From 6891e4f1980596ca91eb99eeb5813d0abac774fd Mon Sep 17 00:00:00 2001 From: Yuhong Sun Date: Tue, 30 May 2023 19:59:57 -0700 Subject: [PATCH] Standardize connectors + permissioning + new frontend for admin pages + small fixes / improvements (#75) Introducing permissioning, standardize onboarding for connectors, re-make the data model for connectors / credentials / index-attempts, making all environment variables optional, a bunch of small fixes + improvements. Co-authored-by: Weves --- .../27c6ecc08586_permission_framework.py | 172 + backend/danswer/auth/users.py | 38 +- backend/danswer/background/update.py | 213 +- backend/danswer/configs/app_configs.py | 16 +- backend/danswer/configs/constants.py | 2 +- backend/danswer/configs/model_configs.py | 2 +- .../connectors/confluence/connector.py | 57 +- backend/danswer/connectors/factory.py | 75 +- .../danswer/connectors/github/connector.py | 18 +- .../connectors/google_drive/connector.py | 20 +- .../connectors/google_drive/connector_auth.py | 106 +- backend/danswer/connectors/interfaces.py | 13 +- backend/danswer/connectors/models.py | 4 - backend/danswer/connectors/slack/config.py | 32 - backend/danswer/connectors/slack/connector.py | 149 +- backend/danswer/connectors/slack/utils.py | 15 +- backend/danswer/connectors/web/connector.py | 11 +- backend/danswer/datastores/interfaces.py | 8 +- backend/danswer/datastores/qdrant/indexing.py | 76 +- backend/danswer/datastores/qdrant/store.py | 32 +- backend/danswer/db/connector.py | 293 ++ backend/danswer/db/credentials.py | 157 + backend/danswer/db/engine.py | 16 + backend/danswer/db/index_attempt.py | 115 +- backend/danswer/db/models.py | 124 +- backend/danswer/direct_qa/__init__.py | 2 +- backend/danswer/direct_qa/question_answer.py | 10 +- backend/danswer/main.py | 13 + .../semantic_search/semantic_search.py | 3 +- backend/danswer/server/admin.py | 578 +++- backend/danswer/server/event_loading.py | 69 +- backend/danswer/server/health.py | 8 +- backend/danswer/server/models.py | 104 +- backend/danswer/server/search_backend.py | 16 +- backend/danswer/utils/indexing_pipeline.py | 19 +- backend/scripts/ingestion.py | 156 - deployment/.env | 5 + deployment/README.md | 37 +- deployment/data/nginx/app.conf.template.dev | 56 + deployment/docker-compose.dev.yml | 24 +- deployment/docker-compose.prod.yml | 2 +- deployment/env.dev.template | 14 +- deployment/env.prod.template | 12 +- web/{Dockerfile.prod => Dockerfile} | 2 + web/Dockerfile.dev | 34 - web/next.config.js | 17 +- web/package-lock.json | 2775 ++++++++++++++++- .../app/admin/connectors/confluence/page.tsx | 232 +- web/src/app/admin/connectors/github/page.tsx | 235 +- .../google-drive/auth/callback/route.ts | 16 +- .../admin/connectors/google-drive/page.tsx | 545 +++- .../connectors/slack/InitialSetupForm.tsx | 101 - web/src/app/admin/connectors/slack/page.tsx | 223 +- web/src/app/admin/connectors/web/page.tsx | 142 +- web/src/app/admin/indexing/status/page.tsx | 232 +- web/src/app/admin/keys/openai/page.tsx | 2 +- web/src/components/Button.tsx | 23 + .../connectors/AttachCredentialPopup.tsx | 3 + .../admin/connectors/BasicTable.tsx | 2 +- .../admin/connectors/ConnectorForm.tsx | 114 + .../admin/connectors/ConnectorStatus.tsx | 65 - .../admin/connectors/CredentialForm.tsx | 92 + .../connectors/{Form.tsx => IndexForm.tsx} | 4 +- web/src/components/admin/connectors/Popup.tsx | 4 +- .../AttachCredentialButtonForTable.tsx | 20 + .../buttons/IndexButtonForTable.tsx | 20 + .../connectors/table/ConnectorsTable.tsx | 223 ++ web/src/components/admin/connectors/types.ts | 20 - web/src/components/icons/icons.tsx | 31 +- web/src/components/openai/ApiKeyForm.tsx | 2 +- .../search/SearchResultsDisplay.tsx | 6 +- web/src/components/search/SearchSection.tsx | 12 +- web/src/lib/connector.ts | 39 + web/src/lib/credential.ts | 25 + web/src/lib/time.ts | 4 +- web/src/lib/types.ts | 71 + 76 files changed, 6760 insertions(+), 1468 deletions(-) create mode 100644 backend/alembic/versions/27c6ecc08586_permission_framework.py delete mode 100644 backend/danswer/connectors/slack/config.py create mode 100644 backend/danswer/db/connector.py create mode 100644 backend/danswer/db/credentials.py delete mode 100644 backend/scripts/ingestion.py create mode 100644 deployment/.env create mode 100644 deployment/data/nginx/app.conf.template.dev rename web/{Dockerfile.prod => Dockerfile} (97%) delete mode 100644 web/Dockerfile.dev delete mode 100644 web/src/app/admin/connectors/slack/InitialSetupForm.tsx create mode 100644 web/src/components/Button.tsx create mode 100644 web/src/components/admin/connectors/AttachCredentialPopup.tsx create mode 100644 web/src/components/admin/connectors/ConnectorForm.tsx delete mode 100644 web/src/components/admin/connectors/ConnectorStatus.tsx create mode 100644 web/src/components/admin/connectors/CredentialForm.tsx rename web/src/components/admin/connectors/{Form.tsx => IndexForm.tsx} (97%) create mode 100644 web/src/components/admin/connectors/buttons/AttachCredentialButtonForTable.tsx create mode 100644 web/src/components/admin/connectors/buttons/IndexButtonForTable.tsx create mode 100644 web/src/components/admin/connectors/table/ConnectorsTable.tsx delete mode 100644 web/src/components/admin/connectors/types.ts create mode 100644 web/src/lib/connector.ts create mode 100644 web/src/lib/credential.ts diff --git a/backend/alembic/versions/27c6ecc08586_permission_framework.py b/backend/alembic/versions/27c6ecc08586_permission_framework.py new file mode 100644 index 000000000..462ab19cc --- /dev/null +++ b/backend/alembic/versions/27c6ecc08586_permission_framework.py @@ -0,0 +1,172 @@ +"""Permission Framework + +Revision ID: 27c6ecc08586 +Revises: 2666d766cb9b +Create Date: 2023-05-24 18:45:17.244495 + +""" +import fastapi_users_db_sqlalchemy +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "27c6ecc08586" +down_revision = "2666d766cb9b" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.create_table( + "connector", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(), nullable=False), + sa.Column( + "source", + sa.Enum( + "SLACK", + "WEB", + "GOOGLE_DRIVE", + "GITHUB", + "CONFLUENCE", + name="documentsource", + native_enum=False, + ), + nullable=False, + ), + sa.Column( + "input_type", + sa.Enum( + "LOAD_STATE", + "POLL", + "EVENT", + name="inputtype", + native_enum=False, + ), + nullable=True, + ), + sa.Column( + "connector_specific_config", + postgresql.JSONB(astext_type=sa.Text()), + nullable=False, + ), + sa.Column("refresh_freq", sa.Integer(), nullable=True), + sa.Column( + "time_created", + sa.DateTime(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.Column( + "time_updated", + sa.DateTime(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.Column("disabled", sa.Boolean(), nullable=False), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "credential", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column( + "credential_json", + postgresql.JSONB(astext_type=sa.Text()), + nullable=True, + ), + sa.Column( + "user_id", + fastapi_users_db_sqlalchemy.generics.GUID(), + nullable=True, + ), + sa.Column("public_doc", sa.Boolean(), nullable=False), + sa.Column( + "time_created", + sa.DateTime(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.Column( + "time_updated", + sa.DateTime(timezone=True), + server_default=sa.text("now()"), + nullable=False, + ), + sa.ForeignKeyConstraint( + ["user_id"], + ["user.id"], + ), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "connector_credential_association", + sa.Column("connector_id", sa.Integer(), nullable=False), + sa.Column("credential_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ["connector_id"], + ["connector.id"], + ), + sa.ForeignKeyConstraint( + ["credential_id"], + ["credential.id"], + ), + sa.PrimaryKeyConstraint("connector_id", "credential_id"), + ) + op.add_column( + "index_attempt", + sa.Column("connector_id", sa.Integer(), nullable=True), + ) + op.add_column( + "index_attempt", + sa.Column("credential_id", sa.Integer(), nullable=True), + ) + op.create_foreign_key( + "fk_index_attempt_credential_id", + "index_attempt", + "credential", + ["credential_id"], + ["id"], + ) + op.create_foreign_key( + "fk_index_attempt_connector_id", + "index_attempt", + "connector", + ["connector_id"], + ["id"], + ) + op.drop_column("index_attempt", "connector_specific_config") + op.drop_column("index_attempt", "source") + op.drop_column("index_attempt", "input_type") + + +def downgrade() -> None: + op.execute("TRUNCATE TABLE index_attempt") + op.add_column( + "index_attempt", + sa.Column("input_type", sa.VARCHAR(), autoincrement=False, nullable=False), + ) + op.add_column( + "index_attempt", + sa.Column("source", sa.VARCHAR(), autoincrement=False, nullable=False), + ) + op.add_column( + "index_attempt", + sa.Column( + "connector_specific_config", + postgresql.JSONB(astext_type=sa.Text()), + autoincrement=False, + nullable=False, + ), + ) + op.drop_constraint( + "fk_index_attempt_credential_id", "index_attempt", type_="foreignkey" + ) + op.drop_constraint( + "fk_index_attempt_connector_id", "index_attempt", type_="foreignkey" + ) + op.drop_column("index_attempt", "credential_id") + op.drop_column("index_attempt", "connector_id") + op.drop_table("connector_credential_association") + op.drop_table("credential") + op.drop_table("connector") diff --git a/backend/danswer/auth/users.py b/backend/danswer/auth/users.py index 0f85a894a..7851f291f 100644 --- a/backend/danswer/auth/users.py +++ b/backend/danswer/auth/users.py @@ -1,3 +1,4 @@ +import contextlib import smtplib import uuid from collections.abc import AsyncGenerator @@ -22,6 +23,7 @@ from danswer.configs.app_configs import WEB_DOMAIN from danswer.db.auth import get_access_token_db from danswer.db.auth import get_user_count from danswer.db.auth import get_user_db +from danswer.db.engine import get_async_session from danswer.db.models import AccessToken from danswer.db.models import User from danswer.utils.logging import setup_logger @@ -40,9 +42,13 @@ from fastapi_users.authentication.strategy.db import AccessTokenDatabase from fastapi_users.authentication.strategy.db import DatabaseStrategy from fastapi_users.db import SQLAlchemyUserDatabase from httpx_oauth.clients.google import GoogleOAuth2 +from pydantic import EmailStr logger = setup_logger() +FAKE_USER_EMAIL = "fakeuser@fakedanswermail.com" +FAKE_USER_PASS = "foobar" + def send_user_verification_email(user_email: str, token: str) -> None: msg = MIMEMultipart() @@ -141,14 +147,44 @@ google_oauth_client = GoogleOAuth2(GOOGLE_OAUTH_CLIENT_ID, GOOGLE_OAUTH_CLIENT_S fastapi_users = FastAPIUsers[User, uuid.UUID](get_user_manager, [auth_backend]) + +# Currently unused, maybe useful later +async def create_get_fake_user() -> User: + get_async_session_context = contextlib.asynccontextmanager( + get_async_session + ) # type:ignore + get_user_db_context = contextlib.asynccontextmanager(get_user_db) + get_user_manager_context = contextlib.asynccontextmanager(get_user_manager) + + logger.info("Creating fake user due to Auth being turned off") + async with get_async_session_context() as session: + async with get_user_db_context(session) as user_db: + async with get_user_manager_context(user_db) as user_manager: + user = await user_manager.get_by_email(FAKE_USER_EMAIL) + if user: + return user + user = await user_manager.create( + UserCreate(email=EmailStr(FAKE_USER_EMAIL), password=FAKE_USER_PASS) + ) + logger.info("Created fake user.") + return user + + current_active_user = fastapi_users.current_user( active=True, verified=REQUIRE_EMAIL_VERIFICATION, optional=DISABLE_AUTH ) -def current_admin_user(user: User = Depends(current_active_user)) -> User | None: +async def current_user(user: User = Depends(current_active_user)) -> User | None: if DISABLE_AUTH: return None + return user + + +async def current_admin_user(user: User = Depends(current_user)) -> User | None: + if DISABLE_AUTH: + return None + if not user or not hasattr(user, "role") or user.role != UserRole.ADMIN: raise HTTPException( status_code=status.HTTP_403_FORBIDDEN, diff --git a/backend/danswer/background/update.py b/backend/danswer/background/update.py index a53f5a1e8..bed9c33b6 100755 --- a/backend/danswer/background/update.py +++ b/backend/danswer/background/update.py @@ -1,131 +1,146 @@ import time -from typing import cast -from danswer.configs.constants import DocumentSource -from danswer.connectors.factory import build_load_connector +from danswer.connectors.factory import instantiate_connector +from danswer.connectors.interfaces import LoadConnector +from danswer.connectors.interfaces import PollConnector from danswer.connectors.models import InputType -from danswer.connectors.slack.config import get_pull_frequency -from danswer.connectors.slack.connector import SlackConnector -from danswer.db.index_attempt import fetch_index_attempts -from danswer.db.index_attempt import insert_index_attempt -from danswer.db.index_attempt import update_index_attempt +from danswer.db.connector import disable_connector +from danswer.db.connector import fetch_connectors +from danswer.db.credentials import backend_update_credential_json +from danswer.db.engine import build_engine +from danswer.db.engine import get_db_current_time +from danswer.db.index_attempt import create_index_attempt +from danswer.db.index_attempt import get_incomplete_index_attempts +from danswer.db.index_attempt import get_last_finished_attempt +from danswer.db.index_attempt import get_not_started_index_attempts +from danswer.db.index_attempt import mark_attempt_failed +from danswer.db.index_attempt import mark_attempt_in_progress +from danswer.db.index_attempt import mark_attempt_succeeded +from danswer.db.models import Connector from danswer.db.models import IndexAttempt -from danswer.db.models import IndexingStatus -from danswer.dynamic_configs import get_dynamic_config_store -from danswer.dynamic_configs.interface import ConfigNotFoundError from danswer.utils.indexing_pipeline import build_indexing_pipeline from danswer.utils.logging import setup_logger +from sqlalchemy.orm import Session logger = setup_logger() -LAST_POLL_KEY_TEMPLATE = "last_poll_{}" + +def should_create_new_indexing( + connector: Connector, last_index: IndexAttempt | None, db_session: Session +) -> bool: + if connector.refresh_freq is None: + return False + if not last_index: + return True + current_db_time = get_db_current_time(db_session) + time_since_index = ( + current_db_time - last_index.time_updated + ) # Maybe better to do time created + return time_since_index.total_seconds() >= connector.refresh_freq -def _check_should_run(current_time: int, last_pull: int, pull_frequency: int) -> bool: - return current_time - last_pull > pull_frequency * 60 +def create_indexing_jobs(db_session: Session) -> None: + connectors = fetch_connectors(db_session, disabled_status=False) + for connector in connectors: + in_progress_indexing_attempts = get_incomplete_index_attempts( + connector.id, db_session + ) + # Currently single threaded so any still in-progress must have errored + for attempt in in_progress_indexing_attempts: + mark_attempt_failed(attempt, db_session) -def run_update() -> None: - logger.info("Running update") - # TODO (chris): implement a more generic way to run updates - # so we don't need to edit this file for future connectors - dynamic_config_store = get_dynamic_config_store() - indexing_pipeline = build_indexing_pipeline() - current_time = int(time.time()) - - # Slack - # TODO (chris): make Slack use the same approach as other connectors / - # make other connectors periodic - try: - pull_frequency = get_pull_frequency() - except ConfigNotFoundError: - pull_frequency = 0 - if pull_frequency: - last_slack_pull_key = LAST_POLL_KEY_TEMPLATE.format(SlackConnector.__name__) - try: - last_pull = cast(int, dynamic_config_store.load(last_slack_pull_key)) - except ConfigNotFoundError: - last_pull = None - - if last_pull is None or _check_should_run( - current_time, last_pull, pull_frequency + last_finished_indexing_attempt = get_last_finished_attempt( + connector.id, db_session + ) + if not should_create_new_indexing( + connector, last_finished_indexing_attempt, db_session ): - # TODO (chris): go back to only fetching messages that have changed - # since the last pull. Not supported for now due to how we compute the - # number of documents indexed for the admin dashboard (only look at latest) - logger.info("Scheduling periodic slack pull") - insert_index_attempt( - IndexAttempt( - source=DocumentSource.SLACK, - input_type=InputType.POLL, - status=IndexingStatus.NOT_STARTED, - connector_specific_config={}, - ) - ) - # not 100% accurate, but the inaccuracy will result in more - # frequent pulling rather than less frequent, which is fine - # for now - dynamic_config_store.store(last_slack_pull_key, current_time) + continue - # TODO (chris): make this more efficient / in a single transaction to - # prevent race conditions across multiple background jobs. For now, - # this assumes we only ever run a single background job at a time - not_started_index_attempts = fetch_index_attempts( - input_types=[InputType.LOAD_STATE, InputType.POLL], - statuses=[IndexingStatus.NOT_STARTED], - ) - for not_started_index_attempt in not_started_index_attempts: + for association in connector.credentials: + credential = association.credential + create_index_attempt(connector.id, credential.id, db_session) + + +def run_indexing_jobs(last_run_time: float, db_session: Session) -> None: + indexing_pipeline = build_indexing_pipeline() + + new_indexing_attempts = get_not_started_index_attempts(db_session) + logger.info(f"Found {len(new_indexing_attempts)} new indexing tasks.") + for attempt in new_indexing_attempts: logger.info( - "Attempting to index with IndexAttempt id: " - f"{not_started_index_attempt.id}, source: " - f"{not_started_index_attempt.source}, input_type: " - f"{not_started_index_attempt.input_type}, and connector_specific_config: " - f"{not_started_index_attempt.connector_specific_config}" - ) - update_index_attempt( - index_attempt_id=not_started_index_attempt.id, - new_status=IndexingStatus.IN_PROGRESS, + f"Starting new indexing attempt for connector: '{attempt.connector.name}', " + f"with config: '{attempt.connector.connector_specific_config}', and " + f" with credentials: '{[c.credential_id for c in attempt.connector.credentials]}'" ) + mark_attempt_in_progress(attempt, db_session) + + db_connector = attempt.connector + db_credential = attempt.credential + task = db_connector.input_type - error_msg = None try: - # TODO (chris): spawn processes to parallelize / take advantage of - # multiple cores + implement retries - connector = build_load_connector( - source=not_started_index_attempt.source, - connector_specific_config=not_started_index_attempt.connector_specific_config, + runnable_connector, new_credential_json = instantiate_connector( + db_connector.source, + task, + db_connector.connector_specific_config, + db_credential.credential_json, ) + if new_credential_json is not None: + backend_update_credential_json( + db_credential, new_credential_json, db_session + ) + except Exception as e: + logger.exception(f"Unable to instantiate connector due to {e}") + disable_connector(db_connector.id, db_session) + continue + + try: + if task == InputType.LOAD_STATE: + assert isinstance(runnable_connector, LoadConnector) + doc_batch_generator = runnable_connector.load_from_state() + + elif task == InputType.POLL: + assert isinstance(runnable_connector, PollConnector) + doc_batch_generator = runnable_connector.poll_source( + last_run_time, time.time() + ) + + else: + # Event types cannot be handled by a background type, leave these untouched + continue document_ids: list[str] = [] - for doc_batch in connector.load_from_state(): - indexing_pipeline(doc_batch) + for doc_batch in doc_batch_generator: + # TODO introduce permissioning here + index_user_id = ( + None if db_credential.public_doc else db_credential.user_id + ) + indexing_pipeline(documents=doc_batch, user_id=index_user_id) document_ids.extend([doc.id for doc in doc_batch]) + + mark_attempt_succeeded(attempt, document_ids, db_session) + except Exception as e: - logger.exception( - "Failed to index for source %s with config %s due to: %s", - not_started_index_attempt.source, - not_started_index_attempt.connector_specific_config, - e, - ) - error_msg = str(e) - - update_index_attempt( - index_attempt_id=not_started_index_attempt.id, - new_status=IndexingStatus.FAILED if error_msg else IndexingStatus.SUCCESS, - document_ids=document_ids if not error_msg else None, - error_msg=error_msg, - ) - - logger.info("Finished update") + logger.exception(f"Indexing job with id {attempt.id} failed due to {e}") + mark_attempt_failed(attempt, db_session, failure_reason=str(e)) -def update_loop(delay: int = 60) -> None: +def update_loop(delay: int = 10) -> None: + last_run_time = 0.0 while True: start = time.time() + logger.info(f"Running update, current time: {time.ctime(start)}") try: - run_update() - except Exception: - logger.exception("Failed to run update") + with Session( + build_engine(), future=True, expire_on_commit=False + ) as db_session: + create_indexing_jobs(db_session) + # TODO failed poll jobs won't recover data from failed runs, should fix + run_indexing_jobs(last_run_time, db_session) + except Exception as e: + logger.exception(f"Failed to run update due to {e}") sleep_time = delay - (time.time() - start) if sleep_time > 0: time.sleep(sleep_time) diff --git a/backend/danswer/configs/app_configs.py b/backend/danswer/configs/app_configs.py index 8f887c375..3fe412c4f 100644 --- a/backend/danswer/configs/app_configs.py +++ b/backend/danswer/configs/app_configs.py @@ -44,6 +44,9 @@ VALID_EMAIL_DOMAIN = os.environ.get("VALID_EMAIL_DOMAIN", "") ENABLE_OAUTH = os.environ.get("ENABLE_OAUTH", "").lower() != "false" GOOGLE_OAUTH_CLIENT_ID = os.environ.get("GOOGLE_OAUTH_CLIENT_ID", "") GOOGLE_OAUTH_CLIENT_SECRET = os.environ.get("GOOGLE_OAUTH_CLIENT_SECRET", "") +MASK_CREDENTIAL_PREFIX = ( + os.environ.get("MASK_CREDENTIAL_PREFIX", "True").lower() != "false" +) ##### @@ -72,21 +75,8 @@ POSTGRES_DB = os.environ.get("POSTGRES_DB", "postgres") ##### # Connector Configs ##### -GOOGLE_DRIVE_CREDENTIAL_JSON = os.environ.get( - "GOOGLE_DRIVE_CREDENTIAL_JSON", "/home/storage/google_drive_creds.json" -) -GOOGLE_DRIVE_TOKENS_JSON = os.environ.get( - "GOOGLE_DRIVE_TOKENS_JSON", "/home/storage/google_drive_tokens.json" -) GOOGLE_DRIVE_INCLUDE_SHARED = False -GITHUB_ACCESS_TOKEN = os.environ.get("GITHUB_ACCESS_TOKEN", "") - -# example: username@companyemail.com -CONFLUENCE_USERNAME = os.environ.get("CONFLUENCE_USERNAME", "") -# https://id.atlassian.com/manage-profile/security/api-tokens -CONFLUENCE_ACCESS_TOKEN = os.environ.get("CONFLUENCE_ACCESS_TOKEN", "") - ##### # Query Configs diff --git a/backend/danswer/configs/constants.py b/backend/danswer/configs/constants.py index f89ec139c..0c448ea1f 100644 --- a/backend/danswer/configs/constants.py +++ b/backend/danswer/configs/constants.py @@ -11,9 +11,9 @@ SEMANTIC_IDENTIFIER = "semantic_identifier" SECTION_CONTINUATION = "section_continuation" ALLOWED_USERS = "allowed_users" ALLOWED_GROUPS = "allowed_groups" -NO_AUTH_USER = "FooBarUser" # TODO rework this temporary solution OPENAI_API_KEY_STORAGE_KEY = "openai_api_key" HTML_SEPARATOR = "\n" +PUBLIC_DOC_PAT = "PUBLIC" class DocumentSource(str, Enum): diff --git a/backend/danswer/configs/model_configs.py b/backend/danswer/configs/model_configs.py index b8e29f1d9..1ed7e56bb 100644 --- a/backend/danswer/configs/model_configs.py +++ b/backend/danswer/configs/model_configs.py @@ -21,5 +21,5 @@ BATCH_SIZE_ENCODE_CHUNKS = 8 # QA Model API Configs # https://platform.openai.com/docs/models/model-endpoint-compatibility INTERNAL_MODEL_VERSION = os.environ.get("INTERNAL_MODEL", "openai-chat-completion") -OPENAI_MODEL_VERSION = os.environ.get("OPENAI_MODEL_VERSION", "gpt-4") +OPENAI_MODEL_VERSION = os.environ.get("OPENAI_MODEL_VERSION", "gpt-3.5-turbo") OPENAI_MAX_OUTPUT_TOKENS = 512 diff --git a/backend/danswer/connectors/confluence/connector.py b/backend/danswer/connectors/confluence/connector.py index 007d33ea0..c124e7106 100644 --- a/backend/danswer/connectors/confluence/connector.py +++ b/backend/danswer/connectors/confluence/connector.py @@ -4,11 +4,10 @@ from urllib.parse import urlparse from atlassian import Confluence # type:ignore from bs4 import BeautifulSoup -from danswer.configs.app_configs import CONFLUENCE_ACCESS_TOKEN -from danswer.configs.app_configs import CONFLUENCE_USERNAME from danswer.configs.app_configs import INDEX_BATCH_SIZE from danswer.configs.constants import DocumentSource from danswer.configs.constants import HTML_SEPARATOR +from danswer.connectors.interfaces import GenerateDocumentsOutput from danswer.connectors.interfaces import LoadConnector from danswer.connectors.models import Document from danswer.connectors.models import Section @@ -41,6 +40,28 @@ def extract_confluence_keys_from_url(wiki_url: str) -> tuple[str, str]: return wiki_base, space +def _comment_dfs( + comments_str: str, + comment_pages: Generator[dict[str, Any], None, None], + confluence_client: Confluence, +) -> str: + for comment_page in comment_pages: + comment_html = comment_page["body"]["storage"]["value"] + soup = BeautifulSoup(comment_html, "html.parser") + comments_str += "\nComment:\n" + soup.get_text(HTML_SEPARATOR) + child_comment_pages = confluence_client.get_page_child_by_type( + comment_page["id"], + type="comment", + start=None, + limit=None, + expand="body.storage.value", + ) + comments_str = _comment_dfs( + comments_str, child_comment_pages, confluence_client + ) + return comments_str + + class ConfluenceConnector(LoadConnector): def __init__( self, @@ -49,31 +70,25 @@ class ConfluenceConnector(LoadConnector): ) -> None: self.batch_size = batch_size self.wiki_base, self.space = extract_confluence_keys_from_url(wiki_page_url) + self.confluence_client: Confluence | None = None + + def load_credentials(self, credentials: dict[str, Any]) -> dict[str, Any] | None: + username = credentials["confluence_username"] + access_token = credentials["confluence_access_token"] self.confluence_client = Confluence( url=self.wiki_base, - username=CONFLUENCE_USERNAME, - password=CONFLUENCE_ACCESS_TOKEN, + username=username, + password=access_token, cloud=True, ) + return None - def _comment_dfs( - self, comments_str: str, comment_pages: Generator[dict[str, Any], None, None] - ) -> str: - for comment_page in comment_pages: - comment_html = comment_page["body"]["storage"]["value"] - soup = BeautifulSoup(comment_html, "html.parser") - comments_str += "\nComment:\n" + soup.get_text(HTML_SEPARATOR) - child_comment_pages = self.confluence_client.get_page_child_by_type( - comment_page["id"], - type="comment", - start=None, - limit=None, - expand="body.storage.value", + def load_from_state(self) -> GenerateDocumentsOutput: + if self.confluence_client is None: + raise PermissionError( + "Confluence Client is not set up, was load_credentials called?" ) - comments_str = self._comment_dfs(comments_str, child_comment_pages) - return comments_str - def load_from_state(self) -> Generator[list[Document], None, None]: start_ind = 0 while True: doc_batch: list[Document] = [] @@ -96,7 +111,7 @@ class ConfluenceConnector(LoadConnector): limit=None, expand="body.storage.value", ) - comments_text = self._comment_dfs("", comment_pages) + comments_text = _comment_dfs("", comment_pages, self.confluence_client) page_text += comments_text page_url = self.wiki_base + page["_links"]["webui"] diff --git a/backend/danswer/connectors/factory.py b/backend/danswer/connectors/factory.py index 61d6c6fa2..447555a0a 100644 --- a/backend/danswer/connectors/factory.py +++ b/backend/danswer/connectors/factory.py @@ -1,6 +1,5 @@ -import time -from collections.abc import Generator from typing import Any +from typing import Type from danswer.configs.constants import DocumentSource from danswer.connectors.confluence.connector import ConfluenceConnector @@ -10,9 +9,9 @@ from danswer.connectors.interfaces import BaseConnector from danswer.connectors.interfaces import EventConnector from danswer.connectors.interfaces import LoadConnector from danswer.connectors.interfaces import PollConnector -from danswer.connectors.models import Document from danswer.connectors.models import InputType -from danswer.connectors.slack.connector import SlackConnector +from danswer.connectors.slack.connector import SlackLoadConnector +from danswer.connectors.slack.connector import SlackPollConnector from danswer.connectors.web.connector import WebConnector _NUM_SECONDS_IN_DAY = 86400 @@ -22,30 +21,35 @@ class ConnectorMissingException(Exception): pass -def build_connector( +def identify_connector_class( source: DocumentSource, input_type: InputType, - connector_specific_config: dict[str, Any], -) -> BaseConnector: - if source == DocumentSource.SLACK: - connector: BaseConnector = SlackConnector(**connector_specific_config) - elif source == DocumentSource.GOOGLE_DRIVE: - connector = GoogleDriveConnector(**connector_specific_config) - elif source == DocumentSource.GITHUB: - connector = GithubConnector(**connector_specific_config) - elif source == DocumentSource.WEB: - connector = WebConnector(**connector_specific_config) - elif source == DocumentSource.CONFLUENCE: - connector = ConfluenceConnector(**connector_specific_config) +) -> Type[BaseConnector]: + connector_map = { + DocumentSource.WEB: WebConnector, + DocumentSource.SLACK: { + InputType.LOAD_STATE: SlackLoadConnector, + InputType.POLL: SlackPollConnector, + }, + DocumentSource.GITHUB: GithubConnector, + DocumentSource.GOOGLE_DRIVE: GoogleDriveConnector, + DocumentSource.CONFLUENCE: ConfluenceConnector, + } + connector_by_source = connector_map.get(source, {}) + + if isinstance(connector_by_source, dict): + connector = connector_by_source.get(input_type) else: + connector = connector_by_source + if connector is None: raise ConnectorMissingException(f"Connector not found for source={source}") if any( [ input_type == InputType.LOAD_STATE - and not isinstance(connector, LoadConnector), - input_type == InputType.POLL and not isinstance(connector, PollConnector), - input_type == InputType.EVENT and not isinstance(connector, EventConnector), + and not issubclass(connector, LoadConnector), + input_type == InputType.POLL and not issubclass(connector, PollConnector), + input_type == InputType.EVENT and not issubclass(connector, EventConnector), ] ): raise ConnectorMissingException( @@ -55,25 +59,14 @@ def build_connector( return connector -# TODO this is some jank, rework at some point -def _poll_to_load_connector(range_pull_connector: PollConnector) -> LoadConnector: - class _Connector(LoadConnector): - def __init__(self) -> None: - self._connector = range_pull_connector +def instantiate_connector( + source: DocumentSource, + input_type: InputType, + connector_specific_config: dict[str, Any], + credentials: dict[str, Any], +) -> tuple[BaseConnector, dict[str, Any] | None]: + connector_class = identify_connector_class(source, input_type) + connector = connector_class(**connector_specific_config) + new_credentials = connector.load_credentials(credentials) - def load_from_state(self) -> Generator[list[Document], None, None]: - # adding some buffer to make sure we get all documents - return self._connector.poll_source(0, time.time() + _NUM_SECONDS_IN_DAY) - - return _Connector() - - -# TODO this is some jank, rework at some point -def build_load_connector( - source: DocumentSource, connector_specific_config: dict[str, Any] -) -> LoadConnector: - connector = build_connector(source, InputType.LOAD_STATE, connector_specific_config) - if isinstance(connector, PollConnector): - return _poll_to_load_connector(connector) - assert isinstance(connector, LoadConnector) - return connector + return connector, new_credentials diff --git a/backend/danswer/connectors/github/connector.py b/backend/danswer/connectors/github/connector.py index ff2b81976..996373816 100644 --- a/backend/danswer/connectors/github/connector.py +++ b/backend/danswer/connectors/github/connector.py @@ -1,9 +1,10 @@ import itertools from collections.abc import Generator +from typing import Any -from danswer.configs.app_configs import GITHUB_ACCESS_TOKEN from danswer.configs.app_configs import INDEX_BATCH_SIZE from danswer.configs.constants import DocumentSource +from danswer.connectors.interfaces import GenerateDocumentsOutput from danswer.connectors.interfaces import LoadConnector from danswer.connectors.models import Document from danswer.connectors.models import Section @@ -15,8 +16,6 @@ from github.PullRequest import PullRequest logger = setup_logger() -github_client = Github(GITHUB_ACCESS_TOKEN) - def get_pr_batches( pull_requests: PaginatedList, batch_size: int @@ -41,9 +40,18 @@ class GithubConnector(LoadConnector): self.repo_name = repo_name self.batch_size = batch_size self.state_filter = state_filter + self.github_client: Github | None = None - def load_from_state(self) -> Generator[list[Document], None, None]: - repo = github_client.get_repo(f"{self.repo_owner}/{self.repo_name}") + def load_credentials(self, credentials: dict[str, Any]) -> dict[str, Any] | None: + self.github_client = Github(credentials["github_access_token"]) + return None + + def load_from_state(self) -> GenerateDocumentsOutput: + if self.github_client is None: + raise PermissionError( + "Github Client is not set up, was load_credentials called?" + ) + repo = self.github_client.get_repo(f"{self.repo_owner}/{self.repo_name}") pull_requests = repo.get_pulls(state=self.state_filter) for pr_batch in get_pr_batches(pull_requests, self.batch_size): doc_batch = [] diff --git a/backend/danswer/connectors/google_drive/connector.py b/backend/danswer/connectors/google_drive/connector.py index 169e366a3..3c7f9bc5e 100644 --- a/backend/danswer/connectors/google_drive/connector.py +++ b/backend/danswer/connectors/google_drive/connector.py @@ -1,10 +1,13 @@ import io from collections.abc import Generator +from typing import Any from danswer.configs.app_configs import GOOGLE_DRIVE_INCLUDE_SHARED from danswer.configs.app_configs import INDEX_BATCH_SIZE from danswer.configs.constants import DocumentSource +from danswer.connectors.google_drive.connector_auth import DB_CREDENTIALS_DICT_KEY from danswer.connectors.google_drive.connector_auth import get_drive_tokens +from danswer.connectors.interfaces import GenerateDocumentsOutput from danswer.connectors.interfaces import LoadConnector from danswer.connectors.models import Document from danswer.connectors.models import Section @@ -89,12 +92,23 @@ class GoogleDriveConnector(LoadConnector): ) -> None: self.batch_size = batch_size self.include_shared = include_shared - self.creds = get_drive_tokens() + self.creds: Credentials | None = None - if not self.creds: + def load_credentials(self, credentials: dict[str, Any]) -> dict[str, Any] | None: + access_token_json_str = credentials[DB_CREDENTIALS_DICT_KEY] + creds = get_drive_tokens(token_json_str=access_token_json_str) + if creds is None: raise PermissionError("Unable to access Google Drive.") + self.creds = creds + new_creds_json_str = creds.to_json() + if new_creds_json_str != access_token_json_str: + return {DB_CREDENTIALS_DICT_KEY: new_creds_json_str} + return None + + def load_from_state(self) -> GenerateDocumentsOutput: + if self.creds is None: + raise PermissionError("Not logged into Google Drive") - def load_from_state(self) -> Generator[list[Document], None, None]: service = discovery.build("drive", "v3", credentials=self.creds) for files_batch in get_file_batches( service, self.include_shared, self.batch_size diff --git a/backend/danswer/connectors/google_drive/connector_auth.py b/backend/danswer/connectors/google_drive/connector_auth.py index ac8dd16bf..e360a240f 100644 --- a/backend/danswer/connectors/google_drive/connector_auth.py +++ b/backend/danswer/connectors/google_drive/connector_auth.py @@ -1,52 +1,41 @@ -import os -from typing import Any +import json +from typing import cast from urllib.parse import parse_qs +from urllib.parse import ParseResult from urllib.parse import urlparse -from danswer.configs.app_configs import GOOGLE_DRIVE_CREDENTIAL_JSON -from danswer.configs.app_configs import GOOGLE_DRIVE_TOKENS_JSON from danswer.configs.app_configs import WEB_DOMAIN +from danswer.db.credentials import update_credential_json +from danswer.db.models import User from danswer.dynamic_configs import get_dynamic_config_store +from danswer.server.models import GoogleAppCredentials from danswer.utils.logging import setup_logger from google.auth.transport.requests import Request # type: ignore from google.oauth2.credentials import Credentials # type: ignore from google_auth_oauthlib.flow import InstalledAppFlow # type: ignore +from sqlalchemy.orm import Session logger = setup_logger() +DB_CREDENTIALS_DICT_KEY = "google_drive_tokens" +CRED_KEY = "credential_id_{}" +GOOGLE_DRIVE_CRED_KEY = "google_drive_app_credential" SCOPES = ["https://www.googleapis.com/auth/drive.readonly"] -FRONTEND_GOOGLE_DRIVE_REDIRECT = ( - f"{WEB_DOMAIN}/admin/connectors/google-drive/auth/callback" -) -def backend_get_credentials() -> Credentials: - """This approach does not work for production builds as it requires - a browser to be opened. It is used for local development only.""" - creds = None - if os.path.exists(GOOGLE_DRIVE_TOKENS_JSON): - creds = Credentials.from_authorized_user_file(GOOGLE_DRIVE_TOKENS_JSON, SCOPES) - - if not creds or not creds.valid: - if creds and creds.expired and creds.refresh_token: - creds.refresh(Request()) - else: - flow = InstalledAppFlow.from_client_secrets_file( - GOOGLE_DRIVE_CREDENTIAL_JSON, SCOPES - ) - creds = flow.run_local_server() - - with open(GOOGLE_DRIVE_TOKENS_JSON, "w") as token_file: - token_file.write(creds.to_json()) - - return creds +def _build_frontend_google_drive_redirect() -> str: + return f"{WEB_DOMAIN}/admin/connectors/google-drive/auth/callback" -def get_drive_tokens(token_path: str = GOOGLE_DRIVE_TOKENS_JSON) -> Any: - if not os.path.exists(token_path): +def get_drive_tokens( + *, creds: Credentials | None = None, token_json_str: str | None = None +) -> Credentials | None: + if creds is None and token_json_str is None: return None - creds = Credentials.from_authorized_user_file(token_path, SCOPES) + if token_json_str is not None: + creds_json = json.loads(token_json_str) + creds = Credentials.from_authorized_user_info(creds_json, SCOPES) if not creds: return None @@ -57,8 +46,6 @@ def get_drive_tokens(token_path: str = GOOGLE_DRIVE_TOKENS_JSON) -> Any: try: creds.refresh(Request()) if creds.valid: - with open(token_path, "w") as token_file: - token_file.write(creds.to_json()) return creds except Exception as e: logger.exception(f"Failed to refresh google drive access token due to: {e}") @@ -66,8 +53,8 @@ def get_drive_tokens(token_path: str = GOOGLE_DRIVE_TOKENS_JSON) -> Any: return None -def verify_csrf(user_id: str, state: str) -> None: - csrf = get_dynamic_config_store().load(user_id) +def verify_csrf(credential_id: int, state: str) -> None: + csrf = get_dynamic_config_store().load(CRED_KEY.format(str(credential_id))) if csrf != state: raise PermissionError( "State from Google Drive Connector callback does not match expected" @@ -75,37 +62,50 @@ def verify_csrf(user_id: str, state: str) -> None: def get_auth_url( - user_id: str, credentials_file: str = GOOGLE_DRIVE_CREDENTIAL_JSON + credential_id: int, ) -> str: - flow = InstalledAppFlow.from_client_secrets_file( - credentials_file, + creds_str = str(get_dynamic_config_store().load(GOOGLE_DRIVE_CRED_KEY)) + credential_json = json.loads(creds_str) + flow = InstalledAppFlow.from_client_config( + credential_json, scopes=SCOPES, - redirect_uri=FRONTEND_GOOGLE_DRIVE_REDIRECT, + redirect_uri=_build_frontend_google_drive_redirect(), ) auth_url, _ = flow.authorization_url(prompt="consent") - parsed_url = urlparse(auth_url) + parsed_url = cast(ParseResult, urlparse(auth_url)) params = parse_qs(parsed_url.query) - get_dynamic_config_store().store(user_id, params.get("state", [None])[0]) # type: ignore + + get_dynamic_config_store().store(CRED_KEY.format(credential_id), params.get("state", [None])[0]) # type: ignore return str(auth_url) -def save_access_tokens( +def update_credential_access_tokens( auth_code: str, - token_path: str = GOOGLE_DRIVE_TOKENS_JSON, - credentials_file: str = GOOGLE_DRIVE_CREDENTIAL_JSON, -) -> Any: - flow = InstalledAppFlow.from_client_secrets_file( - credentials_file, scopes=SCOPES, redirect_uri=FRONTEND_GOOGLE_DRIVE_REDIRECT + credential_id: int, + user: User, + db_session: Session, +) -> Credentials | None: + app_credentials = get_google_app_cred() + flow = InstalledAppFlow.from_client_config( + app_credentials.dict(), + scopes=SCOPES, + redirect_uri=_build_frontend_google_drive_redirect(), ) flow.fetch_token(code=auth_code) creds = flow.credentials + token_json_str = creds.to_json() + new_creds_dict = {DB_CREDENTIALS_DICT_KEY: token_json_str} - os.makedirs(os.path.dirname(token_path), exist_ok=True) - with open(token_path, "w+") as token_file: - token_file.write(creds.to_json()) - - if not get_drive_tokens(token_path): - raise PermissionError("Not able to access Google Drive.") - + if not update_credential_json(credential_id, new_creds_dict, user, db_session): + return None return creds + + +def get_google_app_cred() -> GoogleAppCredentials: + creds_str = str(get_dynamic_config_store().load(GOOGLE_DRIVE_CRED_KEY)) + return GoogleAppCredentials(**json.loads(creds_str)) + + +def upsert_google_app_cred(app_credentials: GoogleAppCredentials) -> None: + get_dynamic_config_store().store(GOOGLE_DRIVE_CRED_KEY, app_credentials.json()) diff --git a/backend/danswer/connectors/interfaces.py b/backend/danswer/connectors/interfaces.py index 98c199898..dd109c55e 100644 --- a/backend/danswer/connectors/interfaces.py +++ b/backend/danswer/connectors/interfaces.py @@ -7,16 +7,19 @@ from danswer.connectors.models import Document SecondsSinceUnixEpoch = float +GenerateDocumentsOutput = Generator[list[Document], None, None] + class BaseConnector(abc.ABC): - # Reserved for future shared uses - pass + @abc.abstractmethod + def load_credentials(self, credentials: dict[str, Any]) -> dict[str, Any] | None: + raise NotImplementedError # Large set update or reindex, generally pulling a complete state or from a savestate file class LoadConnector(BaseConnector): @abc.abstractmethod - def load_from_state(self) -> Generator[list[Document], None, None]: + def load_from_state(self) -> GenerateDocumentsOutput: raise NotImplementedError @@ -25,12 +28,12 @@ class PollConnector(BaseConnector): @abc.abstractmethod def poll_source( self, start: SecondsSinceUnixEpoch, end: SecondsSinceUnixEpoch - ) -> Generator[list[Document], None, None]: + ) -> GenerateDocumentsOutput: raise NotImplementedError # Event driven class EventConnector(BaseConnector): @abc.abstractmethod - def handle_event(self, event: Any) -> Generator[list[Document], None, None]: + def handle_event(self, event: Any) -> GenerateDocumentsOutput: raise NotImplementedError diff --git a/backend/danswer/connectors/models.py b/backend/danswer/connectors/models.py index f4dcf0053..2bb081909 100644 --- a/backend/danswer/connectors/models.py +++ b/backend/danswer/connectors/models.py @@ -21,10 +21,6 @@ class Document: metadata: dict[str, Any] | None -def get_raw_document_text(document: Document) -> str: - return "\n\n".join([section.text for section in document.sections]) - - class InputType(str, Enum): LOAD_STATE = "load_state" # e.g. loading a current full state or a save state, such as from a file POLL = "poll" # e.g. calling an API to get all documents in the last hour diff --git a/backend/danswer/connectors/slack/config.py b/backend/danswer/connectors/slack/config.py deleted file mode 100644 index d54d6868d..000000000 --- a/backend/danswer/connectors/slack/config.py +++ /dev/null @@ -1,32 +0,0 @@ -from danswer.dynamic_configs import get_dynamic_config_store -from pydantic import BaseModel - - -SLACK_CONFIG_KEY = "slack_connector_config" - - -class SlackConfig(BaseModel): - slack_bot_token: str - workspace_id: str - pull_frequency: int = 0 # in minutes, 0 => no pulling - - -def get_slack_config() -> SlackConfig: - slack_config = get_dynamic_config_store().load(SLACK_CONFIG_KEY) - return SlackConfig.parse_obj(slack_config) - - -def get_slack_bot_token() -> str: - return get_slack_config().slack_bot_token - - -def get_workspace_id() -> str: - return get_slack_config().workspace_id - - -def get_pull_frequency() -> int: - return get_slack_config().pull_frequency - - -def update_slack_config(slack_config: SlackConfig) -> None: - get_dynamic_config_store().store(SLACK_CONFIG_KEY, slack_config.dict()) diff --git a/backend/danswer/connectors/slack/connector.py b/backend/danswer/connectors/slack/connector.py index 56db4a0c9..71e3bfeee 100644 --- a/backend/danswer/connectors/slack/connector.py +++ b/backend/danswer/connectors/slack/connector.py @@ -2,20 +2,18 @@ import json import os import time from collections.abc import Callable -from collections.abc import Generator from pathlib import Path from typing import Any from typing import cast -from typing import List from danswer.configs.app_configs import INDEX_BATCH_SIZE from danswer.configs.constants import DocumentSource +from danswer.connectors.interfaces import GenerateDocumentsOutput from danswer.connectors.interfaces import LoadConnector from danswer.connectors.interfaces import PollConnector from danswer.connectors.interfaces import SecondsSinceUnixEpoch from danswer.connectors.models import Document from danswer.connectors.models import Section -from danswer.connectors.slack.utils import get_client from danswer.connectors.slack.utils import get_message_link from danswer.utils.logging import setup_logger from slack_sdk import WebClient @@ -139,13 +137,15 @@ def get_thread(client: WebClient, channel_id: str, thread_id: str) -> ThreadType return threads -def thread_to_doc(channel: ChannelType, thread: ThreadType) -> Document: +def thread_to_doc(workspace: str, channel: ChannelType, thread: ThreadType) -> Document: channel_id = channel["id"] return Document( id=f"{channel_id}__{thread[0]['ts']}", sections=[ Section( - link=get_message_link(m, channel_id=channel_id), + link=get_message_link( + event=m, workspace=workspace, channel_id=channel_id + ), text=cast(str, m["text"]), ) for m in thread @@ -162,6 +162,7 @@ def _default_msg_filter(message: MessageType) -> bool: def get_all_docs( client: WebClient, + workspace: str, oldest: str | None = None, latest: str | None = None, msg_filter_func: Callable[[MessageType], bool] = _default_msg_filter, @@ -197,71 +198,80 @@ def get_all_docs( docs: list[Document] = [] for channel_id, threads in channel_id_to_threads.items(): docs.extend( - thread_to_doc(channel=channel_id_to_channel_info[channel_id], thread=thread) + thread_to_doc( + workspace=workspace, + channel=channel_id_to_channel_info[channel_id], + thread=thread, + ) for thread in threads ) logger.info(f"Pulled {len(docs)} documents from slack") return docs -def _process_batch_event( - slack_event: dict[str, Any], - channel: dict[str, Any], - matching_doc: Document | None, - workspace: str | None = None, -) -> Document | None: - if ( - slack_event["type"] == "message" - and slack_event.get("subtype") != "channel_join" - ): - if matching_doc: +class SlackLoadConnector(LoadConnector): + def __init__( + self, workspace: str, export_path_str: str, batch_size: int = INDEX_BATCH_SIZE + ) -> None: + self.workspace = workspace + self.export_path_str = export_path_str + self.batch_size = batch_size + + def load_credentials(self, credentials: dict[str, Any]) -> dict[str, Any] | None: + if credentials: + logger.warning("Unexpected credentials provided for Slack Load Connector") + return None + + @staticmethod + def _process_batch_event( + slack_event: dict[str, Any], + channel: dict[str, Any], + matching_doc: Document | None, + workspace: str, + ) -> Document | None: + if ( + slack_event["type"] == "message" + and slack_event.get("subtype") != "channel_join" + ): + if matching_doc: + return Document( + id=matching_doc.id, + sections=matching_doc.sections + + [ + Section( + link=get_message_link( + event=slack_event, + workspace=workspace, + channel_id=channel["id"], + ), + text=slack_event["text"], + ) + ], + source=matching_doc.source, + semantic_identifier=matching_doc.semantic_identifier, + metadata=matching_doc.metadata, + ) + return Document( - id=matching_doc.id, - sections=matching_doc.sections - + [ + id=slack_event["ts"], + sections=[ Section( link=get_message_link( - slack_event, workspace=workspace, channel_id=channel["id"] + event=slack_event, + workspace=workspace, + channel_id=channel["id"], ), text=slack_event["text"], ) ], - source=matching_doc.source, - semantic_identifier=matching_doc.semantic_identifier, - metadata=matching_doc.metadata, + source=DocumentSource.SLACK, + semantic_identifier=channel["name"], + metadata={}, ) - return Document( - id=slack_event["ts"], - sections=[ - Section( - link=get_message_link( - slack_event, workspace=workspace, channel_id=channel["id"] - ), - text=slack_event["text"], - ) - ], - source=DocumentSource.SLACK, - semantic_identifier=channel["name"], - metadata={}, - ) + return None - return None - - -class SlackConnector(LoadConnector, PollConnector): - def __init__( - self, export_path_str: str | None = None, batch_size: int = INDEX_BATCH_SIZE - ) -> None: - self.export_path_str = export_path_str - self.batch_size = batch_size - self.client = get_client() - - def load_from_state(self) -> Generator[list[Document], None, None]: - if self.export_path_str is None: - raise ValueError( - "This Slack connector was not set up with a state-export file." - ) + def load_from_state(self) -> GenerateDocumentsOutput: export_path = Path(self.export_path_str) with open(export_path / "channels.json") as f: @@ -278,12 +288,13 @@ class SlackConnector(LoadConnector, PollConnector): with open(path) as f: events = cast(list[dict[str, Any]], json.load(f)) for slack_event in events: - doc = _process_batch_event( + doc = self._process_batch_event( slack_event=slack_event, channel=channel_info, matching_doc=document_batch.get( slack_event.get("thread_ts", "") ), + workspace=self.workspace, ) if doc: document_batch[doc.id] = doc @@ -292,9 +303,33 @@ class SlackConnector(LoadConnector, PollConnector): yield list(document_batch.values()) + +class SlackPollConnector(PollConnector): + def __init__(self, workspace: str, batch_size: int = INDEX_BATCH_SIZE) -> None: + self.workspace = workspace + self.batch_size = batch_size + self.client: WebClient | None = None + + def load_credentials(self, credentials: dict[str, Any]) -> dict[str, Any] | None: + bot_token = credentials["slack_bot_token"] + self.client = WebClient(token=bot_token) + return None + def poll_source( self, start: SecondsSinceUnixEpoch, end: SecondsSinceUnixEpoch - ) -> Generator[List[Document], None, None]: - all_docs = get_all_docs(client=self.client, oldest=str(start), latest=str(end)) + ) -> GenerateDocumentsOutput: + if self.client is None: + raise PermissionError( + "Slack Client is not set up, was load_credentials called?" + ) + all_docs = get_all_docs( + client=self.client, + workspace=self.workspace, + # NOTE: need to impute to `None` instead of using 0.0, since Slack will + # throw an error if we use 0.0 on an account without infinite data + # retention + oldest=str(start) if start else None, + latest=str(end), + ) for i in range(0, len(all_docs), self.batch_size): yield all_docs[i : i + self.batch_size] diff --git a/backend/danswer/connectors/slack/utils.py b/backend/danswer/connectors/slack/utils.py index 7941e8afd..e19264eb1 100644 --- a/backend/danswer/connectors/slack/utils.py +++ b/backend/danswer/connectors/slack/utils.py @@ -1,22 +1,15 @@ from typing import Any from typing import cast -from danswer.connectors.slack.config import get_slack_bot_token -from danswer.connectors.slack.config import get_workspace_id -from slack_sdk import WebClient - - -def get_client() -> WebClient: - """NOTE: assumes token is present in environment variable SLACK_BOT_TOKEN""" - return WebClient(token=get_slack_bot_token()) - def get_message_link( - event: dict[str, Any], workspace: str | None = None, channel_id: str | None = None + event: dict[str, Any], workspace: str, channel_id: str | None = None ) -> str: channel_id = channel_id or cast( str, event["channel"] ) # channel must either be present in the event or passed in message_ts = cast(str, event["ts"]) message_ts_without_dot = message_ts.replace(".", "") - return f"https://{workspace or get_workspace_id()}.slack.com/archives/{channel_id}/p{message_ts_without_dot}" + return ( + f"https://{workspace}.slack.com/archives/{channel_id}/p{message_ts_without_dot}" + ) diff --git a/backend/danswer/connectors/web/connector.py b/backend/danswer/connectors/web/connector.py index 8cf0ecf43..d9ae1e17c 100644 --- a/backend/danswer/connectors/web/connector.py +++ b/backend/danswer/connectors/web/connector.py @@ -1,5 +1,4 @@ import io -from collections.abc import Generator from typing import Any from typing import cast from urllib.parse import urljoin @@ -10,6 +9,7 @@ from bs4 import BeautifulSoup from danswer.configs.app_configs import INDEX_BATCH_SIZE from danswer.configs.constants import DocumentSource from danswer.configs.constants import HTML_SEPARATOR +from danswer.connectors.interfaces import GenerateDocumentsOutput from danswer.connectors.interfaces import LoadConnector from danswer.connectors.models import Document from danswer.connectors.models import Section @@ -57,10 +57,17 @@ class WebConnector(LoadConnector): base_url: str, batch_size: int = INDEX_BATCH_SIZE, ) -> None: + if "://" not in base_url: + base_url = "https://" + base_url self.base_url = base_url self.batch_size = batch_size - def load_from_state(self) -> Generator[list[Document], None, None]: + def load_credentials(self, credentials: dict[str, Any]) -> dict[str, Any] | None: + if credentials: + logger.warning("Unexpected credentials provided for Web Connector") + return None + + def load_from_state(self) -> GenerateDocumentsOutput: """Traverses through all pages found on the website and converts them into documents""" visited_links: set[str] = set() diff --git a/backend/danswer/datastores/interfaces.py b/backend/danswer/datastores/interfaces.py index 4bacf9cb1..29df65729 100644 --- a/backend/danswer/datastores/interfaces.py +++ b/backend/danswer/datastores/interfaces.py @@ -8,11 +8,15 @@ DatastoreFilter = dict[str, str | list[str] | None] class Datastore: @abc.abstractmethod - def index(self, chunks: list[EmbeddedIndexChunk]) -> bool: + def index(self, chunks: list[EmbeddedIndexChunk], user_id: int | None) -> bool: raise NotImplementedError @abc.abstractmethod def semantic_retrieval( - self, query: str, filters: list[DatastoreFilter] | None, num_to_retrieve: int + self, + query: str, + user_id: int | None, + filters: list[DatastoreFilter] | None, + num_to_retrieve: int, ) -> list[InferenceChunk]: raise NotImplementedError diff --git a/backend/danswer/datastores/qdrant/indexing.py b/backend/danswer/datastores/qdrant/indexing.py index 02e61ab72..c0f8e1da9 100644 --- a/backend/danswer/datastores/qdrant/indexing.py +++ b/backend/danswer/datastores/qdrant/indexing.py @@ -7,6 +7,7 @@ from danswer.configs.constants import BLURB from danswer.configs.constants import CHUNK_ID from danswer.configs.constants import CONTENT from danswer.configs.constants import DOCUMENT_ID +from danswer.configs.constants import PUBLIC_DOC_PAT from danswer.configs.constants import SECTION_CONTINUATION from danswer.configs.constants import SEMANTIC_IDENTIFIER from danswer.configs.constants import SOURCE_LINKS @@ -14,7 +15,9 @@ from danswer.configs.constants import SOURCE_TYPE from danswer.configs.model_configs import DOC_EMBEDDING_DIM from danswer.utils.clients import get_qdrant_client from danswer.utils.logging import setup_logger +from danswer.utils.timing import log_function_time from qdrant_client import QdrantClient +from qdrant_client.http import models from qdrant_client.http.exceptions import ResponseHandlingException from qdrant_client.http.models.models import UpdateResult from qdrant_client.http.models.models import UpdateStatus @@ -44,6 +47,44 @@ def create_collection( raise RuntimeError("Could not create Qdrant collection") +@log_function_time() +def get_document_whitelists( + doc_chunk_id: str, collection_name: str, q_client: QdrantClient +) -> tuple[int, list[str], list[str]]: + results = q_client.retrieve( + collection_name=collection_name, + ids=[doc_chunk_id], + with_payload=[ALLOWED_USERS, ALLOWED_GROUPS], + ) + if len(results) == 0: + return 0, [], [] + payload = results[0].payload + if not payload: + raise RuntimeError( + "Qdrant Index is corrupted, Document found with no access lists." + ) + return len(results), payload[ALLOWED_USERS], payload[ALLOWED_GROUPS] + + +@log_function_time() +def delete_doc_chunks( + document_id: str, collection_name: str, q_client: QdrantClient +) -> None: + q_client.delete( + collection_name=collection_name, + points_selector=models.FilterSelector( + filter=models.Filter( + must=[ + models.FieldCondition( + key=DOCUMENT_ID, + match=models.MatchValue(value=document_id), + ), + ], + ) + ), + ) + + def recreate_collection( collection_name: str, embedding_dim: int = DOC_EMBEDDING_DIM ) -> None: @@ -63,18 +104,47 @@ def get_uuid_from_chunk(chunk: EmbeddedIndexChunk) -> uuid.UUID: def index_chunks( chunks: list[EmbeddedIndexChunk], + user_id: int | None, collection: str, client: QdrantClient | None = None, batch_upsert: bool = True, ) -> bool: + # Public documents will have the PUBLIC string in ALLOWED_USERS + # If credential that kicked this off has no user associated, either Auth is off or the doc is public + user_str = PUBLIC_DOC_PAT if user_id is None else str(user_id) q_client: QdrantClient = client if client else get_qdrant_client() point_structs = [] + # Maps document id to dict of whitelists for users/groups each containing list of users/groups as strings + doc_user_map: dict[str, dict[str, list[str]]] = {} for chunk in chunks: + chunk_uuid = str(get_uuid_from_chunk(chunk)) document = chunk.source_document + + if document.id not in doc_user_map: + num_doc_chunks, whitelist_users, whitelist_groups = get_document_whitelists( + chunk_uuid, collection, q_client + ) + if num_doc_chunks == 0: + doc_user_map[document.id] = { + ALLOWED_USERS: [user_str], + # TODO introduce groups logic here + ALLOWED_GROUPS: whitelist_groups, + } + else: + if user_str not in whitelist_users: + whitelist_users.append(user_str) + # TODO introduce groups logic here + doc_user_map[document.id] = { + ALLOWED_USERS: whitelist_users, + ALLOWED_GROUPS: whitelist_groups, + } + # Need to delete document chunks because number of chunks may decrease + delete_doc_chunks(document.id, collection, q_client) + point_structs.append( PointStruct( - id=str(get_uuid_from_chunk(chunk)), + id=chunk_uuid, payload={ DOCUMENT_ID: document.id, CHUNK_ID: chunk.chunk_id, @@ -84,8 +154,8 @@ def index_chunks( SOURCE_LINKS: chunk.source_links, SEMANTIC_IDENTIFIER: document.semantic_identifier, SECTION_CONTINUATION: chunk.section_continuation, - ALLOWED_USERS: [], # TODO - ALLOWED_GROUPS: [], # TODO + ALLOWED_USERS: doc_user_map[document.id][ALLOWED_USERS], + ALLOWED_GROUPS: doc_user_map[document.id][ALLOWED_GROUPS], }, vector=chunk.embedding, ) diff --git a/backend/danswer/datastores/qdrant/store.py b/backend/danswer/datastores/qdrant/store.py index 886b8110d..a17f6d46f 100644 --- a/backend/danswer/datastores/qdrant/store.py +++ b/backend/danswer/datastores/qdrant/store.py @@ -1,6 +1,8 @@ from danswer.chunking.models import EmbeddedIndexChunk from danswer.chunking.models import InferenceChunk from danswer.configs.app_configs import QDRANT_DEFAULT_COLLECTION +from danswer.configs.constants import ALLOWED_USERS +from danswer.configs.constants import PUBLIC_DOC_PAT from danswer.datastores.interfaces import Datastore from danswer.datastores.interfaces import DatastoreFilter from danswer.datastores.qdrant.indexing import index_chunks @@ -23,14 +25,21 @@ class QdrantDatastore(Datastore): self.collection = collection self.client = get_qdrant_client() - def index(self, chunks: list[EmbeddedIndexChunk]) -> bool: + def index(self, chunks: list[EmbeddedIndexChunk], user_id: int | None) -> bool: return index_chunks( - chunks=chunks, collection=self.collection, client=self.client + chunks=chunks, + user_id=user_id, + collection=self.collection, + client=self.client, ) @log_function_time() def semantic_retrieval( - self, query: str, filters: list[DatastoreFilter] | None, num_to_retrieve: int + self, + query: str, + user_id: int | None, + filters: list[DatastoreFilter] | None, + num_to_retrieve: int, ) -> list[InferenceChunk]: query_embedding = get_default_embedding_model().encode( query @@ -41,6 +50,23 @@ class QdrantDatastore(Datastore): hits = [] filter_conditions = [] try: + # Permissions filter + if user_id: + filter_conditions.append( + FieldCondition( + key=ALLOWED_USERS, + match=MatchAny(any=[str(user_id), PUBLIC_DOC_PAT]), + ) + ) + else: + filter_conditions.append( + FieldCondition( + key=ALLOWED_USERS, + match=MatchValue(value=PUBLIC_DOC_PAT), + ) + ) + + # Provided query filters if filters: for filter_dict in filters: valid_filters = { diff --git a/backend/danswer/db/connector.py b/backend/danswer/db/connector.py new file mode 100644 index 000000000..21a9187c7 --- /dev/null +++ b/backend/danswer/db/connector.py @@ -0,0 +1,293 @@ +from typing import cast + +from danswer.configs.constants import DocumentSource +from danswer.connectors.models import InputType +from danswer.db.credentials import fetch_credential_by_id +from danswer.db.models import Connector +from danswer.db.models import ConnectorCredentialAssociation +from danswer.db.models import IndexAttempt +from danswer.db.models import User +from danswer.server.models import ConnectorBase +from danswer.server.models import ObjectCreationIdResponse +from danswer.server.models import StatusResponse +from danswer.utils.logging import setup_logger +from fastapi import HTTPException +from sqlalchemy import and_ +from sqlalchemy import func +from sqlalchemy import select +from sqlalchemy.orm import aliased +from sqlalchemy.orm import Session + +logger = setup_logger() + + +def fetch_connectors( + db_session: Session, + sources: list[DocumentSource] | None = None, + input_types: list[InputType] | None = None, + disabled_status: bool | None = None, +) -> list[Connector]: + stmt = select(Connector) + if sources is not None: + stmt = stmt.where(Connector.source.in_(sources)) + if input_types is not None: + stmt = stmt.where(Connector.input_type.in_(input_types)) + if disabled_status is not None: + stmt = stmt.where(Connector.disabled == disabled_status) + results = db_session.scalars(stmt) + return list(results.all()) + + +def connector_by_name_exists(connector_name: str, db_session: Session) -> bool: + stmt = select(Connector).where(Connector.name == connector_name) + result = db_session.execute(stmt) + connector = result.scalar_one_or_none() + return connector is not None + + +def fetch_connector_by_id(connector_id: int, db_session: Session) -> Connector | None: + stmt = select(Connector).where(Connector.id == connector_id) + result = db_session.execute(stmt) + connector = result.scalar_one_or_none() + return connector + + +def create_connector( + connector_data: ConnectorBase, + db_session: Session, +) -> ObjectCreationIdResponse: + if connector_by_name_exists(connector_data.name, db_session): + raise ValueError( + "Connector by this name already exists, duplicate naming not allowed." + ) + + connector = Connector( + name=connector_data.name, + source=connector_data.source, + input_type=connector_data.input_type, + connector_specific_config=connector_data.connector_specific_config, + refresh_freq=connector_data.refresh_freq, + disabled=connector_data.disabled, + ) + db_session.add(connector) + db_session.commit() + + return ObjectCreationIdResponse(id=connector.id) + + +def update_connector( + connector_id: int, + connector_data: ConnectorBase, + db_session: Session, +) -> Connector | None: + connector = fetch_connector_by_id(connector_id, db_session) + if connector is None: + return None + + if connector_data.name != connector.name and connector_by_name_exists( + connector_data.name, db_session + ): + raise ValueError( + "Connector by this name already exists, duplicate naming not allowed." + ) + + connector.name = connector_data.name + connector.source = connector_data.source + connector.input_type = connector_data.input_type + connector.connector_specific_config = connector_data.connector_specific_config + connector.refresh_freq = connector_data.refresh_freq + connector.disabled = connector_data.disabled + + db_session.commit() + return connector + + +def disable_connector( + connector_id: int, + db_session: Session, +) -> StatusResponse[int]: + connector = fetch_connector_by_id(connector_id, db_session) + if connector is None: + raise HTTPException(status_code=404, detail="Connector does not exist") + + connector.disabled = True + db_session.commit() + return StatusResponse( + success=True, message="Connector deleted successfully", data=connector_id + ) + + +def delete_connector( + connector_id: int, + db_session: Session, +) -> StatusResponse[int]: + """Currently unused due to foreign key restriction from IndexAttempt + Use disable_connector instead""" + connector = fetch_connector_by_id(connector_id, db_session) + if connector is None: + return StatusResponse( + success=True, message="Connector was already deleted", data=connector_id + ) + + db_session.delete(connector) + db_session.commit() + return StatusResponse( + success=True, message="Connector deleted successfully", data=connector_id + ) + + +def get_connector_credential_ids( + connector_id: int, + db_session: Session, +) -> list[int]: + connector = fetch_connector_by_id(connector_id, db_session) + if connector is None: + raise ValueError(f"Connector by id {connector_id} does not exist") + + return [association.credential.id for association in connector.credentials] + + +def add_credential_to_connector( + connector_id: int, + credential_id: int, + user: User, + db_session: Session, +) -> StatusResponse[int]: + connector = fetch_connector_by_id(connector_id, db_session) + credential = fetch_credential_by_id(credential_id, user, db_session) + + if connector is None: + raise HTTPException(status_code=404, detail="Connector does not exist") + + if credential is None: + raise HTTPException( + status_code=401, + detail="Credential does not exist or does not belong to user", + ) + + existing_association = ( + db_session.query(ConnectorCredentialAssociation) + .filter( + ConnectorCredentialAssociation.connector_id == connector_id, + ConnectorCredentialAssociation.credential_id == credential_id, + ) + .one_or_none() + ) + if existing_association is not None: + return StatusResponse( + success=False, + message=f"Connector already has Credential {credential_id}", + data=connector_id, + ) + + association = ConnectorCredentialAssociation( + connector_id=connector_id, credential_id=credential_id + ) + db_session.add(association) + db_session.commit() + + return StatusResponse( + success=True, + message=f"New Credential {credential_id} added to Connector", + data=connector_id, + ) + + +def remove_credential_from_connector( + connector_id: int, + credential_id: int, + user: User, + db_session: Session, +) -> StatusResponse[int]: + connector = fetch_connector_by_id(connector_id, db_session) + credential = fetch_credential_by_id(credential_id, user, db_session) + + if connector is None: + raise HTTPException(status_code=404, detail="Connector does not exist") + + if credential is None: + raise HTTPException( + status_code=404, + detail="Credential does not exist or does not belong to user", + ) + + association = ( + db_session.query(ConnectorCredentialAssociation) + .filter( + ConnectorCredentialAssociation.connector_id == connector_id, + ConnectorCredentialAssociation.credential_id == credential_id, + ) + .one_or_none() + ) + + if association is not None: + db_session.delete(association) + db_session.commit() + return StatusResponse( + success=True, + message=f"Credential {credential_id} removed from Connector", + data=connector_id, + ) + + return StatusResponse( + success=False, + message=f"Connector already does not have Credential {credential_id}", + data=connector_id, + ) + + +def fetch_latest_index_attempt_by_connector( + db_session: Session, + source: DocumentSource | None = None, +) -> list[IndexAttempt]: + latest_index_attempts: list[IndexAttempt] = [] + + if source: + connectors = fetch_connectors( + db_session, sources=[source], disabled_status=False + ) + else: + connectors = fetch_connectors(db_session, disabled_status=False) + + if not connectors: + return [] + + for connector in connectors: + latest_index_attempt = ( + db_session.query(IndexAttempt) + .filter(IndexAttempt.connector_id == connector.id) + .order_by(IndexAttempt.time_updated.desc()) + .first() + ) + + if latest_index_attempt is not None: + latest_index_attempts.append(latest_index_attempt) + + return latest_index_attempts + + +def fetch_latest_index_attempts_by_status( + db_session: Session, +) -> list[IndexAttempt]: + subquery = ( + db_session.query( + IndexAttempt.connector_id, + IndexAttempt.status, + func.max(IndexAttempt.time_updated).label("time_updated"), + ) + .group_by(IndexAttempt.connector_id) + .group_by(IndexAttempt.status) + .subquery() + ) + + alias = aliased(IndexAttempt, subquery) + + query = db_session.query(IndexAttempt).join( + alias, + and_( + IndexAttempt.connector_id == alias.connector_id, + IndexAttempt.status == alias.status, + IndexAttempt.time_updated == alias.time_updated, + ), + ) + return cast(list[IndexAttempt], query.all()) diff --git a/backend/danswer/db/credentials.py b/backend/danswer/db/credentials.py new file mode 100644 index 000000000..872a312ae --- /dev/null +++ b/backend/danswer/db/credentials.py @@ -0,0 +1,157 @@ +from typing import Any + +from danswer.db.engine import build_engine +from danswer.db.models import Credential +from danswer.db.models import User +from danswer.server.models import CredentialBase +from danswer.server.models import ObjectCreationIdResponse +from danswer.utils.logging import setup_logger +from sqlalchemy import select +from sqlalchemy.orm import Session +from sqlalchemy.sql.expression import or_ + + +logger = setup_logger() + + +def mask_string(sensitive_str: str) -> str: + return "****...**" + sensitive_str[-4:] + + +def mask_credential_dict(credential_dict: dict[str, Any]) -> dict[str, str]: + masked_creds = {} + for key, val in credential_dict.items(): + if not isinstance(val, str): + raise ValueError( + "Unable to mask credentials of type other than string, cannot process request." + ) + + masked_creds[key] = mask_string(val) + return masked_creds + + +def fetch_credentials( + user: User | None, + db_session: Session, +) -> list[Credential]: + stmt = select(Credential) + if user: + stmt = stmt.where( + or_(Credential.user_id == user.id, Credential.user_id.is_(None)) + ) + results = db_session.scalars(stmt) + return list(results.all()) + + +def fetch_credential_by_id( + credential_id: int, user: User | None, db_session: Session +) -> Credential | None: + stmt = select(Credential).where(Credential.id == credential_id) + if user: + stmt = stmt.where( + or_(Credential.user_id == user.id, Credential.user_id.is_(None)) + ) + result = db_session.execute(stmt) + credential = result.scalar_one_or_none() + return credential + + +def create_credential( + credential_data: CredentialBase, + user: User, + db_session: Session, +) -> ObjectCreationIdResponse: + credential = Credential( + credential_json=credential_data.credential_json, + user_id=int(user.id) if user else None, + public_doc=credential_data.public_doc, + ) + db_session.add(credential) + db_session.commit() + + return ObjectCreationIdResponse(id=credential.id) + + +def update_credential( + credential_id: int, + credential_data: CredentialBase, + user: User, + db_session: Session, +) -> Credential | None: + credential = fetch_credential_by_id(credential_id, user, db_session) + if credential is None: + return None + + credential.credential_json = credential_data.credential_json + credential.user_id = int(user.id) if user is not None else None + credential.public_doc = credential_data.public_doc + + db_session.commit() + return credential + + +def update_credential_json( + credential_id: int, + credential_json: dict[str, Any], + user: User, + db_session: Session, +) -> Credential | None: + logger.info("HIIII") + logger.info(credential_id) + logger.info(credential_json) + credential = fetch_credential_by_id(credential_id, user, db_session) + if credential is None: + return None + credential.credential_json = credential_json + + db_session.commit() + return credential + + +def backend_update_credential_json( + credential: Credential, + credential_json: dict[str, Any], + db_session: Session, +) -> None: + """This should not be used in any flows involving the frontend or users""" + credential.credential_json = credential_json + db_session.commit() + + +def delete_credential( + credential_id: int, + user: User, + db_session: Session, +) -> None: + credential = fetch_credential_by_id(credential_id, user, db_session) + if credential is None: + raise ValueError( + f"Credential by provided id {credential_id} does not exist or does not belong to user" + ) + + db_session.delete(credential) + db_session.commit() + + +def create_initial_public_credential() -> None: + public_cred_id = 0 + error_msg = ( + "DB is not in a valid initial state." + "There must exist an empty public credential for data connectors that do not require additional Auth." + ) + with Session(build_engine(), future=True, expire_on_commit=False) as db_session: + first_credential = fetch_credential_by_id(public_cred_id, None, db_session) + + if first_credential is not None: + if ( + first_credential.credential_json != {} + or first_credential.public_doc is False + ): + raise ValueError(error_msg) + return + + credential = Credential( + id=public_cred_id, credential_json={}, user_id=None, public_doc=True + ) + db_session.add(credential) + db_session.commit() diff --git a/backend/danswer/db/engine.py b/backend/danswer/db/engine.py index 363b3a88a..9e77b4112 100644 --- a/backend/danswer/db/engine.py +++ b/backend/danswer/db/engine.py @@ -1,21 +1,32 @@ from collections.abc import AsyncGenerator +from collections.abc import Generator +from datetime import datetime from danswer.configs.app_configs import POSTGRES_DB from danswer.configs.app_configs import POSTGRES_HOST from danswer.configs.app_configs import POSTGRES_PASSWORD from danswer.configs.app_configs import POSTGRES_PORT from danswer.configs.app_configs import POSTGRES_USER +from sqlalchemy import text from sqlalchemy.engine import create_engine from sqlalchemy.engine import Engine from sqlalchemy.ext.asyncio import AsyncEngine from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.ext.asyncio import create_async_engine +from sqlalchemy.orm import Session SYNC_DB_API = "psycopg2" ASYNC_DB_API = "asyncpg" +def get_db_current_time(db_session: Session) -> datetime: + result = db_session.execute(text("SELECT NOW()")).scalar() + if result is None: + raise ValueError("Database did not return a time") + return result + + def build_connection_string( *, db_api: str = ASYNC_DB_API, @@ -38,6 +49,11 @@ def build_async_engine() -> AsyncEngine: return create_async_engine(connection_string) +def get_session() -> Generator[Session, None, None]: + with Session(build_engine(), future=True, expire_on_commit=False) as session: + yield session + + async def get_async_session() -> AsyncGenerator[AsyncSession, None]: async with AsyncSession( build_async_engine(), future=True, expire_on_commit=False diff --git a/backend/danswer/db/index_attempt.py b/backend/danswer/db/index_attempt.py index dd4c985b2..81ed5c565 100644 --- a/backend/danswer/db/index_attempt.py +++ b/backend/danswer/db/index_attempt.py @@ -1,55 +1,88 @@ -from danswer.configs.constants import DocumentSource -from danswer.connectors.models import InputType -from danswer.db.engine import build_engine from danswer.db.models import IndexAttempt from danswer.db.models import IndexingStatus from danswer.utils.logging import setup_logger +from sqlalchemy import desc from sqlalchemy import select from sqlalchemy.orm import Session + logger = setup_logger() -def insert_index_attempt(index_attempt: IndexAttempt) -> None: - logger.info(f"Inserting {index_attempt}") - with Session(build_engine()) as session: - session.add(index_attempt) - session.commit() +def create_index_attempt( + connector_id: int, + credential_id: int, + db_session: Session, +) -> int: + new_attempt = IndexAttempt( + connector_id=connector_id, + credential_id=credential_id, + status=IndexingStatus.NOT_STARTED, + ) + db_session.add(new_attempt) + db_session.commit() + + return new_attempt.id -def fetch_index_attempts( - *, - sources: list[DocumentSource] | None = None, - statuses: list[IndexingStatus] | None = None, - input_types: list[InputType] | None = None, +def get_incomplete_index_attempts( + connector_id: int | None, + db_session: Session, ) -> list[IndexAttempt]: - with Session(build_engine(), future=True, expire_on_commit=False) as session: - stmt = select(IndexAttempt) - if sources: - stmt = stmt.where(IndexAttempt.source.in_(sources)) - if statuses: - stmt = stmt.where(IndexAttempt.status.in_(statuses)) - if input_types: - stmt = stmt.where(IndexAttempt.input_type.in_(input_types)) - results = session.scalars(stmt) - return list(results.all()) + stmt = select(IndexAttempt) + if connector_id is not None: + stmt = stmt.where(IndexAttempt.connector_id == connector_id) + stmt = stmt.where( + IndexAttempt.status.notin_([IndexingStatus.SUCCESS, IndexingStatus.FAILED]) + ) + + incomplete_attempts = db_session.scalars(stmt) + return list(incomplete_attempts.all()) -def update_index_attempt( - *, - index_attempt_id: int, - new_status: IndexingStatus, - document_ids: list[str] | None = None, - error_msg: str | None = None, -) -> bool: - """Returns `True` if successfully updated, `False` if cannot find matching ID""" - with Session(build_engine(), future=True, expire_on_commit=False) as session: - stmt = select(IndexAttempt).where(IndexAttempt.id == index_attempt_id) - result = session.scalar(stmt) - if result: - result.status = new_status - result.document_ids = document_ids - result.error_msg = error_msg - session.commit() - return True - return False +def get_not_started_index_attempts(db_session: Session) -> list[IndexAttempt]: + stmt = select(IndexAttempt) + stmt = stmt.where(IndexAttempt.status == IndexingStatus.NOT_STARTED) + new_attempts = db_session.scalars(stmt) + return list(new_attempts.all()) + + +def mark_attempt_in_progress( + index_attempt: IndexAttempt, + db_session: Session, +) -> None: + index_attempt.status = IndexingStatus.IN_PROGRESS + db_session.add(index_attempt) + db_session.commit() + + +def mark_attempt_succeeded( + index_attempt: IndexAttempt, + docs_indexed: list[str], + db_session: Session, +) -> None: + index_attempt.status = IndexingStatus.SUCCESS + index_attempt.document_ids = docs_indexed + db_session.add(index_attempt) + db_session.commit() + + +def mark_attempt_failed( + index_attempt: IndexAttempt, db_session: Session, failure_reason: str = "Unknown" +) -> None: + index_attempt.status = IndexingStatus.FAILED + index_attempt.error_msg = failure_reason + db_session.add(index_attempt) + db_session.commit() + + +def get_last_finished_attempt( + connector_id: int, + db_session: Session, +) -> IndexAttempt | None: + stmt = select(IndexAttempt) + stmt = stmt.where(IndexAttempt.connector_id == connector_id) + stmt = stmt.where(IndexAttempt.status == IndexingStatus.SUCCESS) + stmt = stmt.order_by(desc(IndexAttempt.time_updated)) + + return db_session.execute(stmt).scalars().first() diff --git a/backend/danswer/db/models.py b/backend/danswer/db/models.py index 68d0c85cf..39771e2d4 100644 --- a/backend/danswer/db/models.py +++ b/backend/danswer/db/models.py @@ -9,9 +9,12 @@ from danswer.connectors.models import InputType from fastapi_users.db import SQLAlchemyBaseOAuthAccountTableUUID from fastapi_users.db import SQLAlchemyBaseUserTableUUID from fastapi_users_db_sqlalchemy.access_token import SQLAlchemyBaseAccessTokenTableUUID +from sqlalchemy import Boolean from sqlalchemy import DateTime from sqlalchemy import Enum +from sqlalchemy import ForeignKey from sqlalchemy import func +from sqlalchemy import Integer from sqlalchemy import String from sqlalchemy.dialects import postgresql from sqlalchemy.orm import DeclarativeBase @@ -35,12 +38,93 @@ class User(SQLAlchemyBaseUserTableUUID, Base): role: Mapped[UserRole] = mapped_column( Enum(UserRole, native_enum=False, default=UserRole.BASIC) ) + credentials: Mapped[List["Credential"]] = relationship( + "Credential", back_populates="user", lazy="joined" + ) class AccessToken(SQLAlchemyBaseAccessTokenTableUUID, Base): pass +class ConnectorCredentialAssociation(Base): + """Connectors and Credentials can have a many-to-many relationship + I.e. A Confluence Connector may have multiple admin users who can run it with their own credentials + I.e. An admin user may use the same credential to index multiple Confluence Spaces + """ + + __tablename__ = "connector_credential_association" + connector_id: Mapped[int] = mapped_column( + ForeignKey("connector.id"), primary_key=True + ) + credential_id: Mapped[int] = mapped_column( + ForeignKey("credential.id"), primary_key=True + ) + + connector: Mapped["Connector"] = relationship( + "Connector", back_populates="credentials" + ) + credential: Mapped["Credential"] = relationship( + "Credential", back_populates="connectors" + ) + + +class Connector(Base): + __tablename__ = "connector" + + id: Mapped[int] = mapped_column(primary_key=True) + name: Mapped[str] = mapped_column(String) + source: Mapped[DocumentSource] = mapped_column( + Enum(DocumentSource, native_enum=False) + ) + input_type = mapped_column(Enum(InputType, native_enum=False)) + connector_specific_config: Mapped[dict[str, Any]] = mapped_column( + postgresql.JSONB() + ) + refresh_freq: Mapped[int | None] = mapped_column(Integer, nullable=True) + time_created: Mapped[datetime.datetime] = mapped_column( + DateTime(timezone=True), server_default=func.now() + ) + time_updated: Mapped[datetime.datetime] = mapped_column( + DateTime(timezone=True), server_default=func.now(), onupdate=func.now() + ) + disabled: Mapped[bool] = mapped_column(Boolean, default=False) + + credentials: Mapped[List["ConnectorCredentialAssociation"]] = relationship( + "ConnectorCredentialAssociation", + back_populates="connector", + cascade="all, delete-orphan", + ) + index_attempts: Mapped[List["IndexAttempt"]] = relationship( + "IndexAttempt", back_populates="connector" + ) + + +class Credential(Base): + __tablename__ = "credential" + + id: Mapped[int] = mapped_column(primary_key=True) + credential_json: Mapped[dict[str, Any]] = mapped_column(postgresql.JSONB()) + user_id: Mapped[int | None] = mapped_column(ForeignKey("user.id"), nullable=True) + public_doc: Mapped[bool] = mapped_column(Boolean, default=False) + time_created: Mapped[datetime.datetime] = mapped_column( + DateTime(timezone=True), server_default=func.now() + ) + time_updated: Mapped[datetime.datetime] = mapped_column( + DateTime(timezone=True), server_default=func.now(), onupdate=func.now() + ) + + connectors: Mapped[List["ConnectorCredentialAssociation"]] = relationship( + "ConnectorCredentialAssociation", + back_populates="credential", + cascade="all, delete-orphan", + ) + index_attempts: Mapped[List["IndexAttempt"]] = relationship( + "IndexAttempt", back_populates="credential" + ) + user: Mapped[User] = relationship("User", back_populates="credentials") + + class IndexingStatus(str, PyEnum): NOT_STARTED = "not_started" IN_PROGRESS = "in_progress" @@ -58,22 +142,11 @@ class IndexAttempt(Base): __tablename__ = "index_attempt" id: Mapped[int] = mapped_column(primary_key=True) - # would like this to be a single JSONB column with structure described by - # `ConnectorDescriptor`, but this is not easily supported and requires - # some difficult to understand magic - source: Mapped[DocumentSource] = mapped_column( - Enum(DocumentSource, native_enum=False) + connector_id: Mapped[int | None] = mapped_column( + ForeignKey("connector.id"), nullable=True ) - input_type: Mapped[InputType] = mapped_column(Enum(InputType, native_enum=False)) - connector_specific_config: Mapped[dict[str, Any]] = mapped_column( - postgresql.JSONB(), nullable=False - ) - # TODO (chris): potentially add metadata for the chunker, embedder, and datastore - time_created: Mapped[datetime.datetime] = mapped_column( - DateTime(timezone=True), server_default=func.now() - ) - time_updated: Mapped[datetime.datetime] = mapped_column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() + credential_id: Mapped[int | None] = mapped_column( + ForeignKey("credential.id"), nullable=True ) status: Mapped[IndexingStatus] = mapped_column(Enum(IndexingStatus)) document_ids: Mapped[list[str] | None] = mapped_column( @@ -82,16 +155,27 @@ class IndexAttempt(Base): error_msg: Mapped[str | None] = mapped_column( String(), default=None ) # only filled if status = "failed" + time_created: Mapped[datetime.datetime] = mapped_column( + DateTime(timezone=True), server_default=func.now() + ) + time_updated: Mapped[datetime.datetime] = mapped_column( + DateTime(timezone=True), server_default=func.now(), onupdate=func.now() + ) + + connector: Mapped[Connector] = relationship( + "Connector", back_populates="index_attempts" + ) + credential: Mapped[Credential] = relationship( + "Credential", back_populates="index_attempts" + ) def __repr__(self) -> str: return ( f"" + f"time_created={self.time_created!r}, " + f"time_updated={self.time_updated!r}, " ) diff --git a/backend/danswer/direct_qa/__init__.py b/backend/danswer/direct_qa/__init__.py index b40721dcf..2335a9a17 100644 --- a/backend/danswer/direct_qa/__init__.py +++ b/backend/danswer/direct_qa/__init__.py @@ -7,7 +7,7 @@ from danswer.direct_qa.question_answer import OpenAICompletionQA def get_default_backend_qa_model( - internal_model: str = INTERNAL_MODEL_VERSION, **kwargs: dict[str, Any] + internal_model: str = INTERNAL_MODEL_VERSION, **kwargs: Any ) -> QAModel: if internal_model == "openai-completion": return OpenAICompletionQA(**kwargs) diff --git a/backend/danswer/direct_qa/question_answer.py b/backend/danswer/direct_qa/question_answer.py index 3d51c01fb..c6ce3a790 100644 --- a/backend/danswer/direct_qa/question_answer.py +++ b/backend/danswer/direct_qa/question_answer.py @@ -45,8 +45,10 @@ from openai.error import Timeout logger = setup_logger() -def get_openai_api_key(): - return OPENAI_API_KEY or get_dynamic_config_store().load(OPENAI_API_KEY_STORAGE_KEY) +def get_openai_api_key() -> str: + return OPENAI_API_KEY or cast( + str, get_dynamic_config_store().load(OPENAI_API_KEY_STORAGE_KEY) + ) def get_json_line(json_dict: dict) -> str: @@ -198,7 +200,7 @@ ModelType = Literal["ChatCompletion", "Completion"] PromptProcessor = Callable[[str, list[str]], str] -def _build_openai_settings(**kwargs: dict[str, Any]) -> dict[str, Any]: +def _build_openai_settings(**kwargs: Any) -> dict[str, Any]: """ Utility to add in some common default values so they don't have to be set every time. """ @@ -218,7 +220,7 @@ def _handle_openai_exceptions_wrapper(openai_call: F, query: str) -> F: # if streamed, the call returns a generator if kwargs.get("stream"): - def _generator(): + def _generator() -> Generator[Any, None, None]: yield from openai_call(*args, **kwargs) return _generator() diff --git a/backend/danswer/main.py b/backend/danswer/main.py index b361b155f..89cc309c1 100644 --- a/backend/danswer/main.py +++ b/backend/danswer/main.py @@ -11,6 +11,7 @@ from danswer.configs.app_configs import ENABLE_OAUTH from danswer.configs.app_configs import SECRET from danswer.configs.app_configs import WEB_DOMAIN from danswer.datastores.qdrant.indexing import list_collections +from danswer.db.credentials import create_initial_public_credential from danswer.server.admin import router as admin_router from danswer.server.event_loading import router as event_processing_router from danswer.server.health import router as health_router @@ -35,6 +36,13 @@ def validation_exception_handler( return JSONResponse(content=content, status_code=422) +def value_error_handler(_: Request, exc: ValueError) -> JSONResponse: + return JSONResponse( + status_code=400, + content={"message": str(exc)}, + ) + + def get_application() -> FastAPI: application = FastAPI(title="Internal Search QA Backend", debug=True, version="0.1") application.include_router(backend_router) @@ -94,6 +102,8 @@ def get_application() -> FastAPI: RequestValidationError, validation_exception_handler ) + application.add_exception_handler(ValueError, value_error_handler) + @application.on_event("startup") def startup_event() -> None: # To avoid circular imports @@ -112,6 +122,9 @@ def get_application() -> FastAPI: warm_up_models() logger.info("Semantic Search models are ready.") + logger.info("Verifying public credential exists.") + create_initial_public_credential() + return application diff --git a/backend/danswer/semantic_search/semantic_search.py b/backend/danswer/semantic_search/semantic_search.py index 8a871e4d5..786758678 100644 --- a/backend/danswer/semantic_search/semantic_search.py +++ b/backend/danswer/semantic_search/semantic_search.py @@ -80,11 +80,12 @@ def semantic_reranking( @log_function_time() def retrieve_ranked_documents( query: str, + user_id: int | None, filters: list[DatastoreFilter] | None, datastore: Datastore, num_hits: int = NUM_RETURNED_HITS, ) -> list[InferenceChunk] | None: - top_chunks = datastore.semantic_retrieval(query, filters, num_hits) + top_chunks = datastore.semantic_retrieval(query, user_id, filters, num_hits) if not top_chunks: filters_log_msg = json.dumps(filters, separators=(",", ":")).replace("\n", "") logger.warning( diff --git a/backend/danswer/server/admin.py b/backend/danswer/server/admin.py index f8b0e2154..cdaf2a8ba 100644 --- a/backend/danswer/server/admin.py +++ b/backend/danswer/server/admin.py @@ -1,153 +1,518 @@ -from typing import Any +from collections import defaultdict from typing import cast from danswer.auth.users import current_admin_user +from danswer.configs.app_configs import MASK_CREDENTIAL_PREFIX from danswer.configs.constants import DocumentSource -from danswer.configs.constants import NO_AUTH_USER from danswer.configs.constants import OPENAI_API_KEY_STORAGE_KEY -from danswer.connectors.factory import build_connector +from danswer.connectors.google_drive.connector_auth import DB_CREDENTIALS_DICT_KEY from danswer.connectors.google_drive.connector_auth import get_auth_url from danswer.connectors.google_drive.connector_auth import get_drive_tokens -from danswer.connectors.google_drive.connector_auth import save_access_tokens +from danswer.connectors.google_drive.connector_auth import get_google_app_cred +from danswer.connectors.google_drive.connector_auth import ( + update_credential_access_tokens, +) +from danswer.connectors.google_drive.connector_auth import upsert_google_app_cred from danswer.connectors.google_drive.connector_auth import verify_csrf -from danswer.connectors.models import InputType -from danswer.connectors.slack.config import get_slack_config -from danswer.connectors.slack.config import SlackConfig -from danswer.connectors.slack.config import update_slack_config -from danswer.db.index_attempt import fetch_index_attempts -from danswer.db.index_attempt import insert_index_attempt +from danswer.db.connector import add_credential_to_connector +from danswer.db.connector import create_connector +from danswer.db.connector import delete_connector +from danswer.db.connector import fetch_connector_by_id +from danswer.db.connector import fetch_connectors +from danswer.db.connector import fetch_latest_index_attempt_by_connector +from danswer.db.connector import fetch_latest_index_attempts_by_status +from danswer.db.connector import get_connector_credential_ids +from danswer.db.connector import remove_credential_from_connector +from danswer.db.connector import update_connector +from danswer.db.credentials import create_credential +from danswer.db.credentials import delete_credential +from danswer.db.credentials import fetch_credential_by_id +from danswer.db.credentials import fetch_credentials +from danswer.db.credentials import mask_credential_dict +from danswer.db.credentials import update_credential +from danswer.db.engine import get_session +from danswer.db.index_attempt import create_index_attempt from danswer.db.models import IndexAttempt from danswer.db.models import IndexingStatus from danswer.db.models import User -from danswer.direct_qa.key_validation import ( - check_openai_api_key_is_valid, -) +from danswer.direct_qa.key_validation import check_openai_api_key_is_valid from danswer.direct_qa.question_answer import get_openai_api_key from danswer.dynamic_configs import get_dynamic_config_store from danswer.dynamic_configs.interface import ConfigNotFoundError from danswer.server.models import ApiKey from danswer.server.models import AuthStatus from danswer.server.models import AuthUrl +from danswer.server.models import ConnectorBase +from danswer.server.models import ConnectorIndexingStatus +from danswer.server.models import ConnectorSnapshot +from danswer.server.models import CredentialBase +from danswer.server.models import CredentialSnapshot from danswer.server.models import GDriveCallback -from danswer.server.models import IndexAttemptRequest +from danswer.server.models import GoogleAppCredentials from danswer.server.models import IndexAttemptSnapshot -from danswer.server.models import ListIndexAttemptsResponse +from danswer.server.models import ObjectCreationIdResponse +from danswer.server.models import RunConnectorRequest +from danswer.server.models import StatusResponse from danswer.utils.logging import setup_logger from fastapi import APIRouter from fastapi import Depends from fastapi import HTTPException -from pydantic import BaseModel +from fastapi import Request +from fastapi import Response +from sqlalchemy.orm import Session router = APIRouter(prefix="/admin") logger = setup_logger() -@router.get("/connectors/google-drive/check-auth", response_model=AuthStatus) -def check_drive_tokens(_: User = Depends(current_admin_user)) -> AuthStatus: - tokens = get_drive_tokens() - authenticated = tokens is not None - return AuthStatus(authenticated=authenticated) - - -@router.get("/connectors/google-drive/authorize", response_model=AuthUrl) -def google_drive_auth(user: User = Depends(current_admin_user)) -> AuthUrl: - user_id = str(user.id) if user else NO_AUTH_USER - return AuthUrl(auth_url=get_auth_url(user_id)) - - -@router.get("/connectors/google-drive/callback", status_code=201) -def google_drive_callback( - callback: GDriveCallback = Depends(), user: User = Depends(current_admin_user) -) -> None: - user_id = str(user.id) if user else NO_AUTH_USER - verify_csrf(user_id, callback.state) - return save_access_tokens(callback.code) - - -@router.get("/connectors/slack/config", response_model=SlackConfig) -def fetch_slack_config(_: User = Depends(current_admin_user)) -> SlackConfig: - try: - return get_slack_config() - except ConfigNotFoundError: - return SlackConfig(slack_bot_token="", workspace_id="") - - -@router.post("/connectors/slack/config") -def modify_slack_config( - slack_config: SlackConfig, _: User = Depends(current_admin_user) -) -> None: - update_slack_config(slack_config) - - -@router.post("/connectors/{source}/index-attempt", status_code=201) -def index( - source: DocumentSource, - index_attempt_request: IndexAttemptRequest, +@router.get("/connector/google-drive/app-credential") +def check_google_app_credentials_exist( _: User = Depends(current_admin_user), -) -> None: - # validate that the connector specified by the source / input_type combination - # exists AND that the connector_specific_config is valid for that connector type, should be load - build_connector( - source=source, - input_type=index_attempt_request.input_type, - connector_specific_config=index_attempt_request.connector_specific_config, +) -> dict[str, str]: + try: + return {"client_id": get_google_app_cred().web.client_id} + except ConfigNotFoundError as e: + raise HTTPException(status_code=404, detail="Google App Credentials not found") + + +@router.put("/connector/google-drive/app-credential") +def update_google_app_credentials( + app_credentials: GoogleAppCredentials, _: User = Depends(current_admin_user) +) -> StatusResponse: + try: + upsert_google_app_cred(app_credentials) + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + + return StatusResponse( + success=True, message="Successfully saved Google App Credentials" ) - # once validated, insert the index attempt into the database where it will - # get picked up by a background job - insert_index_attempt( - index_attempt=IndexAttempt( - source=source, - input_type=index_attempt_request.input_type, - connector_specific_config=index_attempt_request.connector_specific_config, - status=IndexingStatus.NOT_STARTED, + +@router.get("/connector/google-drive/check-auth/{credential_id}") +def check_drive_tokens( + credential_id: int, + user: User = Depends(current_admin_user), + db_session: Session = Depends(get_session), +) -> AuthStatus: + db_credentials = fetch_credential_by_id(credential_id, user, db_session) + if ( + not db_credentials + or DB_CREDENTIALS_DICT_KEY not in db_credentials.credential_json + ): + return AuthStatus(authenticated=False) + token_json_str = str(db_credentials.credential_json[DB_CREDENTIALS_DICT_KEY]) + google_drive_creds = get_drive_tokens(token_json_str=token_json_str) + if google_drive_creds is None: + return AuthStatus(authenticated=False) + return AuthStatus(authenticated=True) + + +_GOOGLE_DRIVE_CREDENTIAL_ID_COOKIE_NAME = "google_drive_credential_id" + + +@router.get("/connector/google-drive/authorize/{credential_id}", response_model=AuthUrl) +def google_drive_auth( + response: Response, credential_id: str, _: User = Depends(current_admin_user) +) -> AuthUrl: + # set a cookie that we can read in the callback (used for `verify_csrf`) + response.set_cookie( + key=_GOOGLE_DRIVE_CREDENTIAL_ID_COOKIE_NAME, + value=credential_id, + httponly=True, + max_age=600, + ) + return AuthUrl(auth_url=get_auth_url(int(credential_id))) + + +@router.get("/connector/google-drive/callback") +def google_drive_callback( + request: Request, + callback: GDriveCallback = Depends(), + user: User = Depends(current_admin_user), + db_session: Session = Depends(get_session), +) -> StatusResponse: + credential_id_cookie = request.cookies.get(_GOOGLE_DRIVE_CREDENTIAL_ID_COOKIE_NAME) + if credential_id_cookie is None or not credential_id_cookie.isdigit(): + raise HTTPException( + status_code=401, detail="Request did not pass CSRF verification." ) - ) + credential_id = int(credential_id_cookie) + verify_csrf(credential_id, callback.state) + if ( + update_credential_access_tokens(callback.code, credential_id, user, db_session) + is None + ): + raise HTTPException( + status_code=500, detail="Unable to fetch Google Drive access tokens" + ) + + return StatusResponse(success=True, message="Updated Google Drive access tokens") -@router.get("/connectors/{source}/index-attempt") +@router.get("/latest-index-attempt", response_model=list[IndexAttemptSnapshot]) +def list_all_index_attempts( + _: User = Depends(current_admin_user), + db_session: Session = Depends(get_session), +) -> list[IndexAttemptSnapshot]: + index_attempts = fetch_latest_index_attempt_by_connector(db_session) + return [ + IndexAttemptSnapshot( + source=index_attempt.connector.source, + input_type=index_attempt.connector.input_type, + status=index_attempt.status, + connector_specific_config=index_attempt.connector.connector_specific_config, + docs_indexed=0 + if not index_attempt.document_ids + else len(index_attempt.document_ids), + time_created=index_attempt.time_created, + time_updated=index_attempt.time_updated, + ) + for index_attempt in index_attempts + ] + + +@router.get("/latest-index-attempt/{source}", response_model=list[IndexAttemptSnapshot]) def list_index_attempts( source: DocumentSource, _: User = Depends(current_admin_user), -) -> ListIndexAttemptsResponse: - index_attempts = fetch_index_attempts(sources=[source]) - return ListIndexAttemptsResponse( - index_attempts=[ - IndexAttemptSnapshot( - connector_specific_config=index_attempt.connector_specific_config, - status=index_attempt.status, - source=index_attempt.source, - time_created=index_attempt.time_created, - time_updated=index_attempt.time_updated, - docs_indexed=0 - if not index_attempt.document_ids - else len(index_attempt.document_ids), + db_session: Session = Depends(get_session), +) -> list[IndexAttemptSnapshot]: + index_attempts = fetch_latest_index_attempt_by_connector(db_session, source=source) + return [ + IndexAttemptSnapshot( + source=index_attempt.connector.source, + input_type=index_attempt.connector.input_type, + status=index_attempt.status, + connector_specific_config=index_attempt.connector.connector_specific_config, + docs_indexed=0 + if not index_attempt.document_ids + else len(index_attempt.document_ids), + time_created=index_attempt.time_created, + time_updated=index_attempt.time_updated, + ) + for index_attempt in index_attempts + ] + + +@router.get("/connector", response_model=list[ConnectorSnapshot]) +def get_connectors( + _: User = Depends(current_admin_user), + db_session: Session = Depends(get_session), +) -> list[ConnectorSnapshot]: + connectors = fetch_connectors(db_session) + return [ + ConnectorSnapshot.from_connector_db_model(connector) for connector in connectors + ] + + +@router.get("/connector/indexing-status") +def get_connector_indexing_status( + _: User = Depends(current_admin_user), + db_session: Session = Depends(get_session), +) -> list[ConnectorIndexingStatus]: + connector_id_to_connector = { + connector.id: connector for connector in fetch_connectors(db_session) + } + index_attempts = fetch_latest_index_attempts_by_status(db_session) + connector_to_index_attempts: dict[int, list[IndexAttempt]] = defaultdict(list) + for index_attempt in index_attempts: + # don't consider index attempts where the connector has been deleted + if index_attempt.connector_id: + connector_to_index_attempts[index_attempt.connector_id].append( + index_attempt ) - for index_attempt in index_attempts + + indexing_statuses: list[ConnectorIndexingStatus] = [] + for connector_id, index_attempts in connector_to_index_attempts.items(): + # NOTE: index_attempts is guaranteed to be length > 0 + connector = connector_id_to_connector[connector_id] + index_attempts_sorted = sorted( + index_attempts, key=lambda x: x.time_updated, reverse=True + ) + successful_index_attempts_sorted = [ + index_attempt + for index_attempt in index_attempts_sorted + if index_attempt.status == IndexingStatus.SUCCESS ] + indexing_statuses.append( + ConnectorIndexingStatus( + connector=ConnectorSnapshot.from_connector_db_model(connector), + last_status=index_attempts_sorted[0].status, + last_success=successful_index_attempts_sorted[0].time_updated + if successful_index_attempts_sorted + else None, + docs_indexed=len(successful_index_attempts_sorted[0].document_ids) + if successful_index_attempts_sorted + and successful_index_attempts_sorted[0].document_ids + else 0, + ), + ) + + # add in the connector that haven't started indexing yet + for connector in connector_id_to_connector.values(): + if connector.id not in connector_to_index_attempts: + indexing_statuses.append( + ConnectorIndexingStatus( + connector=ConnectorSnapshot.from_connector_db_model(connector), + last_status=IndexingStatus.NOT_STARTED, + last_success=None, + docs_indexed=0, + ), + ) + + return indexing_statuses + + +@router.get( + "/connector/{connector_id}", + response_model=ConnectorSnapshot | StatusResponse[int], +) +def get_connector_by_id( + connector_id: int, + _: User = Depends(current_admin_user), + db_session: Session = Depends(get_session), +) -> ConnectorSnapshot | StatusResponse[int]: + connector = fetch_connector_by_id(connector_id, db_session) + if connector is None: + raise HTTPException( + status_code=404, detail=f"Connector {connector_id} does not exist" + ) + + return ConnectorSnapshot( + id=connector.id, + name=connector.name, + source=connector.source, + input_type=connector.input_type, + connector_specific_config=connector.connector_specific_config, + refresh_freq=connector.refresh_freq, + credential_ids=[ + association.credential.id for association in connector.credentials + ], + time_created=connector.time_created, + time_updated=connector.time_updated, + disabled=connector.disabled, ) -@router.get("/connectors/index-attempt") -def list_all_index_attempts( +@router.post("/connector", response_model=ObjectCreationIdResponse) +def create_connector_from_model( + connector_info: ConnectorBase, _: User = Depends(current_admin_user), -) -> ListIndexAttemptsResponse: - index_attempts = fetch_index_attempts() - return ListIndexAttemptsResponse( - index_attempts=[ - IndexAttemptSnapshot( - connector_specific_config=index_attempt.connector_specific_config, - status=index_attempt.status, - source=index_attempt.source, - time_created=index_attempt.time_created, - time_updated=index_attempt.time_updated, - docs_indexed=0 - if not index_attempt.document_ids - else len(index_attempt.document_ids), + db_session: Session = Depends(get_session), +) -> ObjectCreationIdResponse: + try: + return create_connector(connector_info, db_session) + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + + +@router.patch( + "/connector/{connector_id}", + response_model=ConnectorSnapshot | StatusResponse[int], +) +def update_connector_from_model( + connector_id: int, + connector_data: ConnectorBase, + _: User = Depends(current_admin_user), + db_session: Session = Depends(get_session), +) -> ConnectorSnapshot | StatusResponse[int]: + updated_connector = update_connector(connector_id, connector_data, db_session) + if updated_connector is None: + raise HTTPException( + status_code=404, detail=f"Connector {connector_id} does not exist" + ) + + return ConnectorSnapshot( + id=updated_connector.id, + name=updated_connector.name, + source=updated_connector.source, + input_type=updated_connector.input_type, + connector_specific_config=updated_connector.connector_specific_config, + refresh_freq=updated_connector.refresh_freq, + credential_ids=[ + association.credential.id for association in updated_connector.credentials + ], + time_created=updated_connector.time_created, + time_updated=updated_connector.time_updated, + disabled=updated_connector.disabled, + ) + + +@router.delete("/connector/{connector_id}", response_model=StatusResponse[int]) +def delete_connector_by_id( + connector_id: int, + _: User = Depends(current_admin_user), + db_session: Session = Depends(get_session), +) -> StatusResponse[int]: + return delete_connector(connector_id, db_session) + + +@router.get("/credential", response_model=list[CredentialSnapshot]) +def get_credentials( + user: User = Depends(current_admin_user), + db_session: Session = Depends(get_session), +) -> list[CredentialSnapshot]: + credentials = fetch_credentials(user, db_session) + return [ + CredentialSnapshot( + id=credential.id, + credential_json=mask_credential_dict(credential.credential_json) + if MASK_CREDENTIAL_PREFIX + else credential.credential_json, + user_id=credential.user_id, + public_doc=credential.public_doc, + time_created=credential.time_created, + time_updated=credential.time_updated, + ) + for credential in credentials + ] + + +@router.get( + "/credential/{credential_id}", + response_model=CredentialSnapshot | StatusResponse[int], +) +def get_credential_by_id( + credential_id: int, + user: User = Depends(current_admin_user), + db_session: Session = Depends(get_session), +) -> CredentialSnapshot | StatusResponse[int]: + credential = fetch_credential_by_id(credential_id, user, db_session) + if credential is None: + raise HTTPException( + status_code=401, + detail=f"Credential {credential_id} does not exist or does not belong to user", + ) + + return CredentialSnapshot( + id=credential.id, + credential_json=mask_credential_dict(credential.credential_json) + if MASK_CREDENTIAL_PREFIX + else credential.credential_json, + user_id=credential.user_id, + public_doc=credential.public_doc, + time_created=credential.time_created, + time_updated=credential.time_updated, + ) + + +@router.post("/credential", response_model=ObjectCreationIdResponse) +def create_credential_from_model( + connector_info: CredentialBase, + user: User = Depends(current_admin_user), + db_session: Session = Depends(get_session), +) -> ObjectCreationIdResponse: + return create_credential(connector_info, user, db_session) + + +@router.patch( + "/credential/{credential_id}", + response_model=CredentialSnapshot | StatusResponse[int], +) +def update_credential_from_model( + credential_id: int, + credential_data: CredentialBase, + user: User = Depends(current_admin_user), + db_session: Session = Depends(get_session), +) -> CredentialSnapshot | StatusResponse[int]: + updated_credential = update_credential( + credential_id, credential_data, user, db_session + ) + if updated_credential is None: + raise HTTPException( + status_code=401, + detail=f"Credential {credential_id} does not exist or does not belong to user", + ) + + return CredentialSnapshot( + id=updated_credential.id, + credential_json=updated_credential.credential_json, + user_id=updated_credential.user_id, + public_doc=updated_credential.public_doc, + time_created=updated_credential.time_created, + time_updated=updated_credential.time_updated, + ) + + +@router.delete("/credential/{credential_id}", response_model=StatusResponse[int]) +def delete_credential_by_id( + credential_id: int, + user: User = Depends(current_admin_user), + db_session: Session = Depends(get_session), +) -> StatusResponse: + delete_credential(credential_id, user, db_session) + return StatusResponse( + success=True, message="Credential deleted successfully", data=credential_id + ) + + +@router.put("/connector/{connector_id}/credential/{credential_id}") +def associate_credential_to_connector( + connector_id: int, + credential_id: int, + user: User = Depends(current_admin_user), + db_session: Session = Depends(get_session), +) -> StatusResponse[int]: + return add_credential_to_connector(connector_id, credential_id, user, db_session) + + +@router.delete("/connector/{connector_id}/credential/{credential_id}") +def dissociate_credential_from_connector( + connector_id: int, + credential_id: int, + user: User = Depends(current_admin_user), + db_session: Session = Depends(get_session), +) -> StatusResponse[int]: + return remove_credential_from_connector( + connector_id, credential_id, user, db_session + ) + + +@router.post("/connector/run-once") +def connector_run_once( + run_info: RunConnectorRequest, + _: User = Depends(current_admin_user), + db_session: Session = Depends(get_session), +) -> StatusResponse[list[int]]: + connector_id = run_info.connector_id + specified_credential_ids = run_info.credential_ids + try: + possible_credential_ids = get_connector_credential_ids( + run_info.connector_id, db_session + ) + except ValueError: + return StatusResponse( + success=False, + message=f"Connector by id {connector_id} does not exist.", + ) + + if not specified_credential_ids: + credential_ids = possible_credential_ids + else: + if set(specified_credential_ids).issubset(set(possible_credential_ids)): + credential_ids = specified_credential_ids + else: + return StatusResponse( + success=False, + message=f"Not all specified credentials are associated with connector", ) - for index_attempt in index_attempts - ] + + if not credential_ids: + return StatusResponse( + success=False, + message=f"Connector has no valid credentials, cannot create index attempts.", + ) + + index_attempt_ids = [ + create_index_attempt(run_info.connector_id, credential_id, db_session) + for credential_id in credential_ids + ] + return StatusResponse( + success=True, + message=f"Successfully created {len(index_attempt_ids)} index attempts", + data=index_attempt_ids, ) @@ -155,7 +520,6 @@ def list_all_index_attempts( def validate_existing_openai_api_key( _: User = Depends(current_admin_user), ) -> None: - is_valid = False try: openai_api_key = get_openai_api_key() is_valid = check_openai_api_key_is_valid(openai_api_key) @@ -168,7 +532,7 @@ def validate_existing_openai_api_key( raise HTTPException(status_code=400, detail="Invalid API key provided") -@router.get("/openai-api-key") +@router.get("/openai-api-key", response_model=ApiKey) def get_openai_api_key_from_dynamic_config_store( _: User = Depends(current_admin_user), ) -> ApiKey: diff --git a/backend/danswer/server/event_loading.py b/backend/danswer/server/event_loading.py index ecc8477ea..7eeb59759 100644 --- a/backend/danswer/server/event_loading.py +++ b/backend/danswer/server/event_loading.py @@ -3,7 +3,6 @@ from typing import Any from danswer.connectors.slack.connector import get_channel_info from danswer.connectors.slack.connector import get_thread from danswer.connectors.slack.connector import thread_to_doc -from danswer.connectors.slack.utils import get_client from danswer.utils.indexing_pipeline import build_indexing_pipeline from danswer.utils.logging import setup_logger from fastapi import APIRouter @@ -25,41 +24,43 @@ class EventHandlingResponse(BaseModel): challenge: str | None -@router.post("/process_slack_event", response_model=EventHandlingResponse) -def process_slack_event(event: SlackEvent) -> EventHandlingResponse: - logger.info("Recieved slack event: %s", event.dict()) +# TODO: just store entry in DB and process in the background, until then this +# won't work cleanly since the slack bot token is not easily accessible +# @router.post("/process_slack_event", response_model=EventHandlingResponse) +# def process_slack_event(event: SlackEvent) -> EventHandlingResponse: +# logger.info("Recieved slack event: %s", event.dict()) - if event.type == "url_verification": - return EventHandlingResponse(challenge=event.challenge) +# if event.type == "url_verification": +# return EventHandlingResponse(challenge=event.challenge) - if event.type == "event_callback" and event.event: - try: - # TODO: process in the background as per slack guidelines - message_type = event.event.get("subtype") - if message_type == "message_changed": - message = event.event["message"] - else: - message = event.event +# if event.type == "event_callback" and event.event: +# try: +# # TODO: process in the background as per slack guidelines +# message_type = event.event.get("subtype") +# if message_type == "message_changed": +# message = event.event["message"] +# else: +# message = event.event - channel_id = event.event["channel"] - thread_ts = message.get("thread_ts") - slack_client = get_client() - doc = thread_to_doc( - channel=get_channel_info(client=slack_client, channel_id=channel_id), - thread=get_thread( - client=slack_client, channel_id=channel_id, thread_id=thread_ts - ) - if thread_ts - else [message], - ) - if doc is None: - logger.info("Message was determined to not be indexable") - return EventHandlingResponse(challenge=None) # @CHRIS is this right? +# channel_id = event.event["channel"] +# thread_ts = message.get("thread_ts") +# slack_client = get_client() +# doc = thread_to_doc( +# channel=get_channel_info(client=slack_client, channel_id=channel_id), +# thread=get_thread( +# client=slack_client, channel_id=channel_id, thread_id=thread_ts +# ) +# if thread_ts +# else [message], +# ) +# if doc is None: +# logger.info("Message was determined to not be indexable") +# return EventHandlingResponse(challenge=None) - build_indexing_pipeline()([doc]) - except Exception: - logger.exception("Failed to process slack message") - return EventHandlingResponse(challenge=None) +# build_indexing_pipeline()([doc]) +# except Exception: +# logger.exception("Failed to process slack message") +# return EventHandlingResponse(challenge=None) - logger.error("Unsupported event type: %s", event.type) - return EventHandlingResponse(challenge=None) +# logger.error("Unsupported event type: %s", event.type) +# return EventHandlingResponse(challenge=None) diff --git a/backend/danswer/server/health.py b/backend/danswer/server/health.py index a9a652783..54da5dffe 100644 --- a/backend/danswer/server/health.py +++ b/backend/danswer/server/health.py @@ -1,10 +1,10 @@ -from danswer.server.models import HealthCheckResponse +from danswer.server.models import StatusResponse from fastapi import APIRouter router = APIRouter() -@router.get("/health") -def healthcheck() -> HealthCheckResponse: - return {"status": "ok"} +@router.get("/health", response_model=StatusResponse) +def healthcheck() -> StatusResponse: + return StatusResponse(success=True, message="ok") diff --git a/backend/danswer/server/models.py b/backend/danswer/server/models.py index ecf98ab28..ee294dcd9 100644 --- a/backend/danswer/server/models.py +++ b/backend/danswer/server/models.py @@ -1,18 +1,56 @@ from datetime import datetime from typing import Any +from typing import Generic from typing import Literal +from typing import Optional +from typing import TYPE_CHECKING +from typing import TypeVar from danswer.configs.constants import DocumentSource from danswer.connectors.models import InputType from danswer.datastores.interfaces import DatastoreFilter +from danswer.db.models import Connector from danswer.db.models import IndexingStatus from pydantic import BaseModel +from pydantic.generics import GenericModel + + +DataT = TypeVar("DataT") + + +class StatusResponse(GenericModel, Generic[DataT]): + success: bool + message: Optional[str] = None + data: Optional[DataT] = None + + +class DataRequest(BaseModel): + data: str + + +class GoogleAppWebCredentials(BaseModel): + client_id: str + project_id: str + auth_uri: str + token_uri: str + auth_provider_x509_cert_url: str + client_secret: str + redirect_uris: list[str] + javascript_origins: list[str] + + +class GoogleAppCredentials(BaseModel): + web: GoogleAppWebCredentials class HealthCheckResponse(BaseModel): status: Literal["ok"] +class ObjectCreationIdResponse(BaseModel): + id: int | str + + class AuthStatus(BaseModel): authenticated: bool @@ -62,17 +100,73 @@ class IndexAttemptRequest(BaseModel): connector_specific_config: dict[str, Any] -class IndexAttemptSnapshot(BaseModel): - connector_specific_config: dict[str, Any] - status: IndexingStatus +class ConnectorBase(BaseModel): + name: str source: DocumentSource + input_type: InputType + connector_specific_config: dict[str, Any] + refresh_freq: int | None # In seconds, None for one time index with no refresh + disabled: bool + + +class ConnectorSnapshot(ConnectorBase): + id: int + credential_ids: list[int] time_created: datetime time_updated: datetime + + @classmethod + def from_connector_db_model(cls, connector: Connector) -> "ConnectorSnapshot": + return ConnectorSnapshot( + id=connector.id, + name=connector.name, + source=connector.source, + input_type=connector.input_type, + connector_specific_config=connector.connector_specific_config, + refresh_freq=connector.refresh_freq, + credential_ids=[ + association.credential.id for association in connector.credentials + ], + time_created=connector.time_created, + time_updated=connector.time_updated, + disabled=connector.disabled, + ) + + +class ConnectorIndexingStatus(BaseModel): + """Represents the latest indexing status of a connector""" + + connector: ConnectorSnapshot + last_status: IndexingStatus + last_success: datetime | None docs_indexed: int -class ListIndexAttemptsResponse(BaseModel): - index_attempts: list[IndexAttemptSnapshot] +class RunConnectorRequest(BaseModel): + connector_id: int + credential_ids: list[int] | None + + +class CredentialBase(BaseModel): + credential_json: dict[str, Any] + public_doc: bool + + +class CredentialSnapshot(CredentialBase): + id: int + user_id: int | None + time_created: datetime + time_updated: datetime + + +class IndexAttemptSnapshot(BaseModel): + source: DocumentSource + input_type: InputType + status: IndexingStatus + connector_specific_config: dict[str, Any] + docs_indexed: int + time_created: datetime + time_updated: datetime class ApiKey(BaseModel): diff --git a/backend/danswer/server/search_backend.py b/backend/danswer/server/search_backend.py index 5c8089179..ca0bb281a 100644 --- a/backend/danswer/server/search_backend.py +++ b/backend/danswer/server/search_backend.py @@ -2,8 +2,8 @@ import time from collections.abc import Generator from danswer.auth.schemas import UserRole -from danswer.auth.users import current_active_user from danswer.auth.users import current_admin_user +from danswer.auth.users import current_user from danswer.configs.app_configs import KEYWORD_MAX_HITS from danswer.configs.app_configs import NUM_RERANKED_RESULTS from danswer.configs.app_configs import QA_TIMEOUT @@ -36,7 +36,7 @@ router = APIRouter() @router.get("/get-user-role", response_model=UserRoleResponse) -async def get_user_role(user: User = Depends(current_active_user)) -> UserRoleResponse: +async def get_user_role(user: User = Depends(current_user)) -> UserRoleResponse: if user is None: raise ValueError("Invalid or missing user.") return UserRoleResponse(role=user.role) @@ -61,7 +61,7 @@ async def promote_admin( @router.get("/direct-qa", response_model=QAResponse) def direct_qa( - question: QAQuestion = Depends(), _: User = Depends(current_active_user) + question: QAQuestion = Depends(), user: User = Depends(current_user) ) -> QAResponse: start_time = time.time() @@ -70,8 +70,9 @@ def direct_qa( filters = question.filters logger.info(f"Received semantic query: {query}") + user_id = None if user is None else int(user.id) ranked_chunks = retrieve_ranked_documents( - query, filters, create_datastore(collection) + query, user_id, filters, create_datastore(collection) ) if not ranked_chunks: return QAResponse(answer=None, quotes=None, ranked_documents=None) @@ -102,7 +103,7 @@ def direct_qa( @router.get("/stream-direct-qa") def stream_direct_qa( - question: QAQuestion = Depends(), _: User = Depends(current_active_user) + question: QAQuestion = Depends(), user: User = Depends(current_user) ) -> StreamingResponse: top_documents_key = "top_documents" @@ -112,8 +113,9 @@ def stream_direct_qa( filters = question.filters logger.info(f"Received semantic query: {query}") + user_id = None if user is None else int(user.id) ranked_chunks = retrieve_ranked_documents( - query, filters, create_datastore(collection) + query, user_id, filters, create_datastore(collection) ) if not ranked_chunks: yield get_json_line({top_documents_key: None}) @@ -151,7 +153,7 @@ def stream_direct_qa( @router.get("/keyword-search", response_model=KeywordResponse) def keyword_search( - question: QAQuestion = Depends(), _: User = Depends(current_active_user) + question: QAQuestion = Depends(), _: User = Depends(current_user) ) -> KeywordResponse: ts_client = TSClient.get_instance() query = question.query diff --git a/backend/danswer/utils/indexing_pipeline.py b/backend/danswer/utils/indexing_pipeline.py index 0c63ccd72..a9235f5a5 100644 --- a/backend/danswer/utils/indexing_pipeline.py +++ b/backend/danswer/utils/indexing_pipeline.py @@ -1,6 +1,8 @@ from collections.abc import Callable from functools import partial from itertools import chain +from typing import Any +from typing import Protocol from danswer.chunking.chunk import Chunker from danswer.chunking.chunk import DefaultChunker @@ -12,17 +14,26 @@ from danswer.semantic_search.biencoder import DefaultEmbedder from danswer.semantic_search.type_aliases import Embedder +class IndexingPipelineProtocol(Protocol): + def __call__( + self, documents: list[Document], user_id: int | None + ) -> list[EmbeddedIndexChunk]: + ... + + def _indexing_pipeline( + *, chunker: Chunker, embedder: Embedder, datastore: Datastore, documents: list[Document], + user_id: int | None, ) -> list[EmbeddedIndexChunk]: # TODO: make entire indexing pipeline async to not block the entire process # when running on async endpoints chunks = list(chain(*[chunker.chunk(document) for document in documents])) chunks_with_embeddings = embedder.embed(chunks) - datastore.index(chunks_with_embeddings) + datastore.index(chunks_with_embeddings, user_id) return chunks_with_embeddings @@ -31,7 +42,7 @@ def build_indexing_pipeline( chunker: Chunker | None = None, embedder: Embedder | None = None, datastore: Datastore | None = None, -) -> Callable[[list[Document]], list[EmbeddedIndexChunk]]: +) -> IndexingPipelineProtocol: """Builds a pipline which takes in a list of docs and indexes them. Default uses _ chunker, _ embedder, and qdrant for the datastore""" @@ -44,4 +55,6 @@ def build_indexing_pipeline( if datastore is None: datastore = QdrantDatastore() - return partial(_indexing_pipeline, chunker, embedder, datastore) + return partial( + _indexing_pipeline, chunker=chunker, embedder=embedder, datastore=datastore + ) diff --git a/backend/scripts/ingestion.py b/backend/scripts/ingestion.py deleted file mode 100644 index 4706b1ed8..000000000 --- a/backend/scripts/ingestion.py +++ /dev/null @@ -1,156 +0,0 @@ -# This file is only for development purposes -import argparse -from itertools import chain - -from danswer.chunking.chunk import Chunker -from danswer.chunking.chunk import DefaultChunker -from danswer.configs.app_configs import INDEX_BATCH_SIZE -from danswer.configs.app_configs import QDRANT_DEFAULT_COLLECTION -from danswer.connectors.confluence.connector import ConfluenceConnector -from danswer.connectors.github.connector import GithubConnector -from danswer.connectors.google_drive.connector import GoogleDriveConnector -from danswer.connectors.google_drive.connector_auth import backend_get_credentials -from danswer.connectors.interfaces import LoadConnector -from danswer.connectors.slack.connector import SlackConnector -from danswer.connectors.web.connector import WebConnector -from danswer.datastores.interfaces import Datastore -from danswer.datastores.qdrant.indexing import recreate_collection -from danswer.datastores.qdrant.store import QdrantDatastore -from danswer.semantic_search.biencoder import DefaultEmbedder -from danswer.semantic_search.type_aliases import Embedder -from danswer.utils.logging import setup_logger - - -logger = setup_logger() - - -def load_batch( - doc_loader: LoadConnector, - chunker: Chunker, - embedder: Embedder, - datastore: Datastore, -) -> None: - num_processed = 0 - total_chunks = 0 - for document_batch in doc_loader.load_from_state(): - if not document_batch: - logger.warning("No parseable documents found in batch") - continue - - logger.info(f"Indexed {num_processed} documents") - document_chunks = list( - chain(*[chunker.chunk(document) for document in document_batch]) - ) - num_chunks = len(document_chunks) - total_chunks += num_chunks - logger.info( - f"Document batch yielded {num_chunks} chunks for a total of {total_chunks}" - ) - chunks_with_embeddings = embedder.embed(document_chunks) - datastore.index(chunks_with_embeddings) - num_processed += len(document_batch) - logger.info(f"Finished, indexed a total of {num_processed} documents") - - -def load_slack_batch(file_path: str, qdrant_collection: str) -> None: - logger.info("Loading documents from Slack.") - load_batch( - SlackConnector(export_path_str=file_path, batch_size=INDEX_BATCH_SIZE), - DefaultChunker(), - DefaultEmbedder(), - QdrantDatastore(collection=qdrant_collection), - ) - - -def load_web_batch(url: str, qdrant_collection: str) -> None: - logger.info("Loading documents from web.") - load_batch( - WebConnector(base_url=url, batch_size=INDEX_BATCH_SIZE), - DefaultChunker(), - DefaultEmbedder(), - QdrantDatastore(collection=qdrant_collection), - ) - - -def load_google_drive_batch(qdrant_collection: str) -> None: - logger.info("Loading documents from Google Drive.") - backend_get_credentials() - load_batch( - GoogleDriveConnector(batch_size=INDEX_BATCH_SIZE), - DefaultChunker(), - DefaultEmbedder(), - QdrantDatastore(collection=qdrant_collection), - ) - - -def load_github_batch(owner: str, repo: str, qdrant_collection: str) -> None: - logger.info("Loading documents from Github.") - load_batch( - GithubConnector(repo_owner=owner, repo_name=repo, batch_size=INDEX_BATCH_SIZE), - DefaultChunker(), - DefaultEmbedder(), - QdrantDatastore(collection=qdrant_collection), - ) - - -def load_confluence_batch(confluence_wiki_url: str, qdrant_collection: str) -> None: - logger.info("Loading documents from Confluence.") - load_batch( - ConfluenceConnector(confluence_wiki_url, batch_size=INDEX_BATCH_SIZE), - DefaultChunker(), - DefaultEmbedder(), - QdrantDatastore(collection=qdrant_collection), - ) - - -class BatchLoadingArgs(argparse.Namespace): - website_url: str - github_owner: str - github_repo: str - slack_export_dir: str - confluence_link: str - qdrant_collection: str - rebuild_index: bool - - -if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument( - "--website-url", - default="https://docs.github.com/en/actions", - ) - parser.add_argument( - "--github-owner", - default="danswer-ai", - ) - parser.add_argument( - "--github-repo", - default="danswer", - ) - parser.add_argument( - "--slack-export-dir", - default="~/Downloads/test-slack-export", - ) - parser.add_argument( - "--confluence_link", - default="https://danswer.atlassian.net/wiki/spaces/fakespace", - ) - parser.add_argument( - "--qdrant-collection", - default=QDRANT_DEFAULT_COLLECTION, - ) - parser.add_argument( - "--rebuild-index", - action="store_true", - help="Deletes and repopulates the semantic search index", - ) - args = parser.parse_args(namespace=BatchLoadingArgs) - - if args.rebuild_index: - recreate_collection(args.qdrant_collection) - - # load_slack_batch(args.slack_export_dir, args.qdrant_collection) - # load_web_batch(args.website_url, args.qdrant_collection) - # load_google_drive_batch(args.qdrant_collection) - # load_github_batch(args.github_owner, args.github_repo, args.qdrant_collection) - load_confluence_batch(args.confluence_link, args.qdrant_collection) diff --git a/deployment/.env b/deployment/.env new file mode 100644 index 000000000..40eacc1fc --- /dev/null +++ b/deployment/.env @@ -0,0 +1,5 @@ +# For a local deployment, no additional setup is needed +# Refer to env.dev.template and env.prod.template for additional options + +# Setting Auth to false for local setup convenience to avoid setting up Google OAuth app in GPC. +DISABLE_AUTH=True diff --git a/deployment/README.md b/deployment/README.md index 39c9f4925..fb4556abf 100644 --- a/deployment/README.md +++ b/deployment/README.md @@ -1,33 +1,32 @@ This serves as an example for how to deploy everything on a single machine. This is not optimal, but can get you started easily and cheaply. To run: -1. Set up a `.env` file in this directory with relevant environment variables. - - Use the `env.template` as a reference. - -2. SKIP this step if running locally. If you are running this for production and need https do the following: - - Set up a `.env.nginx` file in this directory based on `env.nginx.template`. - - `chmod +x init-letsencrypt.sh` + `./init-letsencrypt.sh` to set up https certificate. - - -3. Run one of the docker compose commands below depending on your environment: +1. Run one of the docker compose commands below depending on your environment: - For Local: - `docker compose -f docker-compose.dev.yml -p danswer-stack up -d --build` - This will start Web/API servers, Postgres (backend DB), Qdrant (vector DB), and the background indexing job. - - For Prod: - - `docker compose -f docker-compose.prod.yml -p danswer-stack up -d --build` - - This will additionally run certbot and start Nginx. + - Downloading packages/requirements may take 20+ minutes depending on your internet connection. -4. To shut down the deployment run: +2. To shut down the deployment run (use stop to stop containers, down to remove containers): - For Local: - `docker compose -f docker-compose.dev.yml -p danswer-stack stop` - - For Prod: - - `docker compose -f docker-compose.prod.yml -p danswer-stack stop` -5. To completely remove Danswer (**WARNING, this will also erase your indexed data and all users**) run: +3. To completely remove Danswer (**WARNING, this will also erase your indexed data and all users**) run: - For Local: - - `docker compose -f docker-compose.dev.yml -p danswer-stack down` - - For Prod: - - `docker compose -f docker-compose.prod.yml -p danswer-stack down` \ No newline at end of file + - `docker compose -f docker-compose.dev.yml -p danswer-stack down -v` + + +Additional steps for setting up for Prod: + +1. Set up a `.env` file in this directory with relevant environment variables. + - Refer to env.dev.template and env.prod.template + + +2. Set up https: + - Set up a `.env.nginx` file in this directory based on `env.nginx.template`. + - `chmod +x init-letsencrypt.sh` + `./init-letsencrypt.sh` to set up https certificate. + +3. Follow the above steps but replacing dev with prod. diff --git a/deployment/data/nginx/app.conf.template.dev b/deployment/data/nginx/app.conf.template.dev new file mode 100644 index 000000000..ad4a3d669 --- /dev/null +++ b/deployment/data/nginx/app.conf.template.dev @@ -0,0 +1,56 @@ +upstream app_server { + # fail_timeout=0 means we always retry an upstream even if it failed + # to return a good HTTP response + + # for UNIX domain socket setups + #server unix:/tmp/gunicorn.sock fail_timeout=0; + + # for a TCP configuration + # TODO: use gunicorn to manage multiple processes + server api_server:8080 fail_timeout=0; +} + +upstream web_server { + server web_server:3000 fail_timeout=0; +} + +server { + listen 80; + server_name ${DOMAIN}; + + location ~ ^/api(.*)$ { + rewrite ^/api(/.*)$ $1 break; + + # misc headers + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header X-Forwarded-Host $host; + proxy_set_header Host $host; + + # need to use 1.1 to support chunked transfers + proxy_http_version 1.1; + proxy_buffering off; + + # we don't want nginx trying to do something clever with + # redirects, we set the Host: header above already. + proxy_redirect off; + proxy_pass http://app_server; + } + + location / { + # misc headers + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header X-Forwarded-Host $host; + proxy_set_header Host $host; + + proxy_http_version 1.1; + + # we don't want nginx trying to do something clever with + # redirects, we set the Host: header above already. + proxy_redirect off; + proxy_pass http://web_server; + } +} diff --git a/deployment/docker-compose.dev.yml b/deployment/docker-compose.dev.yml index 01192fbcb..4d9857031 100644 --- a/deployment/docker-compose.dev.yml +++ b/deployment/docker-compose.dev.yml @@ -35,7 +35,7 @@ services: web_server: build: context: ../web - dockerfile: Dockerfile.dev + dockerfile: Dockerfile depends_on: - api_server restart: always @@ -43,12 +43,12 @@ services: - .env environment: - INTERNAL_URL=http://api_server:8080 - ports: - - "3000:3000" relational_db: image: postgres:15.2-alpine restart: always - # POSTGRES_USER and POSTGRES_PASSWORD should be set in .env file + environment: + POSTGRES_USER: ${POSTGRES_USER:-postgres} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-password} env_file: - .env ports: @@ -62,6 +62,22 @@ services: - "6333:6333" volumes: - qdrant_volume:/qdrant/storage + nginx: + image: nginx:1.23.4-alpine + restart: always + depends_on: + - api_server + - web_server + environment: + - DOMAIN=localhost + ports: + - "80:80" + - "3000:80" # allow for localhost:3000 usage, since that is the norm + volumes: + - ./data/nginx:/etc/nginx/conf.d + command: > + /bin/sh -c "envsubst '$$\{DOMAIN\}' < /etc/nginx/conf.d/app.conf.template.dev > /etc/nginx/conf.d/app.conf + && while :; do sleep 6h & wait $${!}; nginx -s reload; done & nginx -g \"daemon off;\"" volumes: local_dynamic_storage: db_volume: diff --git a/deployment/docker-compose.prod.yml b/deployment/docker-compose.prod.yml index 28561f3fc..177ba31cf 100644 --- a/deployment/docker-compose.prod.yml +++ b/deployment/docker-compose.prod.yml @@ -33,7 +33,7 @@ services: web_server: build: context: ../web - dockerfile: Dockerfile.prod + dockerfile: Dockerfile depends_on: - api_server restart: always diff --git a/deployment/env.dev.template b/deployment/env.dev.template index 7387cbd76..5e4e8d674 100644 --- a/deployment/env.dev.template +++ b/deployment/env.dev.template @@ -1,20 +1,12 @@ -# Fill in the values and copy the contents of this file to .env in the deployment directory -# Some valid default values are provided where applicable, delete the variables which you don't set values for +# Very basic .env file with options that are easy to change. Allows you to deploy everything on a single machine. +# We don't suggest using these settings for production. -# Insert your OpenAI API key here, currently the only Generative AI endpoint for QA that we support is OpenAI -OPENAI_API_KEY= - # Choose between "openai-chat-completion" and "openai-completion" INTERNAL_MODEL_VERSION=openai-chat-completion # Use a valid model for the choice above, consult https://platform.openai.com/docs/models/model-endpoint-compatibility -OPENAI_MODEL_VERSION=gpt-3.5-turbo - - -# Can leave these as defaults -POSTGRES_USER=postgres -POSTGRES_PASSWORD=password +OPENAPI_MODEL_VERSION=gpt-3.5-turbo # Auth not necessary for local diff --git a/deployment/env.prod.template b/deployment/env.prod.template index 46df0363c..a4c6b27d9 100644 --- a/deployment/env.prod.template +++ b/deployment/env.prod.template @@ -2,8 +2,8 @@ # Some valid default values are provided where applicable, delete the variables which you don't set values for -# THE SECTION BELOW INCLUDE MUST HAVE CONFIGS # Insert your OpenAI API key here, currently the only Generative AI endpoint for QA that we support is OpenAI +# If not provided here, UI will prompt on setup OPENAI_API_KEY= # Choose between "openai-chat-completion" and "openai-completion" INTERNAL_MODEL_VERSION=openai-chat-completion @@ -14,13 +14,6 @@ OPENAI_MODEL_VERSION=gpt-4 WEB_DOMAIN=http://localhost:3000 -# CONNECTOR CONFIGS (set for the ones you are using, delete the others) -GITHUB_ACCESS_TOKEN= - -GOOGLE_DRIVE_CREDENTIAL_JSON= -GOOGLE_DRIVE_TOKENS_JSON= - - # BACKEND DB can leave these as defaults POSTGRES_USER=postgres POSTGRES_PASSWORD=password @@ -28,7 +21,6 @@ POSTGRES_PASSWORD=password # AUTH CONFIGS DISABLE_AUTH=False -# Feel free remove everything after if DISABLE_AUTH=True # Currently frontend page doesn't have basic auth, use OAuth if user auth is enabled. ENABLE_OAUTH=True @@ -42,7 +34,7 @@ SECRET= # How long before user needs to reauthenticate, default to 1 day. (cookie expiration time) SESSION_EXPIRE_TIME_SECONDS=86400 -# Only relevant if using basic auth +# Only relevant if using basic auth (not supported on frontend yet) REQUIRE_EMAIL_VERIFICATION=True # The five settings below are only required if REQUIRE_EMAIL_VERIFICATION is True VALID_EMAIL_DOMAIN= diff --git a/web/Dockerfile.prod b/web/Dockerfile similarity index 97% rename from web/Dockerfile.prod rename to web/Dockerfile index 99d7a8287..4b4f3cb2a 100644 --- a/web/Dockerfile.prod +++ b/web/Dockerfile @@ -14,6 +14,8 @@ RUN \ elif [ -f pnpm-lock.yaml ]; then yarn global add pnpm && pnpm i --frozen-lockfile; \ else echo "Lockfile not found." && exit 1; \ fi +# needed for image processing +RUN npm i sharp # Step 2. Rebuild the source code only when needed diff --git a/web/Dockerfile.dev b/web/Dockerfile.dev deleted file mode 100644 index 3c7610ece..000000000 --- a/web/Dockerfile.dev +++ /dev/null @@ -1,34 +0,0 @@ -FROM node:18-alpine - -WORKDIR /app - -# Install dependencies based on the preferred package manager -COPY package.json yarn.lock* package-lock.json* pnpm-lock.yaml* ./ -RUN \ - if [ -f yarn.lock ]; then yarn --frozen-lockfile; \ - elif [ -f package-lock.json ]; then npm ci; \ - elif [ -f pnpm-lock.yaml ]; then yarn global add pnpm && pnpm i; \ - # Allow install without lockfile, so example works even without Node.js installed locally - else echo "Warning: Lockfile not found. It is recommended to commit lockfiles to version control." && yarn install; \ - fi - -COPY src ./src -COPY public ./public -COPY next.config.js . -COPY tsconfig.json . -COPY tailwind.config.js . -COPY postcss.config.js . - -# Next.js collects completely anonymous telemetry data about general usage. Learn more here: https://nextjs.org/telemetry -# Uncomment the following line to disable telemetry at run time -ENV NEXT_TELEMETRY_DISABLED 1 - -# Note: Don't expose ports here, Compose will handle that for us - -# Start Next.js in development mode based on the preferred package manager -CMD \ - if [ -f yarn.lock ]; then yarn dev; \ - elif [ -f package-lock.json ]; then npm run dev; \ - elif [ -f pnpm-lock.yaml ]; then pnpm dev; \ - else yarn dev; \ - fi \ No newline at end of file diff --git a/web/next.config.js b/web/next.config.js index 3a6ebb823..ceec6f09f 100644 --- a/web/next.config.js +++ b/web/next.config.js @@ -4,6 +4,19 @@ const nextConfig = { appDir: true, }, output: "standalone", + rewrites: async () => { + // In production, something else (nginx in the one box setup) should take + // care of this rewrite. TODO (chris): better support setups where + // web_server and api_server are on different machines. + if (process.env.NODE_ENV === "production") return []; + + return [ + { + source: "/api/:path*", + destination: "http://127.0.0.1:8080/:path*", // Proxy to Backend + }, + ]; + }, redirects: async () => { // In production, something else (nginx in the one box setup) should take // care of this redirect. TODO (chris): better support setups where @@ -12,8 +25,8 @@ const nextConfig = { return [ { - source: "/api/:path*", - destination: "http://localhost:8080/:path*", // Proxy to Backend + source: "/api/stream-direct-qa:params*", + destination: "http://127.0.0.1:8080/stream-direct-qa:params*", // Proxy to Backend permanent: true, }, ]; diff --git a/web/package-lock.json b/web/package-lock.json index 886bc3a30..2c2b060a7 100644 --- a/web/package-lock.json +++ b/web/package-lock.json @@ -1,7 +1,7 @@ { "name": "qa", "version": "0.1.0", - "lockfileVersion": 3, + "lockfileVersion": 2, "requires": true, "packages": { "": { @@ -4097,5 +4097,2778 @@ "url": "https://github.com/sponsors/colinhacks" } } + }, + "dependencies": { + "@babel/runtime": { + "version": "7.21.0", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.21.0.tgz", + "integrity": "sha512-xwII0//EObnq89Ji5AKYQaRYiW/nZ3llSv29d49IuxPhKbtJoLP+9QUUZ4nVragQVtaVGeZrpB+ZtG/Pdy/POw==", + "requires": { + "regenerator-runtime": "^0.13.11" + } + }, + "@eslint-community/eslint-utils": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", + "integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==", + "requires": { + "eslint-visitor-keys": "^3.3.0" + } + }, + "@eslint-community/regexpp": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.5.0.tgz", + "integrity": "sha512-vITaYzIcNmjn5tF5uxcZ/ft7/RXGrMUIS9HalWckEOF6ESiwXKoMzAQf2UW0aVd6rnOeExTJVd5hmWXucBKGXQ==" + }, + "@eslint/eslintrc": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.0.2.tgz", + "integrity": "sha512-3W4f5tDUra+pA+FzgugqL2pRimUTDJWKr7BINqOpkZrC0uYI0NIc0/JFgBROCU07HR6GieA5m3/rsPIhDmCXTQ==", + "requires": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.5.1", + "globals": "^13.19.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + } + }, + "@eslint/js": { + "version": "8.37.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.37.0.tgz", + "integrity": "sha512-x5vzdtOOGgFVDCUs81QRB2+liax8rFg3+7hqM+QhBG0/G3F1ZsoYl97UrqgHgQ9KKT7G6c4V+aTUCgu/n22v1A==" + }, + "@humanwhocodes/config-array": { + "version": "0.11.8", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.8.tgz", + "integrity": "sha512-UybHIJzJnR5Qc/MsD9Kr+RpO2h+/P1GhOwdiLPXK5TWk5sgTdu88bTD9UP+CKbPPh5Rni1u0GjAdYQLemG8g+g==", + "requires": { + "@humanwhocodes/object-schema": "^1.2.1", + "debug": "^4.1.1", + "minimatch": "^3.0.5" + } + }, + "@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==" + }, + "@humanwhocodes/object-schema": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", + "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==" + }, + "@jridgewell/gen-mapping": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz", + "integrity": "sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==", + "requires": { + "@jridgewell/set-array": "^1.0.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.9" + } + }, + "@jridgewell/resolve-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz", + "integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==" + }, + "@jridgewell/set-array": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz", + "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==" + }, + "@jridgewell/sourcemap-codec": { + "version": "1.4.15", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz", + "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==" + }, + "@jridgewell/trace-mapping": { + "version": "0.3.18", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz", + "integrity": "sha512-w+niJYzMHdd7USdiH2U6869nqhD2nbfZXND5Yp93qIbEmnDNk7PD48o+YchRVpzMU7M6jVCbenTR7PA1FLQ9pA==", + "requires": { + "@jridgewell/resolve-uri": "3.1.0", + "@jridgewell/sourcemap-codec": "1.4.14" + }, + "dependencies": { + "@jridgewell/sourcemap-codec": { + "version": "1.4.14", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz", + "integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==" + } + } + }, + "@next/env": { + "version": "13.4.1", + "resolved": "https://registry.npmjs.org/@next/env/-/env-13.4.1.tgz", + "integrity": "sha512-eD6WCBMFjLFooLM19SIhSkWBHtaFrZFfg2Cxnyl3vS3DAdFRfnx5TY2RxlkuKXdIRCC0ySbtK9JXXt8qLCqzZg==" + }, + "@next/eslint-plugin-next": { + "version": "13.2.4", + "resolved": "https://registry.npmjs.org/@next/eslint-plugin-next/-/eslint-plugin-next-13.2.4.tgz", + "integrity": "sha512-ck1lI+7r1mMJpqLNa3LJ5pxCfOB1lfJncKmRJeJxcJqcngaFwylreLP7da6Rrjr6u2gVRTfmnkSkjc80IiQCwQ==", + "requires": { + "glob": "7.1.7" + } + }, + "@next/swc-darwin-arm64": { + "version": "13.4.1", + "resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-13.4.1.tgz", + "integrity": "sha512-eF8ARHtYfnoYtDa6xFHriUKA/Mfj/cCbmKb3NofeKhMccs65G6/loZ15a6wYCCx4rPAd6x4t1WmVYtri7EdeBg==", + "optional": true + }, + "@next/swc-darwin-x64": { + "version": "13.4.1", + "resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-13.4.1.tgz", + "integrity": "sha512-7cmDgF9tGWTgn5Gw+vP17miJbH4wcraMHDCOHTYWkO/VeKT73dUWG23TNRLfgtCNSPgH4V5B4uLHoZTanx9bAw==", + "optional": true + }, + "@next/swc-linux-arm64-gnu": { + "version": "13.4.1", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-13.4.1.tgz", + "integrity": "sha512-qwJqmCri2ie8aTtE5gjTSr8S6O8B67KCYgVZhv9gKH44yvc/zXbAY8u23QGULsYOyh1islWE5sWfQNLOj9iryg==", + "optional": true + }, + "@next/swc-linux-arm64-musl": { + "version": "13.4.1", + "resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-13.4.1.tgz", + "integrity": "sha512-qcC54tWNGDv/VVIFkazxhqH1Bnagjfs4enzELVRlUOoJPD2BGJTPI7z08pQPbbgxLtRiu8gl2mXvpB8WlOkMeA==", + "optional": true + }, + "@next/swc-linux-x64-gnu": { + "version": "13.4.1", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-13.4.1.tgz", + "integrity": "sha512-9TeWFlpLsBosZ+tsm/rWBaMwt5It9tPH8m3nawZqFUUrZyGRfGcI67js774vtx0k3rL9qbyY6+3pw9BCVpaYUA==", + "optional": true + }, + "@next/swc-linux-x64-musl": { + "version": "13.4.1", + "resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-13.4.1.tgz", + "integrity": "sha512-sNDGaWmSqTS4QRUzw61wl4mVPeSqNIr1OOjLlQTRuyInxMxtqImRqdvzDvFTlDfdeUMU/DZhWGYoHrXLlZXe6A==", + "optional": true + }, + "@next/swc-win32-arm64-msvc": { + "version": "13.4.1", + "resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-13.4.1.tgz", + "integrity": "sha512-+CXZC7u1iXdLRudecoUYbhbsXpglYv8KFYsFxKBPn7kg+bk7eJo738wAA4jXIl8grTF2mPdmO93JOQym+BlYGA==", + "optional": true + }, + "@next/swc-win32-ia32-msvc": { + "version": "13.4.1", + "resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-13.4.1.tgz", + "integrity": "sha512-vIoXVVc7UYO68VwVMDKwJC2+HqAZQtCYiVlApyKEeIPIQpz2gpufzGxk1z3/gwrJt/kJ5CDZjlhYDCzd3hdz+g==", + "optional": true + }, + "@next/swc-win32-x64-msvc": { + "version": "13.4.1", + "resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-13.4.1.tgz", + "integrity": "sha512-n8V5ImLQZibKTu10UUdI3nIeTLkliEXe628qxqW9v8My3BAH2a7H0SaCqkV2OgqFnn8sG1wxKYw9/SNJ632kSA==", + "optional": true + }, + "@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "requires": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + } + }, + "@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==" + }, + "@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "requires": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + } + }, + "@phosphor-icons/react": { + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/@phosphor-icons/react/-/react-2.0.8.tgz", + "integrity": "sha512-VWACI+MkRGpol4htOcVtWKaDCosrcuCg8toJfPS0osgVjxM8i/KoSZSPxQvG5XYPCI8iyJoHKRpSfzOISAXFyg==", + "requires": {} + }, + "@pkgr/utils": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/@pkgr/utils/-/utils-2.3.1.tgz", + "integrity": "sha512-wfzX8kc1PMyUILA+1Z/EqoE4UCXGy0iRGMhPwdfae1+f0OXlLqCk+By+aMzgJBzR9AzS4CDizioG6Ss1gvAFJw==", + "requires": { + "cross-spawn": "^7.0.3", + "is-glob": "^4.0.3", + "open": "^8.4.0", + "picocolors": "^1.0.0", + "tiny-glob": "^0.2.9", + "tslib": "^2.4.0" + } + }, + "@rushstack/eslint-patch": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@rushstack/eslint-patch/-/eslint-patch-1.2.0.tgz", + "integrity": "sha512-sXo/qW2/pAcmT43VoRKOJbDOfV3cYpq3szSVfIThQXNt+E4DfKj361vaAt3c88U5tPUxzEswam7GW48PJqtKAg==" + }, + "@swc/helpers": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.1.tgz", + "integrity": "sha512-sJ902EfIzn1Fa+qYmjdQqh8tPsoxyBz+8yBKC2HKUxyezKJFwPGOn7pv4WY6QuQW//ySQi5lJjA/ZT9sNWWNTg==", + "requires": { + "tslib": "^2.4.0" + } + }, + "@types/json5": { + "version": "0.0.29", + "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", + "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==" + }, + "@types/node": { + "version": "18.15.11", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz", + "integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q==" + }, + "@types/prop-types": { + "version": "15.7.5", + "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz", + "integrity": "sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==" + }, + "@types/react": { + "version": "18.0.32", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.0.32.tgz", + "integrity": "sha512-gYGXdtPQ9Cj0w2Fwqg5/ak6BcK3Z15YgjSqtyDizWUfx7mQ8drs0NBUzRRsAdoFVTO8kJ8L2TL8Skm7OFPnLUw==", + "requires": { + "@types/prop-types": "*", + "@types/scheduler": "*", + "csstype": "^3.0.2" + } + }, + "@types/react-dom": { + "version": "18.0.11", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.0.11.tgz", + "integrity": "sha512-O38bPbI2CWtgw/OoQoY+BRelw7uysmXbWvw3nLWO21H1HSh+GOlqPuXshJfjmpNlKiiSDG9cc1JZAaMmVdcTlw==", + "requires": { + "@types/react": "*" + } + }, + "@types/scheduler": { + "version": "0.16.3", + "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.3.tgz", + "integrity": "sha512-5cJ8CB4yAx7BH1oMvdU0Jh9lrEXyPkar6F9G/ERswkCuvP4KQZfZkSjcMbAICCpQTN4OuZn8tz0HiKv9TGZgrQ==" + }, + "@typescript-eslint/parser": { + "version": "5.57.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.57.0.tgz", + "integrity": "sha512-orrduvpWYkgLCyAdNtR1QIWovcNZlEm6yL8nwH/eTxWLd8gsP+25pdLHYzL2QdkqrieaDwLpytHqycncv0woUQ==", + "requires": { + "@typescript-eslint/scope-manager": "5.57.0", + "@typescript-eslint/types": "5.57.0", + "@typescript-eslint/typescript-estree": "5.57.0", + "debug": "^4.3.4" + } + }, + "@typescript-eslint/scope-manager": { + "version": "5.57.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.57.0.tgz", + "integrity": "sha512-NANBNOQvllPlizl9LatX8+MHi7bx7WGIWYjPHDmQe5Si/0YEYfxSljJpoTyTWFTgRy3X8gLYSE4xQ2U+aCozSw==", + "requires": { + "@typescript-eslint/types": "5.57.0", + "@typescript-eslint/visitor-keys": "5.57.0" + } + }, + "@typescript-eslint/types": { + "version": "5.57.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.57.0.tgz", + "integrity": "sha512-mxsod+aZRSyLT+jiqHw1KK6xrANm19/+VFALVFP5qa/aiJnlP38qpyaTd0fEKhWvQk6YeNZ5LGwI1pDpBRBhtQ==" + }, + "@typescript-eslint/typescript-estree": { + "version": "5.57.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.57.0.tgz", + "integrity": "sha512-LTzQ23TV82KpO8HPnWuxM2V7ieXW8O142I7hQTxWIHDcCEIjtkat6H96PFkYBQqGFLW/G/eVVOB9Z8rcvdY/Vw==", + "requires": { + "@typescript-eslint/types": "5.57.0", + "@typescript-eslint/visitor-keys": "5.57.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "semver": "^7.3.7", + "tsutils": "^3.21.0" + } + }, + "@typescript-eslint/visitor-keys": { + "version": "5.57.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.57.0.tgz", + "integrity": "sha512-ery2g3k0hv5BLiKpPuwYt9KBkAp2ugT6VvyShXdLOkax895EC55sP0Tx5L0fZaQueiK3fBLvHVvEl3jFS5ia+g==", + "requires": { + "@typescript-eslint/types": "5.57.0", + "eslint-visitor-keys": "^3.3.0" + } + }, + "acorn": { + "version": "8.8.2", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.2.tgz", + "integrity": "sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw==" + }, + "acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "requires": {} + }, + "ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "requires": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" + }, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "requires": { + "color-convert": "^2.0.1" + } + }, + "any-promise": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz", + "integrity": "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==" + }, + "anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "requires": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + } + }, + "arg": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", + "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==" + }, + "argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" + }, + "aria-query": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.1.3.tgz", + "integrity": "sha512-R5iJ5lkuHybztUfuOAznmboyjWq8O6sqNqtK7CLOqdydi54VNbORp49mb14KbWgG1QD3JFO9hJdZ+y4KutfdOQ==", + "requires": { + "deep-equal": "^2.0.5" + } + }, + "array-buffer-byte-length": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz", + "integrity": "sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A==", + "requires": { + "call-bind": "^1.0.2", + "is-array-buffer": "^3.0.1" + } + }, + "array-includes": { + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.6.tgz", + "integrity": "sha512-sgTbLvL6cNnw24FnbaDyjmvddQ2ML8arZsgaJhoABMoplz/4QRhtrYS+alr1BUM1Bwp6dhx8vVCBSLG+StwOFw==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4", + "get-intrinsic": "^1.1.3", + "is-string": "^1.0.7" + } + }, + "array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==" + }, + "array.prototype.flat": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.1.tgz", + "integrity": "sha512-roTU0KWIOmJ4DRLmwKd19Otg0/mT3qPNt0Qb3GWW8iObuZXxrjB/pzn0R3hqpRSWg4HCwqx+0vwOnWnvlOyeIA==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4", + "es-shim-unscopables": "^1.0.0" + } + }, + "array.prototype.flatmap": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.1.tgz", + "integrity": "sha512-8UGn9O1FDVvMNB0UlLv4voxRMze7+FpHyF5mSMRjWHUMlpoDViniy05870VlxhfgTnLbpuwTzvD76MTtWxB/mQ==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4", + "es-shim-unscopables": "^1.0.0" + } + }, + "array.prototype.tosorted": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.1.tgz", + "integrity": "sha512-pZYPXPRl2PqWcsUs6LOMn+1f1532nEoPTYowBtqLwAW+W8vSVhkIGnmOX1t/UQjD6YGI0vcD2B1U7ZFGQH9jnQ==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4", + "es-shim-unscopables": "^1.0.0", + "get-intrinsic": "^1.1.3" + } + }, + "ast-types-flow": { + "version": "0.0.7", + "resolved": "https://registry.npmjs.org/ast-types-flow/-/ast-types-flow-0.0.7.tgz", + "integrity": "sha512-eBvWn1lvIApYMhzQMsu9ciLfkBY499mFZlNqG+/9WR7PVlroQw0vG30cOQQbaKz3sCEc44TAOu2ykzqXSNnwag==" + }, + "autoprefixer": { + "version": "10.4.14", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.14.tgz", + "integrity": "sha512-FQzyfOsTlwVzjHxKEqRIAdJx9niO6VCBCoEwax/VLSoQF29ggECcPuBqUMZ+u8jCZOPSy8b8/8KnuFbp0SaFZQ==", + "requires": { + "browserslist": "^4.21.5", + "caniuse-lite": "^1.0.30001464", + "fraction.js": "^4.2.0", + "normalize-range": "^0.1.2", + "picocolors": "^1.0.0", + "postcss-value-parser": "^4.2.0" + } + }, + "available-typed-arrays": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz", + "integrity": "sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==" + }, + "axe-core": { + "version": "4.6.3", + "resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.6.3.tgz", + "integrity": "sha512-/BQzOX780JhsxDnPpH4ZiyrJAzcd8AfzFPkv+89veFSr1rcMjuq2JDCwypKaPeB6ljHp9KjXhPpjgCvQlWYuqg==" + }, + "axobject-query": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-3.1.1.tgz", + "integrity": "sha512-goKlv8DZrK9hUh975fnHzhNIO4jUnFCfv/dszV5VwUGDFjI6vQ2VwoyjYjYNEbBE8AH87TduWP5uyDR1D+Iteg==", + "requires": { + "deep-equal": "^2.0.5" + } + }, + "balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "binary-extensions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==" + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "requires": { + "fill-range": "^7.0.1" + } + }, + "browserslist": { + "version": "4.21.5", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.5.tgz", + "integrity": "sha512-tUkiguQGW7S3IhB7N+c2MV/HZPSCPAAiYBZXLsBhFB/PCy6ZKKsZrmBayHV9fdGV/ARIfJ14NkxKzRDjvp7L6w==", + "requires": { + "caniuse-lite": "^1.0.30001449", + "electron-to-chromium": "^1.4.284", + "node-releases": "^2.0.8", + "update-browserslist-db": "^1.0.10" + } + }, + "busboy": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz", + "integrity": "sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==", + "requires": { + "streamsearch": "^1.1.0" + } + }, + "call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "requires": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" + } + }, + "callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==" + }, + "camelcase-css": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz", + "integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==" + }, + "caniuse-lite": { + "version": "1.0.30001473", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001473.tgz", + "integrity": "sha512-ewDad7+D2vlyy+E4UJuVfiBsU69IL+8oVmTuZnH5Q6CIUbxNfI50uVpRHbUPDD6SUaN2o0Lh4DhTrvLG/Tn1yg==" + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "chokidar": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", + "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", + "requires": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "fsevents": "~2.3.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "dependencies": { + "glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "requires": { + "is-glob": "^4.0.1" + } + } + } + }, + "client-only": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/client-only/-/client-only-0.0.1.tgz", + "integrity": "sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==" + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "commander": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==" + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" + }, + "cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "requires": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + } + }, + "cssesc": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==" + }, + "csstype": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.2.tgz", + "integrity": "sha512-I7K1Uu0MBPzaFKg4nI5Q7Vs2t+3gWWW648spaF+Rg7pI9ds18Ugn+lvg4SHczUdKlHI5LWBXyqfS8+DufyBsgQ==" + }, + "damerau-levenshtein": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz", + "integrity": "sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA==" + }, + "debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "requires": { + "ms": "2.1.2" + } + }, + "deep-equal": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-2.2.0.tgz", + "integrity": "sha512-RdpzE0Hv4lhowpIUKKMJfeH6C1pXdtT1/it80ubgWqwI3qpuxUBpC1S4hnHg+zjnuOoDkzUtUCEEkG+XG5l3Mw==", + "requires": { + "call-bind": "^1.0.2", + "es-get-iterator": "^1.1.2", + "get-intrinsic": "^1.1.3", + "is-arguments": "^1.1.1", + "is-array-buffer": "^3.0.1", + "is-date-object": "^1.0.5", + "is-regex": "^1.1.4", + "is-shared-array-buffer": "^1.0.2", + "isarray": "^2.0.5", + "object-is": "^1.1.5", + "object-keys": "^1.1.1", + "object.assign": "^4.1.4", + "regexp.prototype.flags": "^1.4.3", + "side-channel": "^1.0.4", + "which-boxed-primitive": "^1.0.2", + "which-collection": "^1.0.1", + "which-typed-array": "^1.1.9" + } + }, + "deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==" + }, + "deepmerge": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-2.2.1.tgz", + "integrity": "sha512-R9hc1Xa/NOBi9WRVUWg19rl1UB7Tt4kuPd+thNJgFZoxXsTz7ncaPaeIm+40oSGuP33DfMb4sZt1QIGiJzC4EA==" + }, + "define-lazy-prop": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", + "integrity": "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==" + }, + "define-properties": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.0.tgz", + "integrity": "sha512-xvqAVKGfT1+UAvPwKTVw/njhdQ8ZhXK4lI0bCIuCMrp2up9nPnaDftrLtmpTazqd1o+UY4zgzU+avtMbDP+ldA==", + "requires": { + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + } + }, + "didyoumean": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz", + "integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==" + }, + "dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "requires": { + "path-type": "^4.0.0" + } + }, + "dlv": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", + "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==" + }, + "doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "requires": { + "esutils": "^2.0.2" + } + }, + "electron-to-chromium": { + "version": "1.4.368", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.368.tgz", + "integrity": "sha512-e2aeCAixCj9M7nJxdB/wDjO6mbYX+lJJxSJCXDzlr5YPGYVofuJwGN9nKg2o6wWInjX6XmxRinn3AeJMK81ltw==" + }, + "emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" + }, + "enhanced-resolve": { + "version": "5.12.0", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.12.0.tgz", + "integrity": "sha512-QHTXI/sZQmko1cbDoNAa3mJ5qhWUUNAq3vR0/YiD379fWQrcfuoX1+HW2S0MTt7XmoPLapdaDKUtelUSPic7hQ==", + "requires": { + "graceful-fs": "^4.2.4", + "tapable": "^2.2.0" + } + }, + "es-abstract": { + "version": "1.21.2", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.21.2.tgz", + "integrity": "sha512-y/B5POM2iBnIxCiernH1G7rC9qQoM77lLIMQLuob0zhp8C56Po81+2Nj0WFKnd0pNReDTnkYryc+zhOzpEIROg==", + "requires": { + "array-buffer-byte-length": "^1.0.0", + "available-typed-arrays": "^1.0.5", + "call-bind": "^1.0.2", + "es-set-tostringtag": "^2.0.1", + "es-to-primitive": "^1.2.1", + "function.prototype.name": "^1.1.5", + "get-intrinsic": "^1.2.0", + "get-symbol-description": "^1.0.0", + "globalthis": "^1.0.3", + "gopd": "^1.0.1", + "has": "^1.0.3", + "has-property-descriptors": "^1.0.0", + "has-proto": "^1.0.1", + "has-symbols": "^1.0.3", + "internal-slot": "^1.0.5", + "is-array-buffer": "^3.0.2", + "is-callable": "^1.2.7", + "is-negative-zero": "^2.0.2", + "is-regex": "^1.1.4", + "is-shared-array-buffer": "^1.0.2", + "is-string": "^1.0.7", + "is-typed-array": "^1.1.10", + "is-weakref": "^1.0.2", + "object-inspect": "^1.12.3", + "object-keys": "^1.1.1", + "object.assign": "^4.1.4", + "regexp.prototype.flags": "^1.4.3", + "safe-regex-test": "^1.0.0", + "string.prototype.trim": "^1.2.7", + "string.prototype.trimend": "^1.0.6", + "string.prototype.trimstart": "^1.0.6", + "typed-array-length": "^1.0.4", + "unbox-primitive": "^1.0.2", + "which-typed-array": "^1.1.9" + } + }, + "es-get-iterator": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/es-get-iterator/-/es-get-iterator-1.1.3.tgz", + "integrity": "sha512-sPZmqHBe6JIiTfN5q2pEi//TwxmAFHwj/XEuYjTuse78i8KxaqMTTzxPoFKuzRpDpTJ+0NAbpfenkmH2rePtuw==", + "requires": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.3", + "has-symbols": "^1.0.3", + "is-arguments": "^1.1.1", + "is-map": "^2.0.2", + "is-set": "^2.0.2", + "is-string": "^1.0.7", + "isarray": "^2.0.5", + "stop-iteration-iterator": "^1.0.0" + } + }, + "es-set-tostringtag": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz", + "integrity": "sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg==", + "requires": { + "get-intrinsic": "^1.1.3", + "has": "^1.0.3", + "has-tostringtag": "^1.0.0" + } + }, + "es-shim-unscopables": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz", + "integrity": "sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==", + "requires": { + "has": "^1.0.3" + } + }, + "es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "requires": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + } + }, + "escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==" + }, + "escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==" + }, + "eslint": { + "version": "8.37.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.37.0.tgz", + "integrity": "sha512-NU3Ps9nI05GUoVMxcZx1J8CNR6xOvUT4jAUMH5+z8lpp3aEdPVCImKw6PWG4PY+Vfkpr+jvMpxs/qoE7wq0sPw==", + "requires": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.4.0", + "@eslint/eslintrc": "^2.0.2", + "@eslint/js": "8.37.0", + "@humanwhocodes/config-array": "^0.11.8", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "ajv": "^6.10.0", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.1.1", + "eslint-visitor-keys": "^3.4.0", + "espree": "^9.5.1", + "esquery": "^1.4.2", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "grapheme-splitter": "^1.0.4", + "ignore": "^5.2.0", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-sdsl": "^4.1.4", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.1", + "strip-ansi": "^6.0.1", + "strip-json-comments": "^3.1.0", + "text-table": "^0.2.0" + } + }, + "eslint-config-next": { + "version": "13.2.4", + "resolved": "https://registry.npmjs.org/eslint-config-next/-/eslint-config-next-13.2.4.tgz", + "integrity": "sha512-lunIBhsoeqw6/Lfkd6zPt25w1bn0znLA/JCL+au1HoEpSb4/PpsOYsYtgV/q+YPsoKIOzFyU5xnb04iZnXjUvg==", + "requires": { + "@next/eslint-plugin-next": "13.2.4", + "@rushstack/eslint-patch": "^1.1.3", + "@typescript-eslint/parser": "^5.42.0", + "eslint-import-resolver-node": "^0.3.6", + "eslint-import-resolver-typescript": "^3.5.2", + "eslint-plugin-import": "^2.26.0", + "eslint-plugin-jsx-a11y": "^6.5.1", + "eslint-plugin-react": "^7.31.7", + "eslint-plugin-react-hooks": "^4.5.0" + } + }, + "eslint-import-resolver-node": { + "version": "0.3.7", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.7.tgz", + "integrity": "sha512-gozW2blMLJCeFpBwugLTGyvVjNoeo1knonXAcatC6bjPBZitotxdWf7Gimr25N4c0AAOo4eOUfaG82IJPDpqCA==", + "requires": { + "debug": "^3.2.7", + "is-core-module": "^2.11.0", + "resolve": "^1.22.1" + }, + "dependencies": { + "debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "requires": { + "ms": "^2.1.1" + } + } + } + }, + "eslint-import-resolver-typescript": { + "version": "3.5.4", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-3.5.4.tgz", + "integrity": "sha512-9xUpnedEmSfG57sN1UvWPiEhfJ8bPt0Wg2XysA7Mlc79iFGhmJtRUg9LxtkK81FhMUui0YuR2E8iUsVhePkh4A==", + "requires": { + "debug": "^4.3.4", + "enhanced-resolve": "^5.12.0", + "get-tsconfig": "^4.5.0", + "globby": "^13.1.3", + "is-core-module": "^2.11.0", + "is-glob": "^4.0.3", + "synckit": "^0.8.5" + }, + "dependencies": { + "globby": { + "version": "13.1.3", + "resolved": "https://registry.npmjs.org/globby/-/globby-13.1.3.tgz", + "integrity": "sha512-8krCNHXvlCgHDpegPzleMq07yMYTO2sXKASmZmquEYWEmCx6J5UTRbp5RwMJkTJGtcQ44YpiUYUiN0b9mzy8Bw==", + "requires": { + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.11", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^4.0.0" + } + }, + "slash": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-4.0.0.tgz", + "integrity": "sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==" + } + } + }, + "eslint-module-utils": { + "version": "2.7.4", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.7.4.tgz", + "integrity": "sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA==", + "requires": { + "debug": "^3.2.7" + }, + "dependencies": { + "debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "requires": { + "ms": "^2.1.1" + } + } + } + }, + "eslint-plugin-import": { + "version": "2.27.5", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.27.5.tgz", + "integrity": "sha512-LmEt3GVofgiGuiE+ORpnvP+kAm3h6MLZJ4Q5HCyHADofsb4VzXFsRiWj3c0OFiV+3DWFh0qg3v9gcPlfc3zRow==", + "requires": { + "array-includes": "^3.1.6", + "array.prototype.flat": "^1.3.1", + "array.prototype.flatmap": "^1.3.1", + "debug": "^3.2.7", + "doctrine": "^2.1.0", + "eslint-import-resolver-node": "^0.3.7", + "eslint-module-utils": "^2.7.4", + "has": "^1.0.3", + "is-core-module": "^2.11.0", + "is-glob": "^4.0.3", + "minimatch": "^3.1.2", + "object.values": "^1.1.6", + "resolve": "^1.22.1", + "semver": "^6.3.0", + "tsconfig-paths": "^3.14.1" + }, + "dependencies": { + "debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "requires": { + "ms": "^2.1.1" + } + }, + "doctrine": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "requires": { + "esutils": "^2.0.2" + } + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + } + } + }, + "eslint-plugin-jsx-a11y": { + "version": "6.7.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.7.1.tgz", + "integrity": "sha512-63Bog4iIethyo8smBklORknVjB0T2dwB8Mr/hIC+fBS0uyHdYYpzM/Ed+YC8VxTjlXHEWFOdmgwcDn1U2L9VCA==", + "requires": { + "@babel/runtime": "^7.20.7", + "aria-query": "^5.1.3", + "array-includes": "^3.1.6", + "array.prototype.flatmap": "^1.3.1", + "ast-types-flow": "^0.0.7", + "axe-core": "^4.6.2", + "axobject-query": "^3.1.1", + "damerau-levenshtein": "^1.0.8", + "emoji-regex": "^9.2.2", + "has": "^1.0.3", + "jsx-ast-utils": "^3.3.3", + "language-tags": "=1.0.5", + "minimatch": "^3.1.2", + "object.entries": "^1.1.6", + "object.fromentries": "^2.0.6", + "semver": "^6.3.0" + }, + "dependencies": { + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + } + } + }, + "eslint-plugin-react": { + "version": "7.32.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.32.2.tgz", + "integrity": "sha512-t2fBMa+XzonrrNkyVirzKlvn5RXzzPwRHtMvLAtVZrt8oxgnTQaYbU6SXTOO1mwQgp1y5+toMSKInnzGr0Knqg==", + "requires": { + "array-includes": "^3.1.6", + "array.prototype.flatmap": "^1.3.1", + "array.prototype.tosorted": "^1.1.1", + "doctrine": "^2.1.0", + "estraverse": "^5.3.0", + "jsx-ast-utils": "^2.4.1 || ^3.0.0", + "minimatch": "^3.1.2", + "object.entries": "^1.1.6", + "object.fromentries": "^2.0.6", + "object.hasown": "^1.1.2", + "object.values": "^1.1.6", + "prop-types": "^15.8.1", + "resolve": "^2.0.0-next.4", + "semver": "^6.3.0", + "string.prototype.matchall": "^4.0.8" + }, + "dependencies": { + "doctrine": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", + "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "requires": { + "esutils": "^2.0.2" + } + }, + "resolve": { + "version": "2.0.0-next.4", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.4.tgz", + "integrity": "sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ==", + "requires": { + "is-core-module": "^2.9.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + } + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + } + } + }, + "eslint-plugin-react-hooks": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-4.6.0.tgz", + "integrity": "sha512-oFc7Itz9Qxh2x4gNHStv3BqJq54ExXmfC+a1NjAta66IAN87Wu0R/QArgIS9qKzX3dXKPI9H5crl9QchNMY9+g==", + "requires": {} + }, + "eslint-scope": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.1.1.tgz", + "integrity": "sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==", + "requires": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + } + }, + "eslint-visitor-keys": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.0.tgz", + "integrity": "sha512-HPpKPUBQcAsZOsHAFwTtIKcYlCje62XB7SEAcxjtmW6TD1WVpkS6i6/hOVtTZIl4zGj/mBqpFVGvaDneik+VoQ==" + }, + "espree": { + "version": "9.5.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.5.1.tgz", + "integrity": "sha512-5yxtHSZXRSW5pvv3hAlXM5+/Oswi1AUFqBmbibKb5s6bp3rGIDkyXU6xCoyuuLhijr4SFwPrXRoZjz0AZDN9tg==", + "requires": { + "acorn": "^8.8.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.4.0" + } + }, + "esquery": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", + "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", + "requires": { + "estraverse": "^5.1.0" + } + }, + "esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "requires": { + "estraverse": "^5.2.0" + } + }, + "estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==" + }, + "esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==" + }, + "fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" + }, + "fast-glob": { + "version": "3.2.12", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz", + "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==", + "requires": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "dependencies": { + "glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "requires": { + "is-glob": "^4.0.1" + } + } + } + }, + "fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" + }, + "fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==" + }, + "fastq": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz", + "integrity": "sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==", + "requires": { + "reusify": "^1.0.4" + } + }, + "file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "requires": { + "flat-cache": "^3.0.4" + } + }, + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "requires": { + "to-regex-range": "^5.0.1" + } + }, + "find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "requires": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + } + }, + "flat-cache": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", + "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", + "requires": { + "flatted": "^3.1.0", + "rimraf": "^3.0.2" + } + }, + "flatted": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz", + "integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==" + }, + "for-each": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", + "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", + "requires": { + "is-callable": "^1.1.3" + } + }, + "formik": { + "version": "2.2.9", + "resolved": "https://registry.npmjs.org/formik/-/formik-2.2.9.tgz", + "integrity": "sha512-LQLcISMmf1r5at4/gyJigGn0gOwFbeEAlji+N9InZF6LIMXnFNkO42sCI8Jt84YZggpD4cPWObAZaxpEFtSzNA==", + "requires": { + "deepmerge": "^2.1.1", + "hoist-non-react-statics": "^3.3.0", + "lodash": "^4.17.21", + "lodash-es": "^4.17.21", + "react-fast-compare": "^2.0.1", + "tiny-warning": "^1.0.2", + "tslib": "^1.10.0" + }, + "dependencies": { + "tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" + } + } + }, + "fraction.js": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.2.0.tgz", + "integrity": "sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA==" + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" + }, + "fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "optional": true + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + }, + "function.prototype.name": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz", + "integrity": "sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.0", + "functions-have-names": "^1.2.2" + } + }, + "functions-have-names": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", + "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==" + }, + "get-intrinsic": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.0.tgz", + "integrity": "sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q==", + "requires": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.3" + } + }, + "get-symbol-description": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", + "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", + "requires": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.1" + } + }, + "get-tsconfig": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.5.0.tgz", + "integrity": "sha512-MjhiaIWCJ1sAU4pIQ5i5OfOuHHxVo1oYeNsWTON7jxYkod8pHocXeh+SSbmu5OZZZK73B6cbJ2XADzXehLyovQ==" + }, + "glob": { + "version": "7.1.7", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", + "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "requires": { + "is-glob": "^4.0.3" + } + }, + "globals": { + "version": "13.20.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.20.0.tgz", + "integrity": "sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ==", + "requires": { + "type-fest": "^0.20.2" + } + }, + "globalthis": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz", + "integrity": "sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==", + "requires": { + "define-properties": "^1.1.3" + } + }, + "globalyzer": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/globalyzer/-/globalyzer-0.1.0.tgz", + "integrity": "sha512-40oNTM9UfG6aBmuKxk/giHn5nQ8RVz/SS4Ir6zgzOv9/qC3kKZ9v4etGTcJbEl/NyVQH7FGU7d+X1egr57Md2Q==" + }, + "globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "requires": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + } + }, + "globrex": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/globrex/-/globrex-0.1.2.tgz", + "integrity": "sha512-uHJgbwAMwNFf5mLst7IWLNg14x1CkeqglJb/K3doi4dw6q2IvAAmM/Y81kevy83wP+Sst+nutFTYOGg3d1lsxg==" + }, + "gopd": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", + "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", + "requires": { + "get-intrinsic": "^1.1.3" + } + }, + "graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" + }, + "grapheme-splitter": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz", + "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==" + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-bigints": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", + "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==" + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==" + }, + "has-property-descriptors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", + "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", + "requires": { + "get-intrinsic": "^1.1.1" + } + }, + "has-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz", + "integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==" + }, + "has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==" + }, + "has-tostringtag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", + "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "requires": { + "has-symbols": "^1.0.2" + } + }, + "hoist-non-react-statics": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz", + "integrity": "sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==", + "requires": { + "react-is": "^16.7.0" + } + }, + "ignore": { + "version": "5.2.4", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz", + "integrity": "sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==" + }, + "import-fresh": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "requires": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + } + }, + "imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==" + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "internal-slot": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.5.tgz", + "integrity": "sha512-Y+R5hJrzs52QCG2laLn4udYVnxsfny9CpOhNhUvk/SSSVyF6T27FzRbF0sroPidSu3X8oEAkOn2K804mjpt6UQ==", + "requires": { + "get-intrinsic": "^1.2.0", + "has": "^1.0.3", + "side-channel": "^1.0.4" + } + }, + "is-arguments": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.1.1.tgz", + "integrity": "sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==", + "requires": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + } + }, + "is-array-buffer": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.2.tgz", + "integrity": "sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w==", + "requires": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.2.0", + "is-typed-array": "^1.1.10" + } + }, + "is-bigint": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", + "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", + "requires": { + "has-bigints": "^1.0.1" + } + }, + "is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "requires": { + "binary-extensions": "^2.0.0" + } + }, + "is-boolean-object": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", + "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", + "requires": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + } + }, + "is-callable": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==" + }, + "is-core-module": { + "version": "2.11.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.11.0.tgz", + "integrity": "sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw==", + "requires": { + "has": "^1.0.3" + } + }, + "is-date-object": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", + "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", + "requires": { + "has-tostringtag": "^1.0.0" + } + }, + "is-docker": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==" + }, + "is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==" + }, + "is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "requires": { + "is-extglob": "^2.1.1" + } + }, + "is-map": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.2.tgz", + "integrity": "sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg==" + }, + "is-negative-zero": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", + "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==" + }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==" + }, + "is-number-object": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", + "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", + "requires": { + "has-tostringtag": "^1.0.0" + } + }, + "is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==" + }, + "is-regex": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", + "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", + "requires": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + } + }, + "is-set": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.2.tgz", + "integrity": "sha512-+2cnTEZeY5z/iXGbLhPrOAaK/Mau5k5eXq9j14CpRTftq0pAJu2MwVRSZhyZWBzx3o6X795Lz6Bpb6R0GKf37g==" + }, + "is-shared-array-buffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", + "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", + "requires": { + "call-bind": "^1.0.2" + } + }, + "is-string": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", + "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", + "requires": { + "has-tostringtag": "^1.0.0" + } + }, + "is-symbol": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", + "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", + "requires": { + "has-symbols": "^1.0.2" + } + }, + "is-typed-array": { + "version": "1.1.10", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.10.tgz", + "integrity": "sha512-PJqgEHiWZvMpaFZ3uTc8kHPM4+4ADTlDniuQL7cU/UDA0Ql7F70yGfHph3cLNe+c9toaigv+DFzTJKhc2CtO6A==", + "requires": { + "available-typed-arrays": "^1.0.5", + "call-bind": "^1.0.2", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-tostringtag": "^1.0.0" + } + }, + "is-weakmap": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.1.tgz", + "integrity": "sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA==" + }, + "is-weakref": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", + "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", + "requires": { + "call-bind": "^1.0.2" + } + }, + "is-weakset": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.2.tgz", + "integrity": "sha512-t2yVvttHkQktwnNNmBQ98AhENLdPUTDTE21uPqAQ0ARwQfGeQKRVS0NNurH7bTf7RrvcVn1OOge45CnBeHCSmg==", + "requires": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.1" + } + }, + "is-wsl": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", + "requires": { + "is-docker": "^2.0.0" + } + }, + "isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==" + }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" + }, + "jiti": { + "version": "1.18.2", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.18.2.tgz", + "integrity": "sha512-QAdOptna2NYiSSpv0O/BwoHBSmz4YhpzJHyi+fnMRTXFjp7B8i/YG5Z8IfusxB1ufjcD2Sre1F3R+nX3fvy7gg==" + }, + "js-sdsl": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/js-sdsl/-/js-sdsl-4.4.0.tgz", + "integrity": "sha512-FfVSdx6pJ41Oa+CF7RDaFmTnCaFhua+SNYQX74riGOpl96x+2jQCqEfQ2bnXu/5DPCqlRuiqyvTJM0Qjz26IVg==" + }, + "js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" + }, + "js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "requires": { + "argparse": "^2.0.1" + } + }, + "json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + }, + "json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==" + }, + "json5": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", + "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", + "requires": { + "minimist": "^1.2.0" + } + }, + "jsx-ast-utils": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.3.tgz", + "integrity": "sha512-fYQHZTZ8jSfmWZ0iyzfwiU4WDX4HpHbMCZ3gPlWYiCl3BoeOTsqKBqnTVfH2rYT7eP5c3sVbeSPHnnJOaTrWiw==", + "requires": { + "array-includes": "^3.1.5", + "object.assign": "^4.1.3" + } + }, + "language-subtag-registry": { + "version": "0.3.22", + "resolved": "https://registry.npmjs.org/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz", + "integrity": "sha512-tN0MCzyWnoz/4nHS6uxdlFWoUZT7ABptwKPQ52Ea7URk6vll88bWBVhodtnlfEuCcKWNGoc+uGbw1cwa9IKh/w==" + }, + "language-tags": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/language-tags/-/language-tags-1.0.5.tgz", + "integrity": "sha512-qJhlO9cGXi6hBGKoxEG/sKZDAHD5Hnu9Hs4WbOY3pCWXDhw0N8x1NenNzm2EnNLkLkk7J2SdxAkDSbb6ftT+UQ==", + "requires": { + "language-subtag-registry": "~0.3.2" + } + }, + "levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "requires": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + } + }, + "lilconfig": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.1.0.tgz", + "integrity": "sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==" + }, + "lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==" + }, + "locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "requires": { + "p-locate": "^5.0.0" + } + }, + "lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + }, + "lodash-es": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", + "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==" + }, + "lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==" + }, + "loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "requires": { + "js-tokens": "^3.0.0 || ^4.0.0" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + }, + "merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==" + }, + "micromatch": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", + "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "requires": { + "braces": "^3.0.2", + "picomatch": "^2.3.1" + } + }, + "minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==" + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "mz": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz", + "integrity": "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==", + "requires": { + "any-promise": "^1.0.0", + "object-assign": "^4.0.1", + "thenify-all": "^1.0.0" + } + }, + "nanoid": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.6.tgz", + "integrity": "sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==" + }, + "natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==" + }, + "next": { + "version": "13.4.1", + "resolved": "https://registry.npmjs.org/next/-/next-13.4.1.tgz", + "integrity": "sha512-JBw2kAIyhKDpjhEWvNVoFeIzNp9xNxg8wrthDOtMctfn3EpqGCmW0FSviNyGgOSOSn6zDaX48pmvbdf6X2W9xA==", + "requires": { + "@next/env": "13.4.1", + "@next/swc-darwin-arm64": "13.4.1", + "@next/swc-darwin-x64": "13.4.1", + "@next/swc-linux-arm64-gnu": "13.4.1", + "@next/swc-linux-arm64-musl": "13.4.1", + "@next/swc-linux-x64-gnu": "13.4.1", + "@next/swc-linux-x64-musl": "13.4.1", + "@next/swc-win32-arm64-msvc": "13.4.1", + "@next/swc-win32-ia32-msvc": "13.4.1", + "@next/swc-win32-x64-msvc": "13.4.1", + "@swc/helpers": "0.5.1", + "busboy": "1.6.0", + "caniuse-lite": "^1.0.30001406", + "postcss": "8.4.14", + "styled-jsx": "5.1.1", + "zod": "3.21.4" + }, + "dependencies": { + "postcss": { + "version": "8.4.14", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.14.tgz", + "integrity": "sha512-E398TUmfAYFPBSdzgeieK2Y1+1cpdxJx8yXbK/m57nRhKSmk1GB2tO4lbLBtlkfPQTDKfe4Xqv1ASWPpayPEig==", + "requires": { + "nanoid": "^3.3.4", + "picocolors": "^1.0.0", + "source-map-js": "^1.0.2" + } + } + } + }, + "node-releases": { + "version": "2.0.10", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.10.tgz", + "integrity": "sha512-5GFldHPXVG/YZmFzJvKK2zDSzPKhEp0+ZR5SVaoSag9fsL5YgHbUHDfnG5494ISANDcK4KwPXAx2xqVEydmd7w==" + }, + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==" + }, + "normalize-range": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz", + "integrity": "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==" + }, + "object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==" + }, + "object-hash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz", + "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==" + }, + "object-inspect": { + "version": "1.12.3", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz", + "integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==" + }, + "object-is": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/object-is/-/object-is-1.1.5.tgz", + "integrity": "sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3" + } + }, + "object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==" + }, + "object.assign": { + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", + "integrity": "sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "has-symbols": "^1.0.3", + "object-keys": "^1.1.1" + } + }, + "object.entries": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.6.tgz", + "integrity": "sha512-leTPzo4Zvg3pmbQ3rDK69Rl8GQvIqMWubrkxONG9/ojtFE2rD9fjMKfSI5BxW3osRH1m6VdzmqK8oAY9aT4x5w==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4" + } + }, + "object.fromentries": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.6.tgz", + "integrity": "sha512-VciD13dswC4j1Xt5394WR4MzmAQmlgN72phd/riNp9vtD7tp4QQWJ0R4wvclXcafgcYK8veHRed2W6XeGBvcfg==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4" + } + }, + "object.hasown": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/object.hasown/-/object.hasown-1.1.2.tgz", + "integrity": "sha512-B5UIT3J1W+WuWIU55h0mjlwaqxiE5vYENJXIXZ4VFe05pNYrkKuK0U/6aFcb0pKywYJh7IhfoqUfKVmrJJHZHw==", + "requires": { + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4" + } + }, + "object.values": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.6.tgz", + "integrity": "sha512-FVVTkD1vENCsAcwNs9k6jea2uHC/X0+JcjG8YA60FN5CMaJmG95wT9jek/xX9nornqGRrBkKtzuAu2wuHpKqvw==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4" + } + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "requires": { + "wrappy": "1" + } + }, + "open": { + "version": "8.4.2", + "resolved": "https://registry.npmjs.org/open/-/open-8.4.2.tgz", + "integrity": "sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==", + "requires": { + "define-lazy-prop": "^2.0.0", + "is-docker": "^2.1.1", + "is-wsl": "^2.2.0" + } + }, + "optionator": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", + "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", + "requires": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.3" + } + }, + "p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "requires": { + "yocto-queue": "^0.1.0" + } + }, + "p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "requires": { + "p-limit": "^3.0.2" + } + }, + "parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "requires": { + "callsites": "^3.0.0" + } + }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==" + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==" + }, + "path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==" + }, + "path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" + }, + "path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==" + }, + "picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==" + }, + "picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==" + }, + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==" + }, + "pirates": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.5.tgz", + "integrity": "sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ==" + }, + "postcss": { + "version": "8.4.23", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.23.tgz", + "integrity": "sha512-bQ3qMcpF6A/YjR55xtoTr0jGOlnPOKAIMdOWiv0EIT6HVPEaJiJB4NLljSbiHoC2RX7DN5Uvjtpbg1NPdwv1oA==", + "requires": { + "nanoid": "^3.3.6", + "picocolors": "^1.0.0", + "source-map-js": "^1.0.2" + } + }, + "postcss-import": { + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/postcss-import/-/postcss-import-14.1.0.tgz", + "integrity": "sha512-flwI+Vgm4SElObFVPpTIT7SU7R3qk2L7PyduMcokiaVKuWv9d/U+Gm/QAd8NDLuykTWTkcrjOeD2Pp1rMeBTGw==", + "requires": { + "postcss-value-parser": "^4.0.0", + "read-cache": "^1.0.0", + "resolve": "^1.1.7" + } + }, + "postcss-js": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/postcss-js/-/postcss-js-4.0.1.tgz", + "integrity": "sha512-dDLF8pEO191hJMtlHFPRa8xsizHaM82MLfNkUHdUtVEV3tgTp5oj+8qbEqYM57SLfc74KSbw//4SeJma2LRVIw==", + "requires": { + "camelcase-css": "^2.0.1" + } + }, + "postcss-nested": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-6.0.0.tgz", + "integrity": "sha512-0DkamqrPcmkBDsLn+vQDIrtkSbNkv5AD/M322ySo9kqFkCIYklym2xEmWkwo+Y3/qZo34tzEPNUw4y7yMCdv5w==", + "requires": { + "postcss-selector-parser": "^6.0.10" + } + }, + "postcss-selector-parser": { + "version": "6.0.11", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.11.tgz", + "integrity": "sha512-zbARubNdogI9j7WY4nQJBiNqQf3sLS3wCP4WfOidu+p28LofJqDH1tcXypGrcmMHhDk2t9wGhCsYe/+szLTy1g==", + "requires": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + } + }, + "postcss-value-parser": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", + "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==" + }, + "prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==" + }, + "prop-types": { + "version": "15.8.1", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", + "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", + "requires": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.13.1" + } + }, + "property-expr": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/property-expr/-/property-expr-2.0.5.tgz", + "integrity": "sha512-IJUkICM5dP5znhCckHSv30Q4b5/JA5enCtkRHYaOVOAocnH/1BQEYTC5NMfT3AVl/iXKdr3aqQbQn9DxyWknwA==" + }, + "punycode": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz", + "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==" + }, + "queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==" + }, + "quick-lru": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", + "integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==" + }, + "react": { + "version": "18.2.0", + "resolved": "https://registry.npmjs.org/react/-/react-18.2.0.tgz", + "integrity": "sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==", + "requires": { + "loose-envify": "^1.1.0" + } + }, + "react-dom": { + "version": "18.2.0", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.2.0.tgz", + "integrity": "sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==", + "requires": { + "loose-envify": "^1.1.0", + "scheduler": "^0.23.0" + } + }, + "react-fast-compare": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/react-fast-compare/-/react-fast-compare-2.0.4.tgz", + "integrity": "sha512-suNP+J1VU1MWFKcyt7RtjiSWUjvidmQSlqu+eHslq+342xCbGTYmC0mEhPCOHxlW0CywylOC1u2DFAT+bv4dBw==" + }, + "react-icons": { + "version": "4.8.0", + "resolved": "https://registry.npmjs.org/react-icons/-/react-icons-4.8.0.tgz", + "integrity": "sha512-N6+kOLcihDiAnj5Czu637waJqSnwlMNROzVZMhfX68V/9bu9qHaMIJC4UdozWoOk57gahFCNHwVvWzm0MTzRjg==", + "requires": {} + }, + "react-is": { + "version": "16.13.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" + }, + "read-cache": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz", + "integrity": "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==", + "requires": { + "pify": "^2.3.0" + } + }, + "readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "requires": { + "picomatch": "^2.2.1" + } + }, + "regenerator-runtime": { + "version": "0.13.11", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz", + "integrity": "sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==" + }, + "regexp.prototype.flags": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz", + "integrity": "sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "functions-have-names": "^1.2.2" + } + }, + "resolve": { + "version": "1.22.1", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz", + "integrity": "sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==", + "requires": { + "is-core-module": "^2.9.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + } + }, + "resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==" + }, + "reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==" + }, + "rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "requires": { + "glob": "^7.1.3" + } + }, + "run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "requires": { + "queue-microtask": "^1.2.2" + } + }, + "safe-regex-test": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.0.tgz", + "integrity": "sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==", + "requires": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.3", + "is-regex": "^1.1.4" + } + }, + "scheduler": { + "version": "0.23.0", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.0.tgz", + "integrity": "sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw==", + "requires": { + "loose-envify": "^1.1.0" + } + }, + "semver": { + "version": "7.3.8", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", + "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", + "requires": { + "lru-cache": "^6.0.0" + } + }, + "shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "requires": { + "shebang-regex": "^3.0.0" + } + }, + "shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" + }, + "side-channel": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "requires": { + "call-bind": "^1.0.0", + "get-intrinsic": "^1.0.2", + "object-inspect": "^1.9.0" + } + }, + "slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==" + }, + "source-map-js": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz", + "integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==" + }, + "stop-iteration-iterator": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.0.0.tgz", + "integrity": "sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ==", + "requires": { + "internal-slot": "^1.0.4" + } + }, + "streamsearch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-1.1.0.tgz", + "integrity": "sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==" + }, + "string.prototype.matchall": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.8.tgz", + "integrity": "sha512-6zOCOcJ+RJAQshcTvXPHoxoQGONa3e/Lqx90wUA+wEzX78sg5Bo+1tQo4N0pohS0erG9qtCqJDjNCQBjeWVxyg==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4", + "get-intrinsic": "^1.1.3", + "has-symbols": "^1.0.3", + "internal-slot": "^1.0.3", + "regexp.prototype.flags": "^1.4.3", + "side-channel": "^1.0.4" + } + }, + "string.prototype.trim": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.7.tgz", + "integrity": "sha512-p6TmeT1T3411M8Cgg9wBTMRtY2q9+PNy9EV1i2lIXUN/btt763oIfxwN3RR8VU6wHX8j/1CFy0L+YuThm6bgOg==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4" + } + }, + "string.prototype.trimend": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.6.tgz", + "integrity": "sha512-JySq+4mrPf9EsDBEDYMOb/lM7XQLulwg5R/m1r0PXEFqrV0qHvl58sdTilSXtKOflCsK2E8jxf+GKC0T07RWwQ==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4" + } + }, + "string.prototype.trimstart": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.6.tgz", + "integrity": "sha512-omqjMDaY92pbn5HOX7f9IccLA+U1tA9GvtU4JrodiXFfYB7jPzzHpRzpglLAjtUV6bB557zwClJezTqnAiYnQA==", + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.4", + "es-abstract": "^1.20.4" + } + }, + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "requires": { + "ansi-regex": "^5.0.1" + } + }, + "strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==" + }, + "strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==" + }, + "styled-jsx": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/styled-jsx/-/styled-jsx-5.1.1.tgz", + "integrity": "sha512-pW7uC1l4mBZ8ugbiZrcIsiIvVx1UmTfw7UkC3Um2tmfUq9Bhk8IiyEIPl6F8agHgjzku6j0xQEZbfA5uSgSaCw==", + "requires": { + "client-only": "0.0.1" + } + }, + "sucrase": { + "version": "3.32.0", + "resolved": "https://registry.npmjs.org/sucrase/-/sucrase-3.32.0.tgz", + "integrity": "sha512-ydQOU34rpSyj2TGyz4D2p8rbktIOZ8QY9s+DGLvFU1i5pWJE8vkpruCjGCMHsdXwnD7JDcS+noSwM/a7zyNFDQ==", + "requires": { + "@jridgewell/gen-mapping": "^0.3.2", + "commander": "^4.0.0", + "glob": "7.1.6", + "lines-and-columns": "^1.1.6", + "mz": "^2.7.0", + "pirates": "^4.0.1", + "ts-interface-checker": "^0.1.9" + }, + "dependencies": { + "glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + } + } + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "requires": { + "has-flag": "^4.0.0" + } + }, + "supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==" + }, + "swr": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/swr/-/swr-2.1.5.tgz", + "integrity": "sha512-/OhfZMcEpuz77KavXST5q6XE9nrOBOVcBLWjMT+oAE/kQHyE3PASrevXCtQDZ8aamntOfFkbVJp7Il9tNBQWrw==", + "requires": { + "use-sync-external-store": "^1.2.0" + } + }, + "synckit": { + "version": "0.8.5", + "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.8.5.tgz", + "integrity": "sha512-L1dapNV6vu2s/4Sputv8xGsCdAVlb5nRDMFU/E27D44l5U6cw1g0dGd45uLc+OXjNMmF4ntiMdCimzcjFKQI8Q==", + "requires": { + "@pkgr/utils": "^2.3.1", + "tslib": "^2.5.0" + } + }, + "tailwindcss": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.3.1.tgz", + "integrity": "sha512-Vkiouc41d4CEq0ujXl6oiGFQ7bA3WEhUZdTgXAhtKxSy49OmKs8rEfQmupsfF0IGW8fv2iQkp1EVUuapCFrZ9g==", + "requires": { + "arg": "^5.0.2", + "chokidar": "^3.5.3", + "color-name": "^1.1.4", + "didyoumean": "^1.2.2", + "dlv": "^1.1.3", + "fast-glob": "^3.2.12", + "glob-parent": "^6.0.2", + "is-glob": "^4.0.3", + "jiti": "^1.17.2", + "lilconfig": "^2.0.6", + "micromatch": "^4.0.5", + "normalize-path": "^3.0.0", + "object-hash": "^3.0.0", + "picocolors": "^1.0.0", + "postcss": "^8.0.9", + "postcss-import": "^14.1.0", + "postcss-js": "^4.0.0", + "postcss-load-config": "^3.1.4", + "postcss-nested": "6.0.0", + "postcss-selector-parser": "^6.0.11", + "postcss-value-parser": "^4.2.0", + "quick-lru": "^5.1.1", + "resolve": "^1.22.1", + "sucrase": "^3.29.0" + }, + "dependencies": { + "postcss-load-config": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-3.1.4.tgz", + "integrity": "sha512-6DiM4E7v4coTE4uzA8U//WhtPwyhiim3eyjEMFCnUpzbrkK9wJHgKDT2mR+HbtSrd/NubVaYTOpSpjUl8NQeRg==", + "requires": { + "lilconfig": "^2.0.5", + "yaml": "^1.10.2" + } + } + } + }, + "tapable": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", + "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==" + }, + "text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==" + }, + "thenify": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz", + "integrity": "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==", + "requires": { + "any-promise": "^1.0.0" + } + }, + "thenify-all": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz", + "integrity": "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==", + "requires": { + "thenify": ">= 3.1.0 < 4" + } + }, + "tiny-case": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/tiny-case/-/tiny-case-1.0.3.tgz", + "integrity": "sha512-Eet/eeMhkO6TX8mnUteS9zgPbUMQa4I6Kkp5ORiBD5476/m+PIRiumP5tmh5ioJpH7k51Kehawy2UDfsnxxY8Q==" + }, + "tiny-glob": { + "version": "0.2.9", + "resolved": "https://registry.npmjs.org/tiny-glob/-/tiny-glob-0.2.9.tgz", + "integrity": "sha512-g/55ssRPUjShh+xkfx9UPDXqhckHEsHr4Vd9zX55oSdGZc/MD0m3sferOkwWtp98bv+kcVfEHtRJgBVJzelrzg==", + "requires": { + "globalyzer": "0.1.0", + "globrex": "^0.1.2" + } + }, + "tiny-warning": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", + "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==" + }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "requires": { + "is-number": "^7.0.0" + } + }, + "toposort": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/toposort/-/toposort-2.0.2.tgz", + "integrity": "sha512-0a5EOkAUp8D4moMi2W8ZF8jcga7BgZd91O/yabJCFY8az+XSzeGyTKs0Aoo897iV1Nj6guFq8orWDS96z91oGg==" + }, + "ts-interface-checker": { + "version": "0.1.13", + "resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz", + "integrity": "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==" + }, + "tsconfig-paths": { + "version": "3.14.2", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz", + "integrity": "sha512-o/9iXgCYc5L/JxCHPe3Hvh8Q/2xm5Z+p18PESBU6Ff33695QnCHBEjcytY2q19ua7Mbl/DavtBOLq+oG0RCL+g==", + "requires": { + "@types/json5": "^0.0.29", + "json5": "^1.0.2", + "minimist": "^1.2.6", + "strip-bom": "^3.0.0" + } + }, + "tslib": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz", + "integrity": "sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==" + }, + "tsutils": { + "version": "3.21.0", + "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", + "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", + "requires": { + "tslib": "^1.8.1" + }, + "dependencies": { + "tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" + } + } + }, + "type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "requires": { + "prelude-ls": "^1.2.1" + } + }, + "type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==" + }, + "typed-array-length": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.4.tgz", + "integrity": "sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng==", + "requires": { + "call-bind": "^1.0.2", + "for-each": "^0.3.3", + "is-typed-array": "^1.1.9" + } + }, + "typescript": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.0.3.tgz", + "integrity": "sha512-xv8mOEDnigb/tN9PSMTwSEqAnUvkoXMQlicOb0IUVDBSQCgBSaAAROUZYy2IcUy5qU6XajK5jjjO7TMWqBTKZA==" + }, + "unbox-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", + "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", + "requires": { + "call-bind": "^1.0.2", + "has-bigints": "^1.0.2", + "has-symbols": "^1.0.3", + "which-boxed-primitive": "^1.0.2" + } + }, + "update-browserslist-db": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz", + "integrity": "sha512-dCwEFf0/oT85M1fHBg4F0jtLwJrutGoHSQXCh7u4o2t1drG+c0a9Flnqww6XUKSfQMPpJBRjU8d4RXB09qtvaA==", + "requires": { + "escalade": "^3.1.1", + "picocolors": "^1.0.0" + } + }, + "uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "requires": { + "punycode": "^2.1.0" + } + }, + "use-sync-external-store": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.2.0.tgz", + "integrity": "sha512-eEgnFxGQ1Ife9bzYs6VLi8/4X6CObHMw9Qr9tPY43iKwsPw8xE8+EFsf/2cFZ5S3esXgpWgtSCtLNS41F+sKPA==", + "requires": {} + }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "requires": { + "isexe": "^2.0.0" + } + }, + "which-boxed-primitive": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", + "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", + "requires": { + "is-bigint": "^1.0.1", + "is-boolean-object": "^1.1.0", + "is-number-object": "^1.0.4", + "is-string": "^1.0.5", + "is-symbol": "^1.0.3" + } + }, + "which-collection": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.1.tgz", + "integrity": "sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A==", + "requires": { + "is-map": "^2.0.1", + "is-set": "^2.0.1", + "is-weakmap": "^2.0.1", + "is-weakset": "^2.0.1" + } + }, + "which-typed-array": { + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.9.tgz", + "integrity": "sha512-w9c4xkx6mPidwp7180ckYWfMmvxpjlZuIudNtDf4N/tTAUB8VJbX25qZoAsrtGuYNnGw3pa0AXgbGKRB8/EceA==", + "requires": { + "available-typed-arrays": "^1.0.5", + "call-bind": "^1.0.2", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-tostringtag": "^1.0.0", + "is-typed-array": "^1.1.10" + } + }, + "word-wrap": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", + "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==" + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "yaml": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==" + }, + "yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==" + }, + "yup": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/yup/-/yup-1.1.1.tgz", + "integrity": "sha512-KfCGHdAErqFZWA5tZf7upSUnGKuTOnsI3hUsLr7fgVtx+DK04NPV01A68/FslI4t3s/ZWpvXJmgXhd7q6ICnag==", + "requires": { + "property-expr": "^2.0.5", + "tiny-case": "^1.0.3", + "toposort": "^2.0.2", + "type-fest": "^2.19.0" + }, + "dependencies": { + "type-fest": { + "version": "2.19.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz", + "integrity": "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==" + } + } + }, + "zod": { + "version": "3.21.4", + "resolved": "https://registry.npmjs.org/zod/-/zod-3.21.4.tgz", + "integrity": "sha512-m46AKbrzKVzOzs/DZgVnG5H55N1sv1M8qZU3A8RIKbs3mrACDNeIOeilDymVb2HdmP8uwshOCF4uJ8uM9rCqJw==" + } } } diff --git a/web/src/app/admin/connectors/confluence/page.tsx b/web/src/app/admin/connectors/confluence/page.tsx index b021e3f0a..f574ca957 100644 --- a/web/src/app/admin/connectors/confluence/page.tsx +++ b/web/src/app/admin/connectors/confluence/page.tsx @@ -1,25 +1,147 @@ "use client"; import * as Yup from "yup"; -import { IndexForm } from "@/components/admin/connectors/Form"; -import { ConfluenceIcon } from "@/components/icons/icons"; +import { ConfluenceIcon, TrashIcon } from "@/components/icons/icons"; import { TextFormField } from "@/components/admin/connectors/Field"; import { HealthCheckBanner } from "@/components/health/healthcheck"; +import { CredentialForm } from "@/components/admin/connectors/CredentialForm"; +import { + ConfluenceCredentialJson, + ConfluenceConfig, + Credential, + ConnectorIndexingStatus, +} from "@/lib/types"; +import useSWR, { useSWRConfig } from "swr"; +import { fetcher } from "@/lib/fetcher"; +import { LoadingAnimation } from "@/components/Loading"; +import { deleteCredential, linkCredential } from "@/lib/credential"; +import { ConnectorForm } from "@/components/admin/connectors/ConnectorForm"; +import { ConnectorsTable } from "@/components/admin/connectors/table/ConnectorsTable"; + +const Main = () => { + const { mutate } = useSWRConfig(); + const { + data: connectorIndexingStatuses, + isLoading: isConnectorIndexingStatusesLoading, + error: isConnectorIndexingStatusesError, + } = useSWR[]>( + "/api/admin/connector/indexing-status", + fetcher + ); + const { + data: credentialsData, + isLoading: isCredentialsLoading, + isValidating: isCredentialsValidating, + error: isCredentialsError, + } = useSWR[]>( + "/api/admin/credential", + fetcher + ); + + if ( + isConnectorIndexingStatusesLoading || + isCredentialsLoading || + isCredentialsValidating + ) { + return ; + } + + if (isConnectorIndexingStatusesError || !connectorIndexingStatuses) { + return
Failed to load connectors
; + } + + if (isCredentialsError || !credentialsData) { + return
Failed to load credentials
; + } + + const confluenceConnectorIndexingStatuses = connectorIndexingStatuses.filter( + (connectorIndexingStatus) => + connectorIndexingStatus.connector.source === "confluence" + ); + const confluenceCredential = credentialsData.filter( + (credential) => credential.credential_json?.confluence_access_token + )[0]; -export default function Page() { return ( -
-
- -
-
- -

Confluence

-
+ <> +

+ Step 1: Provide your Credentials +

+ + {confluenceCredential ? ( + <> +
+ {/*
+

Existing Username:

+

+ {confluenceCredential.credential_json?.confluence_username} +

{" "} +
*/} +

Existing Access Token:

+

+ {confluenceCredential.credential_json?.confluence_access_token} +

+ +
+ + ) : ( + <> +

+ To use the Confluence connector, you must first follow the guide + described{" "} + + here + {" "} + to generate an Access Token. +

+
+ + formBody={ + <> + + + + } + validationSchema={Yup.object().shape({ + confluence_username: Yup.string().required( + "Please enter your username on Confluence" + ), + confluence_access_token: Yup.string().required( + "Please enter your Confluence access token" + ), + })} + initialValues={{ + confluence_username: "", + confluence_access_token: "", + }} + onSubmit={(isSuccess) => { + if (isSuccess) { + mutate("/api/admin/credential"); + } + }} + /> +
+ + )} {/* TODO: make this periodic */} -

- Request Indexing +

+ Step 2: Which spaces do you want to make searchable?

To use the Confluence connector, you must first follow the guide @@ -34,13 +156,62 @@ export default function Page() { setup, specify any link to a Confluence page below and click "Index" to Index. Based on the provided link, we will index the ENTIRE SPACE, not just the specified page. For example, entering{" "} - https://danswer.atlassian.net/wiki/spaces/SD/overview and - clicking the Index button will index the whole SD Confluence - space. + https://danswer.atlassian.net/wiki/spaces/Engineering/overview{" "} + and clicking the Index button will index the whole Engineering{" "} + Confluence space.

-
- 0 && ( + <> +

+ We pull the latest pages and comments from each space listed below + every 10 minutes. +

+
+ + connectorIndexingStatuses={confluenceConnectorIndexingStatuses} + liveCredential={confluenceCredential} + getCredential={(credential) => { + return ( +
+

{credential.credential_json.confluence_access_token}

+
+ ); + }} + onCredentialLink={async (connectorId) => { + if (confluenceCredential) { + await linkCredential(connectorId, confluenceCredential.id); + mutate("/api/admin/connector/indexing-status"); + } + }} + specialColumns={[ + { + header: "Url", + key: "url", + getValue: (connector) => ( + + {connector.connector_specific_config.wiki_page_url} + + ), + }, + ]} + onUpdate={() => mutate("/api/admin/connector/indexing-status")} + /> +
+ + )} + +
+

Add a New Space

+ + nameBuilder={(values) => + `ConfluenceConnector-${values.wiki_page_url}` + } source="confluence" + inputType="load_state" formBody={ <> @@ -48,15 +219,36 @@ export default function Page() { } validationSchema={Yup.object().shape({ wiki_page_url: Yup.string().required( - "Please enter any link to your confluence e.g. https://danswer.atlassian.net/wiki/spaces/SD/overview" + "Please enter any link to your confluence e.g. https://danswer.atlassian.net/wiki/spaces/Engineering/overview" ), })} initialValues={{ wiki_page_url: "", }} - onSubmit={(isSuccess) => console.log(isSuccess)} + refreshFreq={10 * 60} // 10 minutes + onSubmit={async (isSuccess, responseJson) => { + if (isSuccess && responseJson) { + await linkCredential(responseJson.id, confluenceCredential.id); + mutate("/api/admin/connector/indexing-status"); + } + }} />
+ + ); +}; + +export default function Page() { + return ( +
+
+ +
+
+ +

Confluence

+
+
); } diff --git a/web/src/app/admin/connectors/github/page.tsx b/web/src/app/admin/connectors/github/page.tsx index 9212405ee..79a965438 100644 --- a/web/src/app/admin/connectors/github/page.tsx +++ b/web/src/app/admin/connectors/github/page.tsx @@ -1,14 +1,213 @@ "use client"; import * as Yup from "yup"; -import { IndexForm } from "@/components/admin/connectors/Form"; -import { GithubIcon } from "@/components/icons/icons"; +import { GithubIcon, TrashIcon } from "@/components/icons/icons"; import { TextFormField } from "@/components/admin/connectors/Field"; import { HealthCheckBanner } from "@/components/health/healthcheck"; +import useSWR, { useSWRConfig } from "swr"; +import { fetcher } from "@/lib/fetcher"; +import { + GithubConfig, + GithubCredentialJson, + Credential, + ConnectorIndexingStatus, +} from "@/lib/types"; +import { ConnectorForm } from "@/components/admin/connectors/ConnectorForm"; +import { LoadingAnimation } from "@/components/Loading"; +import { CredentialForm } from "@/components/admin/connectors/CredentialForm"; +import { deleteCredential, linkCredential } from "@/lib/credential"; +import { ConnectorsTable } from "@/components/admin/connectors/table/ConnectorsTable"; + +const Main = () => { + const { mutate } = useSWRConfig(); + const { + data: connectorIndexingStatuses, + isLoading: isConnectorIndexingStatusesLoading, + error: isConnectorIndexingStatusesError, + } = useSWR[]>( + "/api/admin/connector/indexing-status", + fetcher + ); + + const { + data: credentialsData, + isLoading: isCredentialsLoading, + isValidating: isCredentialsValidating, + error: isCredentialsError, + } = useSWR[]>( + "/api/admin/credential", + fetcher + ); + + if ( + isConnectorIndexingStatusesLoading || + isCredentialsLoading || + isCredentialsValidating + ) { + return ; + } + + if (isConnectorIndexingStatusesError || !connectorIndexingStatuses) { + return
Failed to load connectors
; + } + + if (isCredentialsError || !credentialsData) { + return
Failed to load credentials
; + } + + const githubConnectorIndexingStatuses: ConnectorIndexingStatus[] = + connectorIndexingStatuses.filter( + (connectorIndexingStatus) => + connectorIndexingStatus.connector.source === "github" + ); + const githubCredential = credentialsData.filter( + (credential) => credential.credential_json?.github_access_token + )[0]; + + return ( + <> +

+ Step 1: Provide your access token +

+ {githubCredential ? ( + <> + {" "} +
+

Existing Access Token:

+

+ {githubCredential.credential_json.github_access_token} +

{" "} + +
+ + ) : ( + <> +

+ If you don't have an access token, read the guide{" "} + + here + {" "} + on how to get one from Github. +

+
+ + formBody={ + <> + + + } + validationSchema={Yup.object().shape({ + github_access_token: Yup.string().required( + "Please enter the access token for Github" + ), + })} + initialValues={{ + github_access_token: "", + }} + onSubmit={(isSuccess) => { + if (isSuccess) { + mutate("/api/admin/credential"); + } + }} + /> +
+ + )} + +

+ Step 2: Which repositories do you want to make searchable? +

+ + {githubConnectorIndexingStatuses.length > 0 && ( + <> +

+ We pull the latest Pull Requests from each repository listed below + every 10 minutes. +

+
+ + connectorIndexingStatuses={githubConnectorIndexingStatuses} + liveCredential={githubCredential} + getCredential={(credential) => + credential.credential_json.github_access_token + } + onCredentialLink={async (connectorId) => { + if (githubCredential) { + await linkCredential(connectorId, githubCredential.id); + mutate("/api/admin/connector/indexing-status"); + } + }} + specialColumns={[ + { + header: "Repository", + key: "repository", + getValue: (connector) => + `${connector.connector_specific_config.repo_owner}/${connector.connector_specific_config.repo_name}`, + }, + ]} + onUpdate={() => mutate("/api/admin/connector/indexing-status")} + /> +
+ + )} + +
+

Connect to a New Repository

+ + nameBuilder={(values) => + `GithubConnector-${values.repo_owner}/${values.repo_name}` + } + source="github" + inputType="load_state" + formBody={ + <> + + + + } + validationSchema={Yup.object().shape({ + repo_owner: Yup.string().required( + "Please enter the owner of the repository to index e.g. danswer-ai" + ), + repo_name: Yup.string().required( + "Please enter the name of the repository to index e.g. danswer " + ), + })} + initialValues={{ + repo_owner: "", + repo_name: "", + }} + refreshFreq={10 * 60} // 10 minutes + onSubmit={async (isSuccess, responseJson) => { + if (isSuccess && responseJson) { + await linkCredential(responseJson.id, githubCredential.id); + mutate("/api/admin/connector/indexing-status"); + } + }} + /> +
+ + ); +}; export default function Page() { return ( -
+
@@ -16,35 +215,7 @@ export default function Page() {

Github PRs

- - {/* TODO: make this periodic */} -

- Request Indexing -

-
- - - - - } - validationSchema={Yup.object().shape({ - repo_owner: Yup.string().required( - "Please enter the owner of the repo scrape e.g. danswer-ai" - ), - repo_name: Yup.string().required( - "Please enter the name of the repo scrape e.g. danswer " - ), - })} - initialValues={{ - repo_owner: "", - repo_name: "", - }} - onSubmit={(isSuccess) => console.log(isSuccess)} - /> -
+
); } diff --git a/web/src/app/admin/connectors/google-drive/auth/callback/route.ts b/web/src/app/admin/connectors/google-drive/auth/callback/route.ts index 1d2ba2c65..829a49d73 100644 --- a/web/src/app/admin/connectors/google-drive/auth/callback/route.ts +++ b/web/src/app/admin/connectors/google-drive/auth/callback/route.ts @@ -1,16 +1,28 @@ import { getDomain } from "@/lib/redirectSS"; import { buildUrl } from "@/lib/utilsSS"; import { NextRequest, NextResponse } from "next/server"; +import { cookies } from "next/headers"; export const GET = async (request: NextRequest) => { // Wrapper around the FastAPI endpoint /connectors/google-drive/callback, // which adds back a redirect to the Google Drive admin page. - const url = new URL(buildUrl("/admin/connectors/google-drive/callback")); + const url = new URL(buildUrl("/admin/connector/google-drive/callback")); url.search = request.nextUrl.search; - const response = await fetch(url.toString()); + const response = await fetch(url.toString(), { + headers: { + cookie: cookies() + .getAll() + .map((cookie) => `${cookie.name}=${cookie.value}`) + .join("; "), + }, + }); if (!response.ok) { + console.log( + "Error in Google Drive callback:", + (await response.json()).detail + ); return NextResponse.redirect(new URL("/auth/error", getDomain(request))); } diff --git a/web/src/app/admin/connectors/google-drive/page.tsx b/web/src/app/admin/connectors/google-drive/page.tsx index f7e0ffaf0..d9d9221ca 100644 --- a/web/src/app/admin/connectors/google-drive/page.tsx +++ b/web/src/app/admin/connectors/google-drive/page.tsx @@ -1,40 +1,120 @@ "use client"; -import * as Yup from "yup"; -import { - IndexForm, - submitIndexRequest, -} from "@/components/admin/connectors/Form"; -import { - ConnectorStatusEnum, - ConnectorStatus, -} from "@/components/admin/connectors/ConnectorStatus"; import { GoogleDriveIcon } from "@/components/icons/icons"; -import useSWR from "swr"; +import useSWR, { useSWRConfig } from "swr"; import { fetcher } from "@/lib/fetcher"; import { LoadingAnimation } from "@/components/Loading"; import { useRouter } from "next/navigation"; -import { Popup } from "@/components/admin/connectors/Popup"; +import { Popup, PopupSpec } from "@/components/admin/connectors/Popup"; import { useState } from "react"; import { HealthCheckBanner } from "@/components/health/healthcheck"; +import { Button } from "@/components/Button"; +import { + Connector, + ConnectorBase, + ConnectorIndexingStatus, + Credential, + GoogleDriveCredentialJson, +} from "@/lib/types"; +import { deleteConnector } from "@/lib/connector"; +import { StatusRow } from "@/components/admin/connectors/table/ConnectorsTable"; -export default function Page() { +const AppCredentialUpload = ({ + setPopup, +}: { + setPopup: (popupSpec: PopupSpec | null) => void; +}) => { + const [appCredentialJsonStr, setAppCredentialJsonStr] = useState< + string | undefined + >(); + + return ( + <> + { + if (!event.target.files) { + return; + } + const file = event.target.files[0]; + const reader = new FileReader(); + + reader.onload = function (loadEvent) { + if (!loadEvent?.target?.result) { + return; + } + const fileContents = loadEvent.target.result; + setAppCredentialJsonStr(fileContents as string); + }; + + reader.readAsText(file); + }} + /> + + + + ); +}; + +const Main = () => { const router = useRouter(); + const { mutate } = useSWRConfig(); const { - data: isAuthenticatedData, - isLoading: isAuthenticatedLoading, - error: isAuthenticatedError, - } = useSWR<{ authenticated: boolean }>( - "/api/admin/connectors/google-drive/check-auth", + data: appCredentialData, + isLoading: isAppCredentialLoading, + error: isAppCredentialError, + } = useSWR<{ client_id: string }>( + "/api/admin/connector/google-drive/app-credential", fetcher ); const { - data: authorizationUrlData, - isLoading: authorizationUrlLoading, - error: authorizationUrlError, - } = useSWR<{ auth_url: string }>( - "/api/admin/connectors/google-drive/authorize", + data: connectorIndexingStatuses, + isLoading: isConnectorIndexingStatusesLoading, + error: isConnectorIndexingStatusesError, + } = useSWR[]>( + "/api/admin/connector/indexing-status", + fetcher + ); + const { + data: credentialsData, + isLoading: isCredentialsLoading, + error: isCredentialsError, + } = useSWR[]>( + "/api/admin/credential", fetcher ); @@ -42,9 +122,294 @@ export default function Page() { message: string; type: "success" | "error"; } | null>(null); + const setPopupWithExpiration = (popupSpec: PopupSpec | null) => { + setPopup(popupSpec); + setTimeout(() => { + setPopup(null); + }, 4000); + }; - const header = ( -
+ if ( + isCredentialsLoading || + isAppCredentialLoading || + isConnectorIndexingStatusesLoading + ) { + return ( +
+ +
+ ); + } + + if (isCredentialsError || !credentialsData) { + return ( +
+
Failed to load credentials.
+
+ ); + } + + if (isConnectorIndexingStatusesError || !connectorIndexingStatuses) { + return ( +
+
Failed to load connectors.
+
+ ); + } + + if (isAppCredentialError) { + return ( +
+
+ Error loading Google Drive app credentials. Contact an administrator. +
+
+ ); + } + + const googleDriveConnectorIndexingStatuses: ConnectorIndexingStatus<{}>[] = + connectorIndexingStatuses.filter( + (connectorIndexingStatus) => + connectorIndexingStatus.connector.source === "google_drive" + ); + const googleDriveConnectorIndexingStatus = + googleDriveConnectorIndexingStatuses[0]; + const googleDriveCredential = credentialsData.filter( + (credential) => credential.credential_json?.google_drive_tokens + )[0]; + + return ( + <> + {popup && } +

+ Step 1: Provide your app Credentials +

+
+ {appCredentialData?.client_id ? ( +
+
+ Found existing app credentials with the following{" "} + Client ID: +

{appCredentialData.client_id}

+
+
+ If you want to update these credentials, upload a new + credentials.json file below. +
+ { + mutate("/api/admin/connector/google-drive/app-credential"); + setPopupWithExpiration(popup); + }} + /> +
+
+
+ ) : ( + <> +

+ Follow the guide{" "} + + here + {" "} + to setup your google app in your company workspace. Download the + credentials.json, and upload it here. +

+ { + mutate("/api/admin/connector/google-drive/app-credential"); + setPopupWithExpiration(popup); + }} + /> + + )} +
+ +

+ Step 2: Authenticate with Danswer +

+
+ {googleDriveCredential ? ( +

+ Existing credential already setup! If you want to reset that + credential, click the button below to go through the OAuth flow + again. +

+ ) : ( + <> +

+ Next, you must provide credentials via OAuth. This gives us read + access to the docs you have access to in your google drive + account. +

+ + )} +
+ + +

+ Step 3: Start Indexing! +

+ {googleDriveConnectorIndexingStatus ? ( +
+
+
+ The Google Drive connector is setup!{" "} + Status:{" "} + { + mutate("/api/admin/connector/indexing-status"); + }} + /> +
+

+ Checkout the{" "} + + status page + {" "} + for the latest indexing status. We fetch the latest documents from + Google Drive every 10 minutes. +

+
+ +
+ ) : ( + <> +

+ Click the button below to create a connector. We will refresh the + latest documents from Google Drive every 10 minutes. +

+ + + )} + + ); +}; + +export default function Page() { + return ( +
@@ -52,138 +417,8 @@ export default function Page() {

Google Drive

-
- ); - let body = null; - if (isAuthenticatedLoading || authorizationUrlLoading) { - return ( -
- {header} - -
- ); - } - if ( - isAuthenticatedError || - isAuthenticatedData?.authenticated === undefined - ) { - return ( -
- {header} -
- Error loading Google Drive authentication status. Contact an - administrator. -
-
- ); - } - if (authorizationUrlError || authorizationUrlData?.auth_url === undefined) { - return ( -
- {header} -
- Error loading Google Drive authentication URL. Contact an - administrator. -
-
- ); - } - - if (isAuthenticatedData.authenticated) { - return ( -
- {header} - {popup && } - - {/* TODO: add periodic support */} -

- Request Indexing -

-

- Index the all docs in the setup Google Drive account. -

-
- -
- - {/* TODO: add ability to add more accounts / switch account */} -
-

- Re-Authenticate -

-

- If you want to switch Google Drive accounts, you can re-authenticate - below. -

- - Authenticate with Google Drive - -
-
- ); - } - - return ( -
- {header} - -
-
-

Setup

-

- To use the Google Drive connector, you must first provide - credentials via OAuth. This gives us read access to the docs in your - google drive account. -

- - Authenticate with Google Drive - -
-
+
); } diff --git a/web/src/app/admin/connectors/slack/InitialSetupForm.tsx b/web/src/app/admin/connectors/slack/InitialSetupForm.tsx deleted file mode 100644 index 4bfb20b6c..000000000 --- a/web/src/app/admin/connectors/slack/InitialSetupForm.tsx +++ /dev/null @@ -1,101 +0,0 @@ -import React, { useState } from "react"; -import { Formik, Form, FormikHelpers } from "formik"; -import * as Yup from "yup"; -import { Popup } from "../../../../components/admin/connectors/Popup"; -import { TextFormField } from "../../../../components/admin/connectors/Field"; -import { SlackConfig } from "../../../../components/admin/connectors/types"; - -const validationSchema = Yup.object().shape({ - slack_bot_token: Yup.string().required("Please enter your Slack Bot Token"), - workspace_id: Yup.string().required("Please enter your Workspace ID"), - pull_frequency: Yup.number().optional(), -}); - -const handleSubmit = async ( - values: SlackConfig, - { setSubmitting }: FormikHelpers, - setPopup: ( - popup: { message: string; type: "success" | "error" } | null - ) => void -) => { - let isSuccess = false; - setSubmitting(true); - try { - const response = await fetch("/api/admin/connectors/slack/config", { - method: "POST", - headers: { - "Content-Type": "application/json", - }, - body: JSON.stringify(values), - }); - - if (response.ok) { - isSuccess = true; - setPopup({ message: "Success!", type: "success" }); - } else { - const errorData = await response.json(); - setPopup({ message: `Error: ${errorData.detail}`, type: "error" }); - } - } catch (error) { - setPopup({ message: `Error: ${error}`, type: "error" }); - } finally { - setSubmitting(false); - setTimeout(() => { - setPopup(null); - }, 3000); - } - return isSuccess; -}; - -interface Props { - existingSlackConfig: SlackConfig; - onSubmit: (isSuccess: boolean) => void; -} - -export const InitialSetupForm: React.FC = ({ - existingSlackConfig, - onSubmit, -}) => { - const [popup, setPopup] = useState<{ - message: string; - type: "success" | "error"; - } | null>(null); - - return ( - <> - {popup && } - - handleSubmit(values, formikHelpers, setPopup).then((isSuccess) => - onSubmit(isSuccess) - ) - } - > - {({ isSubmitting }) => ( -
- - - -
- -
- - )} -
- - ); -}; diff --git a/web/src/app/admin/connectors/slack/page.tsx b/web/src/app/admin/connectors/slack/page.tsx index 55511028d..fc53185de 100644 --- a/web/src/app/admin/connectors/slack/page.tsx +++ b/web/src/app/admin/connectors/slack/page.tsx @@ -1,65 +1,208 @@ "use client"; -import { SlackIcon } from "@/components/icons/icons"; +import * as Yup from "yup"; +import { SlackIcon, TrashIcon } from "@/components/icons/icons"; import { fetcher } from "@/lib/fetcher"; import useSWR, { useSWRConfig } from "swr"; -import { SlackConfig } from "../../../../components/admin/connectors/types"; import { LoadingAnimation } from "@/components/Loading"; -import { InitialSetupForm } from "./InitialSetupForm"; -import { useRouter } from "next/navigation"; import { HealthCheckBanner } from "@/components/health/healthcheck"; +import { + Connector, + SlackConfig, + Credential, + SlackCredentialJson, + ConnectorIndexingStatus, +} from "@/lib/types"; +import { deleteCredential, linkCredential } from "@/lib/credential"; +import { CredentialForm } from "@/components/admin/connectors/CredentialForm"; +import { TextFormField } from "@/components/admin/connectors/Field"; +import { ConnectorsTable } from "@/components/admin/connectors/table/ConnectorsTable"; +import { ConnectorForm } from "@/components/admin/connectors/ConnectorForm"; const MainSection = () => { - // TODO: add back in once this is ready - // const { data, isLoading, error } = useSWR( - // "/api/admin/connectors/web/index-attempt", - // fetcher - // ); - const { mutate } = useSWRConfig(); - const { data, isLoading, error } = useSWR( - "/api/admin/connectors/slack/config", + const { + data: connectorIndexingStatuses, + isLoading: isConnectorIndexingStatusesLoading, + error: isConnectorIndexingStatusesError, + } = useSWR[]>( + "/api/admin/connector/indexing-status", fetcher ); - if (isLoading) { - return ( -
- -
- ); - } else if (error || !data) { - return
{`Error loading Slack config - ${error}`}
; + const { + data: credentialsData, + isLoading: isCredentialsLoading, + isValidating: isCredentialsValidating, + error: isCredentialsError, + } = useSWR[]>( + "/api/admin/credential", + fetcher + ); + + if ( + isConnectorIndexingStatusesLoading || + isCredentialsLoading || + isCredentialsValidating + ) { + return ; } + if (isConnectorIndexingStatusesError || !connectorIndexingStatuses) { + return
Failed to load connectors
; + } + + if (isCredentialsError || !credentialsData) { + return
Failed to load credentials
; + } + + const slackConnectorIndexingStatuses: ConnectorIndexingStatus[] = + connectorIndexingStatuses.filter( + (connectorIndexingStatus) => + connectorIndexingStatus.connector.source === "slack" + ); + const slackCredential = credentialsData.filter( + (credential) => credential.credential_json?.slack_bot_token + )[0]; + return ( -
-

Config

-

- To use the Slack connector, you must first provide a Slack bot token - corresponding to the Slack App set up in your workspace. For more - details on setting up the Danswer Slack App, see the{" "} - - docs - - . -

-
- mutate("/api/admin/connectors/slack/config")} + <> +

+ Step 1: Provide Credentials +

+ {slackCredential ? ( + <> +
+

Existing Slack Bot Token:

+

+ {slackCredential.credential_json.slack_bot_token} +

{" "} + +
+ + ) : ( + <> +

+ To use the Slack connector, you must first provide a Slack bot token + corresponding to the Slack App set up in your workspace. For more + details on setting up the Danswer Slack App, see the{" "} + + docs + + . +

+
+ + formBody={ + <> + + + } + validationSchema={Yup.object().shape({ + slack_bot_token: Yup.string().required( + "Please enter your Slack bot token" + ), + })} + initialValues={{ + slack_bot_token: "", + }} + onSubmit={(isSuccess) => { + if (isSuccess) { + mutate("/api/admin/credential"); + } + }} + /> +
+ + )} + +

+ Step 2: Which workspaces do you want to make searchable? +

+ + {slackConnectorIndexingStatuses.length > 0 && ( + <> +

+ We pull the latest messages from each workspace listed below every{" "} + 10 minutes. +

+
+ + credential.credential_json.slack_bot_token + } + specialColumns={[ + { + header: "Workspace", + key: "workspace", + getValue: (connector) => + connector.connector_specific_config.workspace, + }, + ]} + onUpdate={() => mutate("/api/admin/connector/indexing-status")} + onCredentialLink={async (connectorId) => { + if (slackCredential) { + await linkCredential(connectorId, slackCredential.id); + mutate("/api/admin/connector/indexing-status"); + } + }} + /> +
+ + )} + +
+

Connect to a New Workspace

+ + nameBuilder={(values) => `SlackConnector-${values.workspace}`} + source="slack" + inputType="poll" + formBody={ + <> + + + } + validationSchema={Yup.object().shape({ + workspace: Yup.string().required( + "Please enter the workspace to index" + ), + })} + initialValues={{ + workspace: "", + }} + refreshFreq={10 * 60} // 10 minutes + onSubmit={async (isSuccess, responseJson) => { + if (isSuccess && responseJson) { + await linkCredential(responseJson.id, slackCredential.id); + mutate("/api/admin/connector/indexing-status"); + } + }} />
-
+ ); }; export default function Page() { return ( -
+
diff --git a/web/src/app/admin/connectors/web/page.tsx b/web/src/app/admin/connectors/web/page.tsx index 4770e7f35..9d24c0039 100644 --- a/web/src/app/admin/connectors/web/page.tsx +++ b/web/src/app/admin/connectors/web/page.tsx @@ -1,60 +1,38 @@ "use client"; -import useSWR from "swr"; +import useSWR, { useSWRConfig } from "swr"; import * as Yup from "yup"; -import { BasicTable } from "@/components/admin/connectors/BasicTable"; import { LoadingAnimation } from "@/components/Loading"; -import { timeAgo } from "@/lib/time"; import { GlobeIcon } from "@/components/icons/icons"; import { fetcher } from "@/lib/fetcher"; -import { - IndexAttempt, - ListIndexingResponse, -} from "../../../../components/admin/connectors/types"; -import { IndexForm } from "@/components/admin/connectors/Form"; import { TextFormField } from "@/components/admin/connectors/Field"; -import { useRouter } from "next/navigation"; import { HealthCheckBanner } from "@/components/health/healthcheck"; - -const COLUMNS = [ - { header: "Base URL", key: "url" }, - { header: "Last Indexed", key: "indexed_at" }, - { header: "Docs Indexed", key: "docs_indexed" }, - { header: "Status", key: "status" }, -]; +import { ConnectorIndexingStatus, WebConfig } from "@/lib/types"; +import { ConnectorsTable } from "@/components/admin/connectors/table/ConnectorsTable"; +import { ConnectorForm } from "@/components/admin/connectors/ConnectorForm"; +import { linkCredential } from "@/lib/credential"; export default function Web() { - const router = useRouter(); + const { mutate } = useSWRConfig(); - const { data, isLoading, error } = useSWR( - "/api/admin/connectors/web/index-attempt", + const { + data: connectorIndexingStatuses, + isLoading: isConnectorIndexingStatusesLoading, + error: isConnectorIndexingStatusesError, + } = useSWR[]>( + "/api/admin/connector/indexing-status", fetcher ); - const urlToLatestIndexAttempt = new Map(); - const urlToLatestIndexSuccess = new Map(); - data?.index_attempts?.forEach((indexAttempt) => { - const url = indexAttempt.connector_specific_config.base_url; - const latestIndexAttempt = urlToLatestIndexAttempt.get(url); - if ( - !latestIndexAttempt || - indexAttempt.time_created > latestIndexAttempt.time_created - ) { - urlToLatestIndexAttempt.set(url, indexAttempt); - } - - const latestIndexSuccess = urlToLatestIndexSuccess.get(url); - if ( - indexAttempt.status === "success" && - (!latestIndexSuccess || indexAttempt.time_updated > latestIndexSuccess) - ) { - urlToLatestIndexSuccess.set(url, indexAttempt.time_updated); - } - }); + const webIndexingStatuses: ConnectorIndexingStatus[] = + connectorIndexingStatuses?.filter( + (connectorIndexingStatus) => + connectorIndexingStatus.connector.source === "web" + ) ?? []; return ( -
+
@@ -62,59 +40,69 @@ export default function Web() {

Web

-

- Request Indexing +

+ Step 1: Specify which websites to index

+

+ We re-fetch the latest state of the website once a day. +

- + nameBuilder={(values) => `WebConnector-${values.base_url}`} source="web" - formBody={} + inputType="load_state" + formBody={ + <> + + + } validationSchema={Yup.object().shape({ base_url: Yup.string().required( - "Please enter the website URL to scrape e.g. https://docs.github.com/en/actions" + "Please enter the website URL to scrape e.g. https://docs.danswer.dev/" ), })} - initialValues={{ base_url: "" }} - onSubmit={(success) => { - if (success) { - router.push("/admin/indexing/status"); + initialValues={{ + base_url: "", + }} + refreshFreq={60 * 60 * 24} // 1 day + onSubmit={async (isSuccess, responseJson) => { + if (isSuccess && responseJson) { + // assumes there is a dummy credential with id 0 + await linkCredential(responseJson.id, 0); + mutate("/api/admin/connector/indexing-status"); } }} />
-

- Indexing History +

+ Already Indexed Websites

- {isLoading ? ( + {isConnectorIndexingStatusesLoading ? ( - ) : error ? ( + ) : isConnectorIndexingStatusesError || !connectorIndexingStatuses ? (
Error loading indexing history
- ) : ( - 0 - ? Array.from(urlToLatestIndexAttempt.values()).map( - (indexAttempt) => { - const url = indexAttempt.connector_specific_config - .base_url as string; - return { - indexed_at: - timeAgo(urlToLatestIndexSuccess.get(url)) || "-", - docs_indexed: indexAttempt.docs_indexed || "-", - url: ( - - {url} - - ), - status: indexAttempt.status, - }; - } - ) - : [] - } + ) : webIndexingStatuses.length > 0 ? ( + + connectorIndexingStatuses={webIndexingStatuses} + specialColumns={[ + { + header: "Base URL", + key: "base_url", + getValue: (connector) => ( + + {connector.connector_specific_config.base_url} + + ), + }, + ]} + onUpdate={() => mutate("/api/admin/connector/indexing-status")} /> + ) : ( +

No indexed websites found

)}
); diff --git a/web/src/app/admin/indexing/status/page.tsx b/web/src/app/admin/indexing/status/page.tsx index d0e7caf5b..edce954ae 100644 --- a/web/src/app/admin/indexing/status/page.tsx +++ b/web/src/app/admin/indexing/status/page.tsx @@ -1,49 +1,55 @@ "use client"; -import useSWR, { useSWRConfig } from "swr"; +import useSWR from "swr"; import { BasicTable } from "@/components/admin/connectors/BasicTable"; import { LoadingAnimation } from "@/components/Loading"; import { timeAgo } from "@/lib/time"; -import { NotebookIcon } from "@/components/icons/icons"; +import { NotebookIcon, XSquareIcon } from "@/components/icons/icons"; import { fetcher } from "@/lib/fetcher"; -import { - IndexAttempt, - ListIndexingResponse, -} from "@/components/admin/connectors/types"; import { getSourceMetadata } from "@/components/source"; import { CheckCircle, XCircle } from "@phosphor-icons/react"; -import { submitIndexRequest } from "@/components/admin/connectors/Form"; import { useState } from "react"; import { Popup } from "@/components/admin/connectors/Popup"; import { HealthCheckBanner } from "@/components/health/healthcheck"; +import { Connector, ConnectorIndexingStatus } from "@/lib/types"; -const getModifiedSource = (indexAttempt: IndexAttempt) => { - return indexAttempt.source === "web" - ? indexAttempt.source + indexAttempt.connector_specific_config?.base_url - : indexAttempt.source; -}; +const getSourceDisplay = (connector: Connector) => { + const sourceMetadata = getSourceMetadata(connector.source); + if (connector.source === "web") { + return ( + sourceMetadata.displayName + + (connector.connector_specific_config?.base_url && + ` [${connector.connector_specific_config?.base_url}]`) + ); + } -const getLatestIndexAttemptsBySource = (indexAttempts: IndexAttempt[]) => { - const latestIndexAttemptsBySource = new Map(); - indexAttempts.forEach((indexAttempt) => { - const source = getModifiedSource(indexAttempt); - const existingIndexAttempt = latestIndexAttemptsBySource.get(source); - if ( - !existingIndexAttempt || - indexAttempt.time_updated > existingIndexAttempt.time_updated - ) { - latestIndexAttemptsBySource.set(source, indexAttempt); - } - }); - return latestIndexAttemptsBySource; + if (connector.source === "github") { + return ( + sourceMetadata.displayName + + ` [${connector.connector_specific_config?.repo_owner}/${connector.connector_specific_config?.repo_name}]` + ); + } + + if (connector.source === "confluence") { + return ( + sourceMetadata.displayName + + ` [${connector.connector_specific_config?.wiki_page_url}]` + ); + } + + return sourceMetadata.displayName; }; export default function Status() { - const { mutate } = useSWRConfig(); - const { data, isLoading, error } = useSWR( - "/api/admin/connectors/index-attempt", - fetcher + const { + data: indexAttemptData, + isLoading: indexAttemptIsLoading, + error: indexAttemptIsError, + } = useSWR[]>( + "/api/admin/connector/indexing-status", + fetcher, + { refreshInterval: 30000 } // 30 seconds ); const [popup, setPopup] = useState<{ @@ -51,18 +57,8 @@ export default function Status() { type: "success" | "error"; } | null>(null); - // TODO: don't retrieve all index attempts, just the latest ones for each source - const latestIndexAttemptsBySource = getLatestIndexAttemptsBySource( - data?.index_attempts || [] - ); - const latestSuccessfulIndexAttemptsBySource = getLatestIndexAttemptsBySource( - data?.index_attempts?.filter( - (indexAttempt) => indexAttempt.status === "success" - ) || [] - ); - return ( -
+
{popup && }
@@ -72,9 +68,9 @@ export default function Status() {

Indexing Status

- {isLoading ? ( + {indexAttemptIsLoading ? ( - ) : error ? ( + ) : indexAttemptIsError || !indexAttemptData ? (
Error loading indexing history
) : ( { - const sourceMetadata = getSourceMetadata(indexAttempt.source); - const successfulIndexAttempt = - latestSuccessfulIndexAttemptsBySource.get( - getModifiedSource(indexAttempt) - ); - - let statusDisplay = ( -
In Progress...
+ data={indexAttemptData.map((connectorIndexingStatus) => { + const sourceMetadata = getSourceMetadata( + connectorIndexingStatus.connector.source + ); + let statusDisplay = ( +
In Progress...
+ ); + if (connectorIndexingStatus.connector.disabled) { + statusDisplay = ( +
+ + Disabled +
+ ); + } else if (connectorIndexingStatus.last_status === "success") { + statusDisplay = ( +
+ + Enabled +
+ ); + } else if (connectorIndexingStatus.last_status === "failed") { + statusDisplay = ( +
+ + Error +
); - if (indexAttempt.status === "success") { - statusDisplay = ( -
- - Success -
- ); - } else if (indexAttempt.status === "failed") { - statusDisplay = ( -
- - Error -
- ); - } - return { - indexed_at: - timeAgo(successfulIndexAttempt?.time_updated) || "-", - docs_indexed: successfulIndexAttempt?.docs_indexed - ? `${successfulIndexAttempt?.docs_indexed} documents` - : "-", - connector: ( - - {sourceMetadata.icon({ size: "20" })} -
- {sourceMetadata.displayName} - {indexAttempt.source === "web" && - indexAttempt.connector_specific_config?.base_url && - ` [${indexAttempt.connector_specific_config?.base_url}]`} -
-
- ), - status: statusDisplay, - reindex: ( - - ), - }; } - )} + return { + indexed_at: timeAgo(connectorIndexingStatus?.last_success) || "-", + docs_indexed: connectorIndexingStatus?.docs_indexed + ? `${connectorIndexingStatus?.docs_indexed} documents` + : "-", + connector: ( + + {sourceMetadata.icon({ size: "20" })} +
+ {getSourceDisplay(connectorIndexingStatus.connector)} +
+
+ ), + status: statusDisplay, + // TODO: add the below back in after this is supported in the backend + // reindex: ( + // + // ), + }; + })} /> )}
diff --git a/web/src/app/admin/keys/openai/page.tsx b/web/src/app/admin/keys/openai/page.tsx index be1ffdd0d..8cafb3817 100644 --- a/web/src/app/admin/keys/openai/page.tsx +++ b/web/src/app/admin/keys/openai/page.tsx @@ -29,7 +29,7 @@ const ExistingKeys = () => {

Existing Key

-

sk- ...{data?.api_key}

+

sk- ****...**{data?.api_key}

+ ); +}; diff --git a/web/src/components/admin/connectors/AttachCredentialPopup.tsx b/web/src/components/admin/connectors/AttachCredentialPopup.tsx new file mode 100644 index 000000000..e8024e02f --- /dev/null +++ b/web/src/components/admin/connectors/AttachCredentialPopup.tsx @@ -0,0 +1,3 @@ +export const AttachCredentialPopup = () => { + return
; +}; diff --git a/web/src/components/admin/connectors/BasicTable.tsx b/web/src/components/admin/connectors/BasicTable.tsx index 7045bef12..8b2927b7c 100644 --- a/web/src/components/admin/connectors/BasicTable.tsx +++ b/web/src/components/admin/connectors/BasicTable.tsx @@ -16,7 +16,7 @@ interface BasicTableProps { export const BasicTable: FC = ({ columns, data }) => { return ( -
+
diff --git a/web/src/components/admin/connectors/ConnectorForm.tsx b/web/src/components/admin/connectors/ConnectorForm.tsx new file mode 100644 index 000000000..afb6d6620 --- /dev/null +++ b/web/src/components/admin/connectors/ConnectorForm.tsx @@ -0,0 +1,114 @@ +import React, { useState } from "react"; +import { Formik, Form } from "formik"; +import * as Yup from "yup"; +import { Popup } from "./Popup"; +import { + Connector, + ConnectorBase, + ValidInputTypes, + ValidSources, +} from "@/lib/types"; + +export async function submitConnector( + connector: ConnectorBase +): Promise<{ message: string; isSuccess: boolean; response?: Connector }> { + let isSuccess = false; + try { + const response = await fetch(`/api/admin/connector`, { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(connector), + }); + + if (response.ok) { + isSuccess = true; + const responseJson = await response.json(); + return { message: "Success!", isSuccess: true, response: responseJson }; + } else { + const errorData = await response.json(); + return { message: `Error: ${errorData.detail}`, isSuccess: false }; + } + } catch (error) { + return { message: `Error: ${error}`, isSuccess: false }; + } +} + +interface Props { + nameBuilder: (values: T) => string; + source: ValidSources; + inputType: ValidInputTypes; + credentialId?: number; + formBody: JSX.Element | null; + validationSchema: Yup.ObjectSchema; + initialValues: T; + onSubmit: (isSuccess: boolean, responseJson?: Connector) => void; + refreshFreq?: number; +} + +export function ConnectorForm({ + nameBuilder, + source, + inputType, + formBody, + validationSchema, + initialValues, + refreshFreq, + onSubmit, +}: Props): JSX.Element { + const [popup, setPopup] = useState<{ + message: string; + type: "success" | "error"; + } | null>(null); + + return ( + <> + {popup && } + { + formikHelpers.setSubmitting(true); + submitConnector({ + name: nameBuilder(values), + source, + input_type: inputType, + connector_specific_config: values, + refresh_freq: refreshFreq || 0, + disabled: false, + }).then(({ message, isSuccess, response }) => { + setPopup({ message, type: isSuccess ? "success" : "error" }); + formikHelpers.setSubmitting(false); + if (isSuccess) { + formikHelpers.resetForm(); + } + setTimeout(() => { + setPopup(null); + }, 4000); + onSubmit(isSuccess, response); + }); + }} + > + {({ isSubmitting }) => ( +
+ {formBody} +
+ +
+ + )} +
+ + ); +} diff --git a/web/src/components/admin/connectors/ConnectorStatus.tsx b/web/src/components/admin/connectors/ConnectorStatus.tsx deleted file mode 100644 index 52fa51d63..000000000 --- a/web/src/components/admin/connectors/ConnectorStatus.tsx +++ /dev/null @@ -1,65 +0,0 @@ -"use client"; - -import { - IndexAttempt, - ListIndexingResponse, -} from "@/components/admin/connectors/types"; -import { fetcher } from "@/lib/fetcher"; -import { timeAgo } from "@/lib/time"; -import { ValidSources } from "@/lib/types"; -import { CheckCircle, MinusCircle } from "@phosphor-icons/react"; -import useSWR from "swr"; - -export enum ConnectorStatusEnum { - Setup = "Setup", - Running = "Running", - NotSetup = "Not Setup", -} - -const sortIndexAttemptsByTimeUpdated = (a: IndexAttempt, b: IndexAttempt) => { - if (a.time_updated === b.time_updated) { - return 0; - } - return a.time_updated > b.time_updated ? -1 : 1; -}; - -interface ConnectorStatusProps { - status: ConnectorStatusEnum; - source: ValidSources; -} - -export const ConnectorStatus = ({ status, source }: ConnectorStatusProps) => { - const { data } = useSWR( - `/api/admin/connectors/${source}/index-attempt`, - fetcher - ); - - const lastSuccessfulAttempt = data?.index_attempts - .filter((attempt) => attempt.status === "success") - .sort(sortIndexAttemptsByTimeUpdated)[0]; - - if ( - status === ConnectorStatusEnum.Running || - status == ConnectorStatusEnum.Setup - ) { - return ( -
-
- -

{status}

-
- {lastSuccessfulAttempt && ( -

- Last indexed {timeAgo(lastSuccessfulAttempt.time_updated)} -

- )} -
- ); - } - return ( -
- -

{status}

-
- ); -}; diff --git a/web/src/components/admin/connectors/CredentialForm.tsx b/web/src/components/admin/connectors/CredentialForm.tsx new file mode 100644 index 000000000..9a6592c15 --- /dev/null +++ b/web/src/components/admin/connectors/CredentialForm.tsx @@ -0,0 +1,92 @@ +import React, { useState } from "react"; +import { Formik, Form } from "formik"; +import * as Yup from "yup"; +import { Popup } from "./Popup"; +import { CredentialBase } from "@/lib/types"; + +export async function submitCredential( + connector: CredentialBase +): Promise<{ message: string; isSuccess: boolean }> { + let isSuccess = false; + try { + const response = await fetch(`/api/admin/credential`, { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(connector), + }); + + if (response.ok) { + isSuccess = true; + return { message: "Success!", isSuccess: true }; + } else { + const errorData = await response.json(); + return { message: `Error: ${errorData.detail}`, isSuccess: false }; + } + } catch (error) { + return { message: `Error: ${error}`, isSuccess: false }; + } +} + +interface Props { + formBody: JSX.Element | null; + validationSchema: Yup.ObjectSchema; + initialValues: YupObjectType; + onSubmit: (isSuccess: boolean) => void; +} + +export function CredentialForm({ + formBody, + validationSchema, + initialValues, + onSubmit, +}: Props): JSX.Element { + const [popup, setPopup] = useState<{ + message: string; + type: "success" | "error"; + } | null>(null); + + return ( + <> + {popup && } + { + formikHelpers.setSubmitting(true); + submitCredential({ + credential_json: values, + public_doc: true, + }).then(({ message, isSuccess }) => { + setPopup({ message, type: isSuccess ? "success" : "error" }); + formikHelpers.setSubmitting(false); + setTimeout(() => { + setPopup(null); + }, 4000); + onSubmit(isSuccess); + }); + }} + > + {({ isSubmitting }) => ( +
+ {formBody} +
+ +
+ + )} +
+ + ); +} diff --git a/web/src/components/admin/connectors/Form.tsx b/web/src/components/admin/connectors/IndexForm.tsx similarity index 97% rename from web/src/components/admin/connectors/Form.tsx rename to web/src/components/admin/connectors/IndexForm.tsx index b018f88b9..44c610fa2 100644 --- a/web/src/components/admin/connectors/Form.tsx +++ b/web/src/components/admin/connectors/IndexForm.tsx @@ -12,7 +12,7 @@ export const submitIndexRequest = async ( let isSuccess = false; try { const response = await fetch( - `/api/admin/connectors/${source}/index-attempt`, + `/api/admin/connector/${source}/index-attempt`, { method: "POST", headers: { @@ -75,7 +75,7 @@ export function IndexForm({ formikHelpers.setSubmitting(false); setTimeout(() => { setPopup(null); - }, 3000); + }, 4000); onSubmit(isSuccess); }); }} diff --git a/web/src/components/admin/connectors/Popup.tsx b/web/src/components/admin/connectors/Popup.tsx index b1832cf59..dff82243c 100644 --- a/web/src/components/admin/connectors/Popup.tsx +++ b/web/src/components/admin/connectors/Popup.tsx @@ -1,9 +1,9 @@ -interface PopupProps { +export interface PopupSpec { message: string; type: "success" | "error"; } -export const Popup: React.FC = ({ message, type }) => ( +export const Popup: React.FC = ({ message, type }) => (
void; +} + +export const AttachCredentialButtonForTable = ({ onClick }: Props) => { + return ( + + ); +}; diff --git a/web/src/components/admin/connectors/buttons/IndexButtonForTable.tsx b/web/src/components/admin/connectors/buttons/IndexButtonForTable.tsx new file mode 100644 index 000000000..6e6dc388b --- /dev/null +++ b/web/src/components/admin/connectors/buttons/IndexButtonForTable.tsx @@ -0,0 +1,20 @@ +interface Props { + onClick: () => void; +} + +export const IndexButtonForTable = ({ onClick }: Props) => { + return ( + + ); +}; diff --git a/web/src/components/admin/connectors/table/ConnectorsTable.tsx b/web/src/components/admin/connectors/table/ConnectorsTable.tsx new file mode 100644 index 000000000..9a6f69869 --- /dev/null +++ b/web/src/components/admin/connectors/table/ConnectorsTable.tsx @@ -0,0 +1,223 @@ +import { Connector, ConnectorIndexingStatus, Credential } from "@/lib/types"; +import { BasicTable } from "@/components/admin/connectors/BasicTable"; +import { Popup, PopupSpec } from "@/components/admin/connectors/Popup"; +import { useState } from "react"; +import { LinkBreakIcon, LinkIcon, TrashIcon } from "@/components/icons/icons"; +import { deleteConnector, updateConnector } from "@/lib/connector"; +import { AttachCredentialButtonForTable } from "@/components/admin/connectors/buttons/AttachCredentialButtonForTable"; + +interface StatusRowProps { + connectorIndexingStatus: ConnectorIndexingStatus; + hasCredentialsIssue: boolean; + setPopup: (popupSpec: PopupSpec | null) => void; + onUpdate: () => void; +} + +export function StatusRow({ + connectorIndexingStatus, + hasCredentialsIssue, + setPopup, + onUpdate, +}: StatusRowProps) { + const [statusHovered, setStatusHovered] = useState(false); + const connector = connectorIndexingStatus.connector; + + let statusDisplay; + switch (connectorIndexingStatus.last_status) { + case "failed": + statusDisplay =
Failed
; + break; + default: + statusDisplay =
Enabled!
; + } + if (connector.disabled) { + statusDisplay =
Disabled
; + } + + return ( +
+ {statusDisplay} + {!hasCredentialsIssue && ( +
setStatusHovered(true)} + onMouseLeave={() => setStatusHovered(false)} + onClick={() => { + updateConnector({ + ...connector, + disabled: !connector.disabled, + }).then(() => { + setPopup({ + message: connector.disabled + ? "Enabled connector!" + : "Disabled connector!", + type: "success", + }); + setTimeout(() => { + setPopup(null); + }, 4000); + onUpdate(); + }); + }} + > + {statusHovered && ( +
+ {connector.disabled ? "Enable!" : "Disable!"} +
+ )} + {connector.disabled ? ( + + ) : ( + + )} +
+ )} +
+ ); +} + +interface ColumnSpecification { + header: string; + key: string; + getValue: (connector: Connector) => JSX.Element | string; +} + +interface ConnectorsTableProps { + connectorIndexingStatuses: ConnectorIndexingStatus[]; + liveCredential?: Credential | null; + getCredential?: ( + credential: Credential + ) => JSX.Element | string; + onUpdate: () => void; + onCredentialLink?: (connectorId: number) => void; + specialColumns?: ColumnSpecification[]; +} + +export function ConnectorsTable({ + connectorIndexingStatuses, + liveCredential, + getCredential, + specialColumns, + onUpdate, + onCredentialLink, +}: ConnectorsTableProps) { + const [popup, setPopup] = useState<{ + message: string; + type: "success" | "error"; + } | null>(null); + + const connectorIncludesCredential = + getCredential !== undefined && onCredentialLink !== undefined; + + const columns = [ + ...(specialColumns ?? []), + { + header: "Status", + key: "status", + }, + ]; + if (connectorIncludesCredential) { + columns.push({ + header: "Credential", + key: "credential", + }); + } + columns.push({ + header: "Remove", + key: "remove", + }); + + return ( + <> + {popup && } + { + const connector = connectorIndexingStatus.connector; + const hasValidCredentials = + liveCredential && + connector.credential_ids.includes(liveCredential.id); + const credential = connectorIncludesCredential + ? { + credential: hasValidCredentials ? ( +

+ {getCredential(liveCredential)} +

+ ) : liveCredential ? ( + onCredentialLink(connector.id)} + /> + ) : ( +

N/A

+ ), + } + : { credential: "" }; + return { + status: ( + + ), + remove: ( +
{ + deleteConnector(connector.id).then(() => { + setPopup({ + message: "Successfully deleted connector", + type: "success", + }); + setTimeout(() => { + setPopup(null); + }, 4000); + onUpdate(); + }); + }} + > + +
+ ), + ...credential, + ...(specialColumns + ? Object.fromEntries( + specialColumns.map(({ key, getValue }, i) => [ + key, + getValue(connector), + ]) + ) + : {}), + }; + // index: ( + // { + // const { message, isSuccess } = await submitIndexRequest( + // connector.source, + // connector.connector_specific_config + // ); + // setPopup({ + // message, + // type: isSuccess ? "success" : "error", + // }); + // setTimeout(() => { + // setPopup(null); + // }, 4000); + // mutate("/api/admin/connector/index-attempt"); + // }} + // /> + // ), + })} + /> + + ); +} diff --git a/web/src/components/admin/connectors/types.ts b/web/src/components/admin/connectors/types.ts deleted file mode 100644 index 29aeab6de..000000000 --- a/web/src/components/admin/connectors/types.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { ValidSources } from "@/lib/types"; - -export interface SlackConfig { - slack_bot_token: string; - workspace_id: string; - pull_frequency: number; -} - -export interface IndexAttempt { - connector_specific_config: { [key: string]: any }; - status: "success" | "failed" | "in_progress" | "not_started"; - source: ValidSources; - time_created: string; - time_updated: string; - docs_indexed: number; -} - -export interface ListIndexingResponse { - index_attempts: IndexAttempt[]; -} diff --git a/web/src/components/icons/icons.tsx b/web/src/components/icons/icons.tsx index f93f6ea85..ee5b92383 100644 --- a/web/src/components/icons/icons.tsx +++ b/web/src/components/icons/icons.tsx @@ -1,6 +1,14 @@ "use client"; -import { Notebook, Key, Trash, Info } from "@phosphor-icons/react"; +import { + Notebook, + Key, + Trash, + Info, + XSquare, + LinkBreak, + Link, +} from "@phosphor-icons/react"; import { SiConfluence, SiGithub, SiGoogledrive, SiSlack } from "react-icons/si"; import { FaGlobe } from "react-icons/fa"; @@ -32,6 +40,27 @@ export const TrashIcon = ({ return ; }; +export const LinkBreakIcon = ({ + size = "16", + className = defaultTailwindCSS, +}: IconProps) => { + return ; +}; + +export const LinkIcon = ({ + size = "16", + className = defaultTailwindCSS, +}: IconProps) => { + return ; +}; + +export const XSquareIcon = ({ + size = "16", + className = defaultTailwindCSS, +}: IconProps) => { + return ; +}; + export const GlobeIcon = ({ size = "16", className = defaultTailwindCSS, diff --git a/web/src/components/openai/ApiKeyForm.tsx b/web/src/components/openai/ApiKeyForm.tsx index ef336c1b2..9fa186830 100644 --- a/web/src/components/openai/ApiKeyForm.tsx +++ b/web/src/components/openai/ApiKeyForm.tsx @@ -50,7 +50,7 @@ export const ApiKeyForm = ({ handleResponse }: Props) => { } setTimeout(() => { setPopup(null); - }, 3000); + }, 4000); } }} > diff --git a/web/src/components/search/SearchResultsDisplay.tsx b/web/src/components/search/SearchResultsDisplay.tsx index 874e26bac..52e73817a 100644 --- a/web/src/components/search/SearchResultsDisplay.tsx +++ b/web/src/components/search/SearchResultsDisplay.tsx @@ -46,7 +46,7 @@ export const SearchResultsDisplay: React.FC = ({ if (answer === null && documents === null && quotes === null) { return ( -
+
Something went wrong, please try again.
); @@ -104,9 +104,9 @@ export const SearchResultsDisplay: React.FC = ({
-
+
GPT hurt itself in its confusion :(
diff --git a/web/src/components/search/SearchSection.tsx b/web/src/components/search/SearchSection.tsx index 6b3178635..de625142f 100644 --- a/web/src/components/search/SearchSection.tsx +++ b/web/src/components/search/SearchSection.tsx @@ -117,10 +117,14 @@ const searchRequestStreamed = async ( updateCurrentAnswer(answer); } } else { - const docs = chunk.top_documents as any[]; - if (docs) { - relevantDocuments = docs.map((doc) => JSON.parse(doc) as Document); - updateDocs(relevantDocuments); + if (Object.hasOwn(chunk, "top_documents")) { + const docs = chunk.top_documents as any[] | null; + if (docs) { + relevantDocuments = docs.map( + (doc) => JSON.parse(doc) as Document + ); + updateDocs(relevantDocuments); + } } else { quotes = chunk as Record; updateQuotes(quotes); diff --git a/web/src/lib/connector.ts b/web/src/lib/connector.ts new file mode 100644 index 000000000..df5f97b61 --- /dev/null +++ b/web/src/lib/connector.ts @@ -0,0 +1,39 @@ +import { Connector, ConnectorBase } from "./types"; + +export async function createConnector( + connector: ConnectorBase +): Promise> { + const response = await fetch(`/api/admin/connector`, { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(connector), + }); + return response.json(); +} + +export async function updateConnector( + connector: Connector +): Promise> { + const response = await fetch(`/api/admin/connector/${connector.id}`, { + method: "PATCH", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(connector), + }); + return response.json(); +} + +export async function deleteConnector( + connectorId: number +): Promise> { + const response = await fetch(`/api/admin/connector/${connectorId}`, { + method: "DELETE", + headers: { + "Content-Type": "application/json", + }, + }); + return response.json(); +} diff --git a/web/src/lib/credential.ts b/web/src/lib/credential.ts new file mode 100644 index 000000000..3c95b1135 --- /dev/null +++ b/web/src/lib/credential.ts @@ -0,0 +1,25 @@ +export async function deleteCredential(credentialId: number) { + const response = await fetch(`/api/admin/credential/${credentialId}`, { + method: "DELETE", + headers: { + "Content-Type": "application/json", + }, + }); + return response.json(); +} + +export async function linkCredential( + connectorId: number, + credentialId: number +) { + const response = await fetch( + `/api/admin/connector/${connectorId}/credential/${credentialId}`, + { + method: "PUT", + headers: { + "Content-Type": "application/json", + }, + } + ); + return response.json(); +} diff --git a/web/src/lib/time.ts b/web/src/lib/time.ts index 7c27e793e..26e5204fa 100644 --- a/web/src/lib/time.ts +++ b/web/src/lib/time.ts @@ -1,4 +1,6 @@ -export const timeAgo = (dateString: string | undefined): string | null => { +export const timeAgo = ( + dateString: string | undefined | null +): string | null => { if (!dateString) { return null; } diff --git a/web/src/lib/types.ts b/web/src/lib/types.ts index d3f83d027..3fc8208f6 100644 --- a/web/src/lib/types.ts +++ b/web/src/lib/types.ts @@ -14,3 +14,74 @@ export type ValidSources = | "google_drive" | "confluence"; export type ValidInputTypes = "load_state" | "poll" | "event"; + +// CONNECTORS +export interface ConnectorBase { + name: string; + input_type: ValidInputTypes; + source: ValidSources; + connector_specific_config: T; + refresh_freq: number; + disabled: boolean; +} + +export interface Connector extends ConnectorBase { + id: number; + credential_ids: number[]; + time_created: string; + time_updated: string; +} + +export interface WebConfig { + base_url: string; +} + +export interface GithubConfig { + repo_owner: string; + repo_name: string; +} + +export interface ConfluenceConfig { + wiki_page_url: string; +} + +export interface SlackConfig { + workspace: string; +} + +export interface ConnectorIndexingStatus { + connector: Connector; + last_status: "success" | "failed" | "in_progress" | "not_started"; + last_success: string | null; + docs_indexed: number; +} + +// CREDENTIALS +export interface CredentialBase { + credential_json: T; + public_doc: boolean; +} + +export interface Credential extends CredentialBase { + id: number; + user_id: number | null; + time_created: string; + time_updated: string; +} + +export interface GithubCredentialJson { + github_access_token: string; +} + +export interface ConfluenceCredentialJson { + confluence_username: string; + confluence_access_token: string; +} + +export interface SlackCredentialJson { + slack_bot_token: string; +} + +export interface GoogleDriveCredentialJson { + google_drive_tokens: string; +}