mirror of
https://github.com/danswer-ai/danswer.git
synced 2025-10-10 21:26:01 +02:00
Implement freshdesk frontend
This commit is contained in:
@@ -128,6 +128,7 @@ class DocumentSource(str, Enum):
|
|||||||
OCI_STORAGE = "oci_storage"
|
OCI_STORAGE = "oci_storage"
|
||||||
XENFORO = "xenforo"
|
XENFORO = "xenforo"
|
||||||
NOT_APPLICABLE = "not_applicable"
|
NOT_APPLICABLE = "not_applicable"
|
||||||
|
FRESHDESK = "freshdesk"
|
||||||
|
|
||||||
|
|
||||||
DocumentSourceRequiringTenantContext: list[DocumentSource] = [DocumentSource.FILE]
|
DocumentSourceRequiringTenantContext: list[DocumentSource] = [DocumentSource.FILE]
|
||||||
|
@@ -46,6 +46,7 @@ from danswer.connectors.wikipedia.connector import WikipediaConnector
|
|||||||
from danswer.connectors.xenforo.connector import XenforoConnector
|
from danswer.connectors.xenforo.connector import XenforoConnector
|
||||||
from danswer.connectors.zendesk.connector import ZendeskConnector
|
from danswer.connectors.zendesk.connector import ZendeskConnector
|
||||||
from danswer.connectors.zulip.connector import ZulipConnector
|
from danswer.connectors.zulip.connector import ZulipConnector
|
||||||
|
from danswer.connectors.freshdesk.connector import FreshdeskConnector
|
||||||
from danswer.db.credentials import backend_update_credential_json
|
from danswer.db.credentials import backend_update_credential_json
|
||||||
from danswer.db.models import Credential
|
from danswer.db.models import Credential
|
||||||
|
|
||||||
@@ -101,6 +102,7 @@ def identify_connector_class(
|
|||||||
DocumentSource.GOOGLE_CLOUD_STORAGE: BlobStorageConnector,
|
DocumentSource.GOOGLE_CLOUD_STORAGE: BlobStorageConnector,
|
||||||
DocumentSource.OCI_STORAGE: BlobStorageConnector,
|
DocumentSource.OCI_STORAGE: BlobStorageConnector,
|
||||||
DocumentSource.XENFORO: XenforoConnector,
|
DocumentSource.XENFORO: XenforoConnector,
|
||||||
|
DocumentSource.FRESHDESK: FreshdeskConnector,
|
||||||
}
|
}
|
||||||
connector_by_source = connector_map.get(source, {})
|
connector_by_source = connector_map.get(source, {})
|
||||||
|
|
||||||
|
0
backend/danswer/connectors/freshdesk/__init__,py
Normal file
0
backend/danswer/connectors/freshdesk/__init__,py
Normal file
116
backend/danswer/connectors/freshdesk/connector.py
Normal file
116
backend/danswer/connectors/freshdesk/connector.py
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
import requests
|
||||||
|
import json
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from typing import Any, List, Optional
|
||||||
|
from bs4 import BeautifulSoup # Add this import for HTML parsing
|
||||||
|
from danswer.configs.app_configs import INDEX_BATCH_SIZE
|
||||||
|
from danswer.configs.constants import DocumentSource
|
||||||
|
from danswer.connectors.interfaces import GenerateDocumentsOutput, PollConnector
|
||||||
|
from danswer.connectors.models import ConnectorMissingCredentialError, Document, Section
|
||||||
|
from danswer.utils.logger import setup_logger
|
||||||
|
|
||||||
|
logger = setup_logger()
|
||||||
|
|
||||||
|
|
||||||
|
class FreshdeskConnector(PollConnector):
|
||||||
|
def __init__(self, api_key: str, domain: str, password: str, batch_size: int = INDEX_BATCH_SIZE) -> None:
|
||||||
|
self.api_key = api_key
|
||||||
|
self.domain = domain
|
||||||
|
self.password = password
|
||||||
|
self.batch_size = batch_size
|
||||||
|
|
||||||
|
def ticket_link(self, tid: int) -> str:
|
||||||
|
return f"https://{self.domain}.freshdesk.com/helpdesk/tickets/{tid}"
|
||||||
|
|
||||||
|
def build_doc_sections_from_ticket(self, ticket: dict) -> List[Section]:
|
||||||
|
# Use list comprehension for building sections
|
||||||
|
return [
|
||||||
|
Section(
|
||||||
|
link=self.ticket_link(int(ticket["id"])),
|
||||||
|
text=json.dumps({
|
||||||
|
key: value
|
||||||
|
for key, value in ticket.items()
|
||||||
|
if isinstance(value, str)
|
||||||
|
}, default=str),
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
def strip_html_tags(self, html: str) -> str:
|
||||||
|
soup = BeautifulSoup(html, 'html.parser')
|
||||||
|
return soup.get_text()
|
||||||
|
|
||||||
|
def load_credentials(self, credentials: dict[str, Any]) -> Optional[dict[str, Any]]:
|
||||||
|
logger.info("Loading credentials")
|
||||||
|
self.api_key = credentials.get("freshdesk_api_key")
|
||||||
|
self.domain = credentials.get("freshdesk_domain")
|
||||||
|
self.password = credentials.get("freshdesk_password")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _process_tickets(self, start: datetime, end: datetime) -> GenerateDocumentsOutput:
|
||||||
|
logger.info("Processing tickets")
|
||||||
|
if any([self.api_key, self.domain, self.password]) is None:
|
||||||
|
raise ConnectorMissingCredentialError("freshdesk")
|
||||||
|
|
||||||
|
freshdesk_url = f"https://{self.domain}.freshdesk.com/api/v2/tickets?include=description"
|
||||||
|
response = requests.get(freshdesk_url, auth=(self.api_key, self.password))
|
||||||
|
response.raise_for_status() # raises exception when not a 2xx response
|
||||||
|
|
||||||
|
if response.status_code!= 204:
|
||||||
|
tickets = json.loads(response.content)
|
||||||
|
logger.info(f"Fetched {len(tickets)} tickets from Freshdesk API")
|
||||||
|
doc_batch: List[Document] = []
|
||||||
|
|
||||||
|
for ticket in tickets:
|
||||||
|
# Convert the "created_at", "updated_at", and "due_by" values to ISO 8601 strings
|
||||||
|
for date_field in ["created_at", "updated_at", "due_by"]:
|
||||||
|
ticket[date_field] = datetime.fromisoformat(ticket[date_field]).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
|
||||||
|
# Convert all other values to strings
|
||||||
|
ticket = {
|
||||||
|
key: str(value) if not isinstance(value, str) else value
|
||||||
|
for key, value in ticket.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
# Checking for overdue tickets
|
||||||
|
today = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
ticket["overdue"] = "true" if today > ticket["due_by"] else "false"
|
||||||
|
|
||||||
|
# Mapping the status field values
|
||||||
|
status_mapping = {2: "open", 3: "pending", 4: "resolved", 5: "closed"}
|
||||||
|
ticket["status"] = status_mapping.get(ticket["status"], str(ticket["status"]))
|
||||||
|
|
||||||
|
# Stripping HTML tags from the description field
|
||||||
|
ticket["description"] = self.strip_html_tags(ticket["description"])
|
||||||
|
|
||||||
|
# Remove extra white spaces from the description field
|
||||||
|
ticket["description"] = " ".join(ticket["description"].split())
|
||||||
|
|
||||||
|
# Use list comprehension for building sections
|
||||||
|
sections = self.build_doc_sections_from_ticket(ticket)
|
||||||
|
|
||||||
|
created_at = datetime.fromisoformat(ticket["created_at"])
|
||||||
|
today = datetime.now()
|
||||||
|
if (today - created_at).days / 30.4375 <= 2:
|
||||||
|
doc = Document(
|
||||||
|
id=ticket["id"],
|
||||||
|
sections=sections,
|
||||||
|
source=DocumentSource.FRESHDESK,
|
||||||
|
semantic_identifier=ticket["subject"],
|
||||||
|
metadata={
|
||||||
|
key: value
|
||||||
|
for key, value in ticket.items()
|
||||||
|
if isinstance(value, str) and key not in ["description", "description_text"]
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
doc_batch.append(doc)
|
||||||
|
|
||||||
|
if len(doc_batch) >= self.batch_size:
|
||||||
|
yield doc_batch
|
||||||
|
doc_batch = []
|
||||||
|
|
||||||
|
if doc_batch:
|
||||||
|
yield doc_batch
|
||||||
|
|
||||||
|
def poll_source(self, start: datetime, end: datetime) -> GenerateDocumentsOutput:
|
||||||
|
yield from self._process_tickets(start, end)
|
@@ -3,7 +3,7 @@ import json
|
|||||||
from enum import Enum as PyEnum
|
from enum import Enum as PyEnum
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from typing import Literal
|
from typing import Literal
|
||||||
from typing import NotRequired
|
from typing_extensions import NotRequired
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
from typing_extensions import TypedDict # noreorder
|
from typing_extensions import TypedDict # noreorder
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
import base64
|
import base64
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import NotRequired
|
from typing_extensions import NotRequired
|
||||||
from typing_extensions import TypedDict # noreorder
|
from typing_extensions import TypedDict # noreorder
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
@@ -28,7 +28,7 @@ os.environ["TOKENIZERS_PARALLELISM"] = "false"
|
|||||||
os.environ["HF_HUB_DISABLE_TELEMETRY"] = "1"
|
os.environ["HF_HUB_DISABLE_TELEMETRY"] = "1"
|
||||||
|
|
||||||
HF_CACHE_PATH = Path("/root/.cache/huggingface/")
|
HF_CACHE_PATH = Path("/root/.cache/huggingface/")
|
||||||
TEMP_HF_CACHE_PATH = Path("/root/.cache/temp_huggingface/")
|
TEMP_HF_CACHE_PATH = Path.home() / ".cache" / "temp_huggingface"
|
||||||
|
|
||||||
transformer_logging.set_verbosity_error()
|
transformer_logging.set_verbosity_error()
|
||||||
|
|
||||||
|
BIN
web/public/Freshdesk.png
Normal file
BIN
web/public/Freshdesk.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 18 KiB |
@@ -75,9 +75,13 @@ async function handleRequest(request: NextRequest, path: string[]) {
|
|||||||
backendUrl.searchParams.append(key, value);
|
backendUrl.searchParams.append(key, value);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Create a new headers object, omitting the 'connection' header
|
||||||
|
const headers = new Headers(request.headers);
|
||||||
|
headers.delete('connection');
|
||||||
|
|
||||||
const response = await fetch(backendUrl, {
|
const response = await fetch(backendUrl, {
|
||||||
method: request.method,
|
method: request.method,
|
||||||
headers: request.headers,
|
headers: headers,
|
||||||
body: request.body,
|
body: request.body,
|
||||||
signal: request.signal,
|
signal: request.signal,
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
|
@@ -75,6 +75,7 @@ import slackIcon from "../../../public/Slack.png";
|
|||||||
import s3Icon from "../../../public/S3.png";
|
import s3Icon from "../../../public/S3.png";
|
||||||
import r2Icon from "../../../public/r2.png";
|
import r2Icon from "../../../public/r2.png";
|
||||||
import salesforceIcon from "../../../public/Salesforce.png";
|
import salesforceIcon from "../../../public/Salesforce.png";
|
||||||
|
import freshdeskIcon from "../../../public/Freshdesk.png";
|
||||||
|
|
||||||
import sharepointIcon from "../../../public/Sharepoint.png";
|
import sharepointIcon from "../../../public/Sharepoint.png";
|
||||||
import teamsIcon from "../../../public/Teams.png";
|
import teamsIcon from "../../../public/Teams.png";
|
||||||
@@ -1301,6 +1302,13 @@ export const AsanaIcon = ({
|
|||||||
className = defaultTailwindCSS,
|
className = defaultTailwindCSS,
|
||||||
}: IconProps) => <LogoIcon size={size} className={className} src={asanaIcon} />;
|
}: IconProps) => <LogoIcon size={size} className={className} src={asanaIcon} />;
|
||||||
|
|
||||||
|
export const FreshdeskIcon = ({
|
||||||
|
size = 16,
|
||||||
|
className = defaultTailwindCSS,
|
||||||
|
}: IconProps) => (
|
||||||
|
<LogoIcon size={size} className={className} src={freshdeskIcon} />
|
||||||
|
);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
EE Icons
|
EE Icons
|
||||||
*/
|
*/
|
||||||
|
@@ -922,6 +922,12 @@ For example, specifying .*-support.* as a "channel" will cause the connector to
|
|||||||
],
|
],
|
||||||
advanced_values: [],
|
advanced_values: [],
|
||||||
},
|
},
|
||||||
|
freshdesk: {
|
||||||
|
description: "Configure Freshdesk connector",
|
||||||
|
values: [],
|
||||||
|
advanced_values: [],
|
||||||
|
},
|
||||||
|
|
||||||
};
|
};
|
||||||
export function createConnectorInitialValues(
|
export function createConnectorInitialValues(
|
||||||
connector: ConfigurableSources
|
connector: ConfigurableSources
|
||||||
@@ -1180,6 +1186,11 @@ export interface AsanaConfig {
|
|||||||
asana_team_id?: string;
|
asana_team_id?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface FreshdeskConfig {
|
||||||
|
requested_objects?: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
export interface MediaWikiConfig extends MediaWikiBaseConfig {
|
export interface MediaWikiConfig extends MediaWikiBaseConfig {
|
||||||
hostname: string;
|
hostname: string;
|
||||||
}
|
}
|
||||||
|
@@ -186,6 +186,12 @@ export interface AxeroCredentialJson {
|
|||||||
axero_api_token: string;
|
axero_api_token: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface FreshdeskCredentialJson {
|
||||||
|
freshdesk_domain: string;
|
||||||
|
freshdesk_password: string;
|
||||||
|
freshdesk_api_key: string;
|
||||||
|
}
|
||||||
|
|
||||||
export interface MediaWikiCredentialJson {}
|
export interface MediaWikiCredentialJson {}
|
||||||
export interface WikipediaCredentialJson extends MediaWikiCredentialJson {}
|
export interface WikipediaCredentialJson extends MediaWikiCredentialJson {}
|
||||||
|
|
||||||
@@ -289,6 +295,11 @@ export const credentialTemplates: Record<ValidSources, any> = {
|
|||||||
access_key_id: "",
|
access_key_id: "",
|
||||||
secret_access_key: "",
|
secret_access_key: "",
|
||||||
} as OCICredentialJson,
|
} as OCICredentialJson,
|
||||||
|
freshdesk: {
|
||||||
|
freshdesk_domain: "",
|
||||||
|
freshdesk_password: "",
|
||||||
|
freshdesk_api_key: "",
|
||||||
|
} as FreshdeskCredentialJson,
|
||||||
xenforo: null,
|
xenforo: null,
|
||||||
google_sites: null,
|
google_sites: null,
|
||||||
file: null,
|
file: null,
|
||||||
@@ -435,6 +446,11 @@ export const credentialDisplayNames: Record<string, string> = {
|
|||||||
// Axero
|
// Axero
|
||||||
base_url: "Axero Base URL",
|
base_url: "Axero Base URL",
|
||||||
axero_api_token: "Axero API Token",
|
axero_api_token: "Axero API Token",
|
||||||
|
|
||||||
|
// Freshdesk
|
||||||
|
freshdesk_domain: "Freshdesk Domain",
|
||||||
|
freshdesk_password: "Freshdesk Password",
|
||||||
|
freshdesk_api_key: "Freshdesk API Key",
|
||||||
};
|
};
|
||||||
export function getDisplayNameForCredentialKey(key: string): string {
|
export function getDisplayNameForCredentialKey(key: string): string {
|
||||||
return credentialDisplayNames[key] || key;
|
return credentialDisplayNames[key] || key;
|
||||||
|
@@ -37,6 +37,7 @@ import {
|
|||||||
GoogleStorageIcon,
|
GoogleStorageIcon,
|
||||||
ColorSlackIcon,
|
ColorSlackIcon,
|
||||||
XenforoIcon,
|
XenforoIcon,
|
||||||
|
FreshdeskIcon,
|
||||||
} from "@/components/icons/icons";
|
} from "@/components/icons/icons";
|
||||||
import { ValidSources } from "./types";
|
import { ValidSources } from "./types";
|
||||||
import {
|
import {
|
||||||
@@ -289,6 +290,12 @@ const SOURCE_METADATA_MAP: SourceMap = {
|
|||||||
displayName: "Ingestion",
|
displayName: "Ingestion",
|
||||||
category: SourceCategory.Other,
|
category: SourceCategory.Other,
|
||||||
},
|
},
|
||||||
|
freshdesk: {
|
||||||
|
icon: FreshdeskIcon,
|
||||||
|
displayName: "Freshdesk",
|
||||||
|
category: SourceCategory.CustomerSupport,
|
||||||
|
docs: "https://docs.danswer.dev/connectors/freshdesk",
|
||||||
|
},
|
||||||
// currently used for the Internet Search tool docs, which is why
|
// currently used for the Internet Search tool docs, which is why
|
||||||
// a globe is used
|
// a globe is used
|
||||||
not_applicable: {
|
not_applicable: {
|
||||||
|
@@ -263,6 +263,7 @@ const validSources = [
|
|||||||
"oci_storage",
|
"oci_storage",
|
||||||
"not_applicable",
|
"not_applicable",
|
||||||
"ingestion_api",
|
"ingestion_api",
|
||||||
|
"freshdesk",
|
||||||
] as const;
|
] as const;
|
||||||
|
|
||||||
export type ValidSources = (typeof validSources)[number];
|
export type ValidSources = (typeof validSources)[number];
|
||||||
|
Reference in New Issue
Block a user