mirror of
https://github.com/danswer-ai/danswer.git
synced 2025-03-26 17:51:54 +01:00
Ruff Styling (#368)
This commit is contained in:
parent
51ec2517cb
commit
ac2a4f9051
@ -1,7 +1,7 @@
|
||||
"""Create IndexAttempt table
|
||||
|
||||
Revision ID: 47433d30de82
|
||||
Revises:
|
||||
Revises:
|
||||
Create Date: 2023-05-04 00:55:32.971991
|
||||
|
||||
"""
|
||||
|
@ -1,12 +1,12 @@
|
||||
"""
|
||||
To delete a connector / credential pair:
|
||||
(1) find all documents associated with connector / credential pair where there
|
||||
(1) find all documents associated with connector / credential pair where there
|
||||
this the is only connector / credential pair that has indexed it
|
||||
(2) delete all documents from document stores
|
||||
(3) delete all entries from postgres
|
||||
(4) find all documents associated with connector / credential pair where there
|
||||
(4) find all documents associated with connector / credential pair where there
|
||||
are multiple connector / credential pairs that have indexed it
|
||||
(5) update document store entries to remove access associated with the
|
||||
(5) update document store entries to remove access associated with the
|
||||
connector / credential pair from the access list
|
||||
(6) delete all relevant entries from postgres
|
||||
"""
|
||||
|
@ -132,7 +132,7 @@ def build_qa_response_blocks(
|
||||
quotes_blocks: list[Block] = []
|
||||
if not answer:
|
||||
answer_block = SectionBlock(
|
||||
text=f"Sorry, I was unable to find an answer, but I did find some potentially relevant docs 🤓"
|
||||
text="Sorry, I was unable to find an answer, but I did find some potentially relevant docs 🤓"
|
||||
)
|
||||
else:
|
||||
answer_block = SectionBlock(text=answer)
|
||||
|
@ -76,7 +76,7 @@ def _process_slack_event(client: SocketModeClient, req: SocketModeRequest) -> No
|
||||
# this should never happen, but we can't continue without a channel since
|
||||
# we can't send a response without it
|
||||
if not channel:
|
||||
channel_specific_logger.error(f"Found message without channel - skipping")
|
||||
channel_specific_logger.error("Found message without channel - skipping")
|
||||
return
|
||||
|
||||
message_subtype = event.get("subtype")
|
||||
|
@ -30,7 +30,7 @@ class BookStackApiClient:
|
||||
|
||||
try:
|
||||
json = response.json()
|
||||
except:
|
||||
except Exception:
|
||||
json = {}
|
||||
|
||||
if response.status_code >= 300:
|
||||
|
@ -110,9 +110,10 @@ class ConfluenceConnector(LoadConnector, PollConnector):
|
||||
limit=batch_size,
|
||||
expand="body.storage.value,version",
|
||||
)
|
||||
except:
|
||||
except Exception:
|
||||
logger.warning(
|
||||
f"Batch failed with space {self.space} at offset {start_ind} with size {batch_size}, processing pages individually..."
|
||||
f"Batch failed with space {self.space} at offset {start_ind} "
|
||||
f"with size {batch_size}, processing pages individually..."
|
||||
)
|
||||
|
||||
view_pages: list[dict[str, Any]] = []
|
||||
@ -130,7 +131,8 @@ class ConfluenceConnector(LoadConnector, PollConnector):
|
||||
)
|
||||
except HTTPError as e:
|
||||
logger.warning(
|
||||
f"Page failed with space {self.space} at offset {start_ind + i}, trying alternative expand option: {e}"
|
||||
f"Page failed with space {self.space} at offset {start_ind + i}, "
|
||||
f"trying alternative expand option: {e}"
|
||||
)
|
||||
# Use view instead, which captures most info but is less complete
|
||||
view_pages.extend(
|
||||
@ -155,7 +157,7 @@ class ConfluenceConnector(LoadConnector, PollConnector):
|
||||
for i in range(self.batch_size):
|
||||
try:
|
||||
pages.extend(_fetch(start_ind + i, 1))
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Ran into exception when fetching pages from Confluence"
|
||||
)
|
||||
|
@ -85,8 +85,8 @@ class LinearConnector(LoadConnector, PollConnector):
|
||||
"""
|
||||
query IterateIssueBatches($first: Int, $after: String) {
|
||||
issues(
|
||||
orderBy: updatedAt,
|
||||
first: $first,
|
||||
orderBy: updatedAt,
|
||||
first: $first,
|
||||
after: $after,
|
||||
filter: {
|
||||
updatedAt: {
|
||||
|
@ -163,7 +163,7 @@ class UserIdReplacer:
|
||||
"""Simply replaces all channel mentions with `#<CHANNEL_ID>` in order
|
||||
to make a message work as part of a link"""
|
||||
# Find user IDs in the message
|
||||
channel_matches = re.findall("<#(.*?)\|(.*?)>", message)
|
||||
channel_matches = re.findall(r"<#(.*?)\|(.*?)>", message)
|
||||
for channel_id, channel_name in channel_matches:
|
||||
message = message.replace(
|
||||
f"<#{channel_id}|{channel_name}>", f"#{channel_name}"
|
||||
|
@ -152,7 +152,7 @@ class WebConnector(LoadConnector):
|
||||
logger.info(f"Redirected to {final_page}")
|
||||
current_url = final_page
|
||||
if current_url in visited_links:
|
||||
logger.info(f"Redirected page already indexed")
|
||||
logger.info("Redirected page already indexed")
|
||||
continue
|
||||
visited_links.add(current_url)
|
||||
|
||||
|
@ -58,10 +58,10 @@ CrossConnectorDocumentMetadataFetchCallable = Callable[
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
def _add_if_not_exists(l: list[T], item: T) -> list[T]:
|
||||
if item in l:
|
||||
return l
|
||||
return l + [item]
|
||||
def _add_if_not_exists(obj_list: list[T], item: T) -> list[T]:
|
||||
if item in obj_list:
|
||||
return obj_list
|
||||
return obj_list + [item]
|
||||
|
||||
|
||||
def update_cross_connector_document_metadata_map(
|
||||
|
@ -437,7 +437,7 @@ class VespaIndex(DocumentIndex):
|
||||
VespaIndex.yql_base
|
||||
+ vespa_where_clauses
|
||||
+ f"{{targetHits: {10 * num_to_retrieve}}}nearestNeighbor(embeddings, query_embedding) or "
|
||||
+ f'{{grammar: "weakAnd"}}userInput(@query)'
|
||||
+ '{grammar: "weakAnd"}userInput(@query)'
|
||||
)
|
||||
|
||||
query_embedding = embed_query(query)
|
||||
|
@ -21,7 +21,7 @@ def get_connector_credential_pairs(
|
||||
) -> list[ConnectorCredentialPair]:
|
||||
stmt = select(ConnectorCredentialPair)
|
||||
if not include_disabled:
|
||||
stmt = stmt.where(ConnectorCredentialPair.connector.disabled == False)
|
||||
stmt = stmt.where(ConnectorCredentialPair.connector.disabled is False)
|
||||
results = db_session.scalars(stmt)
|
||||
return list(results.all())
|
||||
|
||||
|
@ -82,7 +82,7 @@ def get_default_qa_model(
|
||||
llm=llm,
|
||||
qa_handler=qa_handler,
|
||||
)
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Unable to build a QABlock with the new approach, going back to the "
|
||||
"legacy approach"
|
||||
|
@ -15,10 +15,10 @@ UNCERTAINTY_PAT = "?"
|
||||
QUOTE_PAT = "Quote:"
|
||||
|
||||
BASE_PROMPT = (
|
||||
f"Answer the query based on provided documents and quote relevant sections. "
|
||||
f"Respond with a json containing a concise answer and up to three most relevant quotes from the documents. "
|
||||
f'Respond with "?" for the answer if the query cannot be answered based on the documents. '
|
||||
f"The quotes must be EXACT substrings from the documents."
|
||||
"Answer the query based on provided documents and quote relevant sections. "
|
||||
"Respond with a json containing a concise answer and up to three most relevant quotes from the documents. "
|
||||
'Respond with "?" for the answer if the query cannot be answered based on the documents. '
|
||||
"The quotes must be EXACT substrings from the documents."
|
||||
)
|
||||
|
||||
SAMPLE_QUESTION = "Where is the Eiffel Tower?"
|
||||
@ -81,7 +81,7 @@ def _add_metadata_section(
|
||||
|
||||
prompt_current += _prepend(f"DOCUMENT SOURCE: {chunk.source_type}\n", prepend_tab)
|
||||
if chunk.metadata:
|
||||
prompt_current += _prepend(f"METADATA:\n", prepend_tab)
|
||||
prompt_current += _prepend("METADATA:\n", prepend_tab)
|
||||
connector_class = identify_connector_class(DocumentSource(chunk.source_type))
|
||||
for metadata_line in connector_class.parse_metadata(chunk.metadata):
|
||||
prompt_current += _prepend(f"\t{metadata_line}\n", prepend_tab)
|
||||
|
@ -54,7 +54,7 @@ def validation_exception_handler(
|
||||
def value_error_handler(_: Request, exc: ValueError) -> JSONResponse:
|
||||
try:
|
||||
raise (exc)
|
||||
except:
|
||||
except Exception:
|
||||
# log stacktrace
|
||||
logger.exception("ValueError")
|
||||
return JSONResponse(
|
||||
|
@ -169,7 +169,7 @@ def stream_direct_qa(
|
||||
logger.debug(f"Received QA query: {question.query}")
|
||||
logger.debug(f"Query filters: {question.filters}")
|
||||
if question.use_keyword:
|
||||
logger.debug(f"User selected Keyword Search")
|
||||
logger.debug("User selected Keyword Search")
|
||||
|
||||
@log_generator_function_time()
|
||||
def stream_qa_portions(
|
||||
|
@ -29,12 +29,12 @@ def wipe_all_rows(database: str) -> None:
|
||||
table_names = cur.fetchall()
|
||||
|
||||
# have to delete from these first to not run into psycopg2.errors.ForeignKeyViolation
|
||||
cur.execute(f"DELETE FROM chunk")
|
||||
cur.execute(f"DELETE FROM document_by_connector_credential_pair")
|
||||
cur.execute(f"DELETE FROM document")
|
||||
cur.execute(f"DELETE FROM connector_credential_pair")
|
||||
cur.execute(f"DELETE FROM index_attempt")
|
||||
cur.execute(f"DELETE FROM credential")
|
||||
cur.execute("DELETE FROM chunk")
|
||||
cur.execute("DELETE FROM document_by_connector_credential_pair")
|
||||
cur.execute("DELETE FROM document")
|
||||
cur.execute("DELETE FROM connector_credential_pair")
|
||||
cur.execute("DELETE FROM index_attempt")
|
||||
cur.execute("DELETE FROM credential")
|
||||
conn.commit()
|
||||
|
||||
for table_name in table_names:
|
||||
|
Loading…
x
Reference in New Issue
Block a user