fixed rebase issue and some cleanup

This commit is contained in:
Evan Lohn 2025-02-03 20:49:45 -08:00
parent b500c914b0
commit b928201397
9 changed files with 8 additions and 27 deletions

View File

@ -1,7 +1,7 @@
"""agent_tracking
Revision ID: 98a5008d8711
Revises: 33ea50e88f24
Revises: 2f80c6a2550f
Create Date: 2025-01-29 17:00:00.000001
"""
@ -12,7 +12,7 @@ from sqlalchemy.dialects.postgresql import UUID
# revision identifiers, used by Alembic.
revision = "98a5008d8711"
down_revision = "33ea50e88f24"
down_revision = "2f80c6a2550f"
branch_labels = None
depends_on = None

View File

@ -133,7 +133,5 @@ if __name__ == "__main__":
for thing in compiled_graph.stream(
input=inputs,
config={"configurable": {"config": graph_config}},
# debug=True,
# subgraphs=True,
):
logger.debug(thing)

View File

@ -204,12 +204,12 @@ def main_graph_builder(test_mode: bool = False) -> StateGraph:
)
graph.add_conditional_edges(
source="create_refined_sub_questions", # DONE
source="create_refined_sub_questions",
path=parallelize_refined_sub_question_answering,
path_map=["answer_refined_question_subgraphs"],
)
graph.add_edge(
start_key="answer_refined_question_subgraphs", # HERE
start_key="answer_refined_question_subgraphs",
end_key="ingest_refined_sub_answers",
)
@ -259,8 +259,7 @@ if __name__ == "__main__":
for thing in compiled_graph.stream(
input=inputs,
config={"configurable": {"config": graph_config}},
# stream_mode="debug",
# debug=True,
stream_mode="custom",
subgraphs=True,
):
logger.debug(thing)

View File

@ -261,10 +261,6 @@ def generate_refined_answer(
response = merge_content(*streamed_tokens)
answer = cast(str, response)
# refined_agent_stats = _calculate_refined_agent_stats(
# state.decomp_answer_results, state.original_question_retrieval_stats
# )
refined_agent_stats = RefinedAgentStats(
revision_doc_efficiency=refined_doc_effectiveness,
revision_question_efficiency=revision_question_efficiency,

View File

@ -70,11 +70,6 @@ class InitialRefinedAnswerComparisonUpdate(LoggerUpdate):
refined_answer_improvement_eval: bool = False
# Not used in current graph
class InitialAnswerBASEUpdate(BaseModel):
initial_base_answer: str | None = None
class InitialAnswerUpdate(LoggerUpdate):
"""
Initial answer information
@ -157,7 +152,6 @@ class MainState(
ToolChoiceUpdate,
InitialQuestionDecompositionUpdate,
InitialAnswerUpdate,
InitialAnswerBASEUpdate,
SubQuestionResultsUpdate,
OrigQuestionRetrievalUpdate,
EntityTermExtractionUpdate,

View File

@ -127,9 +127,6 @@ if __name__ == "__main__":
)
for thing in compiled_graph.stream(
input=inputs,
# debug=True,
# subgraphs=True,
stream_mode="custom",
):
logger.debug(thing)
# output = compiled_graph.invoke(inputs)
# logger.debug(output)

View File

@ -155,7 +155,7 @@ if __name__ == "__main__":
for thing in compiled_graph.stream(
input=inputs,
config={"configurable": {"config": graph_config}},
# debug=True,
stream_mode="custom",
subgraphs=True,
):
logger.debug(thing)

View File

@ -162,7 +162,6 @@ if __name__ == "__main__":
# query="What is the difference between astronomy and astrology?",
query="Do a search to tell me what is the difference between astronomy and astrology?",
)
# Joachim custom persona
with get_session_context_manager() as db_session:
config = get_test_config(db_session, primary_llm, fast_llm, search_request)
@ -177,11 +176,8 @@ if __name__ == "__main__":
base_question=config.inputs.search_request.query, log_messages=[]
)
# with open("output.txt", "w") as f:
tool_responses: list = []
for output in run_graph(compiled_graph, config, input):
# pass
if isinstance(output, ToolCallKickoff):
pass
elif isinstance(output, ExtendedToolResponse):

View File

@ -129,6 +129,7 @@ const SortableAssistant: React.FC<SortableAssistantProps> = ({
className="w-3 ml-[2px] mr-[2px] group-hover:visible invisible flex-none cursor-grab"
/>
<div
data-testid={`assistant-[${assistant.id}]`}
onClick={(e) => {
e.preventDefault();
if (!isDragging) {