fix: handle optional content types in LLM response parsing

- Add get_text_content() helper method to PromptMessage class
- Update generate_suggested_questions_after_answer to use the helper method
- Properly handle str, list[PromptMessageContent], and None content types
pull/22809/head
-LAN- 7 months ago
parent bcce68cead
commit 8e8f2fd826
No known key found for this signature in database
GPG Key ID: 6BA0D108DED011FF

@ -114,7 +114,8 @@ class LLMGenerator:
),
)
questions = output_parser.parse(cast(str, response.message.content))
text_content = response.message.get_text_content()
questions = output_parser.parse(text_content) if text_content else []
except InvokeError:
questions = []
except Exception:

@ -10,9 +10,6 @@ class SuggestedQuestionsAfterAnswerOutputParser:
return SUGGESTED_QUESTIONS_AFTER_ANSWER_INSTRUCTION_PROMPT
def parse(self, text: str) -> Any:
if not isinstance(text, str):
# Optionally log a warning here
return []
action_match = re.search(r"\[.*?\]", text.strip(), re.DOTALL)
if action_match is not None:
json_obj = json.loads(action_match.group(0).strip())

@ -156,6 +156,23 @@ class PromptMessage(ABC, BaseModel):
"""
return not self.content
def get_text_content(self) -> str:
"""
Get text content from prompt message.
:return: Text content as string, empty string if no text content
"""
if isinstance(self.content, str):
return self.content
elif isinstance(self.content, list):
text_parts = []
for item in self.content:
if isinstance(item, TextPromptMessageContent):
text_parts.append(item.data)
return "".join(text_parts)
else:
return ""
@field_validator("content", mode="before")
@classmethod
def validate_content(cls, v):

Loading…
Cancel
Save