Use the new flash model

Updated tests
This commit is contained in:
Fahad
2025-06-12 13:44:09 +04:00
parent 8b8d966d33
commit 79af2654b9
20 changed files with 297 additions and 63 deletions

View File

@@ -115,7 +115,7 @@ class TestConversationHistoryBugFix:
return Mock(
content="Response with conversation context",
usage={"input_tokens": 10, "output_tokens": 20, "total_tokens": 30},
model_name="gemini-2.0-flash-exp",
model_name="gemini-2.0-flash",
metadata={"finish_reason": "STOP"},
)
@@ -175,7 +175,7 @@ class TestConversationHistoryBugFix:
return Mock(
content="Response without history",
usage={"input_tokens": 10, "output_tokens": 20, "total_tokens": 30},
model_name="gemini-2.0-flash-exp",
model_name="gemini-2.0-flash",
metadata={"finish_reason": "STOP"},
)
@@ -213,7 +213,7 @@ class TestConversationHistoryBugFix:
return Mock(
content="New conversation response",
usage={"input_tokens": 10, "output_tokens": 20, "total_tokens": 30},
model_name="gemini-2.0-flash-exp",
model_name="gemini-2.0-flash",
metadata={"finish_reason": "STOP"},
)
@@ -297,7 +297,7 @@ class TestConversationHistoryBugFix:
return Mock(
content="Analysis of new files complete",
usage={"input_tokens": 10, "output_tokens": 20, "total_tokens": 30},
model_name="gemini-2.0-flash-exp",
model_name="gemini-2.0-flash",
metadata={"finish_reason": "STOP"},
)