removing restcting of available models, fixing max tokens issues in test

This commit is contained in:
Badri Narayanan S
2025-12-27 12:17:45 +05:30
parent f86c4f4d32
commit 9b7dcf3a6c
10 changed files with 63 additions and 93 deletions

View File

@@ -36,7 +36,7 @@ async function runTests() {
const turn1 = await streamRequest({
model: 'claude-sonnet-4-5-thinking',
max_tokens: 2048,
max_tokens: 16000,
stream: true,
system: LARGE_SYSTEM_PROMPT,
thinking: { type: 'enabled', budget_tokens: 5000 },
@@ -90,7 +90,7 @@ async function runTests() {
const turn2 = await streamRequest({
model: 'claude-sonnet-4-5-thinking',
max_tokens: 2048,
max_tokens: 16000,
stream: true,
system: LARGE_SYSTEM_PROMPT,
thinking: { type: 'enabled', budget_tokens: 5000 },

View File

@@ -28,7 +28,7 @@ async function runTests() {
const result1 = await streamRequest({
model: 'claude-sonnet-4-5-thinking',
max_tokens: 2048,
max_tokens: 16000,
stream: true,
thinking: { type: 'enabled', budget_tokens: 8000 },
messages: [{
@@ -79,7 +79,7 @@ async function runTests() {
const result2 = await streamRequest({
model: 'claude-sonnet-4-5-thinking',
max_tokens: 2048,
max_tokens: 16000,
stream: true,
thinking: { type: 'enabled', budget_tokens: 8000 },
messages: [

View File

@@ -30,7 +30,7 @@ async function runTests() {
const result = await streamRequest({
model: 'claude-opus-4-5-thinking',
max_tokens: 8192,
max_tokens: 32000,
stream: true,
tools,
thinking: { type: 'enabled', budget_tokens: 16000 },
@@ -93,7 +93,7 @@ Please do this step by step, reading each file before modifying.`
const result2 = await streamRequest({
model: 'claude-opus-4-5-thinking',
max_tokens: 8192,
max_tokens: 32000,
stream: true,
tools,
thinking: { type: 'enabled', budget_tokens: 16000 },

View File

@@ -33,7 +33,7 @@ async function runTests() {
const turn1 = await streamRequest({
model: 'claude-sonnet-4-5-thinking',
max_tokens: 4096,
max_tokens: 16000,
stream: true,
tools,
thinking: { type: 'enabled', budget_tokens: 10000 },
@@ -102,7 +102,7 @@ drwxr-xr-x 4 user staff 128 Dec 19 10:00 tests`
const turn2 = await streamRequest({
model: 'claude-sonnet-4-5-thinking',
max_tokens: 4096,
max_tokens: 16000,
stream: true,
tools,
thinking: { type: 'enabled', budget_tokens: 10000 },

View File

@@ -38,7 +38,7 @@ async function runTests() {
const turn1 = await makeRequest({
model: 'claude-sonnet-4-5-thinking',
max_tokens: 4096,
max_tokens: 16000,
stream: false,
tools,
thinking: { type: 'enabled', budget_tokens: 10000 },
@@ -92,7 +92,7 @@ async function runTests() {
const turn2 = await makeRequest({
model: 'claude-sonnet-4-5-thinking',
max_tokens: 4096,
max_tokens: 16000,
stream: false,
tools,
thinking: { type: 'enabled', budget_tokens: 10000 },
@@ -156,7 +156,7 @@ async function runTests() {
const turn3 = await makeRequest({
model: 'claude-sonnet-4-5-thinking',
max_tokens: 4096,
max_tokens: 16000,
stream: false,
tools,
thinking: { type: 'enabled', budget_tokens: 10000 },

View File

@@ -31,7 +31,7 @@ async function runTests() {
const turn1Result = await streamRequest({
model: 'claude-sonnet-4-5-thinking',
max_tokens: 4096,
max_tokens: 16000,
stream: true,
tools,
thinking: { type: 'enabled', budget_tokens: 10000 },
@@ -95,7 +95,7 @@ async function runTests() {
const turn2Result = await streamRequest({
model: 'claude-sonnet-4-5-thinking',
max_tokens: 4096,
max_tokens: 16000,
stream: true,
tools,
thinking: { type: 'enabled', budget_tokens: 10000 },