@@ -452,17 +452,34 @@ function extractTextFromInteractionOutputs(outputs: Interactions.Interaction['ou
452452
453453/**
454454 * Extracts token usage from an Interaction's Usage object.
455- * The Interactions API provides total_input_tokens, total_output_tokens, and total_tokens.
455+ * The Interactions API provides total_input_tokens, total_output_tokens, total_tokens,
456+ * and total_reasoning_tokens (for thinking models).
457+ *
458+ * Also handles the raw API field name total_thought_tokens which the SDK may
459+ * map to total_reasoning_tokens.
456460 */
457461function extractInteractionUsage ( usage : Interactions . Usage | undefined ) : {
458462 inputTokens : number
459463 outputTokens : number
464+ reasoningTokens : number
460465 totalTokens : number
461466} {
462- const inputTokens = usage ?. total_input_tokens ?? 0
463- const outputTokens = usage ?. total_output_tokens ?? 0
464- const totalTokens = usage ?. total_tokens ?? inputTokens + outputTokens
465- return { inputTokens, outputTokens, totalTokens }
467+ if ( ! usage ) {
468+ return { inputTokens : 0 , outputTokens : 0 , reasoningTokens : 0 , totalTokens : 0 }
469+ }
470+
471+ const usageLogger = createLogger ( 'DeepResearchUsage' )
472+ usageLogger . info ( 'Raw interaction usage' , { usage : JSON . stringify ( usage ) } )
473+
474+ const inputTokens = usage . total_input_tokens ?? 0
475+ const outputTokens = usage . total_output_tokens ?? 0
476+ const reasoningTokens =
477+ usage . total_reasoning_tokens ??
478+ ( ( usage as Record < string , unknown > ) . total_thought_tokens as number ) ??
479+ 0
480+ const totalTokens = usage . total_tokens ?? inputTokens + outputTokens
481+
482+ return { inputTokens, outputTokens, reasoningTokens, totalTokens }
466483}
467484
468485/**
@@ -471,9 +488,15 @@ function extractInteractionUsage(usage: Interactions.Usage | undefined): {
471488function buildDeepResearchResponse (
472489 content : string ,
473490 model : string ,
474- usage : { inputTokens : number ; outputTokens : number ; totalTokens : number } ,
491+ usage : {
492+ inputTokens : number
493+ outputTokens : number
494+ reasoningTokens : number
495+ totalTokens : number
496+ } ,
475497 providerStartTime : number ,
476- providerStartTimeISO : string
498+ providerStartTimeISO : string ,
499+ interactionId ?: string
477500) : ProviderResponse {
478501 const providerEndTime = Date . now ( )
479502 const duration = providerEndTime - providerStartTime
@@ -505,6 +528,7 @@ function buildDeepResearchResponse(
505528 ] ,
506529 } ,
507530 cost : calculateCost ( model , usage . inputTokens , usage . outputTokens ) ,
531+ interactionId,
508532 }
509533}
510534
@@ -524,12 +548,19 @@ function createDeepResearchStream(
524548 stream : AsyncIterable < Interactions . InteractionSSEEvent > ,
525549 onComplete ?: (
526550 content : string ,
527- usage : { inputTokens : number ; outputTokens : number ; totalTokens : number }
551+ usage : {
552+ inputTokens : number
553+ outputTokens : number
554+ reasoningTokens : number
555+ totalTokens : number
556+ } ,
557+ interactionId ?: string
528558 ) => void
529559) : ReadableStream < Uint8Array > {
530560 const streamLogger = createLogger ( 'DeepResearchStream' )
531561 let fullContent = ''
532- let completionUsage = { inputTokens : 0 , outputTokens : 0 , totalTokens : 0 }
562+ let completionUsage = { inputTokens : 0 , outputTokens : 0 , reasoningTokens : 0 , totalTokens : 0 }
563+ let completedInteractionId : string | undefined
533564
534565 return new ReadableStream ( {
535566 async start ( controller ) {
@@ -546,6 +577,12 @@ function createDeepResearchStream(
546577 if ( interaction ?. usage ) {
547578 completionUsage = extractInteractionUsage ( interaction . usage )
548579 }
580+ completedInteractionId = interaction ?. id
581+ } else if ( event . event_type === 'interaction.start' ) {
582+ const interaction = ( event as Interactions . InteractionEvent ) . interaction
583+ if ( interaction ?. id ) {
584+ completedInteractionId = interaction . id
585+ }
549586 } else if ( event . event_type === 'error' ) {
550587 const errorEvent = event as { error ?: { code ?: string ; message ?: string } }
551588 const message = errorEvent . error ?. message ?? 'Unknown deep research stream error'
@@ -558,7 +595,7 @@ function createDeepResearchStream(
558595 }
559596 }
560597
561- onComplete ?.( fullContent , completionUsage )
598+ onComplete ?.( fullContent , completionUsage , completedInteractionId )
562599 controller . close ( )
563600 } catch ( error ) {
564601 streamLogger . error ( 'Error reading deep research stream' , {
@@ -595,6 +632,7 @@ export async function executeDeepResearchRequest(
595632 hasSystemPrompt : ! ! request . systemPrompt ,
596633 hasMessages : ! ! request . messages ?. length ,
597634 streaming : ! ! request . stream ,
635+ hasPreviousInteractionId : ! ! request . previousInteractionId ,
598636 } )
599637
600638 if ( request . tools ?. length ) {
@@ -620,6 +658,9 @@ export async function executeDeepResearchRequest(
620658 background : true ,
621659 store : true ,
622660 ...( systemInstruction && { system_instruction : systemInstruction } ) ,
661+ ...( request . previousInteractionId && {
662+ previous_interaction_id : request . previousInteractionId ,
663+ } ) ,
623664 agent_config : {
624665 type : 'deep-research' as const ,
625666 thinking_summaries : 'auto' as const ,
@@ -685,31 +726,35 @@ export async function executeDeepResearchRequest(
685726 } ,
686727 }
687728
688- streamingResult . stream = createDeepResearchStream ( streamResponse , ( content , usage ) => {
689- streamingResult . execution . output . content = content
690- streamingResult . execution . output . tokens = {
691- input : usage . inputTokens ,
692- output : usage . outputTokens ,
693- total : usage . totalTokens ,
694- }
729+ streamingResult . stream = createDeepResearchStream (
730+ streamResponse ,
731+ ( content , usage , streamInteractionId ) => {
732+ streamingResult . execution . output . content = content
733+ streamingResult . execution . output . tokens = {
734+ input : usage . inputTokens ,
735+ output : usage . outputTokens ,
736+ total : usage . totalTokens ,
737+ }
738+ streamingResult . execution . output . interactionId = streamInteractionId
739+
740+ const cost = calculateCost ( model , usage . inputTokens , usage . outputTokens )
741+ streamingResult . execution . output . cost = cost
695742
696- const cost = calculateCost ( model , usage . inputTokens , usage . outputTokens )
697- streamingResult . execution . output . cost = cost
698-
699- const streamEndTime = Date . now ( )
700- if ( streamingResult . execution . output . providerTiming ) {
701- streamingResult . execution . output . providerTiming . endTime = new Date (
702- streamEndTime
703- ) . toISOString ( )
704- streamingResult . execution . output . providerTiming . duration =
705- streamEndTime - providerStartTime
706- const segments = streamingResult . execution . output . providerTiming . timeSegments
707- if ( segments ?. [ 0 ] ) {
708- segments [ 0 ] . endTime = streamEndTime
709- segments [ 0 ] . duration = streamEndTime - providerStartTime
743+ const streamEndTime = Date . now ( )
744+ if ( streamingResult . execution . output . providerTiming ) {
745+ streamingResult . execution . output . providerTiming . endTime = new Date (
746+ streamEndTime
747+ ) . toISOString ( )
748+ streamingResult . execution . output . providerTiming . duration =
749+ streamEndTime - providerStartTime
750+ const segments = streamingResult . execution . output . providerTiming . timeSegments
751+ if ( segments ?. [ 0 ] ) {
752+ segments [ 0 ] . endTime = streamEndTime
753+ segments [ 0 ] . duration = streamEndTime - providerStartTime
754+ }
710755 }
711756 }
712- } )
757+ )
713758
714759 return streamingResult
715760 }
@@ -764,11 +809,21 @@ export async function executeDeepResearchRequest(
764809 logger . info ( 'Deep research completed' , {
765810 interactionId,
766811 contentLength : content . length ,
812+ inputTokens : usage . inputTokens ,
813+ outputTokens : usage . outputTokens ,
814+ reasoningTokens : usage . reasoningTokens ,
767815 totalTokens : usage . totalTokens ,
768816 durationMs : Date . now ( ) - providerStartTime ,
769817 } )
770818
771- return buildDeepResearchResponse ( content , model , usage , providerStartTime , providerStartTimeISO )
819+ return buildDeepResearchResponse (
820+ content ,
821+ model ,
822+ usage ,
823+ providerStartTime ,
824+ providerStartTimeISO ,
825+ interactionId
826+ )
772827 } catch ( error ) {
773828 const providerEndTime = Date . now ( )
774829 const duration = providerEndTime - providerStartTime
0 commit comments