Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions packages/sdk/server-ai/src/LDAIConfigMapper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,10 @@ export class LDAIConfigMapper {
return undefined;
}

/**
* @deprecated Use `VercelProvider.toVercelAISDK()` from the `@launchdarkly/server-sdk-ai-vercel` package instead.
* This method will be removed in a future version.
*/
toVercelAISDK<TMod>(
provider: VercelAISDKProvider<TMod> | Record<string, VercelAISDKProvider<TMod>>,
options?: VercelAISDKMapOptions | undefined,
Expand Down
54 changes: 54 additions & 0 deletions packages/sdk/server-ai/src/LDAIConfigTrackerImpl.ts
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,60 @@ export class LDAIConfigTrackerImpl implements LDAIConfigTracker {
return result;
}

trackStreamMetricsOf<TStream>(
streamCreator: () => TStream,
metricsExtractor: (stream: TStream) => Promise<LDAIMetrics>,
): TStream {
const startTime = Date.now();

try {
// Create the stream synchronously
const stream = streamCreator();

// Start background metrics tracking (fire and forget)
this._trackStreamMetricsInBackground(stream, metricsExtractor, startTime);

// Return stream immediately for consumption
return stream;
} catch (error) {
// Track error if stream creation fails
this.trackDuration(Date.now() - startTime);
this.trackError();
throw error;
}
}

private async _trackStreamMetricsInBackground<TStream>(
stream: TStream,
metricsExtractor: (stream: TStream) => Promise<LDAIMetrics>,
startTime: number,
): Promise<void> {
try {
// Wait for metrics to be available
const metrics = await metricsExtractor(stream);

// Track final duration
const duration = Date.now() - startTime;
this.trackDuration(duration);

// Track success/error based on metrics
if (metrics.success) {
this.trackSuccess();
} else {
this.trackError();
}

// Track token usage if available
if (metrics.usage) {
this.trackTokens(metrics.usage);
}
} catch (error) {
// If metrics extraction fails, track error
// but don't throw - stream consumption should not be affected
this.trackError();
}
}

async trackOpenAIMetrics<
TRes extends {
usage?: {
Expand Down
3 changes: 3 additions & 0 deletions packages/sdk/server-ai/src/api/config/LDAIConfig.ts
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,9 @@ export interface LDAIConfig {
*
* WARNING: this method can throw an exception if a Vercel AI SDK model cannot be determined.
*
* @deprecated Use `VercelProvider.toVercelAISDK()` from the `@launchdarkly/server-sdk-ai-vercel` package instead.
* This method will be removed in a future version.
*
* @param provider A Vercel AI SDK Provider or a map of provider names to Vercel AI SDK Providers.
* @param options Optional mapping options.
* @returns A configuration directly usable in Vercel AI SDK generateText() and streamText()
Expand Down
29 changes: 29 additions & 0 deletions packages/sdk/server-ai/src/api/config/LDAIConfigTracker.ts
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,32 @@ export interface LDAIConfigTracker {
func: () => Promise<TRes>,
): Promise<TRes>;

/**
* Track metrics for a streaming AI operation.
*
* This function will track the duration of the operation, extract metrics using the provided
* metrics extractor function, and track success or error status accordingly.
*
* Unlike trackMetricsOf, this method is designed for streaming operations where:
* - The stream is created and returned immediately (synchronously)
* - Metrics are extracted asynchronously in the background once the stream completes
* - Duration is tracked from stream creation to metrics extraction completion
*
* The stream is returned immediately so the caller can begin consuming it without waiting.
* Metrics extraction happens in the background and does not block stream consumption.
*
* If the stream creator throws, then this method will also throw and record an error.
* If metrics extraction fails, the error is logged but does not affect stream consumption.
*
* @param streamCreator Function that creates and returns the stream (synchronous)
* @param metricsExtractor Function that asynchronously extracts metrics from the stream
* @returns The stream result (returned immediately, not a Promise)
*/
trackStreamMetricsOf<TStream>(
streamCreator: () => TStream,
metricsExtractor: (stream: TStream) => Promise<LDAIMetrics>,
): TStream;

/**
* Track an OpenAI operation.
*
Expand Down Expand Up @@ -187,6 +213,9 @@ export interface LDAIConfigTracker {
* In the case the provided function throws, this function will record the duration and an error.
* A failed operation will not have any token usage data.
*
* @deprecated Use `trackStreamMetricsOf()` with `VercelProvider.createStreamMetricsExtractor()` from the
* `@launchdarkly/server-sdk-ai-vercel` package instead. This method will be removed in a future version.
*
* @param func Function which executes the operation.
* @returns The result of the operation.
*/
Expand Down
12 changes: 12 additions & 0 deletions packages/sdk/server-ai/src/api/config/VercelAISDK.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,23 @@
import { type LDMessage } from './LDAIConfig';

/**
* @deprecated Use `VercelAISDKProvider` from the `@launchdarkly/server-sdk-ai-vercel` package instead.
* This type will be removed in a future version.
*/
export type VercelAISDKProvider<TMod> = (modelName: string) => TMod;

/**
* @deprecated Use `VercelAISDKMapOptions` from the `@launchdarkly/server-sdk-ai-vercel` package instead.
* This type will be removed in a future version.
*/
export interface VercelAISDKMapOptions {
nonInterpolatedMessages?: LDMessage[] | undefined;
}

/**
* @deprecated Use `VercelAISDKConfig` from the `@launchdarkly/server-sdk-ai-vercel` package instead.
* This type will be removed in a future version.
*/
export interface VercelAISDKConfig<TMod> {
model: TMod;
messages?: LDMessage[] | undefined;
Expand Down