diff --git a/packages/sdk/server-ai/src/ldai/client.py b/packages/sdk/server-ai/src/ldai/client.py index a4605e3..c5fda5a 100644 --- a/packages/sdk/server-ai/src/ldai/client.py +++ b/packages/sdk/server-ai/src/ldai/client.py @@ -15,14 +15,35 @@ JudgeConfiguration, LDMessage, ModelConfig, ProviderConfig) from ldai.providers.ai_provider_factory import AIProviderFactory +from ldai.sdk_info import AI_SDK_LANGUAGE, AI_SDK_NAME, AI_SDK_VERSION from ldai.tracker import LDAIConfigTracker +_TRACK_SDK_INFO = '$ld:ai:sdk-info' +_TRACK_USAGE_COMPLETION_CONFIG = '$ld:ai:usage:completion-config' +_TRACK_USAGE_CREATE_CHAT = '$ld:ai:usage:create-chat' +_TRACK_USAGE_JUDGE_CONFIG = '$ld:ai:usage:judge-config' +_TRACK_USAGE_CREATE_JUDGE = '$ld:ai:usage:create-judge' +_TRACK_USAGE_AGENT_CONFIG = '$ld:ai:usage:agent-config' +_TRACK_USAGE_AGENT_CONFIGS = '$ld:ai:usage:agent-configs' + +_INIT_TRACK_CONTEXT = Context.builder('ld-internal-tracking').anonymous(True).build() + class LDAIClient: """The LaunchDarkly AI SDK client object.""" def __init__(self, client: LDClient): self._client = client + self._client.track( + _TRACK_SDK_INFO, + _INIT_TRACK_CONTEXT, + { + 'aiSdkName': AI_SDK_NAME, + 'aiSdkVersion': AI_SDK_VERSION, + 'aiSdkLanguage': AI_SDK_LANGUAGE, + }, + 1, + ) def completion_config( self, @@ -40,7 +61,7 @@ def completion_config( :param variables: Additional variables for the completion configuration. :return: The completion configuration with a tracker used for gathering metrics. """ - self._client.track('$ld:ai:config:function:single', context, key, 1) + self._client.track(_TRACK_USAGE_COMPLETION_CONFIG, context, key, 1) model, provider, messages, instructions, tracker, enabled, judge_configuration, _ = self.__evaluate( key, context, default_value.to_dict(), variables @@ -94,7 +115,7 @@ def judge_config( :param variables: Additional variables for the judge configuration. :return: The judge configuration with a tracker used for gathering metrics. """ - self._client.track('$ld:ai:judge:function:single', context, key, 1) + self._client.track(_TRACK_USAGE_JUDGE_CONFIG, context, key, 1) model, provider, messages, instructions, tracker, enabled, judge_configuration, variation = self.__evaluate( key, context, default_value.to_dict(), variables @@ -170,7 +191,7 @@ async def create_judge( if relevance_eval: print('Relevance score:', relevance_eval.score) """ - self._client.track('$ld:ai:judge:function:createJudge', context, key, 1) + self._client.track(_TRACK_USAGE_CREATE_JUDGE, context, key, 1) try: if variables: @@ -281,7 +302,7 @@ async def create_chat( messages = chat.get_messages() print(f"Conversation has {len(messages)} messages") """ - self._client.track('$ld:ai:config:function:createChat', context, key, 1) + self._client.track(_TRACK_USAGE_CREATE_CHAT, context, key, 1) log.debug(f"Creating chat for key: {key}") config = self.completion_config(key, context, default_value, variables) @@ -340,7 +361,7 @@ def agent_config( :return: Configured AIAgentConfig instance. """ self._client.track( - "$ld:ai:agent:function:single", + _TRACK_USAGE_AGENT_CONFIG, context, key, 1 @@ -406,7 +427,7 @@ def agent_configs( """ agent_count = len(agent_configs) self._client.track( - "$ld:ai:agent:function:multiple", + _TRACK_USAGE_AGENT_CONFIGS, context, agent_count, agent_count diff --git a/packages/sdk/server-ai/src/ldai/sdk_info.py b/packages/sdk/server-ai/src/ldai/sdk_info.py new file mode 100644 index 0000000..743f729 --- /dev/null +++ b/packages/sdk/server-ai/src/ldai/sdk_info.py @@ -0,0 +1,7 @@ +from importlib.metadata import metadata + +_meta = metadata('launchdarkly-server-sdk-ai') + +AI_SDK_NAME: str = _meta['Name'] +AI_SDK_VERSION: str = _meta['Version'] +AI_SDK_LANGUAGE: str = 'python' diff --git a/packages/sdk/server-ai/tests/test_model_config.py b/packages/sdk/server-ai/tests/test_model_config.py index 26a02c9..d0ae24d 100644 --- a/packages/sdk/server-ai/tests/test_model_config.py +++ b/packages/sdk/server-ai/tests/test_model_config.py @@ -323,9 +323,31 @@ def test_config_method_tracking(ldai_client: LDAIClient): config = client.config('test-config-key', context, default_value) - mock_client.track.assert_called_once_with( - '$ld:ai:config:function:single', + mock_client.track.assert_any_call( + '$ld:ai:usage:completion-config', context, 'test-config-key', 1 ) + + +def test_sdk_info_tracked_on_init(): + from unittest.mock import Mock + + from ldai.client import _INIT_TRACK_CONTEXT + from ldai.sdk_info import AI_SDK_LANGUAGE, AI_SDK_NAME, AI_SDK_VERSION + + mock_client = Mock() + + client = LDAIClient(mock_client) + + mock_client.track.assert_called_once_with( + '$ld:ai:sdk-info', + _INIT_TRACK_CONTEXT, + { + 'aiSdkName': AI_SDK_NAME, + 'aiSdkVersion': AI_SDK_VERSION, + 'aiSdkLanguage': AI_SDK_LANGUAGE, + }, + 1, + )