-
Notifications
You must be signed in to change notification settings - Fork 110
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
1123c05
commit 4619c90
Showing
2 changed files
with
160 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,127 @@ | ||
# Copyright 2010 New Relic, Inc. | ||
# | ||
# Licensed under the Apache License, Version 2.0 (the "License"); | ||
# you may not use this file except in compliance with the License. | ||
# You may obtain a copy of the License at | ||
# | ||
# http://www.apache.org/licenses/LICENSE-2.0 | ||
# | ||
# Unless required by applicable law or agreed to in writing, software | ||
# distributed under the License is distributed on an "AS IS" BASIS, | ||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
# See the License for the specific language governing permissions and | ||
# limitations under the License. | ||
|
||
import openai | ||
from testing_support.fixtures import reset_core_stats_engine | ||
|
||
from newrelic.api.background_task import background_task | ||
from newrelic.api.ml_model import get_ai_message_ids | ||
from newrelic.api.transaction import add_custom_attribute, current_transaction | ||
|
||
_test_openai_chat_completion_messages_1 = ( | ||
{"role": "system", "content": "You are a scientist."}, | ||
{"role": "user", "content": "What is 212 degrees Fahrenheit converted to Celsius?"}, | ||
) | ||
_test_openai_chat_completion_messages_2 = ( | ||
{"role": "system", "content": "You are a mathematician."}, | ||
{"role": "user", "content": "What is 1 plus 2?"}, | ||
) | ||
expected_message_ids_1 = [ | ||
{ | ||
"conversation_id": "my-awesome-id", | ||
"request_id": "49dbbffbd3c3f4612aa48def69059ccd", | ||
"message_id": "chatcmpl-87sb95K4EF2nuJRcTs43Tm9ntTemv-0", | ||
}, | ||
{ | ||
"conversation_id": "my-awesome-id", | ||
"request_id": "49dbbffbd3c3f4612aa48def69059ccd", | ||
"message_id": "chatcmpl-87sb95K4EF2nuJRcTs43Tm9ntTemv-1", | ||
}, | ||
{ | ||
"conversation_id": "my-awesome-id", | ||
"request_id": "49dbbffbd3c3f4612aa48def69059ccd", | ||
"message_id": "chatcmpl-87sb95K4EF2nuJRcTs43Tm9ntTemv-2", | ||
}, | ||
] | ||
expected_message_ids_2 = [ | ||
{ | ||
"conversation_id": "my-awesome-id", | ||
"request_id": "49dbbffbd3c3f4612aa48def69059aad", | ||
"message_id": "chatcmpl-87sb95K4EF2nuJRcTs43Tm9ntTeat-0", | ||
}, | ||
{ | ||
"conversation_id": "my-awesome-id", | ||
"request_id": "49dbbffbd3c3f4612aa48def69059aad", | ||
"message_id": "chatcmpl-87sb95K4EF2nuJRcTs43Tm9ntTeat-1", | ||
}, | ||
{ | ||
"conversation_id": "my-awesome-id", | ||
"request_id": "49dbbffbd3c3f4612aa48def69059aad", | ||
"message_id": "chatcmpl-87sb95K4EF2nuJRcTs43Tm9ntTeat-2", | ||
}, | ||
] | ||
|
||
|
||
@reset_core_stats_engine() | ||
@background_task() | ||
def test_get_ai_message_ids_when_nr_message_ids_not_set(): | ||
message_ids = get_ai_message_ids("request-id-1") | ||
assert message_ids == [] | ||
|
||
|
||
@reset_core_stats_engine() | ||
def test_get_ai_message_ids_outside_transaction(): | ||
message_ids = get_ai_message_ids("request-id-1") | ||
assert message_ids == [] | ||
|
||
|
||
@reset_core_stats_engine() | ||
@background_task() | ||
def test_get_ai_message_ids_mulitple_async(loop, set_trace_info): | ||
set_trace_info() | ||
add_custom_attribute("conversation_id", "my-awesome-id") | ||
|
||
async def _run(): | ||
res1 = await openai.ChatCompletion.acreate( | ||
model="gpt-3.5-turbo", messages=_test_openai_chat_completion_messages_1, temperature=0.7, max_tokens=100 | ||
) | ||
res2 = await openai.ChatCompletion.acreate( | ||
model="gpt-3.5-turbo", messages=_test_openai_chat_completion_messages_2, temperature=0.7, max_tokens=100 | ||
) | ||
return [res1, res2] | ||
|
||
results = loop.run_until_complete(_run()) | ||
|
||
message_ids = [m for m in get_ai_message_ids(results[0].id)] | ||
assert message_ids == expected_message_ids_1 | ||
|
||
message_ids = [m for m in get_ai_message_ids(results[1].id)] | ||
assert message_ids == expected_message_ids_2 | ||
|
||
# Make sure we aren't causing a memory leak. | ||
transaction = current_transaction() | ||
assert not transaction._nr_message_ids | ||
|
||
|
||
@reset_core_stats_engine() | ||
@background_task() | ||
def test_get_ai_message_ids_mulitple_sync(set_trace_info): | ||
set_trace_info() | ||
add_custom_attribute("conversation_id", "my-awesome-id") | ||
|
||
results = openai.ChatCompletion.create( | ||
model="gpt-3.5-turbo", messages=_test_openai_chat_completion_messages_1, temperature=0.7, max_tokens=100 | ||
) | ||
message_ids = [m for m in get_ai_message_ids(results.id)] | ||
assert message_ids == expected_message_ids_1 | ||
|
||
results = openai.ChatCompletion.create( | ||
model="gpt-3.5-turbo", messages=_test_openai_chat_completion_messages_2, temperature=0.7, max_tokens=100 | ||
) | ||
message_ids = [m for m in get_ai_message_ids(results.id)] | ||
assert message_ids == expected_message_ids_2 | ||
|
||
# Make sure we aren't causing a memory leak. | ||
transaction = current_transaction() | ||
assert not transaction._nr_message_ids |