-
Notifications
You must be signed in to change notification settings - Fork 73
/
Copy pathopenai_chat_stream.py
39 lines (34 loc) · 1.45 KB
/
openai_chat_stream.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
from langchain_openai import ChatOpenAI
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.sdk import trace as trace_sdk
from opentelemetry.sdk.trace.export import ConsoleSpanExporter, SimpleSpanProcessor
from openinference.instrumentation import using_attributes
from openinference.instrumentation.langchain import LangChainInstrumentor
endpoint = "http://127.0.0.1:6006/v1/traces"
tracer_provider = trace_sdk.TracerProvider()
tracer_provider.add_span_processor(SimpleSpanProcessor(OTLPSpanExporter(endpoint)))
tracer_provider.add_span_processor(SimpleSpanProcessor(ConsoleSpanExporter()))
LangChainInstrumentor().instrument(tracer_provider=tracer_provider)
if __name__ == "__main__":
with using_attributes(
session_id="my-test-session",
user_id="my-test-user",
metadata={
"test-int": 1,
"test-str": "string",
"test-list": [1, 2, 3],
"test-dict": {
"key-1": "val-1",
"key-2": "val-2",
},
},
tags=["tag-1", "tag-2"],
prompt_template="Who won the soccer match in {city} on {date}",
prompt_template_version="v1.0",
prompt_template_variables={
"city": "Johannesburg",
"date": "July 11th",
},
):
for chunk in ChatOpenAI(model_name="gpt-3.5-turbo").stream("Write a haiku."):
print(chunk.content, end="", flush=True)