LangSmith can capture traces generated by Semantic Kernel using OpenInference’s OpenAI instrumentation. This guide shows you how to automatically capture traces from your Semantic Kernel applications and send them to LangSmith for monitoring and analysis.
3. Create and run your Semantic Kernel application
Once configured, your Semantic Kernel application will automatically send traces to LangSmith:This example includes a minimal app that configures the kernel, defines prompt-based functions, and invokes them to generate traced activity.
Copy
Ask AI
import osimport asynciofrom semantic_kernel import Kernelfrom semantic_kernel.connectors.ai.open_ai import OpenAIChatCompletionfrom semantic_kernel.prompt_template import InputVariable, PromptTemplateConfigfrom openinference.instrumentation.openai import OpenAIInstrumentorfrom langsmith.integrations.otel import configureimport dotenv# Load environment variablesdotenv.load_dotenv(".env.local")# Configure LangSmith tracingconfigure(project_name="semantic-kernel-assistant")# Instrument OpenAI callsOpenAIInstrumentor().instrument()# Configure Semantic Kernelkernel = Kernel()kernel.add_service(OpenAIChatCompletion())# Create a code analysis prompt templatecode_analysis_prompt = """Analyze the following code and provide insights:Code: {{$code}}Please provide:1. A brief summary of what the code does2. Any potential improvements3. Code quality assessment"""prompt_template_config = PromptTemplateConfig( template=code_analysis_prompt, name="code_analyzer", template_format="semantic-kernel", input_variables=[ InputVariable(name="code", description="The code to analyze", is_required=True), ],)# Add the function to the kernelcode_analyzer = kernel.add_function( function_name="analyzeCode", plugin_name="codeAnalysisPlugin", prompt_template_config=prompt_template_config,)# Create a documentation generatordoc_prompt = """Generate comprehensive documentation for the following function:{{$function_code}}Include:- Purpose and functionality- Parameters and return values- Usage examples- Any important notes"""doc_template_config = PromptTemplateConfig( template=doc_prompt, name="doc_generator", template_format="semantic-kernel", input_variables=[ InputVariable(name="function_code", description="The function code to document", is_required=True), ],)doc_generator = kernel.add_function( function_name="generateDocs", plugin_name="documentationPlugin", prompt_template_config=doc_template_config,)async def main(): # Example code to analyze sample_code = """def fibonacci(n): if n <= 1: return n return fibonacci(n-1) + fibonacci(n-2) """ # Analyze the code analysis_result = await kernel.invoke(code_analyzer, code=sample_code) print("Code Analysis:") print(analysis_result) print("\n" + "="*50 + "\n") # Generate documentation doc_result = await kernel.invoke(doc_generator, function_code=sample_code) print("Generated Documentation:") print(doc_result) return {"analysis": str(analysis_result), "documentation": str(doc_result)}if __name__ == "__main__": asyncio.run(main())
You can add custom metadata to your traces by setting span attributes:
Copy
Ask AI
from opentelemetry import trace# Get the current tracertracer = trace.get_tracer(__name__)async def main(): with tracer.start_as_current_span("semantic_kernel_workflow") as span: # Add custom metadata span.set_attribute("langsmith.metadata.workflow_type", "code_analysis") span.set_attribute("langsmith.metadata.user_id", "developer_123") span.set_attribute("langsmith.span.tags", "semantic-kernel,code-analysis") # Your Semantic Kernel code here result = await kernel.invoke(code_analyzer, code=sample_code) return result