@@ -501,8 +501,45 @@ func TestE2EInvokeDeclarativeAgentWithMcpServerTool(t *testing.T) {
501501 })
502502}
503503
504- // This function generates a CrewAI agent that uses a mock LLM server
505- // Assumes that the image is built and pushed to registry, the agent can be found in python/samples/crewai/poem_flow
504+ // This function generates an OpenAI BYO agent that uses a mock LLM server
505+ // Assumes that the image is built and pushed to registry
506+ func generateOpenAIAgent (baseURL string ) * v1alpha2.Agent {
507+ return & v1alpha2.Agent {
508+ ObjectMeta : metav1.ObjectMeta {
509+ Name : "basic-openai-test-agent" ,
510+ Namespace : "kagent" ,
511+ },
512+ Spec : v1alpha2.AgentSpec {
513+ Description : "A basic OpenAI agent with calculator and weather tools" ,
514+ Type : v1alpha2 .AgentType_BYO ,
515+ BYO : & v1alpha2.BYOAgentSpec {
516+ Deployment : & v1alpha2.ByoDeploymentSpec {
517+ Image : "localhost:5001/basic-openai:latest" ,
518+ SharedDeploymentSpec : v1alpha2.SharedDeploymentSpec {
519+ Env : []corev1.EnvVar {
520+ {
521+ Name : "OPENAI_API_KEY" ,
522+ ValueFrom : & corev1.EnvVarSource {
523+ SecretKeyRef : & corev1.SecretKeySelector {
524+ LocalObjectReference : corev1.LocalObjectReference {
525+ Name : "kagent-openai" ,
526+ },
527+ Key : "OPENAI_API_KEY" ,
528+ },
529+ },
530+ },
531+ {
532+ Name : "OPENAI_API_BASE" ,
533+ Value : baseURL + "/v1" ,
534+ },
535+ },
536+ },
537+ },
538+ },
539+ },
540+ }
541+ }
542+
506543func generateCrewAIAgent (baseURL string ) * v1alpha2.Agent {
507544 return & v1alpha2.Agent {
508545 ObjectMeta : metav1.ObjectMeta {
@@ -541,6 +578,59 @@ func generateCrewAIAgent(baseURL string) *v1alpha2.Agent {
541578 }
542579}
543580
581+ func TestE2EInvokeOpenAIAgent (t * testing.T ) {
582+ // Setup mock server
583+ baseURL , stopServer := setupMockServer (t , "mocks/invoke_openai_agent.json" )
584+ defer stopServer ()
585+
586+ // Setup Kubernetes client
587+ cli := setupK8sClient (t , false )
588+
589+ // Setup specific resources
590+ modelCfg := setupModelConfig (t , cli , baseURL )
591+ agent := generateOpenAIAgent (baseURL )
592+
593+ // Create the agent on the cluster
594+ err := cli .Create (t .Context (), agent )
595+ require .NoError (t , err )
596+
597+ // Wait for agent to be ready
598+ args := []string {
599+ "wait" ,
600+ "--for" ,
601+ "condition=Ready" ,
602+ "--timeout=1m" ,
603+ "agents.kagent.dev" ,
604+ agent .Name ,
605+ "-n" ,
606+ agent .Namespace ,
607+ }
608+
609+ cmd := exec .CommandContext (t .Context (), "kubectl" , args ... )
610+ cmd .Stdout = os .Stdout
611+ cmd .Stderr = os .Stderr
612+ require .NoError (t , cmd .Run ())
613+
614+ defer func () {
615+ cli .Delete (t .Context (), agent ) //nolint:errcheck
616+ cli .Delete (t .Context (), modelCfg ) //nolint:errcheck
617+ }()
618+
619+ // Setup A2A client - use the agent's actual name
620+ a2aURL := a2aUrl ("kagent" , "basic-openai-test-agent" )
621+ a2aClient , err := a2aclient .NewA2AClient (a2aURL )
622+ require .NoError (t , err )
623+
624+ useArtifacts := true
625+ t .Run ("sync_invocation_calculator" , func (t * testing.T ) {
626+ runSyncTest (t , a2aClient , "What is 2+2?" , "4" , & useArtifacts )
627+ })
628+
629+ t .Run ("streaming_invocation_weather" , func (t * testing.T ) {
630+ runStreamingTest (t , a2aClient , "What is the weather in London?" , "Rainy, 52°F" )
631+ })
632+ }
633+
544634func TestE2EInvokeCrewAIAgent (t * testing.T ) {
545635 mockllmCfg , err := mockllm .LoadConfigFromFile ("mocks/invoke_crewai_agent.json" , mocks )
546636 require .NoError (t , err )
@@ -619,6 +709,8 @@ func TestE2EInvokeCrewAIAgent(t *testing.T) {
619709 t .Run ("streaming_invocation" , func (t * testing.T ) {
620710 runStreamingTest (t , a2aClient , "Generate a poem about CrewAI" , "CrewAI is awesome, it makes coding fun." )
621711 })
712+
713+ cli .Delete (t .Context (), agent ) //nolint:errcheck
622714}
623715
624716func TestE2EInvokeSTSIntegration (t * testing.T ) {
0 commit comments