@@ -551,6 +551,56 @@ def test_static_instruction_field_exists(llm_backend):
551551 assert agent .static_instruction == static_content
552552
553553
554+ @pytest .mark .parametrize ("llm_backend" , ["GOOGLE_AI" , "VERTEX" ])
555+ def test_static_instruction_supports_string (llm_backend ):
556+ """Test that static_instruction field supports simple strings."""
557+ static_str = "This is a static instruction as a string"
558+ agent = LlmAgent (name = "test_agent" , static_instruction = static_str )
559+ assert agent .static_instruction == static_str
560+ assert isinstance (agent .static_instruction , str )
561+
562+
563+ @pytest .mark .parametrize ("llm_backend" , ["GOOGLE_AI" , "VERTEX" ])
564+ def test_static_instruction_supports_part (llm_backend ):
565+ """Test that static_instruction field supports types.Part."""
566+ static_part = types .Part (text = "This is a static instruction as Part" )
567+ agent = LlmAgent (name = "test_agent" , static_instruction = static_part )
568+ assert agent .static_instruction == static_part
569+ assert isinstance (agent .static_instruction , types .Part )
570+
571+
572+ @pytest .mark .parametrize ("llm_backend" , ["GOOGLE_AI" , "VERTEX" ])
573+ def test_static_instruction_supports_file (llm_backend ):
574+ """Test that static_instruction field supports types.File."""
575+ static_file = types .File (uri = "gs://bucket/file.txt" , mime_type = "text/plain" )
576+ agent = LlmAgent (name = "test_agent" , static_instruction = static_file )
577+ assert agent .static_instruction == static_file
578+ assert isinstance (agent .static_instruction , types .File )
579+
580+
581+ @pytest .mark .parametrize ("llm_backend" , ["GOOGLE_AI" , "VERTEX" ])
582+ def test_static_instruction_supports_list_of_parts (llm_backend ):
583+ """Test that static_instruction field supports list[PartUnion]."""
584+ static_parts_list = [
585+ types .Part (text = "First part" ),
586+ types .Part (text = "Second part" ),
587+ ]
588+ agent = LlmAgent (name = "test_agent" , static_instruction = static_parts_list )
589+ assert agent .static_instruction == static_parts_list
590+ assert isinstance (agent .static_instruction , list )
591+ assert len (agent .static_instruction ) == 2
592+
593+
594+ @pytest .mark .parametrize ("llm_backend" , ["GOOGLE_AI" , "VERTEX" ])
595+ def test_static_instruction_supports_list_of_strings (llm_backend ):
596+ """Test that static_instruction field supports list of strings."""
597+ static_strings_list = ["First instruction" , "Second instruction" ]
598+ agent = LlmAgent (name = "test_agent" , static_instruction = static_strings_list )
599+ assert agent .static_instruction == static_strings_list
600+ assert isinstance (agent .static_instruction , list )
601+ assert all (isinstance (s , str ) for s in agent .static_instruction )
602+
603+
554604@pytest .mark .parametrize ("llm_backend" , ["GOOGLE_AI" , "VERTEX" ])
555605def test_static_instruction_supports_multiple_parts (llm_backend ):
556606 """Test that static_instruction supports multiple parts including files."""
@@ -607,6 +657,91 @@ async def test_static_instruction_added_to_contents(llm_backend):
607657 assert llm_request .config .system_instruction == "Static instruction content"
608658
609659
660+ @pytest .mark .parametrize ("llm_backend" , ["GOOGLE_AI" , "VERTEX" ])
661+ @pytest .mark .asyncio
662+ async def test_static_instruction_string_added_to_system (llm_backend ):
663+ """Test that string static instructions are added to system_instruction."""
664+ agent = LlmAgent (
665+ name = "test_agent" , static_instruction = "Static instruction as string"
666+ )
667+
668+ invocation_context = await _create_invocation_context (agent )
669+
670+ llm_request = LlmRequest ()
671+
672+ # Run the instruction processor
673+ async for _ in request_processor .run_async (invocation_context , llm_request ):
674+ pass
675+
676+ # Static instruction should be added to system instructions, not contents
677+ assert len (llm_request .contents ) == 0
678+ assert llm_request .config .system_instruction == "Static instruction as string"
679+
680+
681+ @pytest .mark .parametrize ("llm_backend" , ["GOOGLE_AI" , "VERTEX" ])
682+ @pytest .mark .asyncio
683+ async def test_static_instruction_part_converted_to_system (llm_backend ):
684+ """Test that Part static instructions are converted and added to system_instruction."""
685+ static_part = types .Part (text = "Static instruction from Part" )
686+ agent = LlmAgent (name = "test_agent" , static_instruction = static_part )
687+
688+ invocation_context = await _create_invocation_context (agent )
689+ llm_request = LlmRequest ()
690+
691+ # Run the instruction processor
692+ async for _ in request_processor .run_async (invocation_context , llm_request ):
693+ pass
694+
695+ # Part should be converted to Content and text extracted to system instruction
696+ assert llm_request .config .system_instruction == "Static instruction from Part"
697+
698+
699+ @pytest .mark .parametrize ("llm_backend" , ["GOOGLE_AI" , "VERTEX" ])
700+ @pytest .mark .asyncio
701+ async def test_static_instruction_list_of_parts_converted_to_system (
702+ llm_backend ,
703+ ):
704+ """Test that list of Parts is converted and added to system_instruction."""
705+ static_parts_list = [
706+ types .Part (text = "First part" ),
707+ types .Part (text = "Second part" ),
708+ ]
709+ agent = LlmAgent (name = "test_agent" , static_instruction = static_parts_list )
710+
711+ invocation_context = await _create_invocation_context (agent )
712+ llm_request = LlmRequest ()
713+
714+ # Run the instruction processor
715+ async for _ in request_processor .run_async (invocation_context , llm_request ):
716+ pass
717+
718+ # List of parts should be converted to Content with text extracted
719+ assert llm_request .config .system_instruction == "First part\n \n Second part"
720+
721+
722+ @pytest .mark .parametrize ("llm_backend" , ["GOOGLE_AI" , "VERTEX" ])
723+ @pytest .mark .asyncio
724+ async def test_static_instruction_list_of_strings_converted_to_system (
725+ llm_backend ,
726+ ):
727+ """Test that list of strings is converted and added to system_instruction."""
728+ static_strings_list = ["First instruction" , "Second instruction" ]
729+ agent = LlmAgent (name = "test_agent" , static_instruction = static_strings_list )
730+
731+ invocation_context = await _create_invocation_context (agent )
732+ llm_request = LlmRequest ()
733+
734+ # Run the instruction processor
735+ async for _ in request_processor .run_async (invocation_context , llm_request ):
736+ pass
737+
738+ # List of strings should be converted to Content with text extracted
739+ assert (
740+ llm_request .config .system_instruction
741+ == "First instruction\n \n Second instruction"
742+ )
743+
744+
610745@pytest .mark .parametrize ("llm_backend" , ["GOOGLE_AI" , "VERTEX" ])
611746@pytest .mark .asyncio
612747async def test_dynamic_instruction_without_static_goes_to_system (llm_backend ):
@@ -658,6 +793,36 @@ async def test_dynamic_instruction_with_static_not_in_system(llm_backend):
658793 assert llm_request .contents [0 ].parts [0 ].text == "Dynamic instruction content"
659794
660795
796+ @pytest .mark .parametrize ("llm_backend" , ["GOOGLE_AI" , "VERTEX" ])
797+ @pytest .mark .asyncio
798+ async def test_dynamic_instruction_with_string_static_not_in_system (
799+ llm_backend ,
800+ ):
801+ """Test that dynamic instructions go to user content when string static_instruction exists."""
802+ agent = LlmAgent (
803+ name = "test_agent" ,
804+ instruction = "Dynamic instruction content" ,
805+ static_instruction = "Static instruction as string" ,
806+ )
807+
808+ invocation_context = await _create_invocation_context (agent )
809+
810+ llm_request = LlmRequest ()
811+
812+ # Run the instruction processor
813+ async for _ in request_processor .run_async (invocation_context , llm_request ):
814+ pass
815+
816+ # Static instruction should be in system instructions
817+ assert llm_request .config .system_instruction == "Static instruction as string"
818+
819+ # Dynamic instruction should be added as user content
820+ assert len (llm_request .contents ) == 1
821+ assert llm_request .contents [0 ].role == "user"
822+ assert len (llm_request .contents [0 ].parts ) == 1
823+ assert llm_request .contents [0 ].parts [0 ].text == "Dynamic instruction content"
824+
825+
661826@pytest .mark .parametrize ("llm_backend" , ["GOOGLE_AI" , "VERTEX" ])
662827@pytest .mark .asyncio
663828async def test_dynamic_instructions_added_to_user_content (llm_backend ):
0 commit comments