@@ -686,6 +686,34 @@ def test_transfer_learn_with_forces(options_pet, caplog, monkeypatch, tmp_path):
686686 assert f"Starting finetuning from '{ MODEL_PATH_PET } '" in caplog .text
687687
688688
689+ def test_transfer_learn_variant (options_pet , caplog , monkeypatch , tmp_path ):
690+ monkeypatch .chdir (tmp_path )
691+
692+ options_pet_transfer_learn = copy .deepcopy (options_pet )
693+ options_pet_transfer_learn ["architecture" ]["training" ]["finetune" ] = {
694+ "method" : "full" ,
695+ "read_from" : str (MODEL_PATH_PET ),
696+ }
697+ options_pet_transfer_learn ["training_set" ]["systems" ]["read_from" ] = (
698+ "ethanol_reduced_100.xyz"
699+ )
700+ options_pet_transfer_learn ["training_set" ]["targets" ]["energy/finetuned" ] = (
701+ options_pet_transfer_learn ["training_set" ]["targets" ].pop ("energy" )
702+ )
703+ options_pet_transfer_learn ["training_set" ]["targets" ]["energy/finetuned" ]["key" ] = (
704+ "energy"
705+ )
706+ options_pet_transfer_learn ["training_set" ]["targets" ]["energy/finetuned" ]["forces" ] = {
707+ "key" : "forces" ,
708+ }
709+ shutil .copy (DATASET_PATH_ETHANOL , "ethanol_reduced_100.xyz" )
710+
711+ caplog .set_level (logging .INFO )
712+ train_model (options_pet_transfer_learn )
713+
714+ assert f"Starting finetuning from '{ MODEL_PATH_PET } '" in caplog .text
715+
716+
689717@pytest .mark .parametrize ("move_folder" , [True , False ])
690718def test_restart_auto (options , caplog , monkeypatch , tmp_path , move_folder ):
691719 """Test that continuing with the `auto` keyword results in
0 commit comments