4848from lightning .pytorch .utilities .imports import _TORCHVISION_AVAILABLE
4949from lightning_utilities import compare_version
5050from lightning_utilities .test .warning import no_warning_call
51+ from packaging .version import Version
5152from tensorboard .backend .event_processing import event_accumulator
5253from tensorboard .plugins .hparams .plugin_data_pb2 import HParamsPluginData
5354from torch .optim import SGD
@@ -64,6 +65,14 @@ def lazy_instance(*args, **kwargs):
6465 return None
6566
6667
68+ _xfail_python_ge_3_11_9 = pytest .mark .xfail (
69+ # https://github.com/omni-us/jsonargparse/issues/484
70+ Version (f"{ sys .version_info .major } .{ sys .version_info .minor } .{ sys .version_info .micro } " ) >= Version ("3.11.9" ),
71+ strict = False ,
72+ reason = "jsonargparse + Python 3.11.9 compatibility issue" ,
73+ )
74+
75+
6776@contextmanager
6877def mock_subclasses (baseclass , * subclasses ):
6978 """Mocks baseclass so that it only has the given child subclasses."""
@@ -347,6 +356,7 @@ def test_save_to_log_dir_false_error():
347356 )
348357
349358
359+ @_xfail_python_ge_3_11_9
350360def test_lightning_cli_logger_save_config (cleandir ):
351361 class LoggerSaveConfigCallback (SaveConfigCallback ):
352362 def __init__ (self , * args , ** kwargs ) -> None :
@@ -736,6 +746,7 @@ def add_arguments_to_parser(self, parser):
736746 assert cli .trainer .lr_scheduler_configs [0 ].scheduler .step_size == 50
737747
738748
749+ @_xfail_python_ge_3_11_9
739750@pytest .mark .parametrize ("use_generic_base_class" , [False , True ])
740751def test_lightning_cli_optimizers_and_lr_scheduler_with_link_to (use_generic_base_class ):
741752 class MyLightningCLI (LightningCLI ):
@@ -782,7 +793,7 @@ def __init__(self, optim1: dict, optim2: dict, scheduler: dict):
782793 assert isinstance (cli .model .scheduler , torch .optim .lr_scheduler .ExponentialLR )
783794
784795
785- @pytest . mark . skipif ( compare_version ( "jsonargparse" , operator . lt , "4.21.3" ), reason = "vulnerability with failing imports" )
796+ @_xfail_python_ge_3_11_9
786797def test_lightning_cli_optimizers_and_lr_scheduler_with_callable_type ():
787798 class TestModel (BoringModel ):
788799 def __init__ (
@@ -1031,6 +1042,7 @@ def __init__(self, foo, bar=5):
10311042 self .bar = bar
10321043
10331044
1045+ @_xfail_python_ge_3_11_9
10341046def test_lightning_cli_model_short_arguments ():
10351047 with mock .patch ("sys.argv" , ["any.py" , "fit" , "--model=BoringModel" ]), mock .patch (
10361048 "lightning.pytorch.Trainer._fit_impl"
@@ -1055,6 +1067,7 @@ def __init__(self, foo, bar=5):
10551067 self .bar = bar
10561068
10571069
1070+ @_xfail_python_ge_3_11_9
10581071def test_lightning_cli_datamodule_short_arguments ():
10591072 # with set model
10601073 with mock .patch ("sys.argv" , ["any.py" , "fit" , "--data=BoringDataModule" ]), mock .patch (
@@ -1100,6 +1113,7 @@ def test_lightning_cli_datamodule_short_arguments():
11001113 assert cli .parser .groups ["data" ].group_class is BoringDataModule
11011114
11021115
1116+ @_xfail_python_ge_3_11_9
11031117@pytest .mark .parametrize ("use_class_path_callbacks" , [False , True ])
11041118def test_callbacks_append (use_class_path_callbacks ):
11051119 """This test validates registries are used when simplified command line are being used."""
@@ -1143,6 +1157,7 @@ def test_callbacks_append(use_class_path_callbacks):
11431157 assert all (t in callback_types for t in expected )
11441158
11451159
1160+ @_xfail_python_ge_3_11_9
11461161def test_optimizers_and_lr_schedulers_reload (cleandir ):
11471162 base = ["any.py" , "--trainer.max_epochs=1" ]
11481163 input = base + [
@@ -1174,6 +1189,7 @@ def test_optimizers_and_lr_schedulers_reload(cleandir):
11741189 LightningCLI (BoringModel , run = False )
11751190
11761191
1192+ @_xfail_python_ge_3_11_9
11771193def test_optimizers_and_lr_schedulers_add_arguments_to_parser_implemented_reload (cleandir ):
11781194 class TestLightningCLI (LightningCLI ):
11791195 def __init__ (self , * args ):
@@ -1427,6 +1443,7 @@ def test_cli_help_message():
14271443 assert "Implements Adam" in shorthand_help .getvalue ()
14281444
14291445
1446+ @_xfail_python_ge_3_11_9
14301447def test_cli_reducelronplateau ():
14311448 with mock .patch (
14321449 "sys.argv" , ["any.py" , "--optimizer=Adam" , "--lr_scheduler=ReduceLROnPlateau" , "--lr_scheduler.monitor=foo" ]
@@ -1437,6 +1454,7 @@ def test_cli_reducelronplateau():
14371454 assert config ["lr_scheduler" ]["scheduler" ].monitor == "foo"
14381455
14391456
1457+ @_xfail_python_ge_3_11_9
14401458def test_cli_configureoptimizers_can_be_overridden ():
14411459 class MyCLI (LightningCLI ):
14421460 def __init__ (self ):
@@ -1481,6 +1499,7 @@ def __init__(self, activation: torch.nn.Module = lazy_instance(torch.nn.LeakyReL
14811499 assert cli .model .activation is not model .activation
14821500
14831501
1502+ @_xfail_python_ge_3_11_9
14841503def test_ddpstrategy_instantiation_and_find_unused_parameters (mps_count_0 ):
14851504 strategy_default = lazy_instance (DDPStrategy , find_unused_parameters = True )
14861505 with mock .patch ("sys.argv" , ["any.py" , "--trainer.strategy.process_group_backend=group" ]):
@@ -1496,6 +1515,7 @@ def test_ddpstrategy_instantiation_and_find_unused_parameters(mps_count_0):
14961515 assert strategy_default is not cli .config_init .trainer .strategy
14971516
14981517
1518+ @_xfail_python_ge_3_11_9
14991519def test_cli_logger_shorthand ():
15001520 with mock .patch ("sys.argv" , ["any.py" ]):
15011521 cli = LightningCLI (TestModel , run = False , trainer_defaults = {"logger" : False })
@@ -1526,6 +1546,7 @@ def _test_logger_init_args(logger_name, init, unresolved=None):
15261546 assert data ["dict_kwargs" ] == unresolved
15271547
15281548
1549+ @_xfail_python_ge_3_11_9
15291550def test_comet_logger_init_args ():
15301551 _test_logger_init_args (
15311552 "CometLogger" ,
@@ -1541,6 +1562,7 @@ def test_comet_logger_init_args():
15411562 strict = False ,
15421563 reason = "TypeError on Windows when parsing" ,
15431564)
1565+ @_xfail_python_ge_3_11_9
15441566def test_neptune_logger_init_args ():
15451567 _test_logger_init_args (
15461568 "NeptuneLogger" ,
@@ -1549,6 +1571,7 @@ def test_neptune_logger_init_args():
15491571 )
15501572
15511573
1574+ @_xfail_python_ge_3_11_9
15521575def test_tensorboard_logger_init_args ():
15531576 _test_logger_init_args (
15541577 "TensorBoardLogger" ,
@@ -1560,6 +1583,7 @@ def test_tensorboard_logger_init_args():
15601583 )
15611584
15621585
1586+ @_xfail_python_ge_3_11_9
15631587def test_wandb_logger_init_args ():
15641588 _test_logger_init_args (
15651589 "WandbLogger" ,
@@ -1644,6 +1668,7 @@ def __init__(self, a_func: Callable = torch.nn.Softmax):
16441668 assert "a_func: torch.nn.Softmax" in out .getvalue ()
16451669
16461670
1671+ @_xfail_python_ge_3_11_9
16471672def test_pytorch_profiler_init_args ():
16481673 from lightning .pytorch .profilers import Profiler , PyTorchProfiler
16491674
0 commit comments