diff --git a/bayesflow/networks/summary_network.py b/bayesflow/networks/summary_network.py index e821be3f3..d7df0b476 100644 --- a/bayesflow/networks/summary_network.py +++ b/bayesflow/networks/summary_network.py @@ -53,4 +53,6 @@ def compute_metrics(self, x: Tensor, stage: str = "training", **kwargs) -> dict[ @classmethod def from_config(cls, config, custom_objects=None): + if hasattr(cls.get_config, "_is_default") and cls.get_config._is_default: + return cls(**config) return cls(**deserialize(config, custom_objects=custom_objects)) diff --git a/bayesflow/workflows/basic_workflow.py b/bayesflow/workflows/basic_workflow.py index c30271eb1..34fa03794 100644 --- a/bayesflow/workflows/basic_workflow.py +++ b/bayesflow/workflows/basic_workflow.py @@ -914,6 +914,7 @@ def build_optimizer(self, epochs: int, num_batches: int, strategy: str) -> keras self.optimizer = keras.optimizers.Adam(learning_rate, clipnorm=1.5) else: self.optimizer = keras.optimizers.AdamW(learning_rate, weight_decay=5e-3, clipnorm=1.5) + return self.optimizer def _fit( self, @@ -955,9 +956,10 @@ def _fit( else: kwargs["callbacks"] = [model_checkpoint_callback] - self.build_optimizer(epochs, dataset.num_batches, strategy=strategy) - - if not self.approximator.built: + # returns None if no new optimizer was built and assigned to self.optimizer, which indicates we do not have + # to (re)compile the approximator. + optimizer = self.build_optimizer(epochs, dataset.num_batches, strategy=strategy) + if optimizer is not None: self.approximator.compile(optimizer=self.optimizer, metrics=kwargs.pop("metrics", None)) try: