task: benchmark_suite: sbibm name: lotka_volterra num_posterior_samples: 10000 training: initial_learning_rate: 1e-3 optimizer: adamw mode: offline num_simulations: 10000 epochs: 100 batch_size: 32 architecture: summary_network: type: "bayesflow.networks>TimeSeriesNetwork" inference_network: type: "bayesflow.networks>FreeFormFlow" kwargs: beta: 500.0 encoder_subnet_kwargs: widths: [128, 128, 128] decoder_subnet_kwargs: widths: [128, 128, 128]