diff --git a/docs/vision/semantic_segmentation.ipynb b/docs/vision/semantic_segmentation.ipynb index 76e1230ad1e..ca4ae43dbfe 100644 --- a/docs/vision/semantic_segmentation.ipynb +++ b/docs/vision/semantic_segmentation.ipynb @@ -170,8 +170,8 @@ }, "outputs": [], "source": [ - "(train_ds, val_ds, test_ds), info = tfds.load(\n", - " 'oxford_iiit_pet:3.*.*',\n", + "(train_ds, val_ds, test_ds), info = tfds.data_source(\n", + " 'oxford_iiit_pet:4.*.*',\n", " split=['train+test[:50%]', 'test[50%:80%]', 'test[80%:100%]'],\n", " with_info=True)\n", "info" @@ -465,12 +465,12 @@ "\n", "\n", "train_steps = 2000\n", - "exp_config.trainer.steps_per_loop = int(train_ds.__len__().numpy() // BATCH_SIZE)\n", + "exp_config.trainer.steps_per_loop = int(train_ds.__len__() // BATCH_SIZE)\n", "\n", "exp_config.trainer.summary_interval = exp_config.trainer.steps_per_loop # steps_per_loop = num_of_validation_examples // eval_batch_size\n", "exp_config.trainer.checkpoint_interval = exp_config.trainer.steps_per_loop\n", "exp_config.trainer.validation_interval = exp_config.trainer.steps_per_loop\n", - "exp_config.trainer.validation_steps = int(train_ds.__len__().numpy() // BATCH_SIZE) # validation_steps = num_of_validation_examples // eval_batch_size\n", + "exp_config.trainer.validation_steps = int(train_ds.__len__() // BATCH_SIZE) # validation_steps = num_of_validation_examples // eval_batch_size\n", "exp_config.trainer.train_steps = train_steps\n", "exp_config.trainer.optimizer_config.warmup.linear.warmup_steps = exp_config.trainer.steps_per_loop\n", "exp_config.trainer.optimizer_config.learning_rate.type = 'cosine'\n",