diff --git a/assets/diff_class.png b/assets/diff_class.png new file mode 100644 index 0000000..a742395 Binary files /dev/null and b/assets/diff_class.png differ diff --git a/assets/same_class.png b/assets/same_class.png new file mode 100644 index 0000000..f082b4b Binary files /dev/null and b/assets/same_class.png differ diff --git a/assets/same_class_diff_color.png b/assets/same_class_diff_color.png deleted file mode 100644 index c5d98c8..0000000 Binary files a/assets/same_class_diff_color.png and /dev/null differ diff --git a/assets/same_color_diff_class.png b/assets/same_color_diff_class.png deleted file mode 100644 index 775ce42..0000000 Binary files a/assets/same_color_diff_class.png and /dev/null differ diff --git a/solution.py b/solution.py index c70861b..a5f0c6f 100644 --- a/solution.py +++ b/solution.py @@ -967,7 +967,7 @@ def visualize_color_attribution_and_counterfactual( # %% for idx in range(batch_size): print("Source class:", y[idx].item()) - print("Target class:", 0) + print("Target class:", target_class) visualize_color_attribution_and_counterfactual( attributions[idx].cpu().numpy(), x[idx].cpu().numpy(), x_fake[idx].cpu().numpy() ) @@ -980,6 +980,16 @@ def visualize_color_attribution_and_counterfactual( # # # %% [markdown] +# By now you will have hopefully noticed that it isn't the exact color of the image that determines its class, but that two images with a very similar color can be of different classes! +# +# Here are two examples of image-counterfactual-attribution triplets. +# You'll notice that they are *very* similar in every way! But one set is different classes, and one set is the same class! +# +# ![same_class](assets/same_class.png) +# ![diff_class](assets/diff_class.png) +# +# We are missing a crucial step of the explanation pipeline: a quantification of how the class changes over the interpolation. +# # In the lecture, we used the attribution to act as a mask, to gradually go from the original image to the counterfactual image. # This allowed us to classify all of the intermediate images, and learn how the class changed over the interpolation. # Here we have a much simpler task so we have some advantages: @@ -1053,17 +1063,6 @@ def visualize_color_attribution_and_counterfactual( # # %% [markdown] # # Part 5: Exploring the Style Space, finding the answer -# By now you will have hopefully noticed that it isn't the exact color of the image that determines its class, but that two images with a very similar color can be of different classes! -# -# Here is an example of two images that are very similar in color, but are of different classes. -# ![same_color_diff_class](assets/same_color_diff_class.png) -# While both of the images are yellow, the attribution tells us (if you squint!) that one of the yellows has slightly more blue in it! -# -# Conversely, here is an example of two images with very different colors, but that are of the same class: -# ![same_class_diff_color](assets/same_class_diff_color.png) -# Here the attribution is empty! Using the discriminative attribution we can see that the significant color change doesn't matter at all! -# -# # So color is important... but not always? What's going on!? # There is a final piece of information that we can use to solve the puzzle: the style space. # %% [markdown]