""" Credits: This code is adapted from code originally posted by François Chollet at URL https://keras.io/examples/vision/visualizing_what_convnets_learn/ . """ #%% import numpy as np import tensorflow as tf from tensorflow import keras from tensorflow.keras.applications.resnet_v2 import preprocess_input, decode_predictions from tensorflow.keras.preprocessing import image from IPython.display import Image, display # The dimensions of our input image img_width = 224 img_height = 224 #%% # Build a ResNet50V2 model loaded with pre-trained ImageNet weights model = keras.applications.ResNet50V2(weights="imagenet", include_top=True) #%% # Here we create a list that contains some useful information for each of the # 1000 classes recognized by the ResNet50V2 model. # class_info[i] contains the triple (index, imagenet_label, english_label), where: # - index is i # - imagenet_label is a string used in imagenet filenames and annotation files. # - english_label is a string describing the class in English numbers = np.array(range(1000,0,-1)) numbers = numbers.reshape((1,1000,)) class_info_temp = decode_predictions(numbers, top=1000)[0] class_info = [[1000-x[2], x[0], x[1]] for x in class_info_temp] #%% #@tf.function def gradient_ascent_step(img, model, class_index, learning_rate): with tf.GradientTape() as tape: tape.watch(img) outputs = model(img) gain = outputs[:, class_index] # Compute gradients. grads = tape.gradient(gain, img) # Normalize gradients. grads = tf.math.l2_normalize(grads) img += learning_rate * grads return gain, img def make_adversarial(class_index, img, thr=0.99, learning_rate=1): # We run gradient ascent for at most the specified number of iterations. iterations = 200 for iteration in range(iterations): previous = img gain, img = gradient_ascent_step(img, model, class_index, learning_rate) print("iteration %3d, gain %.4f" % (iteration, gain)) if (gain > thr): break # Note that we return previous, NOT img. If we reached a gain > thr, # that gain was reached by previous, NOT by img. # Also, previous is a 4D array, so we return previous.numpy()[0] to make # it a 3D array. return gain, previous.numpy()[0] #%% def classify_files(filenames): for path in filenames: img = image.load_img(path, target_size=(img_width, img_height)) img = image.img_to_array(img) x = np.expand_dims(img, axis=0) x = preprocess_input(x) preds = model.predict(x) # decode the results into a list of tuples (class, description, probability) # (one such list for each sample in the batch) print('Path:', path) print('Predicted:', decode_predictions(preds, top=3)[0]) print() filenames = """ 001_n01443537_goldfish_1.jpg 001_n01443537_goldfish_2.jpg 015_n01558993_robin_1.jpg 015_n01558993_robin_2.jpg 071_n01770393_scorpion_1.jpg 071_n01770393_scorpion_2.jpg 105_n01882714_koala_1.jpg 105_n01882714_koala_2.jpg 164_n02088632_bluetick_1.jpg 164_n02088632_bluetick_2.jpg 292_n02129604_tiger_1.jpg 292_n02129604_tiger_2.jpg 367_n02481823_chimp_1.jpg 367_n02481823_chimp_2.jpg 404_n02690373_airliner_1.jpg 404_n02690373_airliner_2.jpg 409_n02708093_clock_1.jpg 409_n02708093_clock_2.jpg 526_n03179701_desk_1.jpg 526_n03179701_desk_2.jpg 677_n03804744_nail_1.jpg 677_n03804744_nail_2.jpg 698_n03877845_palace_1.jpg 698_n03877845_palace_2.jpg """ filenames = ['data/' + x for x in filenames.split()] classify_files(filenames) #%% # specify the filename for the input real image path = filenames[14] thr = 0.7 learning_rate = 1 img0 = image.load_img(path, target_size=(224, 224)) display(img0) img = image.img_to_array(img0) x = np.expand_dims(img, axis=0) x = preprocess_input(x) tx = tf.convert_to_tensor(x) (loss, result) = make_adversarial(105, tx, thr=thr, learning_rate=learning_rate) keras.preprocessing.image.save_img("0.png", result) display(Image("0.png")) img0 = image.load_img("0.png", target_size=(224, 224)) img = image.img_to_array(img0) rx = np.expand_dims(img, axis=0) rx = preprocess_input(rx) print('Path: %s, thr=%.4f, learning_rate = %.2f' % (path, thr, learning_rate)) preds = model.predict(x) print('Prediction on original:', decode_predictions(preds, top=3)[0]) preds = model.predict(rx) print('Prediction on adversarial:', decode_predictions(preds, top=3)[0]) #%% # generate results for all combinations of real images and target classes. thr = 0.7 learning_rate = 1 for filename in filenames: img0 = image.load_img(filename, target_size=(224, 224)) img = image.img_to_array(img0) x = np.expand_dims(img, axis=0) x = preprocess_input(x) tx = tf.convert_to_tensor(x) for c in [1, 15, 71, 105, 164, 292, 367, 404, 409, 526, 677, 698]: (loss, result) = make_adversarial(c, tx, thr=thr, learning_rate=learning_rate) keras.preprocessing.image.save_img("0.png", result) display(Image("0.png")) img0 = image.load_img("0.png", target_size=(224, 224)) img = image.img_to_array(img0) rx = np.expand_dims(img, axis=0) rx = preprocess_input(rx) print('Path: %s, thr=%.4f, learning_rate = %.2f' % (path, thr, learning_rate)) preds = model.predict(x) print('Prediction on original:', decode_predictions(preds, top=3)[0]) preds = model.predict(rx) preds = model.predict(rx) print('Prediction on adversarial:', decode_predictions(preds, top=3)[0]) target_name = class_info[c][2] output_name = class_info[preds.argmax()][2] out_file = 'generated_200_1b_7/%s_%s_%s.png' % (filename[5:-4], target_name, output_name) print(out_file) keras.preprocessing.image.save_img(out_file, result) #%% # Measure the success rate (for what percentage of the generated images # we got ResNet50V2 to output the target class). directory = 'generated_200_10_7' count = 0 source_files = os.listdir('data') out_files = os.listdir(directory) errors = [] for file in source_files: for c in [1, 15, 71, 105, 164, 292, 367, 404, 409, 526, 677, 698]: target_name = class_info[c][2] out_file = '%s/%s_%s_%s.png' % (directory, file[:-4], target_name, target_name) if (os.path.exists(out_file)): count += 1 else: errors= errors+[(file, c, target_name)] print(errors) # The directory contains 24 real images, we exclude them from the counting. number_real = len(source_files) number_success = count - number_real total = len(out_files)-number_real print('%d succesful results, %.2f%% success' % (number_success, number_success/total*100)) errors_200_1 = errors #errors_200_10 = errors