2
0
mirror of https://github.com/Shawn-Shan/fawkes.git synced 2024-12-22 07:09:33 +05:30

1. correct a typo

2. rewrite distance function to remove sklearn


Former-commit-id: a9e6234a80e371b559750654fc60f3b1642eb74a [formerly d2dc50e6357b516398beb374a032b7cc2f169d70]
Former-commit-id: 3e566674f17cca4b89afe4d1fecfe1009c8166b8
This commit is contained in:
smy17 2020-07-09 23:26:45 +08:00
parent feb2294993
commit 886a04ded0
3 changed files with 26 additions and 12 deletions

View File

@ -102,26 +102,26 @@ class Fawkes(object):
faces = Faces(image_paths, self.sess, verbose=1) faces = Faces(image_paths, self.sess, verbose=1)
orginal_images = faces.cropped_faces original_images = faces.cropped_faces
orginal_images = np.array(orginal_images) original_images = np.array(original_images)
if separate_target: if separate_target:
target_embedding = [] target_embedding = []
for org_img in orginal_images: for org_img in original_images:
org_img = org_img.reshape([1] + list(org_img.shape)) org_img = org_img.reshape([1] + list(org_img.shape))
tar_emb = select_target_label(org_img, self.feature_extractors_ls, self.fs_names) tar_emb = select_target_label(org_img, self.feature_extractors_ls, self.fs_names)
target_embedding.append(tar_emb) target_embedding.append(tar_emb)
target_embedding = np.concatenate(target_embedding) target_embedding = np.concatenate(target_embedding)
else: else:
target_embedding = select_target_label(orginal_images, self.feature_extractors_ls, self.fs_names) target_embedding = select_target_label(original_images, self.feature_extractors_ls, self.fs_names)
protected_images = generate_cloak_images(self.sess, self.feature_extractors_ls, orginal_images, protected_images = generate_cloak_images(self.sess, self.feature_extractors_ls, original_images,
target_emb=target_embedding, th=th, faces=faces, sd=sd, target_emb=target_embedding, th=th, faces=faces, sd=sd,
lr=lr, max_step=max_step, batch_size=batch_size) lr=lr, max_step=max_step, batch_size=batch_size)
faces.cloaked_cropped_faces = protected_images faces.cloaked_cropped_faces = protected_images
cloak_perturbation = reverse_process_cloaked(protected_images) - reverse_process_cloaked(orginal_images) cloak_perturbation = reverse_process_cloaked(protected_images) - reverse_process_cloaked(original_images)
final_images = faces.merge_faces(cloak_perturbation) final_images = faces.merge_faces(cloak_perturbation)
for p_img, cloaked_img, path in zip(final_images, protected_images, image_paths): for p_img, cloaked_img, path in zip(final_images, protected_images, image_paths):
@ -129,7 +129,7 @@ class Fawkes(object):
dump_image(p_img, file_name, format=format) dump_image(p_img, file_name, format=format)
elapsed_time = time.time() - start_time elapsed_time = time.time() - start_time
print('attack cost %f s' % (elapsed_time)) print('attack cost %f s' % elapsed_time)
print("Done!") print("Done!")

View File

@ -25,7 +25,6 @@ from keras.layers import Dense, Activation
from keras.models import Model from keras.models import Model
from keras.preprocessing import image from keras.preprocessing import image
from skimage.transform import resize from skimage.transform import resize
from sklearn.metrics import pairwise_distances
from fawkes.align_face import align, aligner from fawkes.align_face import align, aligner
from six.moves.urllib.request import urlopen from six.moves.urllib.request import urlopen
@ -422,11 +421,27 @@ def extractor_ls_predict(feature_extractors_ls, X):
return concated_feature_ls return concated_feature_ls
def pairwise_l2_distance(A, B):
BT = B.transpose()
vecProd = np.dot(A, BT)
SqA = A ** 2
sumSqA = np.matrix(np.sum(SqA, axis=1))
sumSqAEx = np.tile(sumSqA.transpose(), (1, vecProd.shape[1]))
SqB = B ** 2
sumSqB = np.sum(SqB, axis=1)
sumSqBEx = np.tile(sumSqB, (vecProd.shape[0], 1))
SqED = sumSqBEx + sumSqAEx - 2 * vecProd
SqED[SqED < 0] = 0.0
ED = np.sqrt(SqED)
return ED
def calculate_dist_score(a, b, feature_extractors_ls, metric='l2'): def calculate_dist_score(a, b, feature_extractors_ls, metric='l2'):
features1 = extractor_ls_predict(feature_extractors_ls, a) features1 = extractor_ls_predict(feature_extractors_ls, a)
features2 = extractor_ls_predict(feature_extractors_ls, b) features2 = extractor_ls_predict(feature_extractors_ls, b)
pair_cos = pairwise_distances(features1, features2, metric) pair_cos = pairwise_l2_distance(features1, features2)
max_sum = np.min(pair_cos, axis=0) max_sum = np.min(pair_cos, axis=0)
max_sum_arg = np.argsort(max_sum)[::-1] max_sum_arg = np.argsort(max_sum)[::-1]
max_sum_arg = max_sum_arg[:len(a)] max_sum_arg = max_sum_arg[:len(a)]
@ -447,7 +462,7 @@ def select_target_label(imgs, feature_extractors_ls, feature_extractors_names, m
embs = [p[1] for p in items] embs = [p[1] for p in items]
embs = np.array(embs) embs = np.array(embs)
pair_dist = pairwise_distances(original_feature_x, embs, metric) pair_dist = pairwise_l2_distance(original_feature_x, embs)
max_sum = np.min(pair_dist, axis=0) max_sum = np.min(pair_dist, axis=0)
max_id = np.argmax(max_sum) max_id = np.argmax(max_sum)

View File

@ -82,8 +82,7 @@ install_requires = [
'keras==2.2.5', 'keras==2.2.5',
'scikit-image', 'scikit-image',
'pillow>=7.0.0', 'pillow>=7.0.0',
'opencv-python>=4.2.0.34', 'opencv-python>=4.2.0.34'
'sklearn',
] ]
setup( setup(