mirror of
https://github.com/Shawn-Shan/fawkes.git
synced 2024-12-22 07:09:33 +05:30
fix issue on get_file download
This commit is contained in:
parent
3c89d43cf1
commit
aaaea79378
@ -4,7 +4,7 @@
|
|||||||
# @Link : https://www.shawnshan.com/
|
# @Link : https://www.shawnshan.com/
|
||||||
|
|
||||||
|
|
||||||
__version__ = '0.0.6'
|
__version__ = '0.0.8'
|
||||||
|
|
||||||
from .detect_faces import create_mtcnn, run_detect_face
|
from .detect_faces import create_mtcnn, run_detect_face
|
||||||
from .differentiator import FawkesMaskGeneration
|
from .differentiator import FawkesMaskGeneration
|
||||||
|
@ -19,6 +19,7 @@ from fawkes.differentiator import FawkesMaskGeneration
|
|||||||
from fawkes.utils import load_extractor, init_gpu, select_target_label, dump_image, reverse_process_cloaked, \
|
from fawkes.utils import load_extractor, init_gpu, select_target_label, dump_image, reverse_process_cloaked, \
|
||||||
Faces
|
Faces
|
||||||
from fawkes.align_face import aligner
|
from fawkes.align_face import aligner
|
||||||
|
from fawkes.utils import get_file
|
||||||
random.seed(12243)
|
random.seed(12243)
|
||||||
np.random.seed(122412)
|
np.random.seed(122412)
|
||||||
|
|
||||||
@ -61,6 +62,13 @@ class Fawkes(object):
|
|||||||
self.gpu = gpu
|
self.gpu = gpu
|
||||||
self.batch_size = batch_size
|
self.batch_size = batch_size
|
||||||
self.sess = init_gpu(gpu)
|
self.sess = init_gpu(gpu)
|
||||||
|
|
||||||
|
model_dir = os.path.join(os.path.expanduser('~'), '.fawkes')
|
||||||
|
if not os.path.exists(os.path.join(model_dir, "mtcnn.p.gz")):
|
||||||
|
os.makedirs(model_dir, exist_ok=True)
|
||||||
|
get_file("mtcnn.p.gz", "http://sandlab.cs.uchicago.edu/fawkes/files/mtcnn.p.gz", cache_dir=model_dir,
|
||||||
|
cache_subdir='')
|
||||||
|
|
||||||
self.aligner = aligner(self.sess)
|
self.aligner = aligner(self.sess)
|
||||||
self.fs_names = [feature_extractor]
|
self.fs_names = [feature_extractor]
|
||||||
if isinstance(feature_extractor, list):
|
if isinstance(feature_extractor, list):
|
||||||
@ -76,7 +84,7 @@ class Fawkes(object):
|
|||||||
elif mode == 'mid':
|
elif mode == 'mid':
|
||||||
th = 0.005
|
th = 0.005
|
||||||
max_step = 100
|
max_step = 100
|
||||||
lr = 30
|
lr = 20
|
||||||
elif mode == 'high':
|
elif mode == 'high':
|
||||||
th = 0.008
|
th = 0.008
|
||||||
max_step = 200
|
max_step = 200
|
||||||
|
@ -55,7 +55,6 @@ if sys.version_info[0] == 2:
|
|||||||
else:
|
else:
|
||||||
from six.moves.urllib.request import urlretrieve
|
from six.moves.urllib.request import urlretrieve
|
||||||
|
|
||||||
|
|
||||||
def clip_img(X, preprocessing='raw'):
|
def clip_img(X, preprocessing='raw'):
|
||||||
X = reverse_preprocess(X, preprocessing)
|
X = reverse_preprocess(X, preprocessing)
|
||||||
X = np.clip(X, 0.0, 255.0)
|
X = np.clip(X, 0.0, 255.0)
|
||||||
@ -92,11 +91,6 @@ def load_image(path):
|
|||||||
|
|
||||||
class Faces(object):
|
class Faces(object):
|
||||||
def __init__(self, image_paths, aligner, verbose=1, eval_local=False):
|
def __init__(self, image_paths, aligner, verbose=1, eval_local=False):
|
||||||
model_dir = os.path.join(os.path.expanduser('~'), '.fawkes')
|
|
||||||
if not os.path.exists(os.path.join(model_dir, "mtcnn.p.gz")):
|
|
||||||
os.makedirs(model_dir, exist_ok=True)
|
|
||||||
get_file("mtcnn.p.gz", "http://sandlab.cs.uchicago.edu/fawkes/files/mtcnn.p.gz", cache_dir=model_dir,
|
|
||||||
cache_subdir='')
|
|
||||||
|
|
||||||
self.verbose = verbose
|
self.verbose = verbose
|
||||||
self.aligner = aligner
|
self.aligner = aligner
|
||||||
@ -366,13 +360,6 @@ def load_extractor(name):
|
|||||||
if hasattr(model.layers[-1], "activation") and model.layers[-1].activation == "softmax":
|
if hasattr(model.layers[-1], "activation") and model.layers[-1].activation == "softmax":
|
||||||
raise Exception(
|
raise Exception(
|
||||||
"Given extractor's last layer is softmax, need to remove the top layers to make it into a feature extractor")
|
"Given extractor's last layer is softmax, need to remove the top layers to make it into a feature extractor")
|
||||||
# if "extract" in name.split("/")[-1]:
|
|
||||||
# pass
|
|
||||||
# else:
|
|
||||||
# print("Convert a model to a feature extractor")
|
|
||||||
# model = build_bottleneck_model(model, model.layers[layer_idx].name)
|
|
||||||
# model.save(name + "extract")
|
|
||||||
# model = keras.models.load_model(name + "extract")
|
|
||||||
return model
|
return model
|
||||||
|
|
||||||
|
|
||||||
@ -524,13 +511,13 @@ def get_file(fname,
|
|||||||
archive_format='auto',
|
archive_format='auto',
|
||||||
cache_dir=None):
|
cache_dir=None):
|
||||||
if cache_dir is None:
|
if cache_dir is None:
|
||||||
cache_dir = os.path.join(os.path.expanduser('~'), '.keras')
|
cache_dir = os.path.join(os.path.expanduser('~'), '.fawkes')
|
||||||
if md5_hash is not None and file_hash is None:
|
if md5_hash is not None and file_hash is None:
|
||||||
file_hash = md5_hash
|
file_hash = md5_hash
|
||||||
hash_algorithm = 'md5'
|
hash_algorithm = 'md5'
|
||||||
datadir_base = os.path.expanduser(cache_dir)
|
datadir_base = os.path.expanduser(cache_dir)
|
||||||
if not os.access(datadir_base, os.W_OK):
|
if not os.access(datadir_base, os.W_OK):
|
||||||
datadir_base = os.path.join('/tmp', '.keras')
|
datadir_base = os.path.join('/tmp', '.fawkes')
|
||||||
datadir = os.path.join(datadir_base, cache_subdir)
|
datadir = os.path.join(datadir_base, cache_subdir)
|
||||||
_makedirs_exist_ok(datadir)
|
_makedirs_exist_ok(datadir)
|
||||||
|
|
||||||
|
5
setup.py
5
setup.py
@ -77,11 +77,12 @@ setup_requires = []
|
|||||||
install_requires = [
|
install_requires = [
|
||||||
'numpy==1.16.4',
|
'numpy==1.16.4',
|
||||||
# 'tensorflow-gpu>=1.13.1, <=1.14.0',
|
# 'tensorflow-gpu>=1.13.1, <=1.14.0',
|
||||||
'tensorflow>=1.11.1, <=1.14.0',
|
'tensorflow>=1.12.0, <=1.15.0',
|
||||||
'argparse',
|
'argparse',
|
||||||
'keras==2.2.5',
|
'keras>=2.2.5, <=2.3.1',
|
||||||
'scikit-image',
|
'scikit-image',
|
||||||
'pillow>=7.0.0',
|
'pillow>=7.0.0',
|
||||||
|
'bleach>=2.1.0'
|
||||||
]
|
]
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
|
Loading…
Reference in New Issue
Block a user