Path: blob/main/Dreambooth/smart_crop.py
540 views
#Based on A1111 cropping script1import cv22import os3from math import log, sqrt4import numpy as np5from PIL import Image, ImageDraw67GREEN = "#0F0"8BLUE = "#00F"9RED = "#F00"101112def crop_image(im, size):1314def focal_point(im, settings):15corner_points = image_corner_points(im, settings) if settings.corner_points_weight > 0 else []16entropy_points = image_entropy_points(im, settings) if settings.entropy_points_weight > 0 else []17face_points = image_face_points(im, settings) if settings.face_points_weight > 0 else []1819pois = []2021weight_pref_total = 022if len(corner_points) > 0:23weight_pref_total += settings.corner_points_weight24if len(entropy_points) > 0:25weight_pref_total += settings.entropy_points_weight26if len(face_points) > 0:27weight_pref_total += settings.face_points_weight2829corner_centroid = None30if len(corner_points) > 0:31corner_centroid = centroid(corner_points)32corner_centroid.weight = settings.corner_points_weight / weight_pref_total33pois.append(corner_centroid)3435entropy_centroid = None36if len(entropy_points) > 0:37entropy_centroid = centroid(entropy_points)38entropy_centroid.weight = settings.entropy_points_weight / weight_pref_total39pois.append(entropy_centroid)4041face_centroid = None42if len(face_points) > 0:43face_centroid = centroid(face_points)44face_centroid.weight = settings.face_points_weight / weight_pref_total45pois.append(face_centroid)4647average_point = poi_average(pois, settings)4849return average_point505152def image_face_points(im, settings):5354np_im = np.array(im)55gray = cv2.cvtColor(np_im, cv2.COLOR_BGR2GRAY)5657tries = [58[ f'{cv2.data.haarcascades}haarcascade_eye.xml', 0.01 ],59[ f'{cv2.data.haarcascades}haarcascade_frontalface_default.xml', 0.05 ],60[ f'{cv2.data.haarcascades}haarcascade_profileface.xml', 0.05 ],61[ f'{cv2.data.haarcascades}haarcascade_frontalface_alt.xml', 0.05 ],62[ f'{cv2.data.haarcascades}haarcascade_frontalface_alt2.xml', 0.05 ],63[ f'{cv2.data.haarcascades}haarcascade_frontalface_alt_tree.xml', 0.05 ],64[ f'{cv2.data.haarcascades}haarcascade_eye_tree_eyeglasses.xml', 0.05 ],65[ f'{cv2.data.haarcascades}haarcascade_upperbody.xml', 0.05 ]66]67for t in tries:68classifier = cv2.CascadeClassifier(t[0])69minsize = int(min(im.width, im.height) * t[1]) # at least N percent of the smallest side70try:71faces = classifier.detectMultiScale(gray, scaleFactor=1.1,72minNeighbors=7, minSize=(minsize, minsize), flags=cv2.CASCADE_SCALE_IMAGE)73except:74continue7576if len(faces) > 0:77rects = [[f[0], f[1], f[0] + f[2], f[1] + f[3]] for f in faces]78return [PointOfInterest((r[0] +r[2]) // 2, (r[1] + r[3]) // 2, size=abs(r[0]-r[2]), weight=1/len(rects)) for r in rects]79return []808182def image_corner_points(im, settings):83grayscale = im.convert("L")8485# naive attempt at preventing focal points from collecting at watermarks near the bottom86gd = ImageDraw.Draw(grayscale)87gd.rectangle([0, im.height*.9, im.width, im.height], fill="#999")8889np_im = np.array(grayscale)9091points = cv2.goodFeaturesToTrack(92np_im,93maxCorners=100,94qualityLevel=0.04,95minDistance=min(grayscale.width, grayscale.height)*0.06,96useHarrisDetector=False,97)9899if points is None:100return []101102focal_points = []103for point in points:104x, y = point.ravel()105focal_points.append(PointOfInterest(x, y, size=4, weight=1/len(points)))106107return focal_points108109110def image_entropy_points(im, settings):111landscape = im.height < im.width112portrait = im.height > im.width113if landscape:114move_idx = [0, 2]115move_max = im.size[0]116elif portrait:117move_idx = [1, 3]118move_max = im.size[1]119else:120return []121122e_max = 0123crop_current = [0, 0, settings.crop_width, settings.crop_height]124crop_best = crop_current125while crop_current[move_idx[1]] < move_max:126crop = im.crop(tuple(crop_current))127e = image_entropy(crop)128129if (e > e_max):130e_max = e131crop_best = list(crop_current)132133crop_current[move_idx[0]] += 4134crop_current[move_idx[1]] += 4135136x_mid = int(crop_best[0] + settings.crop_width/2)137y_mid = int(crop_best[1] + settings.crop_height/2)138139return [PointOfInterest(x_mid, y_mid, size=25, weight=1.0)]140141142def image_entropy(im):143# greyscale image entropy144# band = np.asarray(im.convert("L"))145band = np.asarray(im.convert("1"), dtype=np.uint8)146hist, _ = np.histogram(band, bins=range(0, 256))147hist = hist[hist > 0]148return -np.log2(hist / hist.sum()).sum()149150def centroid(pois):151x = [poi.x for poi in pois]152y = [poi.y for poi in pois]153return PointOfInterest(sum(x)/len(pois), sum(y)/len(pois))154155156def poi_average(pois, settings):157weight = 0.0158x = 0.0159y = 0.0160for poi in pois:161weight += poi.weight162x += poi.x * poi.weight163y += poi.y * poi.weight164avg_x = round(weight and x / weight)165avg_y = round(weight and y / weight)166167return PointOfInterest(avg_x, avg_y)168169170def is_landscape(w, h):171return w > h172173174def is_portrait(w, h):175return h > w176177178def is_square(w, h):179return w == h180181182class PointOfInterest:183def __init__(self, x, y, weight=1.0, size=10):184self.x = x185self.y = y186self.weight = weight187self.size = size188189def bounding(self, size):190return [191self.x - size//2,192self.y - size//2,193self.x + size//2,194self.y + size//2195]196197class Settings:198def __init__(self, crop_width=512, crop_height=512, corner_points_weight=0.5, entropy_points_weight=0.5, face_points_weight=0.5):199self.crop_width = crop_width200self.crop_height = crop_height201self.corner_points_weight = corner_points_weight202self.entropy_points_weight = entropy_points_weight203self.face_points_weight = face_points_weight204205settings = Settings(206crop_width = size,207crop_height = size,208face_points_weight = 0.9,209entropy_points_weight = 0.15,210corner_points_weight = 0.5,211)212213scale_by = 1214if is_landscape(im.width, im.height):215scale_by = settings.crop_height / im.height216elif is_portrait(im.width, im.height):217scale_by = settings.crop_width / im.width218elif is_square(im.width, im.height):219if is_square(settings.crop_width, settings.crop_height):220scale_by = settings.crop_width / im.width221elif is_landscape(settings.crop_width, settings.crop_height):222scale_by = settings.crop_width / im.width223elif is_portrait(settings.crop_width, settings.crop_height):224scale_by = settings.crop_height / im.height225226im = im.resize((int(im.width * scale_by), int(im.height * scale_by)))227im_debug = im.copy()228229focus = focal_point(im_debug, settings)230231# take the focal point and turn it into crop coordinates that try to center over the focal232# point but then get adjusted back into the frame233y_half = int(settings.crop_height / 2)234x_half = int(settings.crop_width / 2)235236x1 = focus.x - x_half237if x1 < 0:238x1 = 0239elif x1 + settings.crop_width > im.width:240x1 = im.width - settings.crop_width241242y1 = focus.y - y_half243if y1 < 0:244y1 = 0245elif y1 + settings.crop_height > im.height:246y1 = im.height - settings.crop_height247248x2 = x1 + settings.crop_width249y2 = y1 + settings.crop_height250251crop = [x1, y1, x2, y2]252253results = []254255results.append(im.crop(tuple(crop)))256257return results258259260