forked from Hitmare/Eris_api_tensor_patch
Add anti child porn patch
This commit is contained in:
parent
b5dbfc2bca
commit
16b3903d80
69
api.py
69
api.py
|
@ -27,7 +27,7 @@ from PIL import PngImagePlugin, Image
|
|||
from modules.sd_models_config import find_checkpoint_config_near_filename
|
||||
from modules.realesrgan_model import get_realesrgan_models
|
||||
from modules import devices
|
||||
from typing import Any
|
||||
from typing import Any, Tuple
|
||||
import piexif
|
||||
import piexif.helper
|
||||
from contextlib import closing
|
||||
|
@ -63,6 +63,7 @@ eris_imagelog = False
|
|||
eris_consolelog = False
|
||||
eris_TRTpatch = False
|
||||
eris_imagelimit = False
|
||||
eris_antichildporn = False
|
||||
# Limits for text2image. needs eris_imagelimit = True
|
||||
txt_max_width_height = 1024
|
||||
txt_min_width_height = 320
|
||||
|
@ -75,6 +76,63 @@ img_min_width_height = 320
|
|||
img_max_pixel_count = 589824
|
||||
img_max_steps = 35
|
||||
img_max_characters = 2000
|
||||
# Anti child porn tags. Needs eris_antichildporn = True
|
||||
# If a tag is found in the prompt, anti child porn will be triggered,
|
||||
# and the tag will be removed from the prompt and added to the neg_prompt.
|
||||
child_porn_tags = [
|
||||
"loli",
|
||||
"baby",
|
||||
"newborn",
|
||||
]
|
||||
# Soft tags will be removed if they are found and added to the neg_prompt, but they
|
||||
# cannot trigger the anti child porn check.
|
||||
soft_child_porn_tags = [
|
||||
"small",
|
||||
"little",
|
||||
"1 year",
|
||||
"2 years",
|
||||
"3 years",
|
||||
"4 years",
|
||||
"5 years",
|
||||
"6 years",
|
||||
"7 years",
|
||||
"8 years",
|
||||
"9 years",
|
||||
"10 years",
|
||||
"11 years",
|
||||
"12 years",
|
||||
"13 years",
|
||||
"14 years",
|
||||
"15 years",
|
||||
]
|
||||
|
||||
def anti_child_porn(prompt: str, neg_prompt: str) -> Tuple[str, str]:
|
||||
"""Detect child porn and remove it from the prompt.
|
||||
|
||||
This can be easily bypassed, but we assume that pedophiles are not smart
|
||||
enough to understand how to do it.
|
||||
|
||||
Args:
|
||||
prompt (str): Positive prompt.
|
||||
neg_prompt (str): Negative promth.
|
||||
|
||||
Returns:
|
||||
Tuple[str, str]: Updated positive prompt and negative prompt.
|
||||
"""
|
||||
tags = prompt.replace(",", " ").split(" ")
|
||||
detected = False
|
||||
for tag in child_porn_tags:
|
||||
if tag in tags:
|
||||
if not detected:
|
||||
print("[Anti-Child-Porn] Detected child porn, original prompt:", (prompt, neg_prompt))
|
||||
prompt = prompt.replace(tag, "")
|
||||
detected = True
|
||||
if detected:
|
||||
for tag in soft_child_porn_tags:
|
||||
prompt = prompt.replace(tag, "")
|
||||
neg_prompt += " , " + ", ".join(child_porn_tags + soft_child_porn_tags)
|
||||
print("[Anti-Child-Porn] Patched:", (prompt, neg_prompt))
|
||||
return (prompt, neg_prompt)
|
||||
|
||||
def script_name_to_index(name, scripts):
|
||||
try:
|
||||
|
@ -461,6 +519,10 @@ class Api:
|
|||
if eris_consolelog:
|
||||
print('[t2i]', txt2imgreq.width, 'x', txt2imgreq.height, '|', txt2imgreq.prompt)
|
||||
# Eris ______
|
||||
|
||||
# Eris AntiChildPorn
|
||||
if eris_antichildporn:
|
||||
txt2imgreq.prompt, txt2imgreq.negative_prompt = anti_child_porn(txt2imgreq.prompt, txt2imgreq.negative_prompt)
|
||||
|
||||
|
||||
|
||||
|
@ -600,6 +662,11 @@ class Api:
|
|||
# Eris console prompt log -> writes promts into the console/terminal window
|
||||
if eris_consolelog:
|
||||
print('[i2i]', img2imgreq.width, 'x', img2imgreq.height, '|', img2imgreq.prompt)
|
||||
|
||||
# Eris AntiChildPorn
|
||||
if eris_antichildporn:
|
||||
img2imgreq.prompt, img2imgreq.negative_prompt = anti_child_porn(img2imgreq.prompt, img2imgreq.negative_prompt)
|
||||
|
||||
# Eris ______
|
||||
init_images = img2imgreq.init_images
|
||||
if init_images is None:
|
||||
|
|
Loading…
Reference in New Issue