forked from Hitmare/Eris_api_tensor_patch
Merge pull request 'Add anti child porn patch' (#3) from cofob/Eris_api_tensor_patch:anti-child-porn into main
Reviewed-on: Hitmare/Eris_api_tensor_patch#3 Reviewed-by: Hitmare <hitmare@noreply@foxo.me>
This commit is contained in:
commit
8f3d7ca9fd
|
@ -25,6 +25,8 @@ The modified API.py file can do the following:
|
|||
5. Eris Imagelimit:
|
||||
- Will resize and limit every request down to 1024px max on height and width, keeping the aspect ratio. Also reduces Steps down to 35 if the set Steps exceeds 35
|
||||
- Compatible with TRTpatch. Please read the Readme in the `TRT-Patch` folder for the additional instructions
|
||||
6. Eris AntiChildPorn:
|
||||
- Will prevent the generation of child porn images.
|
||||
|
||||
# Enabling the Modifications
|
||||
|
||||
|
@ -38,11 +40,12 @@ eris_imagelog = False
|
|||
eris_consolelog = False
|
||||
eris_TRTpatch = False
|
||||
eris_imagelimit = False
|
||||
eris_antichildporn = False
|
||||
```
|
||||
|
||||
Change the `False` to `True` to enable any of the modifications.
|
||||
|
||||
For example to enable the Promptlog and Imagelog:
|
||||
For example to enable the Promptlog, Imagelog and AntiChildPorn:
|
||||
|
||||
```python
|
||||
eris_promtlog = True
|
||||
|
@ -50,6 +53,7 @@ eris_imagelog = True
|
|||
eris_consolelog = False
|
||||
eris_TRTpatch = False
|
||||
eris_imagelimit = False
|
||||
eris_antichildporn = True
|
||||
```
|
||||
|
||||
Don't forget to save the `api.py` file again and (re)start your A1111 Stable-difussion
|
||||
|
@ -57,4 +61,4 @@ Don't forget to save the `api.py` file again and (re)start your A1111 Stable-dif
|
|||
::: warn
|
||||
Additional and !!**__necessary__**!! Instructions for the TRT-Patch are in the `TRT-Patch` Folder
|
||||
|
||||
:::
|
||||
:::
|
||||
|
|
70
api.py
70
api.py
|
@ -27,7 +27,7 @@ from PIL import PngImagePlugin, Image
|
|||
from modules.sd_models_config import find_checkpoint_config_near_filename
|
||||
from modules.realesrgan_model import get_realesrgan_models
|
||||
from modules import devices
|
||||
from typing import Any
|
||||
from typing import Any, Tuple
|
||||
import piexif
|
||||
import piexif.helper
|
||||
from contextlib import closing
|
||||
|
@ -63,6 +63,7 @@ eris_imagelog = False
|
|||
eris_consolelog = False
|
||||
eris_TRTpatch = False
|
||||
eris_imagelimit = False
|
||||
eris_antichildporn = False
|
||||
# Limits for text2image. needs eris_imagelimit = True
|
||||
txt_max_width_height = 1024
|
||||
txt_min_width_height = 320
|
||||
|
@ -75,6 +76,64 @@ img_min_width_height = 320
|
|||
img_max_pixel_count = 589824
|
||||
img_max_steps = 35
|
||||
img_max_characters = 2000
|
||||
# Anti child porn tags. Needs eris_antichildporn = True
|
||||
# If a tag is found in the prompt, anti child porn will be triggered,
|
||||
# and the tag will be removed from the prompt and added to the neg_prompt.
|
||||
child_porn_tags = [
|
||||
"loli",
|
||||
"baby",
|
||||
"newborn",
|
||||
"kid",
|
||||
]
|
||||
# Soft tags will be removed if they are found and added to the neg_prompt, but they
|
||||
# cannot trigger the anti child porn check.
|
||||
soft_child_porn_tags = [
|
||||
"small",
|
||||
"little",
|
||||
"1 year",
|
||||
"2 years",
|
||||
"3 years",
|
||||
"4 years",
|
||||
"5 years",
|
||||
"6 years",
|
||||
"7 years",
|
||||
"8 years",
|
||||
"9 years",
|
||||
"10 years",
|
||||
"11 years",
|
||||
"12 years",
|
||||
"13 years",
|
||||
"14 years",
|
||||
"15 years",
|
||||
]
|
||||
|
||||
def anti_child_porn(prompt: str, neg_prompt: str) -> Tuple[str, str]:
|
||||
"""Detect child porn and remove it from the prompt.
|
||||
|
||||
This can be easily bypassed, but we assume that pedophiles are not smart
|
||||
enough to understand how to do it.
|
||||
|
||||
Args:
|
||||
prompt (str): Positive prompt.
|
||||
neg_prompt (str): Negative promth.
|
||||
|
||||
Returns:
|
||||
Tuple[str, str]: Updated positive prompt and negative prompt.
|
||||
"""
|
||||
tags = prompt.replace(",", " ").lower().split(" ")
|
||||
detected = False
|
||||
for tag in child_porn_tags:
|
||||
if tag in tags:
|
||||
if not detected:
|
||||
print("[Anti-Child-Porn] Detected child porn, original prompt:", (prompt, neg_prompt))
|
||||
prompt = prompt.replace(tag, "")
|
||||
detected = True
|
||||
if detected:
|
||||
for tag in soft_child_porn_tags:
|
||||
prompt = prompt.replace(tag, "")
|
||||
neg_prompt += " , " + ", ".join(child_porn_tags + soft_child_porn_tags)
|
||||
print("[Anti-Child-Porn] Patched:", (prompt, neg_prompt))
|
||||
return (prompt, neg_prompt)
|
||||
|
||||
def script_name_to_index(name, scripts):
|
||||
try:
|
||||
|
@ -461,6 +520,10 @@ class Api:
|
|||
if eris_consolelog:
|
||||
print('[t2i]', txt2imgreq.width, 'x', txt2imgreq.height, '|', txt2imgreq.prompt)
|
||||
# Eris ______
|
||||
|
||||
# Eris AntiChildPorn
|
||||
if eris_antichildporn:
|
||||
txt2imgreq.prompt, txt2imgreq.negative_prompt = anti_child_porn(txt2imgreq.prompt, txt2imgreq.negative_prompt)
|
||||
|
||||
|
||||
|
||||
|
@ -600,6 +663,11 @@ class Api:
|
|||
# Eris console prompt log -> writes promts into the console/terminal window
|
||||
if eris_consolelog:
|
||||
print('[i2i]', img2imgreq.width, 'x', img2imgreq.height, '|', img2imgreq.prompt)
|
||||
|
||||
# Eris AntiChildPorn
|
||||
if eris_antichildporn:
|
||||
img2imgreq.prompt, img2imgreq.negative_prompt = anti_child_porn(img2imgreq.prompt, img2imgreq.negative_prompt)
|
||||
|
||||
# Eris ______
|
||||
init_images = img2imgreq.init_images
|
||||
if init_images is None:
|
||||
|
|
Loading…
Reference in New Issue