Compare commits

..

2 Commits

Author SHA1 Message Date
pinks 5a7ba81f46 retry optimizations 2023-09-11 22:48:38 +02:00
pinks 82a253b616 feat: extract pnginfo 2023-09-11 22:43:12 +02:00
7 changed files with 248 additions and 35 deletions

View File

@ -1,8 +1,9 @@
import { Grammy, GrammyAutoQuote, GrammyParseMode, Log } from "../deps.ts";
import { Grammy, GrammyAutoQuote, GrammyFiles, GrammyParseMode, Log } from "../deps.ts";
import { formatUserChat } from "../utils.ts";
import { session, SessionFlavor } from "./session.ts";
import { queueCommand } from "./queueCommand.ts";
import { txt2imgCommand, txt2imgQuestion } from "./txt2imgCommand.ts";
import { pnginfoCommand, pnginfoQuestion } from "./pnginfoCommand.ts";
export const logger = () => Log.getLogger();
@ -12,7 +13,9 @@ type WithRetryApi<T extends Grammy.RawApi> = {
: T[M];
};
export type Context = GrammyParseMode.ParseModeFlavor<Grammy.Context> & SessionFlavor;
export type Context =
& GrammyFiles.FileFlavor<GrammyParseMode.ParseModeFlavor<Grammy.Context>>
& SessionFlavor;
export const bot = new Grammy.Bot<Context, Grammy.Api<WithRetryApi<Grammy.RawApi>>>(
Deno.env.get("TG_BOT_TOKEN") ?? "",
);
@ -20,9 +23,7 @@ bot.use(GrammyAutoQuote.autoQuote);
bot.use(GrammyParseMode.hydrateReply);
bot.use(session);
bot.catch((err) => {
logger().error(`Handling update from ${formatUserChat(err.ctx)} failed: ${err}`);
});
bot.api.config.use(GrammyFiles.hydrateFiles(bot.token));
// Automatically retry bot requests if we get a "too many requests" or telegram internal error
bot.api.config.use(async (prev, method, payload, signal) => {
@ -39,13 +40,19 @@ bot.api.config.use(async (prev, method, payload, signal) => {
return result;
}
logger().warning(
`Retrying ${method} after attempt ${attempt} failed with ${result.error_code} error`,
`${method} (attempt ${attempt}) failed: ${result.error_code} ${result.description}`,
);
const retryAfterMs = (result.parameters?.retry_after ?? (attempt * 5)) * 1000;
await new Promise((resolve) => setTimeout(resolve, retryAfterMs));
}
});
bot.catch((err) => {
logger().error(
`Handling update from ${formatUserChat(err.ctx)} failed: ${err.name} ${err.message}`,
);
});
// if error happened, try to reply to the user with the error
bot.use(async (ctx, next) => {
try {
@ -68,6 +75,7 @@ bot.api.setMyDescription(
);
bot.api.setMyCommands([
{ command: "txt2img", description: "Generate an image" },
{ command: "pnginfo", description: "Show generation parameters of an image" },
{ command: "queue", description: "Show the current queue" },
]);
@ -76,6 +84,9 @@ bot.command("start", (ctx) => ctx.reply("Hello! Use the /txt2img command to gene
bot.command("txt2img", txt2imgCommand);
bot.use(txt2imgQuestion.middleware());
bot.command("pnginfo", pnginfoCommand);
bot.use(pnginfoQuestion.middleware());
bot.command("queue", queueCommand);
bot.command("pause", (ctx) => {

51
bot/pnginfoCommand.ts Normal file
View File

@ -0,0 +1,51 @@
import { Grammy, GrammyParseMode, GrammyStatelessQ } from "../deps.ts";
import { fmt } from "../utils.ts";
import { getPngInfo, parsePngInfo } from "../sd.ts";
import { Context } from "./mod.ts";
export const pnginfoQuestion = new GrammyStatelessQ.StatelessQuestion<Context>(
"pnginfo",
async (ctx) => {
await pnginfo(ctx, false);
},
);
export async function pnginfoCommand(ctx: Grammy.CommandContext<Context>) {
await pnginfo(ctx, true);
}
async function pnginfo(ctx: Context, includeRepliedTo: boolean): Promise<void> {
const document = ctx.message?.document ||
(includeRepliedTo ? ctx.message?.reply_to_message?.document : undefined);
if (document?.mime_type !== "image/png") {
await ctx.reply(
"Please send me a PNG file." +
pnginfoQuestion.messageSuffixMarkdown(),
{ reply_markup: { force_reply: true, selective: true }, parse_mode: "Markdown" },
);
return;
}
const file = await ctx.api.getFile(document.file_id);
const buffer = await fetch(file.getUrl()).then((resp) => resp.arrayBuffer());
const params = parsePngInfo(getPngInfo(new Uint8Array(buffer)) ?? "");
const { bold } = GrammyParseMode;
const paramsText = fmt([
`${params.prompt}\n`,
params.negative_prompt ? fmt`${bold("Negative prompt:")} ${params.negative_prompt}\n` : "",
params.steps ? fmt`${bold("Steps:")} ${params.steps}, ` : "",
params.sampler_name ? fmt`${bold("Sampler:")} ${params.sampler_name}, ` : "",
params.cfg_scale ? fmt`${bold("CFG scale:")} ${params.cfg_scale}, ` : "",
params.seed ? fmt`${bold("Seed:")} ${params.seed}, ` : "",
params.width && params.height ? fmt`${bold("Size")}: ${params.width}x${params.height}` : "",
]);
await ctx.reply(paramsText.text, {
reply_to_message_id: ctx.message?.message_id,
entities: paramsText.entities,
});
}

View File

@ -1,7 +1,7 @@
import { Grammy, GrammyStatelessQ } from "../deps.ts";
import { formatUserChat } from "../utils.ts";
import { jobStore } from "../db/jobStore.ts";
import { parsePngInfo } from "../sd.ts";
import { getPngInfo, parsePngInfo, SdTxt2ImgRequest } from "../sd.ts";
import { Context, logger } from "./mod.ts";
export const txt2imgQuestion = new GrammyStatelessQ.StatelessQuestion<Context>(
@ -43,8 +43,23 @@ async function txt2img(ctx: Context, match: string, includeRepliedTo: boolean):
return;
}
let params = parsePngInfo(match);
let params: Partial<SdTxt2ImgRequest> = {};
const repliedToMsg = ctx.message.reply_to_message;
if (includeRepliedTo && repliedToMsg?.document?.mime_type === "image/png") {
const file = await ctx.api.getFile(repliedToMsg.document.file_id);
const buffer = await fetch(file.getUrl()).then((resp) => resp.arrayBuffer());
const fileParams = parsePngInfo(getPngInfo(new Uint8Array(buffer)) ?? "");
params = {
...params,
...fileParams,
prompt: [params.prompt, fileParams.prompt].filter(Boolean).join("\n"),
negative_prompt: [params.negative_prompt, fileParams.negative_prompt]
.filter(Boolean).join("\n"),
};
}
const repliedToText = repliedToMsg?.text || repliedToMsg?.caption;
if (includeRepliedTo && repliedToText) {
// TODO: remove bot command from replied to text
@ -53,8 +68,18 @@ async function txt2img(ctx: Context, match: string, includeRepliedTo: boolean):
...originalParams,
...params,
prompt: [originalParams.prompt, params.prompt].filter(Boolean).join("\n"),
negative_prompt: [originalParams.negative_prompt, params.negative_prompt]
.filter(Boolean).join("\n"),
};
}
const messageParams = parsePngInfo(match);
params = {
...params,
...messageParams,
prompt: [params.prompt, messageParams.prompt].filter(Boolean).join("\n"),
};
if (!params.prompt) {
await ctx.reply(
"Please tell me what you want to see." +

View File

@ -12,6 +12,7 @@ export * as GrammyAutoQuote from "https://deno.land/x/grammy_autoquote@v1.1.2/mo
export * as GrammyParseMode from "https://deno.land/x/grammy_parse_mode@1.7.1/mod.ts";
export * as GrammyKvStorage from "https://deno.land/x/grammy_storages@v2.3.1/denokv/src/mod.ts";
export * as GrammyStatelessQ from "https://deno.land/x/grammy_stateless_question_alpha@v3.0.3/mod.ts";
export * as GrammyFiles from "https://deno.land/x/grammy_files@v1.0.4/mod.ts";
export * as FileType from "npm:file-type@18.5.0";
// @deno-types="./types/png-chunks-extract.d.ts"
export * as PngChunksExtract from "npm:png-chunks-extract@1.0.0";

83
pnginfo.test.ts Normal file
View File

@ -0,0 +1,83 @@
import {
assert,
assertEquals,
assertMatch,
} from "https://deno.land/std@0.135.0/testing/asserts.ts";
import { parsePngInfo } from "./sd.ts";
Deno.test("parses pnginfo", async (t) => {
await t.step("1", () => {
const params = parsePngInfo(
`female, red fox, pink hair, (long hair), eyeshadow, lipstick, armband, midriff, leather shorts, (fishnet) legwear, fingerless gloves, <3, tongue out,
presenting breasts, ((flashing)) breasts, shirt lift, raised shirt, public exposure,
lgbt pride, pride colors, pride color clothing, flag \\(object\\), pride march, public,
braeburned, keadonger, zaush, jishinu, pixelsketcher, detailed background, insane detail, soft shading, masterpiece
Negative prompt: bad-hands-5, boring_e621
Steps: 40, Sampler: Euler a, CFG scale: 8, Seed: 2843818575, Size: 768x768, Model hash: e2d72a81a3, Model: bb95FurryMix_v90, Denoising strength: 0.28,
SD upscale overlap: 64, SD upscale upscaler: Lanczos, Version: v1.4.0`,
);
assertMatch(params.prompt ?? "", /\blong hair\b/);
assertMatch(params.prompt ?? "", /\bflag \\\(object\\\)/);
assertMatch(params.prompt ?? "", /\bmasterpiece\b/);
assert(!params.prompt?.includes("2843818575"));
assertMatch(params.negative_prompt ?? "", /\bbad-hands-5\b/);
assertMatch(params.negative_prompt ?? "", /\bboring_e621\b/);
assert(!params.negative_prompt?.includes("2843818575"));
assertEquals(params.steps, 40);
assertEquals(params.cfg_scale, 8);
assertEquals(params.width, 768);
assertEquals(params.height, 768);
});
await t.step("2", () => {
const params = parsePngInfo(
`anthro, female, wolf:1.2, long hair, fluffy tail, thick thighs,
wraps, loincloth, tribal clothing, tribal body markings, bone necklace, feathers in hair, skimpy,
holding spear, weapon, crouching, digitigrade, action pose, perspective, motion lines, forest, hunting, female pred, grin, angry,
by kenket, by ruaidri, by keadonger, by braeburned, by twiren, detailed fur, hires, masterpiece, <lora:add_detail:0.8>
Negative prompt: boring_e621_fluffyrock
Steps: 40, Sampler: Euler a, CFG scale: 7, Seed: 2876880391, Size: 1536x1536, Model hash: 06ac6055bd,
Model: fluffyrock-576-704-832-960-1088-lion-low-lr-e61-terminal-snr-e34, Denoising strength: 0.2,
Ultimate SD upscale upscaler: None, Ultimate SD upscale tile_width: 768, Ultimate SD upscale tile_height: 768,
Ultimate SD upscale mask_blur: 8, Ultimate SD upscale padding: 48,
Lora hashes: "add_detail: 7c6bad76eb54", Version: v1.3.2`,
);
assertMatch(params.prompt ?? "", /\bwolf\b/);
assertMatch(params.prompt ?? "", /\bdigitigrade\b/);
assertMatch(params.prompt ?? "", /\bby ruaidri\b/);
assert(!params.prompt?.includes("7c6bad76eb54"));
assert(!params.prompt?.includes("tile_width"));
assert(!params.prompt?.includes("boring_e621_fluffyrock"));
assertMatch(params.negative_prompt ?? "", /\bboring_e621_fluffyrock\b/);
assert(!params.negative_prompt?.includes("7c6bad76eb54"));
assert(!params.negative_prompt?.includes("tile_width"));
assert(!params.negative_prompt?.includes("add_detail"));
assertEquals(params.steps, 40);
assertEquals(params.cfg_scale, 7);
assertEquals(params.width, 1536);
assertEquals(params.height, 1536);
});
await t.step("3", () => {
const params = parsePngInfo(
`anthro, female, red fox, long black hair with pink highlights, bra, underwear, digitigrade, pawpads, foot focus, foot fetish, sitting, 4 toes,
sticker, outline, simple background, by braeburned, by alibi-cami, by ultrabondagefairy, by dripponi, <lora:easy_sticker:0.5>
Negative prompt: boring_e621_v4, happy, smile,
Steps: 40, Sampler: Euler a, CFG scale: 7, Seed: 3154849350, Size: 512x512, Model hash: fd926f7598, Model: bb95FurryMix_v100,
Denoising strength: 0.65, Lora hashes: "easy_sticker: 2c98dc945091", Version: v1.3.2`,
);
assertMatch(params.prompt ?? "", /\bbra\b/);
assertMatch(params.prompt ?? "", /\blora:easy_sticker\b/);
assert(!params.prompt?.includes("smile"));
assert(!params.prompt?.includes("Euler a"));
assert(!params.prompt?.includes("bb95FurryMix_v100"));
assertMatch(params.negative_prompt ?? "", /\bboring_e621_v4\b/);
assert(!params.negative_prompt?.includes("simple background"));
assert(!params.negative_prompt?.includes("easy_sticker"));
assert(!params.negative_prompt?.includes("bb95FurryMix_v100"));
assertEquals(params.steps, 40);
assertEquals(params.cfg_scale, 7);
assertEquals(params.width, 512);
assertEquals(params.height, 512);
});
});

21
sd.ts
View File

@ -52,7 +52,7 @@ export async function sdTxt2Img(
try {
while (true) {
await Async.abortable(Promise.race([request, Async.delay(3000)]), signal);
await Async.abortable(Promise.race([request, Async.delay(4000)]), signal);
if (await AsyncX.promiseState(request) !== "pending") return await request;
onProgress?.(await fetchSdApi<SdProgressResponse>(api, "sdapi/v1/progress"));
}
@ -249,15 +249,20 @@ export function parsePngInfo(pngInfo: string): Partial<SdTxt2ImgRequest> {
const prompt: string[] = [];
const negativePrompt: string[] = [];
for (const tag of tags) {
const paramValuePair = tag.trim().match(/^(\w+\s*\w*):\s+([\d\w. ]+)\s*$/u);
const paramValuePair = tag.trim().match(/^(\w+\s*\w*):\s+(.*)$/u);
if (paramValuePair) {
const [, param, value] = paramValuePair;
switch (param.replace(/\s+/u, "").toLowerCase()) {
case "positiveprompt":
case "positive":
case "prompt":
case "pos":
part = "prompt";
prompt.push(value.trim());
break;
case "negativeprompt":
case "negative":
case "neg":
part = "negative_prompt";
negativePrompt.push(value.trim());
break;
@ -269,6 +274,7 @@ export function parsePngInfo(pngInfo: string): Partial<SdTxt2ImgRequest> {
break;
}
case "cfgscale":
case "cfg":
case "detail": {
part = "params";
const cfgScale = Number(value.trim());
@ -288,6 +294,17 @@ export function parsePngInfo(pngInfo: string): Partial<SdTxt2ImgRequest> {
}
break;
}
case "seed":
case "model":
case "modelhash":
case "modelname":
case "sampler":
case "denoisingstrength":
case "denoising":
case "denoise":
part = "params";
// ignore for now
break;
default:
break;
}

View File

@ -1,4 +1,14 @@
import { Base64, FileType, FmtDuration, Grammy, GrammyParseMode, IKV, Log } from "../deps.ts";
import {
Async,
Base64,
FileType,
FmtDuration,
Grammy,
GrammyParseMode,
GrammyTypes,
IKV,
Log,
} from "../deps.ts";
import { bot } from "../bot/mod.ts";
import { getGlobalSession, GlobalData, WorkerData } from "../bot/session.ts";
import { fmt, formatUserChat } from "../utils.ts";
@ -43,7 +53,7 @@ export async function processJobs(): Promise<never> {
if (err instanceof Grammy.GrammyError || err instanceof SdApiError) {
await bot.api.sendMessage(
job.value.request.chat.id,
`Failed to generate your prompt: ${err.message}`,
`Failed to generate your prompt using ${worker.name}: ${err.message}`,
{ reply_to_message_id: job.value.request.message_id },
).catch(() => undefined);
await job.update({ status: { type: "waiting" } }).catch(() => undefined);
@ -141,7 +151,6 @@ async function processJob(job: IKV.Model<JobSchema>, worker: WorkerData, config:
job.value.reply.chat.id,
job.value.reply.message_id,
`Uploading your images...`,
{ maxAttempts: 1 },
).catch(() => undefined);
}
@ -168,31 +177,45 @@ async function processJob(job: IKV.Model<JobSchema>, worker: WorkerData, config:
: [],
]);
// parse files from reply JSON
const inputFiles = await Promise.all(
response.images.map(async (imageBase64, idx) => {
const imageBuffer = Base64.decode(imageBase64);
const imageType = await FileType.fileTypeFromBuffer(imageBuffer);
if (!imageType) throw new Error("Unknown file type returned from worker");
return Grammy.InputMediaBuilder.photo(
new Grammy.InputFile(imageBuffer, `image${idx}.${imageType.ext}`),
// if it can fit, add caption for first photo
idx === 0 && caption.text.length <= 1024
? { caption: caption.text, caption_entities: caption.entities }
: undefined,
);
}),
);
let sendMediaAttempt = 0;
let resultMessages: GrammyTypes.Message.MediaMessage[] | undefined;
while (true) {
sendMediaAttempt++;
// parse files from reply JSON
const inputFiles = await Promise.all(
response.images.map(async (imageBase64, idx) => {
const imageBuffer = Base64.decode(imageBase64);
const imageType = await FileType.fileTypeFromBuffer(imageBuffer);
if (!imageType) throw new Error("Unknown file type returned from worker");
return Grammy.InputMediaBuilder.photo(
new Grammy.InputFile(imageBuffer, `image${idx}.${imageType.ext}`),
// if it can fit, add caption for first photo
idx === 0 && caption.text.length <= 1024
? { caption: caption.text, caption_entities: caption.entities }
: undefined,
);
}),
);
// send the result to telegram
try {
resultMessages = await bot.api.sendMediaGroup(job.value.request.chat.id, inputFiles, {
reply_to_message_id: job.value.request.message_id,
maxAttempts: 5,
});
break;
} catch (err) {
logger().warning(`Sending images (attempt ${sendMediaAttempt}) failed: ${err}`);
if (sendMediaAttempt >= 5) throw err;
await Async.delay(15000);
}
}
// send the result to telegram
const resultMessage = await bot.api.sendMediaGroup(job.value.request.chat.id, inputFiles, {
reply_to_message_id: job.value.request.message_id,
maxAttempts: 5,
});
// send caption in separate message if it couldn't fit
if (caption.text.length > 1024 && caption.text.length <= 4096) {
await bot.api.sendMessage(job.value.request.chat.id, caption.text, {
reply_to_message_id: resultMessage[0].message_id,
reply_to_message_id: resultMessages[0].message_id,
entities: caption.entities,
});
}
@ -210,7 +233,9 @@ async function processJob(job: IKV.Model<JobSchema>, worker: WorkerData, config:
status: { type: "done", info: response.info, startDate, endDate: new Date() },
});
logger().debug(
`Job finished for ${formatUserChat(job.value.request)} using ${worker.name}`,
`Job finished for ${formatUserChat(job.value.request)} using ${worker.name}${
sendMediaAttempt > 1 ? ` after ${sendMediaAttempt} attempts` : ""
}`,
);
}