facemorph.ts 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317
  1. import { isValidImage } from "../util";
  2. import Jimp from "jimp";
  3. import { client } from "../client";
  4. import * as cv from "opencv4nodejs";
  5. import * as path from "path";
  6. import request from "request-promise-native";
  7. import { Message, MessageAttachment } from "discord.js";
  8. import { getRepository } from "typeorm";
  9. import { FaceCaptionMessage, FaceCaptionType } from "@shared/db/entity/FaceCaptionMessage";
  10. import { KnownChannel } from "@shared/db/entity/KnownChannel";
  11. import { CommandSet, Action, ActionType, Command } from "src/model/command";
  12. import { logger } from "src/logging";
  13. const EMOTE_GUILD = "505333548694241281";
  14. const animeCascade = new cv.CascadeClassifier(path.resolve(process.cwd(), "animu.xml"));
  15. const faceCascade = new cv.CascadeClassifier(cv.HAAR_FRONTALFACE_ALT2);
  16. const CAPTION_IMG_SIZE = 300;
  17. const CAPTION_PROBABILITY = 0.33;
  18. type ImageProcessor = (faces: cv.Rect[], data: Buffer) => Promise<Jimp>;
  19. const CAPTION_OFFSET = 5;
  20. @CommandSet
  21. export class Facemorph {
  22. intersects(r1: cv.Rect, r2: cv.Rect): boolean {
  23. return (
  24. r1.x <= r2.x + r2.width &&
  25. r1.x + r1.width >= r2.x &&
  26. (r1.y <= r2.y + r2.height && r1.y + r1.height >= r2.y)
  27. );
  28. }
  29. morphFaces = async (faces: cv.Rect[], data: Buffer): Promise<Jimp> => {
  30. const padoru = Math.random() <= this.getPadoruChance();
  31. let jimpImage = await Jimp.read(data);
  32. const emoteGuild = client.bot.guilds.resolve(EMOTE_GUILD);
  33. if (!emoteGuild)
  34. return jimpImage;
  35. const emojiKeys = process.env.FOOLS != "TRUE" ? [
  36. ...emoteGuild
  37. .emojis.cache.filter(e => !e.animated && e.name.startsWith("PADORU") == padoru)
  38. .keys()
  39. ]:
  40. [
  41. "505335829565276160",
  42. "430434087157760003",
  43. "456472341874999297",
  44. "649677767348060170",
  45. "589706788782342183",
  46. "665272109227835422"
  47. ];
  48. for (const rect of faces) {
  49. const dx = rect.x + rect.width / 2;
  50. const dy = rect.y + rect.height / 2;
  51. const emojiKey = emojiKeys[Math.floor(Math.random() * emojiKeys.length)];
  52. const emoji = client.bot.emojis.resolve(emojiKey);
  53. if (!emoji)
  54. throw new Error("Failed to resolve emoji!");
  55. let emojiImage = await Jimp.read(emoji.url);
  56. let ew = emojiImage.getWidth();
  57. let eh = emojiImage.getHeight();
  58. const CONSTANT_SCALE = 1.1;
  59. const scaleFactor = (Math.max(rect.width, rect.height) / Math.min(ew, eh)) * CONSTANT_SCALE;
  60. ew *= scaleFactor;
  61. eh *= scaleFactor;
  62. emojiImage = emojiImage.scale(scaleFactor);
  63. jimpImage = jimpImage.composite(emojiImage, dx - ew / 2, dy - eh / 2);
  64. }
  65. return jimpImage;
  66. }
  67. async getRandomCaption(type: FaceCaptionType): Promise<FaceCaptionMessage | null> {
  68. const repo = getRepository(FaceCaptionMessage);
  69. const caption = await repo.query(`select message
  70. from face_caption_message
  71. where type = $1
  72. order by random()
  73. limit 1`, [type]) as FaceCaptionMessage[];
  74. if (caption.length == 0)
  75. return null;
  76. return caption[0];
  77. }
  78. captionFace = async (faces: cv.Rect[], data: Buffer): Promise<Jimp> => {
  79. const padoru = Math.random() <= this.getPadoruChance();
  80. const face = faces[Math.floor(Math.random() * faces.length)];
  81. const squaredFace = await face.toSquareAsync();
  82. const targetSize = CAPTION_IMG_SIZE;
  83. const img = await Jimp.read(data);
  84. let tempImg = await Jimp.create(squaredFace.width, squaredFace.height);
  85. tempImg = await tempImg.blit(
  86. img,
  87. 0,
  88. 0,
  89. squaredFace.x,
  90. squaredFace.y,
  91. squaredFace.width,
  92. squaredFace.height
  93. );
  94. tempImg = await tempImg.scale(targetSize / squaredFace.width);
  95. const font = await Jimp.loadFont(padoru ? Jimp.FONT_SANS_16_WHITE : Jimp.FONT_SANS_16_BLACK);
  96. let text = "";
  97. if(padoru)
  98. text = "PADORU PADORU";
  99. else if(process.env.FOOLS == "TRUE") {
  100. const titles = ["They are horse", "Neigh!", "Insert carrots into them!", "They will become horse!", "They will serve Geoffrey!", "tfw no carrots"];
  101. text = titles[Math.floor(Math.random() * titles.length)];
  102. }
  103. else {
  104. const prefixMessage = (await this.getRandomCaption(FaceCaptionType.PREFIX))?.message ?? "Feed them";
  105. const postfixMessage = (await this.getRandomCaption(FaceCaptionType.POSTFIX)) ?? "carrots";
  106. text = `${prefixMessage} ${postfixMessage}`;
  107. }
  108. const h = Jimp.measureTextHeight(font, text, targetSize - CAPTION_OFFSET * 2);
  109. let finalImage = await Jimp.create(targetSize, targetSize + h + CAPTION_OFFSET * 2, padoru ? "#FD2027" : "#FFFFFF");
  110. finalImage = await finalImage.print(
  111. font,
  112. CAPTION_OFFSET,
  113. CAPTION_OFFSET,
  114. text,
  115. finalImage.getWidth() - CAPTION_OFFSET
  116. );
  117. finalImage = await finalImage.composite(tempImg, 0, CAPTION_OFFSET * 2 + h);
  118. return finalImage;
  119. }
  120. getPadoruChance(): number {
  121. const now = new Date();
  122. if (now.getUTCMonth() != 11 || now.getUTCDate() > 25)
  123. return 0;
  124. return 1 / (27.0 - now.getUTCDate());
  125. }
  126. async processFaceSwap(message: Message, attachmentUrl: string, processor?: ImageProcessor, failMessage?: string, successMessage?: string): Promise<void> {
  127. const data = await request(attachmentUrl, { encoding: null }) as Buffer;
  128. const im = await cv.imdecodeAsync(data, cv.IMREAD_COLOR);
  129. const gray = await im.cvtColorAsync(cv.COLOR_BGR2GRAY);
  130. const normGray = await gray.equalizeHistAsync();
  131. const animeFaces = await animeCascade.detectMultiScaleAsync(
  132. normGray,
  133. 1.1,
  134. 5,
  135. 0,
  136. new cv.Size(24, 24)
  137. );
  138. const normalFaces = await faceCascade.detectMultiScaleAsync(gray);
  139. if (animeFaces.objects.length == 0 && normalFaces.objects.length == 0) {
  140. if (failMessage) message.channel.send(failMessage);
  141. return;
  142. }
  143. const faces = [...normalFaces.objects, ...animeFaces.objects];
  144. let normalCount = normalFaces.objects.length;
  145. let animeCount = animeFaces.objects.length;
  146. for (let i = 0; i < normalCount; i++) {
  147. const rNormal = faces[i];
  148. if (animeCount == 0) break;
  149. for (let j = normalCount; j < faces.length; j++) {
  150. const rAnime = faces[j];
  151. if (this.intersects(rAnime, rNormal)) {
  152. const animeA = rAnime.width * rAnime.height;
  153. const faceA = rNormal.width * rNormal.height;
  154. if (animeA > faceA) {
  155. faces.splice(i, 1);
  156. normalCount--;
  157. i--;
  158. break;
  159. } else {
  160. faces.splice(j, 1);
  161. animeCount--;
  162. j--;
  163. }
  164. }
  165. }
  166. }
  167. let jimpImage: Jimp;
  168. if (processor)
  169. jimpImage = await processor(faces, data);
  170. else {
  171. if (Math.random() <= CAPTION_PROBABILITY)
  172. jimpImage = await this.captionFace(faces, data);
  173. else
  174. jimpImage = await this.morphFaces(faces, data);
  175. }
  176. jimpImage.quality(90);
  177. const buffer = await jimpImage.getBufferAsync(Jimp.MIME_JPEG);
  178. const messageContents =
  179. successMessage ||
  180. `I noticed a face in the image. I think this looks better ${client.bot.emojis.resolve("505076258753740810")?.toString() ?? ":)"}`;
  181. message.channel.send(messageContents, {
  182. files: [buffer]
  183. });
  184. }
  185. processLastImage(msg: Message, processor: ImageProcessor): void {
  186. type AttachedMessage = {msg: Message, att: MessageAttachment};
  187. const lastImagedMessage = msg.channel.messages.cache.mapValues(m => ({msg: m, att: m.attachments.find(v => isValidImage(v.name))}))
  188. .filter(v => v.att != undefined).last() as AttachedMessage;
  189. if (!lastImagedMessage) {
  190. msg.channel.send(`${msg.author.toString()} Sorry, I couldn't find any recent messages with images.`);
  191. return;
  192. }
  193. const replyEmoji = client.bot.emojis.resolve("505076258753740810");
  194. const emojiText = replyEmoji ? replyEmoji.toString() : "Jiiii~";
  195. this.processFaceSwap(
  196. msg,
  197. lastImagedMessage.att.url,
  198. processor,
  199. `${msg.author.toString()} Nice image! I don't see anything interesting, though.`,
  200. `${msg.author.toString()} ${emojiText}`
  201. ).catch(err => logger.error(`Failed to run faceapp on message ${msg.id}`, err));
  202. }
  203. @Action(ActionType.MESSAGE)
  204. async morphRandomImage(actionsDone: boolean, msg: Message): Promise<boolean> {
  205. if (actionsDone) return false;
  206. if (msg.mentions.users.size > 0 && msg.mentions.users.first()?.id == client.botUser.id)
  207. return false;
  208. const imageAttachment = msg.attachments.find(v => isValidImage(v.name));
  209. if (imageAttachment) {
  210. const repo = getRepository(KnownChannel);
  211. const knownChannel = await repo.findOne({
  212. where: { channelId: msg.channel.id },
  213. select: ["faceMorphProbability"]
  214. });
  215. if (!knownChannel || Math.random() > knownChannel.faceMorphProbability)
  216. return false;
  217. this.processFaceSwap(msg, imageAttachment.url).catch(err =>
  218. logger.error(`Failed to run faceapp on message ${msg.id}`, err)
  219. );
  220. return true;
  221. }
  222. return false;
  223. }
  224. @Action(ActionType.DIRECT_MENTION)
  225. async morphProvidedImage(actionsDone: boolean, msg: Message, content: string): Promise<boolean> {
  226. if (actionsDone) return false;
  227. const image = msg.attachments.find(v => isValidImage(v.name));
  228. if (!image) {
  229. if (msg.attachments.size > 0) {
  230. msg.channel.send(
  231. `${msg.author.toString()} Nice, but I can't do anything to it! (Invalid file type)`
  232. );
  233. return true;
  234. }
  235. return false;
  236. }
  237. let processor;
  238. if (content.startsWith("caption this"))
  239. processor = this.captionFace;
  240. else
  241. processor = this.morphFaces;
  242. const replyEmoji = client.bot.emojis.resolve("505076258753740810");
  243. const emojiText = replyEmoji ? replyEmoji.toString() : "Jiiii~";
  244. this.processFaceSwap(
  245. msg,
  246. image.url,
  247. processor,
  248. `${msg.author.toString()} Nice image! I don't see anything interesting, though.`,
  249. `${msg.author.toString()} ${emojiText}`
  250. ).catch(err => logger.error(`Failed to run faceapp because ${msg.id}`, err));
  251. return true;
  252. }
  253. @Command({
  254. pattern: "caption last image"
  255. })
  256. captionLastImage(msg: Message): void {
  257. this.processLastImage(msg, this.captionFace);
  258. }
  259. @Command({
  260. pattern: "look at last image"
  261. })
  262. lookLastImage(msg: Message): void {
  263. this.processLastImage(msg, this.morphFaces);
  264. }
  265. }