|
@@ -1,8 +1,6 @@
|
|
|
import { isValidImage } from "../util";
|
|
|
import Jimp from "jimp";
|
|
|
import { client } from "../client";
|
|
|
-import * as cv from "opencv4nodejs";
|
|
|
-import * as path from "path";
|
|
|
import request from "request-promise-native";
|
|
|
import { Message, MessageAttachment } from "discord.js";
|
|
|
import { getRepository } from "typeorm";
|
|
@@ -10,30 +8,58 @@ import { FaceCaptionMessage, FaceCaptionType } from "@shared/db/entity/FaceCapti
|
|
|
import { KnownChannel } from "@shared/db/entity/KnownChannel";
|
|
|
import { CommandSet, Action, ActionType, Command } from "src/model/command";
|
|
|
import { logger } from "src/logging";
|
|
|
+import { Response } from "request";
|
|
|
+import needle from "needle";
|
|
|
|
|
|
const EMOTE_GUILD = "505333548694241281";
|
|
|
|
|
|
-const animeCascade = new cv.CascadeClassifier(path.resolve(process.cwd(), "animu.xml"));
|
|
|
-const faceCascade = new cv.CascadeClassifier(cv.HAAR_FRONTALFACE_ALT2);
|
|
|
-
|
|
|
const CAPTION_IMG_SIZE = 300;
|
|
|
const CAPTION_PROBABILITY = 0.33;
|
|
|
|
|
|
-type ImageProcessor = (faces: cv.Rect[], data: Buffer) => Promise<Jimp>;
|
|
|
+interface Rect {
|
|
|
+ x: number,
|
|
|
+ y: number,
|
|
|
+ w: number,
|
|
|
+ h: number
|
|
|
+}
|
|
|
+
|
|
|
+interface ErrorInfo {
|
|
|
+ ok: boolean,
|
|
|
+ error: string;
|
|
|
+}
|
|
|
+
|
|
|
+interface FaceData {
|
|
|
+ ok: boolean,
|
|
|
+ animeFaces: Rect[],
|
|
|
+ normalFaces: Rect[]
|
|
|
+}
|
|
|
+
|
|
|
+type FaceDetectionResponse = FaceData | ErrorInfo;
|
|
|
+
|
|
|
+function isError(resp: FaceDetectionResponse): resp is ErrorInfo {
|
|
|
+ return !resp.ok;
|
|
|
+}
|
|
|
+
|
|
|
+type ImageProcessor = (faces: Rect[], data: Buffer) => Promise<Jimp>;
|
|
|
const CAPTION_OFFSET = 5;
|
|
|
|
|
|
@CommandSet
|
|
|
export class Facemorph {
|
|
|
|
|
|
- intersects(r1: cv.Rect, r2: cv.Rect): boolean {
|
|
|
+ squareFace(rect: Rect): Rect {
|
|
|
+ const s = Math.min(rect.w, rect.h);
|
|
|
+ return {...rect, w: s, h: s};
|
|
|
+ }
|
|
|
+
|
|
|
+ intersects(r1: Rect, r2: Rect): boolean {
|
|
|
return (
|
|
|
- r1.x <= r2.x + r2.width &&
|
|
|
- r1.x + r1.width >= r2.x &&
|
|
|
- (r1.y <= r2.y + r2.height && r1.y + r1.height >= r2.y)
|
|
|
+ r1.x <= r2.x + r2.w &&
|
|
|
+ r1.x + r1.w >= r2.x &&
|
|
|
+ (r1.y <= r2.y + r2.h && r1.y + r1.h >= r2.y)
|
|
|
);
|
|
|
}
|
|
|
|
|
|
- morphFaces = async (faces: cv.Rect[], data: Buffer): Promise<Jimp> => {
|
|
|
+ morphFaces = async (faces: Rect[], data: Buffer): Promise<Jimp> => {
|
|
|
const padoru = Math.random() <= this.getPadoruChance();
|
|
|
let jimpImage = await Jimp.read(data);
|
|
|
const emoteGuild = client.bot.guilds.resolve(EMOTE_GUILD);
|
|
@@ -54,8 +80,8 @@ export class Facemorph {
|
|
|
];
|
|
|
|
|
|
for (const rect of faces) {
|
|
|
- const dx = rect.x + rect.width / 2;
|
|
|
- const dy = rect.y + rect.height / 2;
|
|
|
+ const dx = rect.x + rect.w / 2;
|
|
|
+ const dy = rect.y + rect.h / 2;
|
|
|
const emojiKey = emojiKeys[Math.floor(Math.random() * emojiKeys.length)];
|
|
|
const emoji = client.bot.emojis.resolve(emojiKey);
|
|
|
if (!emoji)
|
|
@@ -65,7 +91,7 @@ export class Facemorph {
|
|
|
let eh = emojiImage.getHeight();
|
|
|
|
|
|
const CONSTANT_SCALE = 1.1;
|
|
|
- const scaleFactor = (Math.max(rect.width, rect.height) / Math.min(ew, eh)) * CONSTANT_SCALE;
|
|
|
+ const scaleFactor = (Math.max(rect.w, rect.h) / Math.min(ew, eh)) * CONSTANT_SCALE;
|
|
|
ew *= scaleFactor;
|
|
|
eh *= scaleFactor;
|
|
|
|
|
@@ -89,24 +115,24 @@ export class Facemorph {
|
|
|
return caption[0];
|
|
|
}
|
|
|
|
|
|
- captionFace = async (faces: cv.Rect[], data: Buffer): Promise<Jimp> => {
|
|
|
+ captionFace = async (faces: Rect[], data: Buffer): Promise<Jimp> => {
|
|
|
const padoru = Math.random() <= this.getPadoruChance();
|
|
|
const face = faces[Math.floor(Math.random() * faces.length)];
|
|
|
- const squaredFace = await face.toSquareAsync();
|
|
|
+ const squaredFace = this.squareFace(face);
|
|
|
const targetSize = CAPTION_IMG_SIZE;
|
|
|
const img = await Jimp.read(data);
|
|
|
|
|
|
- let tempImg = await Jimp.create(squaredFace.width, squaredFace.height);
|
|
|
+ let tempImg = await Jimp.create(squaredFace.w, squaredFace.h);
|
|
|
tempImg = await tempImg.blit(
|
|
|
img,
|
|
|
0,
|
|
|
0,
|
|
|
squaredFace.x,
|
|
|
squaredFace.y,
|
|
|
- squaredFace.width,
|
|
|
- squaredFace.height
|
|
|
+ squaredFace.w,
|
|
|
+ squaredFace.h
|
|
|
);
|
|
|
- tempImg = await tempImg.scale(targetSize / squaredFace.width);
|
|
|
+ tempImg = await tempImg.scale(targetSize / squaredFace.w);
|
|
|
|
|
|
const font = await Jimp.loadFont(padoru ? Jimp.FONT_SANS_16_WHITE : Jimp.FONT_SANS_16_BLACK);
|
|
|
let text = "";
|
|
@@ -145,27 +171,36 @@ export class Facemorph {
|
|
|
|
|
|
async processFaceSwap(message: Message, attachmentUrl: string, processor?: ImageProcessor, failMessage?: string, successMessage?: string): Promise<void> {
|
|
|
const data = await request(attachmentUrl, { encoding: null }) as Buffer;
|
|
|
- const im = await cv.imdecodeAsync(data, cv.IMREAD_COLOR);
|
|
|
- const gray = await im.cvtColorAsync(cv.COLOR_BGR2GRAY);
|
|
|
- const normGray = await gray.equalizeHistAsync();
|
|
|
- const animeFaces = await animeCascade.detectMultiScaleAsync(
|
|
|
- normGray,
|
|
|
- 1.1,
|
|
|
- 5,
|
|
|
- 0,
|
|
|
- new cv.Size(24, 24)
|
|
|
- );
|
|
|
- const normalFaces = await faceCascade.detectMultiScaleAsync(gray);
|
|
|
|
|
|
- if (animeFaces.objects.length == 0 && normalFaces.objects.length == 0) {
|
|
|
+ const result = await needle("post", `http://${process.env.FACEDETECT_URL}/process`, {
|
|
|
+ img_data: {
|
|
|
+ buffer: data,
|
|
|
+ filename: "image.png",
|
|
|
+ content_type: "application/octet-stream"
|
|
|
+ }
|
|
|
+ }, { multipart: true });
|
|
|
+
|
|
|
+ if(result.statusCode != 200) {
|
|
|
+ logger.error("Face detection failed! Got response %s", result.statusCode);
|
|
|
+ return;
|
|
|
+ }
|
|
|
+
|
|
|
+ const faceRects = result.body as FaceDetectionResponse;
|
|
|
+
|
|
|
+ if (isError(faceRects)) {
|
|
|
+ logger.error("Face detection failed! Got response %s", result.statusCode);
|
|
|
+ return;
|
|
|
+ }
|
|
|
+
|
|
|
+ if (faceRects.animeFaces.length == 0 && faceRects.normalFaces.length == 0) {
|
|
|
if (failMessage) message.channel.send(failMessage);
|
|
|
return;
|
|
|
}
|
|
|
|
|
|
- const faces = [...normalFaces.objects, ...animeFaces.objects];
|
|
|
+ const faces = [...faceRects.normalFaces, ...faceRects.animeFaces];
|
|
|
|
|
|
- let normalCount = normalFaces.objects.length;
|
|
|
- let animeCount = animeFaces.objects.length;
|
|
|
+ let normalCount = faceRects.normalFaces.length;
|
|
|
+ let animeCount = faceRects.animeFaces.length;
|
|
|
|
|
|
for (let i = 0; i < normalCount; i++) {
|
|
|
const rNormal = faces[i];
|
|
@@ -176,8 +211,8 @@ export class Facemorph {
|
|
|
const rAnime = faces[j];
|
|
|
|
|
|
if (this.intersects(rAnime, rNormal)) {
|
|
|
- const animeA = rAnime.width * rAnime.height;
|
|
|
- const faceA = rNormal.width * rNormal.height;
|
|
|
+ const animeA = rAnime.w * rAnime.h;
|
|
|
+ const faceA = rNormal.w * rNormal.h;
|
|
|
|
|
|
if (animeA > faceA) {
|
|
|
faces.splice(i, 1);
|