
* fix: punycode.js が使用されていない場所がある問題 * fix: use punycode/punycode.js on backend * fix: use punycode/punycode.es6.js on backend * fix: d.ts missing declare keyword * chore: don't use punycode.js on backend * update pnpm-lock.yaml * chore: remove punycode.d.ts * chore: use punycode.js instead of punycode npm package * chore: bump psl to 1.15.0 * chore: bump nsfwjs to 4.2.0 4.2.1 is not usable because of https://github.com/infinitered/nsfwjs/issues/904 * chore: prevent loading node-fetch from tensorflow * chore: DOMWindow['document'] => Document IDK why DOMWindow['document'] fails, but might be related to tsc internal complexity limit * fix: disable --trace-deprecation --------- Co-authored-by: syuilo <4439005+syuilo@users.noreply.github.com>
90 lines
2.2 KiB
TypeScript
90 lines
2.2 KiB
TypeScript
/*
|
|
* SPDX-FileCopyrightText: syuilo and misskey-project
|
|
* SPDX-License-Identifier: AGPL-3.0-only
|
|
*/
|
|
|
|
import * as fs from 'node:fs';
|
|
import { fileURLToPath } from 'node:url';
|
|
import { dirname } from 'node:path';
|
|
import { Injectable } from '@nestjs/common';
|
|
import * as nsfw from 'nsfwjs';
|
|
import si from 'systeminformation';
|
|
import { Mutex } from 'async-mutex';
|
|
import fetch from 'node-fetch';
|
|
import { bindThis } from '@/decorators.js';
|
|
|
|
const _filename = fileURLToPath(import.meta.url);
|
|
const _dirname = dirname(_filename);
|
|
|
|
const REQUIRED_CPU_FLAGS_X64 = ['avx2', 'fma'];
|
|
let isSupportedCpu: undefined | boolean = undefined;
|
|
|
|
@Injectable()
|
|
export class AiService {
|
|
private model: nsfw.NSFWJS;
|
|
private modelLoadMutex: Mutex = new Mutex();
|
|
|
|
constructor(
|
|
) {
|
|
}
|
|
|
|
@bindThis
|
|
public async detectSensitive(path: string): Promise<nsfw.PredictionType[] | null> {
|
|
try {
|
|
if (isSupportedCpu === undefined) {
|
|
isSupportedCpu = await this.computeIsSupportedCpu();
|
|
}
|
|
|
|
if (!isSupportedCpu) {
|
|
console.error('This CPU cannot use TensorFlow.');
|
|
return null;
|
|
}
|
|
|
|
const tf = await import('@tensorflow/tfjs-node');
|
|
tf.env().global.fetch = fetch;
|
|
|
|
if (this.model == null) {
|
|
await this.modelLoadMutex.runExclusive(async () => {
|
|
if (this.model == null) {
|
|
this.model = await nsfw.load(`file://${_dirname}/../../nsfw-model/`, { size: 299 });
|
|
}
|
|
});
|
|
}
|
|
|
|
const buffer = await fs.promises.readFile(path);
|
|
const image = await tf.node.decodeImage(buffer, 3) as any;
|
|
try {
|
|
const predictions = await this.model.classify(image);
|
|
return predictions;
|
|
} finally {
|
|
image.dispose();
|
|
}
|
|
} catch (err) {
|
|
console.error(err);
|
|
return null;
|
|
}
|
|
}
|
|
|
|
private async computeIsSupportedCpu(): Promise<boolean> {
|
|
switch (process.arch) {
|
|
case 'x64': {
|
|
const cpuFlags = await this.getCpuFlags();
|
|
return REQUIRED_CPU_FLAGS_X64.every(required => cpuFlags.includes(required));
|
|
}
|
|
case 'arm64': {
|
|
// As far as I know, no required CPU flags for ARM64.
|
|
return true;
|
|
}
|
|
default: {
|
|
return false;
|
|
}
|
|
}
|
|
}
|
|
|
|
@bindThis
|
|
private async getCpuFlags(): Promise<string[]> {
|
|
const str = await si.cpuFlags();
|
|
return str.split(/\s+/);
|
|
}
|
|
}
|