1114 lines
28 KiB
TypeScript
1114 lines
28 KiB
TypeScript
import { removeBackgroundFromRgba } from '../../../packages/shared/src/assets/chromaKey';
|
||
import {
|
||
AnimationState,
|
||
type Character,
|
||
type CharacterAnimationConfig,
|
||
} from '../../types';
|
||
|
||
export const MASTER_VISUAL_WIDTH = 1024;
|
||
export const MASTER_VISUAL_HEIGHT = 1024;
|
||
export const GENERATED_FRAME_WIDTH = 192;
|
||
export const GENERATED_FRAME_HEIGHT = 256;
|
||
|
||
export const REQUIRED_BASE_ANIMATIONS: AnimationState[] = [
|
||
AnimationState.ATTACK,
|
||
AnimationState.RUN,
|
||
];
|
||
|
||
export const OPTIONAL_BASE_ANIMATIONS: AnimationState[] = [
|
||
AnimationState.IDLE,
|
||
AnimationState.DIE,
|
||
];
|
||
|
||
export type DraftVisualCandidate = {
|
||
id: string;
|
||
label: string;
|
||
dataUrl: string;
|
||
width: number;
|
||
height: number;
|
||
};
|
||
|
||
export type DraftAnimationClip = {
|
||
animation: AnimationState;
|
||
frames: string[];
|
||
fps: number;
|
||
loop: boolean;
|
||
frameWidth: number;
|
||
frameHeight: number;
|
||
previewVideoPath?: string;
|
||
};
|
||
|
||
const DEFAULT_CHARACTER_ANIMATIONS: Record<
|
||
AnimationState,
|
||
CharacterAnimationConfig
|
||
> = {
|
||
[AnimationState.ACQUIRE]: {
|
||
frames: 1,
|
||
prefix: 'acquire',
|
||
folder: 'acquire',
|
||
},
|
||
[AnimationState.ATTACK]: { frames: 1, prefix: 'Attack', folder: 'attack' },
|
||
[AnimationState.RUN]: { frames: 1, prefix: 'Run', folder: 'run' },
|
||
[AnimationState.DOUBLE_JUMP]: {
|
||
frames: 1,
|
||
prefix: 'double jump',
|
||
folder: 'double jump',
|
||
},
|
||
[AnimationState.JUMP_ATTACK]: {
|
||
frames: 1,
|
||
prefix: 'jump attack',
|
||
folder: 'jump attack',
|
||
},
|
||
[AnimationState.DASH]: { frames: 1, prefix: 'dash', folder: 'dash' },
|
||
[AnimationState.HURT]: { frames: 1, prefix: 'hurt', folder: 'hurt' },
|
||
[AnimationState.DIE]: { frames: 1, prefix: 'die', folder: 'die' },
|
||
[AnimationState.CLIMB]: { frames: 1, prefix: 'Climb', folder: 'climb' },
|
||
[AnimationState.SKILL1]: { frames: 1, prefix: 'skill1', folder: 'skill1' },
|
||
[AnimationState.SKILL1_JUMP]: {
|
||
frames: 1,
|
||
prefix: 'skill1 jump',
|
||
folder: 'skill1 jump',
|
||
},
|
||
[AnimationState.SKILL1_BULLET]: {
|
||
frames: 1,
|
||
prefix: 'skill1 bullet',
|
||
folder: 'skill1 bullet',
|
||
},
|
||
[AnimationState.SKILL1_BULLET_FX]: {
|
||
frames: 1,
|
||
prefix: 'skill1 bullet FX',
|
||
folder: 'skill1 bullet FX',
|
||
},
|
||
[AnimationState.SKILL2]: { frames: 1, prefix: 'skill2', folder: 'skill2' },
|
||
[AnimationState.SKILL2_JUMP]: {
|
||
frames: 1,
|
||
prefix: 'skill2 jump',
|
||
folder: 'skill2 jump',
|
||
},
|
||
[AnimationState.SKILL3]: { frames: 1, prefix: 'skill3', folder: 'skill3' },
|
||
[AnimationState.SKILL3_JUMP]: {
|
||
frames: 1,
|
||
prefix: 'skill3 jump',
|
||
folder: 'skill3 jump',
|
||
},
|
||
[AnimationState.SKILL3_BULLET]: {
|
||
frames: 1,
|
||
prefix: 'skill3 bullet',
|
||
folder: 'skill3 bullet',
|
||
},
|
||
[AnimationState.SKILL3_BULLET_FX]: {
|
||
frames: 1,
|
||
prefix: 'skill3 bullet FX',
|
||
folder: 'skill3 bullet FX',
|
||
},
|
||
[AnimationState.SKILL4]: { frames: 1, prefix: 'skill4', folder: 'skill4' },
|
||
[AnimationState.WALL_SLIDE]: {
|
||
frames: 1,
|
||
prefix: 'Wall Slide',
|
||
folder: 'Wall Slide',
|
||
},
|
||
[AnimationState.IDLE]: { frames: 1, prefix: 'Idle', folder: 'idle' },
|
||
[AnimationState.JUMP]: { frames: 1, prefix: 'Jump', folder: 'jump' },
|
||
};
|
||
|
||
type PoseTransform = {
|
||
offsetX: number;
|
||
offsetY: number;
|
||
scaleX: number;
|
||
scaleY: number;
|
||
rotation: number;
|
||
alpha?: number;
|
||
tintColor?: string;
|
||
afterImage?: boolean;
|
||
};
|
||
|
||
type ActionTemplate = {
|
||
frames: number;
|
||
fps: number;
|
||
loop: boolean;
|
||
poseAt: (
|
||
progress: number,
|
||
frameIndex: number,
|
||
totalFrames: number,
|
||
) => PoseTransform;
|
||
};
|
||
|
||
const ACTION_TEMPLATES: Record<AnimationState, ActionTemplate> = {
|
||
[AnimationState.IDLE]: {
|
||
frames: 8,
|
||
fps: 8,
|
||
loop: true,
|
||
poseAt: (progress) => {
|
||
const wave = Math.sin(progress * Math.PI * 2);
|
||
return {
|
||
offsetX: wave * 1.5,
|
||
offsetY: wave * -4,
|
||
scaleX: 1 - wave * 0.015,
|
||
scaleY: 1 + wave * 0.02,
|
||
rotation: wave * 0.01,
|
||
};
|
||
},
|
||
},
|
||
[AnimationState.ACQUIRE]: {
|
||
frames: 6,
|
||
fps: 10,
|
||
loop: false,
|
||
poseAt: (progress) => ({
|
||
offsetX: progress < 0.5 ? progress * 6 : (1 - progress) * 6,
|
||
offsetY: progress < 0.5 ? progress * -18 : -18 + (progress - 0.5) * 18,
|
||
scaleX: 1,
|
||
scaleY: 1,
|
||
rotation: progress < 0.5 ? -0.08 * progress : -0.04 * (1 - progress),
|
||
}),
|
||
},
|
||
[AnimationState.ATTACK]: {
|
||
frames: 6,
|
||
fps: 12,
|
||
loop: false,
|
||
poseAt: (progress) => ({
|
||
offsetX: progress < 0.55 ? progress * 30 : 30 - (progress - 0.55) * 30,
|
||
offsetY: progress < 0.55 ? progress * -12 : -12 + (progress - 0.55) * 10,
|
||
scaleX: 1 + Math.max(0, Math.sin(progress * Math.PI)) * 0.06,
|
||
scaleY: 1 - Math.max(0, Math.sin(progress * Math.PI)) * 0.03,
|
||
rotation: progress < 0.55 ? -0.12 : 0.05 * (progress - 0.55),
|
||
}),
|
||
},
|
||
[AnimationState.RUN]: {
|
||
frames: 8,
|
||
fps: 10,
|
||
loop: true,
|
||
poseAt: (progress) => {
|
||
const cycle = Math.sin(progress * Math.PI * 2);
|
||
return {
|
||
offsetX: cycle * 8,
|
||
offsetY: Math.abs(cycle) * -10,
|
||
scaleX: 1 + Math.max(0, cycle) * 0.04,
|
||
scaleY: 1 - Math.abs(cycle) * 0.04,
|
||
rotation: cycle * 0.05,
|
||
};
|
||
},
|
||
},
|
||
[AnimationState.JUMP]: {
|
||
frames: 6,
|
||
fps: 10,
|
||
loop: false,
|
||
poseAt: (progress) => {
|
||
const arc = Math.sin(progress * Math.PI);
|
||
return {
|
||
offsetX: 0,
|
||
offsetY: -36 * arc,
|
||
scaleX: 1,
|
||
scaleY: 1 - arc * 0.04,
|
||
rotation: -0.02 + progress * 0.04,
|
||
};
|
||
},
|
||
},
|
||
[AnimationState.DOUBLE_JUMP]: {
|
||
frames: 6,
|
||
fps: 10,
|
||
loop: false,
|
||
poseAt: (progress) => {
|
||
const arc = Math.sin(progress * Math.PI);
|
||
return {
|
||
offsetX: progress < 0.5 ? 6 : -6,
|
||
offsetY: -48 * arc,
|
||
scaleX: 1 + arc * 0.03,
|
||
scaleY: 1 - arc * 0.05,
|
||
rotation: -0.08 + progress * 0.16,
|
||
};
|
||
},
|
||
},
|
||
[AnimationState.JUMP_ATTACK]: {
|
||
frames: 6,
|
||
fps: 12,
|
||
loop: false,
|
||
poseAt: (progress) => {
|
||
const arc = Math.sin(progress * Math.PI);
|
||
return {
|
||
offsetX: progress * 18,
|
||
offsetY: -28 * arc,
|
||
scaleX: 1 + arc * 0.05,
|
||
scaleY: 1 - arc * 0.05,
|
||
rotation: -0.12 + progress * 0.18,
|
||
};
|
||
},
|
||
},
|
||
[AnimationState.DASH]: {
|
||
frames: 5,
|
||
fps: 14,
|
||
loop: false,
|
||
poseAt: (progress) => ({
|
||
offsetX: progress * 42,
|
||
offsetY: -6,
|
||
scaleX: 1 + progress * 0.08,
|
||
scaleY: 1 - progress * 0.04,
|
||
rotation: -0.04,
|
||
afterImage: progress > 0.15,
|
||
}),
|
||
},
|
||
[AnimationState.HURT]: {
|
||
frames: 5,
|
||
fps: 10,
|
||
loop: false,
|
||
poseAt: (progress) => ({
|
||
offsetX: -18 * Math.sin(progress * Math.PI),
|
||
offsetY: 4 * progress,
|
||
scaleX: 1,
|
||
scaleY: 1 - progress * 0.02,
|
||
rotation: 0.08 * Math.sin(progress * Math.PI),
|
||
tintColor: 'rgba(248, 113, 113, 0.22)',
|
||
}),
|
||
},
|
||
[AnimationState.DIE]: {
|
||
frames: 7,
|
||
fps: 8,
|
||
loop: false,
|
||
poseAt: (progress) => ({
|
||
offsetX: progress * 18,
|
||
offsetY: progress * 34,
|
||
scaleX: 1,
|
||
scaleY: 1,
|
||
rotation: progress * 1.35,
|
||
alpha: 1 - progress * 0.18,
|
||
}),
|
||
},
|
||
[AnimationState.CLIMB]: {
|
||
frames: 6,
|
||
fps: 8,
|
||
loop: true,
|
||
poseAt: (progress) => {
|
||
const cycle = Math.sin(progress * Math.PI * 2);
|
||
return {
|
||
offsetX: cycle * 2,
|
||
offsetY: cycle * -12,
|
||
scaleX: 1,
|
||
scaleY: 1,
|
||
rotation: cycle * 0.02,
|
||
};
|
||
},
|
||
},
|
||
[AnimationState.WALL_SLIDE]: {
|
||
frames: 4,
|
||
fps: 8,
|
||
loop: true,
|
||
poseAt: (progress) => ({
|
||
offsetX: -8,
|
||
offsetY: progress * 18,
|
||
scaleX: 1,
|
||
scaleY: 1,
|
||
rotation: -0.05,
|
||
alpha: 0.96,
|
||
}),
|
||
},
|
||
[AnimationState.SKILL1]: {
|
||
frames: 6,
|
||
fps: 10,
|
||
loop: false,
|
||
poseAt: () => ({
|
||
offsetX: 0,
|
||
offsetY: 0,
|
||
scaleX: 1,
|
||
scaleY: 1,
|
||
rotation: 0,
|
||
}),
|
||
},
|
||
[AnimationState.SKILL1_JUMP]: {
|
||
frames: 6,
|
||
fps: 10,
|
||
loop: false,
|
||
poseAt: () => ({
|
||
offsetX: 0,
|
||
offsetY: 0,
|
||
scaleX: 1,
|
||
scaleY: 1,
|
||
rotation: 0,
|
||
}),
|
||
},
|
||
[AnimationState.SKILL1_BULLET]: {
|
||
frames: 4,
|
||
fps: 10,
|
||
loop: false,
|
||
poseAt: () => ({
|
||
offsetX: 0,
|
||
offsetY: 0,
|
||
scaleX: 1,
|
||
scaleY: 1,
|
||
rotation: 0,
|
||
}),
|
||
},
|
||
[AnimationState.SKILL1_BULLET_FX]: {
|
||
frames: 4,
|
||
fps: 10,
|
||
loop: false,
|
||
poseAt: () => ({
|
||
offsetX: 0,
|
||
offsetY: 0,
|
||
scaleX: 1,
|
||
scaleY: 1,
|
||
rotation: 0,
|
||
}),
|
||
},
|
||
[AnimationState.SKILL2]: {
|
||
frames: 6,
|
||
fps: 10,
|
||
loop: false,
|
||
poseAt: () => ({
|
||
offsetX: 0,
|
||
offsetY: 0,
|
||
scaleX: 1,
|
||
scaleY: 1,
|
||
rotation: 0,
|
||
}),
|
||
},
|
||
[AnimationState.SKILL2_JUMP]: {
|
||
frames: 6,
|
||
fps: 10,
|
||
loop: false,
|
||
poseAt: () => ({
|
||
offsetX: 0,
|
||
offsetY: 0,
|
||
scaleX: 1,
|
||
scaleY: 1,
|
||
rotation: 0,
|
||
}),
|
||
},
|
||
[AnimationState.SKILL3]: {
|
||
frames: 6,
|
||
fps: 10,
|
||
loop: false,
|
||
poseAt: () => ({
|
||
offsetX: 0,
|
||
offsetY: 0,
|
||
scaleX: 1,
|
||
scaleY: 1,
|
||
rotation: 0,
|
||
}),
|
||
},
|
||
[AnimationState.SKILL3_JUMP]: {
|
||
frames: 6,
|
||
fps: 10,
|
||
loop: false,
|
||
poseAt: () => ({
|
||
offsetX: 0,
|
||
offsetY: 0,
|
||
scaleX: 1,
|
||
scaleY: 1,
|
||
rotation: 0,
|
||
}),
|
||
},
|
||
[AnimationState.SKILL3_BULLET]: {
|
||
frames: 4,
|
||
fps: 10,
|
||
loop: false,
|
||
poseAt: () => ({
|
||
offsetX: 0,
|
||
offsetY: 0,
|
||
scaleX: 1,
|
||
scaleY: 1,
|
||
rotation: 0,
|
||
}),
|
||
},
|
||
[AnimationState.SKILL3_BULLET_FX]: {
|
||
frames: 4,
|
||
fps: 10,
|
||
loop: false,
|
||
poseAt: () => ({
|
||
offsetX: 0,
|
||
offsetY: 0,
|
||
scaleX: 1,
|
||
scaleY: 1,
|
||
rotation: 0,
|
||
}),
|
||
},
|
||
[AnimationState.SKILL4]: {
|
||
frames: 6,
|
||
fps: 10,
|
||
loop: false,
|
||
poseAt: () => ({
|
||
offsetX: 0,
|
||
offsetY: 0,
|
||
scaleX: 1,
|
||
scaleY: 1,
|
||
rotation: 0,
|
||
}),
|
||
},
|
||
};
|
||
|
||
export function readFileAsDataUrl(file: File) {
|
||
return new Promise<string>((resolve, reject) => {
|
||
const reader = new FileReader();
|
||
reader.onload = () => resolve(String(reader.result ?? ''));
|
||
reader.onerror = () => reject(reader.error ?? new Error('读取文件失败'));
|
||
reader.readAsDataURL(file);
|
||
});
|
||
}
|
||
|
||
export function loadImageFromSource(source: string) {
|
||
return new Promise<HTMLImageElement>((resolve, reject) => {
|
||
const image = new Image();
|
||
image.crossOrigin = 'anonymous';
|
||
image.onload = () => resolve(image);
|
||
image.onerror = () => reject(new Error(`加载图片失败:${source}`));
|
||
image.src = source;
|
||
});
|
||
}
|
||
|
||
function loadVideoFromSource(source: string) {
|
||
return new Promise<HTMLVideoElement>((resolve, reject) => {
|
||
const video = document.createElement('video');
|
||
video.crossOrigin = 'anonymous';
|
||
video.preload = 'auto';
|
||
video.muted = true;
|
||
video.playsInline = true;
|
||
video.onloadeddata = () => resolve(video);
|
||
video.onerror = () => reject(new Error(`加载视频失败:${source}`));
|
||
video.src = source;
|
||
});
|
||
}
|
||
|
||
function createCanvas(width: number, height: number) {
|
||
const canvas = document.createElement('canvas');
|
||
canvas.width = width;
|
||
canvas.height = height;
|
||
const context = canvas.getContext('2d');
|
||
if (!context) {
|
||
throw new Error('无法创建画布上下文');
|
||
}
|
||
return { canvas, context };
|
||
}
|
||
|
||
function drawContainedSource(
|
||
context: CanvasRenderingContext2D,
|
||
source: CanvasImageSource,
|
||
sourceWidth: number,
|
||
sourceHeight: number,
|
||
options: {
|
||
width: number;
|
||
height: number;
|
||
translateX?: number;
|
||
translateY?: number;
|
||
scale?: number;
|
||
rotation?: number;
|
||
alpha?: number;
|
||
},
|
||
) {
|
||
const {
|
||
width,
|
||
height,
|
||
translateX = 0,
|
||
translateY = 0,
|
||
scale = 1,
|
||
rotation = 0,
|
||
alpha = 1,
|
||
} = options;
|
||
const fitScale = Math.min(width / sourceWidth, height / sourceHeight);
|
||
const drawWidth = sourceWidth * fitScale * scale;
|
||
const drawHeight = sourceHeight * fitScale * scale;
|
||
const centerX = width / 2 + translateX;
|
||
const centerY = height / 2 + translateY;
|
||
|
||
context.save();
|
||
context.globalAlpha = alpha;
|
||
context.translate(centerX, centerY);
|
||
context.rotate(rotation);
|
||
context.drawImage(
|
||
source,
|
||
-drawWidth / 2,
|
||
-drawHeight / 2,
|
||
drawWidth,
|
||
drawHeight,
|
||
);
|
||
context.restore();
|
||
}
|
||
|
||
function drawContainedImage(
|
||
context: CanvasRenderingContext2D,
|
||
image: HTMLImageElement,
|
||
options: {
|
||
width: number;
|
||
height: number;
|
||
translateX?: number;
|
||
translateY?: number;
|
||
scale?: number;
|
||
rotation?: number;
|
||
alpha?: number;
|
||
},
|
||
) {
|
||
const {
|
||
width,
|
||
height,
|
||
translateX = 0,
|
||
translateY = 0,
|
||
scale = 1,
|
||
rotation = 0,
|
||
alpha = 1,
|
||
} = options;
|
||
drawContainedSource(context, image, image.width, image.height, {
|
||
width,
|
||
height,
|
||
translateX,
|
||
translateY,
|
||
scale,
|
||
rotation,
|
||
alpha,
|
||
});
|
||
}
|
||
|
||
export async function buildVisualCandidatesFromSource(source: string) {
|
||
const image = await loadImageFromSource(source);
|
||
const variants: Array<{
|
||
id: string;
|
||
label: string;
|
||
scale: number;
|
||
translateY: number;
|
||
tint?: string;
|
||
}> = [
|
||
{ id: 'balanced', label: '平衡构图', scale: 1, translateY: 0 },
|
||
{ id: 'closer', label: '主体更近', scale: 1.08, translateY: 18 },
|
||
{
|
||
id: 'lighter',
|
||
label: '轻提主体',
|
||
scale: 0.96,
|
||
translateY: -22,
|
||
tint: 'rgba(16, 185, 129, 0.08)',
|
||
},
|
||
];
|
||
|
||
return variants.map((variant) => {
|
||
const { canvas, context } = createCanvas(
|
||
MASTER_VISUAL_WIDTH,
|
||
MASTER_VISUAL_HEIGHT,
|
||
);
|
||
context.clearRect(0, 0, canvas.width, canvas.height);
|
||
drawContainedImage(context, image, {
|
||
width: canvas.width * 0.82,
|
||
height: canvas.height * 0.86,
|
||
translateY: variant.translateY,
|
||
scale: variant.scale,
|
||
});
|
||
|
||
if (variant.tint) {
|
||
context.save();
|
||
context.globalCompositeOperation = 'source-atop';
|
||
context.fillStyle = variant.tint;
|
||
context.fillRect(0, 0, canvas.width, canvas.height);
|
||
context.restore();
|
||
}
|
||
return {
|
||
id: variant.id,
|
||
label: variant.label,
|
||
dataUrl: canvas.toDataURL('image/png'),
|
||
width: canvas.width,
|
||
height: canvas.height,
|
||
} satisfies DraftVisualCandidate;
|
||
});
|
||
}
|
||
|
||
function drawShadow(
|
||
context: CanvasRenderingContext2D,
|
||
width: number,
|
||
height: number,
|
||
pose: PoseTransform,
|
||
) {
|
||
context.save();
|
||
context.fillStyle = 'rgba(0, 0, 0, 0.2)';
|
||
context.beginPath();
|
||
context.ellipse(
|
||
width / 2 + pose.offsetX * 0.15,
|
||
height * 0.92 + pose.offsetY * 0.05,
|
||
width * 0.18,
|
||
height * 0.04,
|
||
0,
|
||
0,
|
||
Math.PI * 2,
|
||
);
|
||
context.fill();
|
||
context.restore();
|
||
}
|
||
|
||
function drawTintOverlay(
|
||
context: CanvasRenderingContext2D,
|
||
tintColor: string,
|
||
width: number,
|
||
height: number,
|
||
) {
|
||
context.save();
|
||
context.globalCompositeOperation = 'source-atop';
|
||
context.fillStyle = tintColor;
|
||
context.fillRect(0, 0, width, height);
|
||
context.restore();
|
||
}
|
||
|
||
function renderPoseFrame(image: HTMLImageElement, pose: PoseTransform) {
|
||
const { canvas, context } = createCanvas(
|
||
GENERATED_FRAME_WIDTH,
|
||
GENERATED_FRAME_HEIGHT,
|
||
);
|
||
context.clearRect(0, 0, canvas.width, canvas.height);
|
||
drawShadow(context, canvas.width, canvas.height, pose);
|
||
|
||
const naturalAspect = image.width / image.height;
|
||
const baseHeight = canvas.height * 0.82;
|
||
const drawWidth = baseHeight * naturalAspect * pose.scaleX;
|
||
const drawHeight = baseHeight * pose.scaleY;
|
||
const bottomY = canvas.height * 0.9 + pose.offsetY;
|
||
const centerX = canvas.width / 2 + pose.offsetX;
|
||
|
||
const drawSprite = (alpha: number, offsetX: number) => {
|
||
context.save();
|
||
context.globalAlpha = alpha;
|
||
context.translate(centerX + offsetX, bottomY);
|
||
context.rotate(pose.rotation);
|
||
context.drawImage(
|
||
image,
|
||
-drawWidth / 2,
|
||
-drawHeight,
|
||
drawWidth,
|
||
drawHeight,
|
||
);
|
||
context.restore();
|
||
};
|
||
|
||
if (pose.afterImage) {
|
||
drawSprite(0.18, -18);
|
||
drawSprite(0.1, -28);
|
||
}
|
||
|
||
drawSprite(pose.alpha ?? 1, 0);
|
||
|
||
if (pose.tintColor) {
|
||
drawTintOverlay(context, pose.tintColor, canvas.width, canvas.height);
|
||
}
|
||
|
||
return canvas.toDataURL('image/png');
|
||
}
|
||
|
||
export async function buildAnimationClipFromMaster(
|
||
masterSource: string,
|
||
animation: AnimationState,
|
||
) {
|
||
const image = await loadImageFromSource(masterSource);
|
||
const template = ACTION_TEMPLATES[animation];
|
||
const frames = Array.from({ length: template.frames }, (_, frameIndex) => {
|
||
const progress =
|
||
template.frames <= 1 ? 0 : frameIndex / Math.max(1, template.frames - 1);
|
||
return renderPoseFrame(
|
||
image,
|
||
template.poseAt(progress, frameIndex, template.frames),
|
||
);
|
||
});
|
||
|
||
return {
|
||
animation,
|
||
frames,
|
||
fps: template.fps,
|
||
loop: template.loop,
|
||
frameWidth: GENERATED_FRAME_WIDTH,
|
||
frameHeight: GENERATED_FRAME_HEIGHT,
|
||
} satisfies DraftAnimationClip;
|
||
}
|
||
|
||
function applyGreenScreenAlpha(
|
||
context: CanvasRenderingContext2D,
|
||
width: number,
|
||
height: number,
|
||
) {
|
||
const imageData = context.getImageData(0, 0, width, height);
|
||
removeBackgroundFromRgba(imageData.data, width, height);
|
||
|
||
context.putImageData(imageData, 0, 0);
|
||
}
|
||
|
||
async function normalizeFrameSourceToDataUrl(
|
||
frameSource: string,
|
||
options: {
|
||
frameWidth: number;
|
||
frameHeight: number;
|
||
applyChromaKey: boolean;
|
||
},
|
||
) {
|
||
const image = await loadImageFromSource(frameSource);
|
||
const { canvas, context } = createCanvas(
|
||
options.frameWidth,
|
||
options.frameHeight,
|
||
);
|
||
context.clearRect(0, 0, canvas.width, canvas.height);
|
||
drawContainedImage(context, image, {
|
||
width: canvas.width,
|
||
height: canvas.height,
|
||
});
|
||
|
||
if (options.applyChromaKey) {
|
||
applyGreenScreenAlpha(context, canvas.width, canvas.height);
|
||
}
|
||
|
||
return canvas.toDataURL('image/png');
|
||
}
|
||
|
||
export async function normalizeMasterVisualSourceToDataUrl(
|
||
source: string,
|
||
options: {
|
||
applyChromaKey?: boolean;
|
||
} = {},
|
||
) {
|
||
const image = await loadImageFromSource(source);
|
||
const { canvas, context } = createCanvas(
|
||
MASTER_VISUAL_WIDTH,
|
||
MASTER_VISUAL_HEIGHT,
|
||
);
|
||
context.clearRect(0, 0, canvas.width, canvas.height);
|
||
drawContainedImage(context, image, {
|
||
width: canvas.width,
|
||
height: canvas.height,
|
||
});
|
||
|
||
if (options.applyChromaKey !== false) {
|
||
applyGreenScreenAlpha(context, canvas.width, canvas.height);
|
||
}
|
||
|
||
return {
|
||
dataUrl: canvas.toDataURL('image/png'),
|
||
width: canvas.width,
|
||
height: canvas.height,
|
||
};
|
||
}
|
||
|
||
function seekVideo(video: HTMLVideoElement, targetTime: number) {
|
||
return new Promise<void>((resolve, reject) => {
|
||
if (Math.abs(video.currentTime - targetTime) < 0.001) {
|
||
window.requestAnimationFrame(() => resolve());
|
||
return;
|
||
}
|
||
|
||
const handleSeeked = () => {
|
||
cleanup();
|
||
resolve();
|
||
};
|
||
const handleError = () => {
|
||
cleanup();
|
||
reject(new Error('视频定位失败'));
|
||
};
|
||
const cleanup = () => {
|
||
video.removeEventListener('seeked', handleSeeked);
|
||
video.removeEventListener('error', handleError);
|
||
};
|
||
|
||
video.addEventListener('seeked', handleSeeked, { once: true });
|
||
video.addEventListener('error', handleError, { once: true });
|
||
video.currentTime = Math.max(0, targetTime);
|
||
});
|
||
}
|
||
|
||
export async function buildAnimationClipFromImageSources(
|
||
sources: string[],
|
||
options: {
|
||
animation: AnimationState;
|
||
fps: number;
|
||
loop: boolean;
|
||
frameWidth?: number;
|
||
frameHeight?: number;
|
||
applyChromaKey?: boolean;
|
||
},
|
||
) {
|
||
const frameWidth = options.frameWidth ?? GENERATED_FRAME_WIDTH;
|
||
const frameHeight = options.frameHeight ?? GENERATED_FRAME_HEIGHT;
|
||
const frames = await Promise.all(
|
||
sources.map((source) =>
|
||
normalizeFrameSourceToDataUrl(source, {
|
||
frameWidth,
|
||
frameHeight,
|
||
applyChromaKey: options.applyChromaKey ?? false,
|
||
}),
|
||
),
|
||
);
|
||
|
||
return {
|
||
animation: options.animation,
|
||
frames,
|
||
fps: Math.max(1, options.fps),
|
||
loop: options.loop,
|
||
frameWidth,
|
||
frameHeight,
|
||
} satisfies DraftAnimationClip;
|
||
}
|
||
|
||
export async function buildAnimationClipFromVideoSource(
|
||
videoSource: string,
|
||
options: {
|
||
animation: AnimationState;
|
||
fps: number;
|
||
loop: boolean;
|
||
frameCount?: number;
|
||
frameWidth?: number;
|
||
frameHeight?: number;
|
||
applyChromaKey?: boolean;
|
||
sampleStartRatio?: number;
|
||
sampleEndRatio?: number;
|
||
},
|
||
) {
|
||
const video = await loadVideoFromSource(videoSource);
|
||
const frameWidth = options.frameWidth ?? GENERATED_FRAME_WIDTH;
|
||
const frameHeight = options.frameHeight ?? GENERATED_FRAME_HEIGHT;
|
||
const duration =
|
||
Number.isFinite(video.duration) && video.duration > 0 ? video.duration : 1;
|
||
const derivedFrameCount = Math.max(
|
||
2,
|
||
options.frameCount ?? Math.round(duration * Math.max(1, options.fps)),
|
||
);
|
||
const sampleStartRatio = Math.min(
|
||
0.85,
|
||
Math.max(0, options.sampleStartRatio ?? 0),
|
||
);
|
||
const sampleEndRatio = Math.min(
|
||
1,
|
||
Math.max(sampleStartRatio + 0.05, options.sampleEndRatio ?? 1),
|
||
);
|
||
const sampleWindowDuration = duration * (sampleEndRatio - sampleStartRatio);
|
||
const { canvas, context } = createCanvas(frameWidth, frameHeight);
|
||
const frames: string[] = [];
|
||
|
||
for (let frameIndex = 0; frameIndex < derivedFrameCount; frameIndex += 1) {
|
||
const progress = options.loop
|
||
? frameIndex / derivedFrameCount
|
||
: frameIndex / Math.max(1, derivedFrameCount - 1);
|
||
const targetTime = Math.min(
|
||
duration - 0.001,
|
||
duration * sampleStartRatio + sampleWindowDuration * progress,
|
||
);
|
||
|
||
await seekVideo(video, targetTime);
|
||
|
||
context.clearRect(0, 0, canvas.width, canvas.height);
|
||
drawContainedSource(context, video, video.videoWidth, video.videoHeight, {
|
||
width: canvas.width,
|
||
height: canvas.height,
|
||
});
|
||
|
||
if (options.applyChromaKey) {
|
||
applyGreenScreenAlpha(context, canvas.width, canvas.height);
|
||
}
|
||
|
||
frames.push(canvas.toDataURL('image/png'));
|
||
}
|
||
|
||
return {
|
||
animation: options.animation,
|
||
frames,
|
||
fps: Math.max(1, options.fps),
|
||
loop: options.loop,
|
||
frameWidth,
|
||
frameHeight,
|
||
previewVideoPath: videoSource,
|
||
} satisfies DraftAnimationClip;
|
||
}
|
||
|
||
async function buildReferenceVideoFromFrameSources(
|
||
frameSources: string[],
|
||
options: {
|
||
fps?: number;
|
||
width?: number;
|
||
height?: number;
|
||
repeatLoops?: number;
|
||
} = {},
|
||
) {
|
||
if (typeof MediaRecorder === 'undefined') {
|
||
throw new Error('当前浏览器不支持 MediaRecorder,无法生成参考视频。');
|
||
}
|
||
|
||
const images = await Promise.all(
|
||
frameSources.map((frameSource) => loadImageFromSource(frameSource)),
|
||
);
|
||
const width = options.width ?? GENERATED_FRAME_WIDTH;
|
||
const height = options.height ?? GENERATED_FRAME_HEIGHT;
|
||
const fps = Math.max(1, options.fps ?? 8);
|
||
const repeatLoops = Math.max(1, options.repeatLoops ?? 2);
|
||
const { canvas, context } = createCanvas(width, height);
|
||
const stream = canvas.captureStream(fps);
|
||
const mimeType = pickRecordMimeType();
|
||
const recorder = mimeType
|
||
? new MediaRecorder(stream, { mimeType })
|
||
: new MediaRecorder(stream);
|
||
const chunks: BlobPart[] = [];
|
||
|
||
recorder.ondataavailable = (event) => {
|
||
if (event.data.size > 0) {
|
||
chunks.push(event.data);
|
||
}
|
||
};
|
||
|
||
const stopPromise = new Promise<Blob>((resolve) => {
|
||
recorder.onstop = () => {
|
||
resolve(new Blob(chunks, { type: recorder.mimeType || 'video/webm' }));
|
||
};
|
||
});
|
||
|
||
recorder.start();
|
||
|
||
for (let loopIndex = 0; loopIndex < repeatLoops; loopIndex += 1) {
|
||
for (const image of images) {
|
||
context.clearRect(0, 0, canvas.width, canvas.height);
|
||
drawContainedImage(context, image, {
|
||
width: canvas.width,
|
||
height: canvas.height,
|
||
});
|
||
await waitFrame(Math.max(40, Math.round(1000 / fps)));
|
||
}
|
||
}
|
||
|
||
await waitFrame(80);
|
||
recorder.stop();
|
||
const blob = await stopPromise;
|
||
return blobToDataUrl(blob);
|
||
}
|
||
|
||
export async function buildReferenceVideoFromMasterAnimation(
|
||
masterSource: string,
|
||
animation: AnimationState,
|
||
options: {
|
||
fps?: number;
|
||
repeatLoops?: number;
|
||
width?: number;
|
||
height?: number;
|
||
} = {},
|
||
) {
|
||
const clip = await buildAnimationClipFromMaster(masterSource, animation);
|
||
return buildReferenceVideoFromFrameSources(clip.frames, {
|
||
fps: options.fps ?? clip.fps,
|
||
repeatLoops: options.repeatLoops ?? 2,
|
||
width: options.width ?? clip.frameWidth,
|
||
height: options.height ?? clip.frameHeight,
|
||
});
|
||
}
|
||
|
||
function getCharacterAnimationConfig(
|
||
character: Character,
|
||
animation: AnimationState,
|
||
) {
|
||
return (
|
||
character.animationMap?.[animation] ??
|
||
DEFAULT_CHARACTER_ANIMATIONS[animation] ??
|
||
character.animationMap?.[AnimationState.IDLE] ??
|
||
DEFAULT_CHARACTER_ANIMATIONS[AnimationState.IDLE]
|
||
);
|
||
}
|
||
|
||
function getCharacterAnimationFrameSources(
|
||
character: Character,
|
||
animation: AnimationState,
|
||
) {
|
||
const config = getCharacterAnimationConfig(character, animation);
|
||
const startFrame = config.startFrame || 1;
|
||
const frameCount = Math.max(1, config.frames);
|
||
const normalizedBasePath = config.basePath?.replace(/\/+$/u, '');
|
||
|
||
return Array.from({ length: frameCount }, (_, index) => {
|
||
const frameNumber = String(startFrame + index).padStart(2, '0');
|
||
|
||
if (normalizedBasePath) {
|
||
return config.file
|
||
? `${normalizedBasePath}/${encodeURIComponent(config.file)}`
|
||
: `${normalizedBasePath}/${config.prefix}${frameNumber}.${config.extension ?? 'png'}`;
|
||
}
|
||
|
||
const folder = encodeURIComponent(character.assetFolder);
|
||
const variant = encodeURIComponent(character.assetVariant);
|
||
const animationFolder = encodeURIComponent(config.folder);
|
||
return config.file
|
||
? `/character/${folder}/${variant}/Hero/${animationFolder}/${encodeURIComponent(config.file)}`
|
||
: `/character/${folder}/${variant}/Hero/${animationFolder}/${config.prefix}${frameNumber}.${config.extension ?? 'png'}`;
|
||
});
|
||
}
|
||
|
||
function waitFrame(ms: number) {
|
||
return new Promise((resolve) => {
|
||
window.setTimeout(resolve, ms);
|
||
});
|
||
}
|
||
|
||
function blobToDataUrl(blob: Blob) {
|
||
return new Promise<string>((resolve, reject) => {
|
||
const reader = new FileReader();
|
||
reader.onload = () => resolve(String(reader.result ?? ''));
|
||
reader.onerror = () => reject(reader.error ?? new Error('读取 Blob 失败'));
|
||
reader.readAsDataURL(blob);
|
||
});
|
||
}
|
||
|
||
function pickRecordMimeType() {
|
||
const candidates = [
|
||
'video/webm;codecs=vp9',
|
||
'video/webm;codecs=vp8',
|
||
'video/webm',
|
||
];
|
||
|
||
if (typeof MediaRecorder === 'undefined') {
|
||
return '';
|
||
}
|
||
|
||
return (
|
||
candidates.find((candidate) => MediaRecorder.isTypeSupported(candidate)) ??
|
||
''
|
||
);
|
||
}
|
||
|
||
export async function buildReferenceVideoFromCharacterAnimation(
|
||
character: Character,
|
||
animation: AnimationState,
|
||
options: {
|
||
fps?: number;
|
||
width?: number;
|
||
height?: number;
|
||
repeatLoops?: number;
|
||
} = {},
|
||
) {
|
||
if (typeof MediaRecorder === 'undefined') {
|
||
throw new Error('当前浏览器不支持 MediaRecorder,无法生成内置模板视频。');
|
||
}
|
||
|
||
const frameSources = getCharacterAnimationFrameSources(character, animation);
|
||
const images = await Promise.all(
|
||
frameSources.map((frameSource) => loadImageFromSource(frameSource)),
|
||
);
|
||
const width = options.width ?? GENERATED_FRAME_WIDTH;
|
||
const height = options.height ?? GENERATED_FRAME_HEIGHT;
|
||
const fps = Math.max(1, options.fps ?? 8);
|
||
const repeatLoops = Math.max(1, options.repeatLoops ?? 2);
|
||
const { canvas, context } = createCanvas(width, height);
|
||
const stream = canvas.captureStream(fps);
|
||
const mimeType = pickRecordMimeType();
|
||
const recorder = mimeType
|
||
? new MediaRecorder(stream, { mimeType })
|
||
: new MediaRecorder(stream);
|
||
const chunks: BlobPart[] = [];
|
||
|
||
recorder.ondataavailable = (event) => {
|
||
if (event.data.size > 0) {
|
||
chunks.push(event.data);
|
||
}
|
||
};
|
||
|
||
const stopPromise = new Promise<Blob>((resolve) => {
|
||
recorder.onstop = () => {
|
||
resolve(new Blob(chunks, { type: recorder.mimeType || 'video/webm' }));
|
||
};
|
||
});
|
||
|
||
recorder.start();
|
||
|
||
for (let loopIndex = 0; loopIndex < repeatLoops; loopIndex += 1) {
|
||
for (const image of images) {
|
||
context.clearRect(0, 0, canvas.width, canvas.height);
|
||
drawContainedImage(context, image, {
|
||
width: canvas.width,
|
||
height: canvas.height,
|
||
});
|
||
await waitFrame(Math.max(40, Math.round(1000 / fps)));
|
||
}
|
||
}
|
||
|
||
await waitFrame(80);
|
||
recorder.stop();
|
||
const blob = await stopPromise;
|
||
return blobToDataUrl(blob);
|
||
}
|