Rendering Bezier expressions on Desmos and saving them as screenshots.

huangapple go评论59阅读模式
英文:

Rendering Bezier expressions on Desmos and saving them as screenshots

问题

以下是您要翻译的代码部分:

const { Canvas, createCanvas, Image, ImageData, loadImage } = require('canvas');
const { JSDOM } = require('jsdom');
const { exec } = require('child_process');
const fs = require('fs');
const potrace = require('potrace');
const ffmpeg = require('ffmpeg');
const chalk = require('chalk');

const darkRed = chalk.hex('#b51b1b');

async function getCurveArray(file) {
	// 省略部分代码...
}

function loadOpenCV() {
	// 省略部分代码...
}

function installDOM() {
	// 省略部分代码...
}

function videoToFrames(fileName, extension) {
	// 省略部分代码...
}

async function cleanDirectories() {
	// 省略部分代码...
}

videoToFrames('ganyu');

fs.readdir('frames', async function (err, files) {
	installDOM();
	await loadOpenCV();
	if (!files || files?.length == 0) {
		return console.log(darkRed('Frame directory was found empty or does not exist, attempt to rerun the program with the same parameters.'));
	}
	for (i = 0; i < files.length; i++) {
		// 省略部分代码...
	}

	for (j = 0; j < files.length; j++) {
		await getCurveArray(files[j]);
		console.clear();
		console.log(darkRed(`Frame ${j + 1}/${files.length} has been traced.`));
	}
});

请提供更多信息,以便我回答您的其他问题。

英文:

I have the following code that takes a video, converts it to frames using ffmpeg, runs OpenCV Canny Edge on the frames and then converts the images to JSON files with Bezier curves.

const { JSDOM } = require(&#39;jsdom&#39;);
const { exec } = require(&#39;child_process&#39;);
const fs = require(&#39;fs&#39;);
const potrace = require(&#39;potrace&#39;);
const ffmpeg = require(&#39;ffmpeg&#39;);
const chalk = require(&#39;chalk&#39;);
const darkRed = chalk.hex(&#39;#b51b1b&#39;);
async function getCurveArray(file) {
let curveArray = [];
let paths;
let x0;
let x1;
let x2;
let x3;
let y0;
let y1;
let y2;
let y3;
let trace = new potrace.Potrace();
trace.loadImage(`./frames/${file}`, function (err) {
if (err) throw err;
trace.getPathTag();
paths = trace._pathlist;
for (let h = 0; h &lt; paths.length; h++) {
x0 = paths[h].x0;
y0 = paths[h].y0;
for (let k = 0; k &lt; paths[h].curve.n; k++) {
let curveDataIndex = k * 3;
if (paths[h].curve.tag[k] === &#39;CORNER&#39;) {
x1 = paths[h].curve.c[curveDataIndex + 1].x;
x2 = paths[h].curve.c[curveDataIndex + 2].x;
y1 = paths[h].curve.c[curveDataIndex + 1].y;
y2 = paths[h].curve.c[curveDataIndex + 2].y;
x0 = Math.round((x0 + Number.EPSILON) * 100) / 100;
x1 = Math.round((x1 + Number.EPSILON) * 100) / 100;
x2 = Math.round((x2 + Number.EPSILON) * 100) / 100;
y0 = Math.round((y0 + Number.EPSILON) * 100) / 100;
y1 = Math.round((y1 + Number.EPSILON) * 100) / 100;
y2 = Math.round((y2 + Number.EPSILON) * 100) / 100;
curveArray.push(`((1-t)${x0}+t${x1}, (1-t)${y0}+t${y1})`);
curveArray.push(`((1-t)${x1}+t${x2}, (1-t)${y1}+t${y2})`);
} else {
x1 = paths[h].curve.c[curveDataIndex].x;
x2 = paths[h].curve.c[curveDataIndex + 1].x;
x3 = paths[h].curve.c[curveDataIndex + 2].x;
y1 = paths[h].curve.c[curveDataIndex].y;
y2 = paths[h].curve.c[curveDataIndex + 1].y;
y3 = paths[h].curve.c[curveDataIndex + 2].y;
x0 = Math.round((x0 + Number.EPSILON) * 100) / 100;
x1 = Math.round((x1 + Number.EPSILON) * 100) / 100;
x2 = Math.round((x2 + Number.EPSILON) * 100) / 100;
x3 = Math.round((x3 + Number.EPSILON) * 100) / 100;
y0 = Math.round((y0 + Number.EPSILON) * 100) / 100;
y1 = Math.round((y1 + Number.EPSILON) * 100) / 100;
y2 = Math.round((y2 + Number.EPSILON) * 100) / 100;
y3 = Math.round((y3 + Number.EPSILON) * 100) / 100;
curveArray.push(
`((1-t)((1-t)((1-t)${x0}+t${x1})+t((1-t)${x1}+t${x2}))+t((1-t)((1-t)${x1}+t${x2})+t((1-t)${x2}+t${x3})), (1-t)((1-t)((1-t)${y0}+t${y1})+t((1-t)${y1}+t${y2}))+t((1-t)((1-t)${y1}+t${y2})+t((1-t)${y2}+t${y3})))`
);
}
x0 = paths[h].curve.c[curveDataIndex + 2].x;
y0 = paths[h].curve.c[curveDataIndex + 2].y;
}
}
fs.writeFileSync(`./curves/curves_${file.split(&#39;_&#39;)[1].split(&#39;.&#39;)[0]}.json`, JSON.stringify(curveArray));
});
}
function loadOpenCV() {
return new Promise((resolve) =&gt; {
global.Module = {
onRuntimeInitialized: resolve,
};
global.cv = require(&#39;./lib/opencv.js&#39;);
});
}
function installDOM() {
const dom = new JSDOM();
global.document = dom.window.document;
global.Image = Image;
global.HTMLCanvasElement = Canvas;
global.ImageData = ImageData;
global.HTMLImageElement = Image;
}
function videoToFrames(fileName, extension) {
let fileExtension = extension?.length &gt; 1 ? extension : &#39;mp4&#39;;
try {
var process = new ffmpeg(`./video/${fileName}.${fileExtension}`);
process.then(
function (video) {
video.fnExtractFrameToJPG(&#39;./frames&#39;, {
file_name: &#39;frame&#39;,
size: `?x480`,
frame_rate: 30,
});
},
function (err) {
console.log(&#39;Error: &#39; + err);
}
);
} catch (e) {
console.log(e.code);
console.log(e.msg);
}
console.log(darkRed(&#39;Video processed&#39;));
return 0;
}
async function cleanDirectories() {
exec(&#39;rm -Force -r frames&#39;, { shell: &#39;powershell.exe&#39; });
exec(&#39;mkdir frames&#39;, { shell: &#39;powershell.exe&#39; });
exec(&#39;rm -Force -r curves&#39;, { shell: &#39;powershell.exe&#39; });
exec(&#39;mkdir curves&#39;, { shell: &#39;powershell.exe&#39; });
}
videoToFrames(&#39;ganyu&#39;);
fs.readdir(&#39;frames&#39;, async function (err, files) {
installDOM();
await loadOpenCV();
if (!files || files?.length == 0) {
return console.log(darkRed(&#39;Frame directory was found empty or does not exist, attempt to rerun the program with the same parameters.&#39;));
}
for (i = 0; i &lt; files.length; i++) {
const image = await loadImage(`./frames/${files[i]}`);
let src = cv.imread(image);
cv.cvtColor(src, src, cv.COLOR_BGR2GRAY);
let src2 = new cv.Mat();
cv.bilateralFilter(src, src2, 5, 50, 50, cv.BORDER_DEFAULT);
let src3 = new cv.Mat();
cv.rotate(src2, src3, cv.ROTATE_180);
let dst = new cv.Mat();
cv.Canny(src3, dst, 30, 200, 3, true);
const canvas = createCanvas(300, 300);
cv.imshow(canvas, dst);
fs.writeFileSync(`./frames/${files[i]}`, canvas.toBuffer(&#39;image/jpeg&#39;));
src.delete();
src2.delete();
src3.delete();
dst.delete();
console.clear();
console.log(darkRed(`${i + 1}/${files.length} frames prepared.`));
}
for (j = 0; j &lt; files.length; j++) {
await getCurveArray(files[j]);
console.clear();
console.log(darkRed(`Frame ${j + 1}/${files.length} has been traced.`));
}
});

My goal is to be able to render these frames individually in Desmos, capture the result and stitch the rendered frames back together into an .mp4.

My question is how should I even approach this? How could I render each frame, detect that its "finished" rendering and somehow save the image shown in the Desmos API?

I am also curious if there are any obvious ways I could decrease the quantity of the curves without severely damaging the quality of the frames outside of lowering the resolution in the ffmpeg function.

答案1

得分: 0

Desmos API GraphingCalculator.asyncScreenshot()

等待当前图形状态完全评估,并允许轻松传递边界以裁剪掉不需要的部分。

英文:

Desmos API GraphingCalculator.asyncScreenshot()

Waits for the current graph state to be fully evaluated, and allows for bounds to be passed the function easily cropping out anything unneeded.

huangapple
  • 本文由 发表于 2023年4月17日 09:36:15
  • 转载请务必保留本文链接:https://go.coder-hub.com/76031183.html
匿名

发表评论

匿名网友

:?: :razz: :sad: :evil: :!: :smile: :oops: :grin: :eek: :shock: :???: :cool: :lol: :mad: :twisted: :roll: :wink: :idea: :arrow: :neutral: :cry: :mrgreen:

确定