Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
64 changes: 36 additions & 28 deletions geo_operators.js
Original file line number Diff line number Diff line change
Expand Up @@ -88,35 +88,43 @@ const planarize = function(vertices, faces) {
};

// combines above three constraint adjustments in iterative cycle
const canonicalize = function(poly, Niter) {
if (!Niter) {
Niter = 1;
}
console.log(`Canonicalizing ${poly.name}...`);
const faces = poly.faces;
const edges = poly.edges();
let newVs = poly.vertices;
let maxChange = 1.0; // convergence tracker
for (let i = 0; i <= Niter; i++) {
const oldVs = copyVecArray(newVs); //copy vertices
newVs = tangentify(newVs, edges);
newVs = recenter(newVs, edges);
newVs = planarize(newVs, faces);
maxChange = _.max(_.map(_.zip(newVs, oldVs),
([x, y])=>mag(sub(x, y))
));
if (maxChange < 1e-8) {
break;
const canonicalize = async function (poly, Niter) {
if (!Niter) {
Niter = 1;
}
}
// one should now rescale, but not rescaling here makes for very interesting numerical
// instabilities that make interesting mutants on multiple applications...
// more experience will tell what to do
//newVs = rescale(newVs)
console.log(`[canonicalization done, last |deltaV|=${maxChange}]`);
const newpoly = new polyhedron(newVs, poly.faces, poly.name);
console.log("canonicalize" , newpoly);
return newpoly;
console.log(`Canonicalizing ${poly.name}...`);
const faces = poly.faces;
const edges = poly.edges();
let newVs = poly.vertices;
let maxChange = 1.0; // convergence tracker
for (let i = 0; i <= Niter; i++) {
const oldVs = copyVecArray(newVs); //copy vertices
newVs = tangentify(newVs, edges);
newVs = recenter(newVs, edges);
newVs = planarize(newVs, faces);
maxChange = _.max(_.map(_.zip(newVs, oldVs),
([x, y]) => mag(sub(x, y))
));
if (maxChange < 1e-8) {
break;
}

// Update the polyhedron vertices to show progress
poly.vertices = newVs;
// update the display to show animated progress
// TODO: test and debug
drawShape();
// Yield to browser to allow display update
await new Promise(resolve => setTimeout(resolve, 0));
}
// one should now rescale, but not rescaling here makes for very interesting numerical
// instabilities that make interesting mutants on multiple applications...
// more experience will tell what to do
//newVs = rescale(newVs)
console.log(`[canonicalization done, last |deltaV|=${maxChange}]`);
const newpoly = new polyhedron(newVs, poly.faces, poly.name);
console.log("canonicalize", newpoly);
return newpoly;
};

// Hacky Canonicalization Algorithm
Expand Down
2 changes: 1 addition & 1 deletion polyhedronisme.html
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
<body>
<div class="container">
<h2 style="text-align:center;">
<span class="fadey">poly</span>H&eacute;d<span class="fadey">r</span>onisme<span style="font-size:10px;">v0.2.1<span>
<span class="fadey">poly</span>H&eacute;d<span class="fadey">r</span>onisme<span style="font-size:10px;">v0.2.1</span>
</h2>

<div class="canvaswrapper clearfix">
Expand Down
64 changes: 36 additions & 28 deletions polyhedronisme.js
Original file line number Diff line number Diff line change
Expand Up @@ -7055,35 +7055,43 @@ const planarize = function(vertices, faces) {
};

// combines above three constraint adjustments in iterative cycle
const canonicalize = function(poly, Niter) {
if (!Niter) {
Niter = 1;
}
console.log(`Canonicalizing ${poly.name}...`);
const faces = poly.faces;
const edges = poly.edges();
let newVs = poly.vertices;
let maxChange = 1.0; // convergence tracker
for (let i = 0; i <= Niter; i++) {
const oldVs = copyVecArray(newVs); //copy vertices
newVs = tangentify(newVs, edges);
newVs = recenter(newVs, edges);
newVs = planarize(newVs, faces);
maxChange = _.max(_.map(_.zip(newVs, oldVs),
([x, y])=>mag(sub(x, y))
));
if (maxChange < 1e-8) {
break;
const canonicalize = async function (poly, Niter) {
if (!Niter) {
Niter = 1;
}
}
// one should now rescale, but not rescaling here makes for very interesting numerical
// instabilities that make interesting mutants on multiple applications...
// more experience will tell what to do
//newVs = rescale(newVs)
console.log(`[canonicalization done, last |deltaV|=${maxChange}]`);
const newpoly = new polyhedron(newVs, poly.faces, poly.name);
console.log("canonicalize" , newpoly);
return newpoly;
console.log(`Canonicalizing ${poly.name}...`);
const faces = poly.faces;
const edges = poly.edges();
let newVs = poly.vertices;
let maxChange = 1.0; // convergence tracker
for (let i = 0; i <= Niter; i++) {
const oldVs = copyVecArray(newVs); //copy vertices
newVs = tangentify(newVs, edges);
newVs = recenter(newVs, edges);
newVs = planarize(newVs, faces);
maxChange = _.max(_.map(_.zip(newVs, oldVs),
([x, y]) => mag(sub(x, y))
));
if (maxChange < 1e-8) {
break;
}

// Update the polyhedron vertices to show progress
poly.vertices = newVs;
// update the display to show animated progress
// TODO: test and debug
drawShape();
// Yield to browser to allow display update
await new Promise(resolve => setTimeout(resolve, 0));
}
// one should now rescale, but not rescaling here makes for very interesting numerical
// instabilities that make interesting mutants on multiple applications...
// more experience will tell what to do
//newVs = rescale(newVs)
console.log(`[canonicalization done, last |deltaV|=${maxChange}]`);
const newpoly = new polyhedron(newVs, poly.faces, poly.name);
console.log("canonicalize", newpoly);
return newpoly;
};

// Hacky Canonicalization Algorithm
Expand Down