diff --git a/src-frontend/package-lock.json b/src-frontend/package-lock.json index a8354ddb..9eeef18f 100644 --- a/src-frontend/package-lock.json +++ b/src-frontend/package-lock.json @@ -24,7 +24,8 @@ "eslint-plugin-react-refresh": "^0.5.2", "globals": "^17.5.0", "tailwindcss": "^4.2.4", - "vite": "^8.0.10" + "vite": "^8.0.10", + "vitest": "^4.1.5" } }, "node_modules/@babel/code-frame": { @@ -838,6 +839,13 @@ "dev": true, "license": "MIT" }, + "node_modules/@standard-schema/spec": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.1.0.tgz", + "integrity": "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==", + "dev": true, + "license": "MIT" + }, "node_modules/@tailwindcss/node": { "version": "4.2.4", "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.2.4.tgz", @@ -1140,6 +1148,24 @@ "tslib": "^2.4.0" } }, + "node_modules/@types/chai": { + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.3.tgz", + "integrity": "sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/deep-eql": "*", + "assertion-error": "^2.0.1" + } + }, + "node_modules/@types/deep-eql": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz", + "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/esrecurse": { "version": "4.3.1", "resolved": "https://registry.npmjs.org/@types/esrecurse/-/esrecurse-4.3.1.tgz", @@ -1207,6 +1233,119 @@ } } }, + "node_modules/@vitest/expect": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-4.1.5.tgz", + "integrity": "sha512-PWBaRY5JoKuRnHlUHfpV/KohFylaDZTupcXN1H9vYryNLOnitSw60Mw9IAE2r67NbwwzBw/Cc/8q9BK3kIX8Kw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@standard-schema/spec": "^1.1.0", + "@types/chai": "^5.2.2", + "@vitest/spy": "4.1.5", + "@vitest/utils": "4.1.5", + "chai": "^6.2.2", + "tinyrainbow": "^3.1.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/mocker": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-4.1.5.tgz", + "integrity": "sha512-/x2EmFC4mT4NNzqvC3fmesuV97w5FC903KPmey4gsnJiMQ3Be1IlDKVaDaG8iqaLFHqJ2FVEkxZk5VmeLjIItw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/spy": "4.1.5", + "estree-walker": "^3.0.3", + "magic-string": "^0.30.21" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "msw": "^2.4.9", + "vite": "^6.0.0 || ^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "msw": { + "optional": true + }, + "vite": { + "optional": true + } + } + }, + "node_modules/@vitest/pretty-format": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-4.1.5.tgz", + "integrity": "sha512-7I3q6l5qr03dVfMX2wCo9FxwSJbPdwKjy2uu/YPpU3wfHvIL4QHwVRp57OfGrDFeUJ8/8QdfBKIV12FTtLn00g==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyrainbow": "^3.1.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/runner": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-4.1.5.tgz", + "integrity": "sha512-2D+o7Pr82IEO46YPpoA/YU0neeyr6FTerQb5Ro7BUnBuv6NQtT/kmVnczngiMEBhzgqz2UZYl5gArejsyERDSQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/utils": "4.1.5", + "pathe": "^2.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/snapshot": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-4.1.5.tgz", + "integrity": "sha512-zypXEt4KH/XgKGPUz4eC2AvErYx0My5hfL8oDb1HzGFpEk1P62bxSohdyOmvz+d9UJwanI68MKwr2EquOaOgMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "4.1.5", + "@vitest/utils": "4.1.5", + "magic-string": "^0.30.21", + "pathe": "^2.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/spy": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-4.1.5.tgz", + "integrity": "sha512-2lNOsh6+R2Idnf1TCZqSwYlKN2E/iDlD8sgU59kYVl+OMDmvldO1VDk39smRfpUNwYpNRVn3w4YfuC7KfbBnkQ==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/utils": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-4.1.5.tgz", + "integrity": "sha512-76wdkrmfXfqGjueGgnb45ITPyUi1ycZ4IHgC2bhPDUfWHklY/q3MdLOAB+TF1e6xfl8NxNY0ZYaPCFNWSsw3Ug==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "4.1.5", + "convert-source-map": "^2.0.0", + "tinyrainbow": "^3.1.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, "node_modules/acorn": { "version": "8.16.0", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.16.0.tgz", @@ -1247,6 +1386,16 @@ "url": "https://github.com/sponsors/epoberezkin" } }, + "node_modules/assertion-error": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", + "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + } + }, "node_modules/balanced-match": { "version": "4.0.4", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.4.tgz", @@ -1338,6 +1487,16 @@ ], "license": "CC-BY-4.0" }, + "node_modules/chai": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/chai/-/chai-6.2.2.tgz", + "integrity": "sha512-NUPRluOfOiTKBKvWPtSD4PhFvWCqOi0BGStNWs57X9js7XGTprSmFoz5F0tWhR4WPjNeR9jXqdC7/UpSJTnlRg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, "node_modules/convert-source-map": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", @@ -1423,6 +1582,13 @@ "node": ">=10.13.0" } }, + "node_modules/es-module-lexer": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-2.1.0.tgz", + "integrity": "sha512-n27zTYMjYu1aj4MjCWzSP7G9r75utsaoc8m61weK+W8JMBGGQybd43GstCXZ3WNmSFtGT9wi59qQTW6mhTR5LQ==", + "dev": true, + "license": "MIT" + }, "node_modules/escalade": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", @@ -1618,6 +1784,16 @@ "node": ">=4.0" } }, + "node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0" + } + }, "node_modules/esutils": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", @@ -1628,6 +1804,16 @@ "node": ">=0.10.0" } }, + "node_modules/expect-type": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.3.0.tgz", + "integrity": "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.0.0" + } + }, "node_modules/fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", @@ -2284,6 +2470,17 @@ "dev": true, "license": "MIT" }, + "node_modules/obug": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/obug/-/obug-2.1.1.tgz", + "integrity": "sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==", + "dev": true, + "funding": [ + "https://github.com/sponsors/sxzz", + "https://opencollective.com/debug" + ], + "license": "MIT" + }, "node_modules/optionator": { "version": "0.9.4", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", @@ -2354,6 +2551,13 @@ "node": ">=8" } }, + "node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "dev": true, + "license": "MIT" + }, "node_modules/picocolors": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", @@ -2524,6 +2728,13 @@ "node": ">=8" } }, + "node_modules/siginfo": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz", + "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==", + "dev": true, + "license": "ISC" + }, "node_modules/source-map-js": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", @@ -2534,6 +2745,20 @@ "node": ">=0.10.0" } }, + "node_modules/stackback": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", + "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==", + "dev": true, + "license": "MIT" + }, + "node_modules/std-env": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-4.1.0.tgz", + "integrity": "sha512-Rq7ybcX2RuC55r9oaPVEW7/xu3tj8u4GeBYHBWCychFtzMIr86A7e3PPEBPT37sHStKX3+TiX/Fr/ACmJLVlLQ==", + "dev": true, + "license": "MIT" + }, "node_modules/tailwindcss": { "version": "4.2.4", "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.2.4.tgz", @@ -2555,6 +2780,23 @@ "url": "https://opencollective.com/webpack" } }, + "node_modules/tinybench": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz", + "integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==", + "dev": true, + "license": "MIT" + }, + "node_modules/tinyexec": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.1.1.tgz", + "integrity": "sha512-VKS/ZaQhhkKFMANmAOhhXVoIfBXblQxGX1myCQ2faQrfmobMftXeJPcZGp0gS07ocvGJWDLZGyOZDadDBqYIJg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + } + }, "node_modules/tinyglobby": { "version": "0.2.16", "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.16.tgz", @@ -2572,6 +2814,16 @@ "url": "https://github.com/sponsors/SuperchupuDev" } }, + "node_modules/tinyrainbow": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-3.1.0.tgz", + "integrity": "sha512-Bf+ILmBgretUrdJxzXM0SgXLZ3XfiaUuOj/IKQHuTXip+05Xn+uyEYdVg0kYDipTBcLrCVyUzAPz7QmArb0mmw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, "node_modules/tslib": { "version": "2.8.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", @@ -2712,6 +2964,96 @@ } } }, + "node_modules/vitest": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-4.1.5.tgz", + "integrity": "sha512-9Xx1v3/ih3m9hN+SbfkUyy0JAs72ap3r7joc87XL6jwF0jGg6mFBvQ1SrwaX+h8BlkX6Hz9shdd1uo6AF+ZGpg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/expect": "4.1.5", + "@vitest/mocker": "4.1.5", + "@vitest/pretty-format": "4.1.5", + "@vitest/runner": "4.1.5", + "@vitest/snapshot": "4.1.5", + "@vitest/spy": "4.1.5", + "@vitest/utils": "4.1.5", + "es-module-lexer": "^2.0.0", + "expect-type": "^1.3.0", + "magic-string": "^0.30.21", + "obug": "^2.1.1", + "pathe": "^2.0.3", + "picomatch": "^4.0.3", + "std-env": "^4.0.0-rc.1", + "tinybench": "^2.9.0", + "tinyexec": "^1.0.2", + "tinyglobby": "^0.2.15", + "tinyrainbow": "^3.1.0", + "vite": "^6.0.0 || ^7.0.0 || ^8.0.0", + "why-is-node-running": "^2.3.0" + }, + "bin": { + "vitest": "vitest.mjs" + }, + "engines": { + "node": "^20.0.0 || ^22.0.0 || >=24.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@edge-runtime/vm": "*", + "@opentelemetry/api": "^1.9.0", + "@types/node": "^20.0.0 || ^22.0.0 || >=24.0.0", + "@vitest/browser-playwright": "4.1.5", + "@vitest/browser-preview": "4.1.5", + "@vitest/browser-webdriverio": "4.1.5", + "@vitest/coverage-istanbul": "4.1.5", + "@vitest/coverage-v8": "4.1.5", + "@vitest/ui": "4.1.5", + "happy-dom": "*", + "jsdom": "*", + "vite": "^6.0.0 || ^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "@edge-runtime/vm": { + "optional": true + }, + "@opentelemetry/api": { + "optional": true + }, + "@types/node": { + "optional": true + }, + "@vitest/browser-playwright": { + "optional": true + }, + "@vitest/browser-preview": { + "optional": true + }, + "@vitest/browser-webdriverio": { + "optional": true + }, + "@vitest/coverage-istanbul": { + "optional": true + }, + "@vitest/coverage-v8": { + "optional": true + }, + "@vitest/ui": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { + "optional": true + }, + "vite": { + "optional": false + } + } + }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", @@ -2728,6 +3070,23 @@ "node": ">= 8" } }, + "node_modules/why-is-node-running": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz", + "integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==", + "dev": true, + "license": "MIT", + "dependencies": { + "siginfo": "^2.0.0", + "stackback": "0.0.2" + }, + "bin": { + "why-is-node-running": "cli.js" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/word-wrap": { "version": "1.2.5", "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", diff --git a/src-frontend/package.json b/src-frontend/package.json index 25af8d1a..d5ae49c8 100644 --- a/src-frontend/package.json +++ b/src-frontend/package.json @@ -6,6 +6,8 @@ "scripts": { "dev": "vite", "build": "vite build", + "test": "vitest run", + "test:watch": "vitest", "lint": "eslint .", "preview": "vite preview" }, @@ -26,6 +28,7 @@ "eslint-plugin-react-refresh": "^0.5.2", "globals": "^17.5.0", "tailwindcss": "^4.2.4", - "vite": "^8.0.10" + "vite": "^8.0.10", + "vitest": "^4.1.5" } } diff --git a/src-frontend/src/App.jsx b/src-frontend/src/App.jsx index 7dc63615..cdae5eaf 100644 --- a/src-frontend/src/App.jsx +++ b/src-frontend/src/App.jsx @@ -6,6 +6,7 @@ import PerfPanel from './components/PerfPanel.jsx' import Slider from './components/Slider.jsx' import { defaultPass, defaultGcodeConfig, PAPER_SIZES } from './store.js' import * as tauri from './hooks/useTauri.js' +import { serialize, deserialize } from './project.js' import { useFps } from './hooks/useFps.js' const VIEW_MODES = ['source', 'detection', 'contours', 'gcode'] @@ -13,7 +14,7 @@ const VIEW_MODES = ['source', 'detection', 'contours', 'gcode'] export default function App() { const [image, setImage] = useState(null) const [passes, setPasses] = useState([defaultPass(0)]) - const [activePass, setActivePass] = useState(0) + // Single pass — multi-pass is replaced by PenOutput nodes in the graph const [gcodeConfig, setGcodeConfig] = useState(defaultGcodeConfig()) const [viewMode, setViewMode] = useState('source') const [displayB64, setDisplayB64] = useState(null) // current image shown in viewport @@ -26,8 +27,25 @@ export default function App() { const fps = useFps() const [sidebarWidth, setSidebarWidth] = useState(320) + const [nodeWidth, setNodeWidth] = useState(450) + const [dpi, setDpi] = useState(150) + const [projectPath, setProjectPath] = useState(null) // null = unsaved const resizing = useRef(false) + // Ctrl+S / Ctrl+Shift+S — ref pattern keeps listener stable across renders + const saveProjectRef = useRef(null) + saveProjectRef.current = saveProject + useEffect(() => { + function onKey(e) { + if ((e.metaKey || e.ctrlKey) && e.key === 's') { + e.preventDefault() + saveProjectRef.current(e.shiftKey) + } + } + window.addEventListener('keydown', onKey) + return () => window.removeEventListener('keydown', onKey) + }, []) + // Long-task observer — fires whenever the JS main thread blocks > 50ms useEffect(() => { const obs = new PerformanceObserver(list => { @@ -58,13 +76,15 @@ export default function App() { window.addEventListener('mouseup', onUp) } - // Always-fresh ref so debounced callbacks never close over stale passes - const passesRef = useRef(passes) - const imageRef = useRef(image) - const activePasRef = useRef(activePass) - passesRef.current = passes - imageRef.current = image - activePasRef.current = activePass + // Always-fresh refs so debounced callbacks never close over stale state + const passesRef = useRef(passes) + const imageRef = useRef(image) + const dpiRef = useRef(dpi) + const gcodeConfigRef = useRef(gcodeConfig) + passesRef.current = passes + imageRef.current = image + dpiRef.current = dpi + gcodeConfigRef.current = gcodeConfig // Debounce timers: { 'idx-detection': timer, 'idx-fill': timer } const debounceTimers = useRef({}) @@ -84,13 +104,13 @@ export default function App() { setDisplayB64(image.preview_b64) break case 'detection': - setDisplayB64(passes[activePass]?.vizB64 ?? null) + setDisplayB64(passes[0]?.vizB64 ?? null) break case 'contours': - if (passes[activePass]?.hullCount > 0) { + if (passes[0]?.hullCount > 0) { try { const tv = performance.now() - const b64 = await tauri.getPassViz(activePass, viewMode) + const b64 = await tauri.getPassViz(0, viewMode) setPerfData(pd => ({ ...(pd ?? {}), js_viz: Math.round(performance.now() - tv) })) setDisplayB64(b64) } catch (e) { @@ -105,7 +125,7 @@ export default function App() { if (passes.some(p => p.strokeCount > 0)) { try { const tv = performance.now() - const b64 = await tauri.getGcodeViz(passes.map(p => p.penColor)) + const b64 = await tauri.getGcodeViz() setPerfData(pd => ({ ...(pd ?? {}), js_viz: Math.round(performance.now() - tv) })) setDisplayB64(b64) } catch (e) { @@ -119,7 +139,7 @@ export default function App() { } } refresh() - }, [viewMode, activePass, image, passes[activePass]?.vizB64, passes[activePass]?.hullCount, totalStrokeCount]) + }, [viewMode, image, passes[0]?.vizB64, passes[0]?.hullCount, totalStrokeCount]) // ── File open ────────────────────────────────────────────────────────────── async function openImage() { @@ -134,7 +154,7 @@ export default function App() { setViewMode('source') setStrokes(null) setGlobalStatus(`${info.width} × ${info.height}px`) - for (let i = 0; i < passesRef.current.length; i++) processPass(i, true) + processPass(0, true) } catch (e) { setGlobalStatus(`Error loading image: ${e}`) } @@ -147,14 +167,15 @@ export default function App() { if (!imageRef.current) return if (!silent) setBusy(true) const pass = passesRef.current[idx] - // Reset strokeCount NOW so the gcode viz useEffect sees 0 strokes and - // doesn't fire getGcodeViz while generateFill is about to start. + // Reset counts so viewport doesn't show stale data during reprocessing. updatePass(idx, { status: 'Processing…', vizB64: null, hullCount: 0, strokeCount: 0 }) const t0 = performance.now() try { const result = await tauri.processPass({ pass_index: idx, graph: pass.graph, + dpi: dpiRef.current, + img_w_mm: gcodeConfigRef.current.img_w_mm, }) const js_process = Math.round(performance.now() - t0) setPerfData(pd => ({ ...(pd ?? {}), process: result.timings, js_process })) @@ -165,8 +186,7 @@ export default function App() { strokeCount: result.stroke_count, nodePreviews: result.node_previews ?? {}, }) - const colors = passesRef.current.map(p => p.penColor) - tauri.getAllStrokes(colors).then(s => setStrokes(s)).catch(() => {}) + tauri.getAllStrokes().then(s => setStrokes(s)).catch(() => {}) } catch (e) { updatePass(idx, { status: `Error: ${e}` }) setGlobalStatus(`Process error: ${e}`) @@ -175,31 +195,21 @@ export default function App() { }, []) // stable — uses refs // ── Debounced auto-reprocess triggered by slider changes ─────────────────── - const scheduleProcess = useCallback((idx) => { - const key = `${idx}-detect` - clearTimeout(debounceTimers.current[key]) - debounceTimers.current[key] = setTimeout(() => processPass(idx, true), 400) + const scheduleProcess = useCallback(() => { + clearTimeout(debounceTimers.current['detect']) + debounceTimers.current['detect'] = setTimeout(() => processPass(0, true), 400) }, [processPass]) - // ── Export ───────────────────────────────────────────────────────────────── - async function exportActivePass() { - const pass = passes[activePass] - const fname = `pass${activePass + 1}_${pass.label.replace(/\s+/g, '_')}.gcode` - const path = await tauri.pickSaveFile(fname) - if (!path) return - try { - await tauri.exportGcode(activePass, gcodeConfig) - setGlobalStatus(`Saved ${path}`) - } catch (e) { - setGlobalStatus(`Export error: ${e}`) - } - } + useEffect(() => { + if (imageRef.current) scheduleProcess() + }, [dpi, gcodeConfig.img_w_mm]) + // ── Export ───────────────────────────────────────────────────────────────── async function exportAll() { const dir = await tauri.pickFolder() if (!dir) return try { - const saved = await tauri.exportAllGcode(passes.map(p => p.penColor), gcodeConfig, dir) + const saved = await tauri.exportAllGcode(gcodeConfig, dir) setGlobalStatus(`Saved ${saved.length} file(s) to ${dir}`) } catch (e) { setGlobalStatus(`Export error: ${e}`) @@ -208,6 +218,78 @@ export default function App() { function setGcode(patch) { setGcodeConfig(c => ({ ...c, ...patch })) } + // ── Project save ─────────────────────────────────────────────────────────── + async function saveProject(saveAs = false) { + let path = saveAs ? null : projectPath + if (!path) { + const suggested = image + ? image.path.replace(/\.[^.]+$/, '.trac3r').split('/').pop() + : 'project.trac3r' + path = await tauri.pickProjectSavePath(suggested) + if (!path) return + } + try { + const json = serialize({ + imagePath: image?.path ?? null, + dpi, + nodeWidth, + graph: passes[0].graph, + gcodeConfig, + }) + await tauri.writeProjectFile(path, json) + setProjectPath(path) + setGlobalStatus(`Saved: ${path.split('/').pop()}`) + } catch (e) { + setGlobalStatus(`Save error: ${e}`) + } + } + + // ── Project load ─────────────────────────────────────────────────────────── + async function loadProject() { + const path = await tauri.pickProjectOpenPath() + if (!path) return + setBusy(true) + try { + const json = await tauri.readProjectFile(path) + const restored = deserialize(json) + + // Apply non-image state immediately + if (restored.gcodeConfig) setGcodeConfig(restored.gcodeConfig) + if (restored.dpi) setDpi(restored.dpi) + if (restored.nodeWidth) setNodeWidth(restored.nodeWidth) + + // Replace the pass graph + if (restored.graph) { + setPasses([{ ...defaultPass(0), graph: restored.graph }]) + } + + setProjectPath(path) + setStrokes(null) + + // Load the image if the path is still valid + if (restored.imagePath) { + try { + const info = await tauri.loadImage(restored.imagePath) + setImage(info) + imageRef.current = info + setDisplayB64(info.preview_b64) + setViewMode('source') + setGlobalStatus(`Loaded: ${path.split('/').pop()}`) + processPass(0, true) + } catch { + setImage(null) + setDisplayB64(null) + setGlobalStatus(`Project loaded — image not found at: ${restored.imagePath}`) + } + } else { + setGlobalStatus(`Loaded: ${path.split('/').pop()}`) + } + } catch (e) { + setGlobalStatus(`Load error: ${e}`) + } + setBusy(false) + } + async function dumpDebugState() { try { const configs = passes.map(p => ({ @@ -230,16 +312,32 @@ export default function App() { {/* Toolbar */}
- Trac3r + Trac3r + {projectPath && ( + + {projectPath.split('/').pop()} + + )}
+ +
)} - {/* Pass tabs */} -
- {passes.map((p, i) => { - const color = `rgb(${p.penColor.join(',')})` - return ( - - ) - })} - {passes.length < 3 && ( - - )} - {passes.length > 1 && ( - - )} -
+ {/* Scrollable sidebar content */} +
- {/* Active pass config — scrollable */} -
- updatePass(activePass, p)} - onDetectionChange={() => scheduleProcess(activePass)} - /> + {/* Status */} + + +
+ + {/* Graph */} +
+

Graph

+ setNodeWidth(v)} unit="px" /> +
+ + {/* Pipeline */} +
+

Pipeline

+ setDpi(v)} /> +
+ + {/* Paper */} +
+

Paper

+
+ {PAPER_SIZES.map(ps => { + const isPortrait = Math.abs(gcodeConfig.paper_w_mm - ps.w) < 1 && Math.abs(gcodeConfig.paper_h_mm - ps.h) < 1 + const isLandscape = Math.abs(gcodeConfig.paper_w_mm - ps.h) < 1 && Math.abs(gcodeConfig.paper_h_mm - ps.w) < 1 + return ( + + ) + })} + +
+
+ + {/* Placement */} +
+

Placement

+ setGcode({ img_w_mm: v })} /> + setGcode({ offset_x_mm: v })} /> + setGcode({ offset_y_mm: v })} /> +
+ + {/* Plotter */} +
+
+

Plotter

+ +
+ setGcode({ feed_draw: v })} unit=" mm/m" /> + setGcode({ feed_travel: v })} unit=" mm/m" /> +
+ + {/* Export */} +
+

Export

+ +
+ +
@@ -316,7 +453,7 @@ export default function App() { {/* Top bar — accent colors match the section dots in the left panel */}
{VIEW_MODES.map(m => { - const accent = { detection: '#6366f1', contours: '#14b8a6', fill: '#a855f7', gcode: '#f59e0b' }[m] + const accent = { detection: '#6366f1', contours: '#14b8a6', gcode: '#f59e0b' }[m] const label = m === 'gcode' ? 'G-code' : m.charAt(0).toUpperCase() + m.slice(1) return ( - ) - })} - {/* Portrait / Landscape toggle */} - -
-
- - {/* Placement */} -
-

Placement

- setGcode({ img_w_mm: v })} /> - setGcode({ offset_x_mm: v })} /> - setGcode({ offset_y_mm: v })} /> -
- - {/* Plotter */} -
-
-

Plotter

- -
- setGcode({ feed_draw: v })} unit=" mm/m" /> - setGcode({ feed_travel: v })} unit=" mm/m" /> -
- - {/* Export */} -
-

Export

- - -
- - ) diff --git a/src-frontend/src/components/ColorFilter.jsx b/src-frontend/src/components/ColorFilter.jsx index 44c7ee88..6a06eae1 100644 --- a/src-frontend/src/components/ColorFilter.jsx +++ b/src-frontend/src/components/ColorFilter.jsx @@ -1,18 +1,21 @@ import Slider from './Slider.jsx' -export default function ColorFilter({ filter, onChange }) { +export default function ColorFilter({ filter, onChange, alwaysOn = false }) { function set(patch) { onChange({ ...filter, ...patch }) } + const showSliders = alwaysOn || filter.enabled return (
- + {!alwaysOn && ( + + )} - {filter.enabled && ( + {showSliders && (
Hue (0–360°)
e.from === cur).forEach(e => queue.push(e.to)) + } + return true +} + // ── Component ────────────────────────────────────────────────────────────────── -export default function NodeGraph({ graph, onChange, nodePreviews, sourceImageB64 }) { +export default function NodeGraph({ graph, onChange, nodePreviews, sourceImageB64, nodeWidth = 220 }) { const canvasRef = useRef(null) - const [pan, setPan] = useState({ x: 40, y: 40 }) - const [zoom, setZoom] = useState(1) + const worldRef = useRef(null) const [wire, setWire] = useState(null) // { fromId, fromX, fromY, mouseX, mouseY } - // Refs mirror current state so event handlers don't need to be recreated + // Pan/zoom live only in refs — no React re-render on scroll/drag const panRef = useRef({ x: 40, y: 40 }) const zoomRef = useRef(1) const graphRef = useRef(graph) const onChangeRef = useRef(onChange) - panRef.current = pan - zoomRef.current = zoom graphRef.current = graph onChangeRef.current = onChange + function applyTransform() { + if (!worldRef.current) return + const { x, y } = panRef.current + worldRef.current.style.transform = `translate(${x}px, ${y}px) scale(${zoomRef.current})` + } + // Drag state stored in refs so handlers remain stable const panDragRef = useRef(null) // { startX, startY } — canvas-space origin const nodeDragRef = useRef(null) // { nodeId, startNodeX/Y, startClientX/Y } - const wireRef = useRef(null) // same shape as wire state + const wireRef = useRef(null) // same shape as wire state (drag-initiated) + const clickWireRef = useRef(null) // same shape as wire state (click-initiated) + + // Eyedropper sample mode: which ColorIsolate node is waiting for a pixel pick + const [sampleNodeId, setSampleNodeId] = useState(null) + const sampleNodeIdRef = useRef(null) + sampleNodeIdRef.current = sampleNodeId + + // Offscreen canvas holding decoded source image pixels for eyedropper sampling + const sourceSampleCanvasRef = useRef(null) + useEffect(() => { + if (!sourceImageB64) { sourceSampleCanvasRef.current = null; return } + const img = new Image() + img.onload = () => { + const c = document.createElement('canvas') + c.width = img.naturalWidth; c.height = img.naturalHeight + c.getContext('2d').drawImage(img, 0, 0) + sourceSampleCanvasRef.current = c + } + img.src = `data:image/jpeg;base64,${sourceImageB64}` + }, [sourceImageB64]) // ── Wheel zoom — zoom to cursor, computed from refs to handle rapid scroll ── const onWheel = useCallback(e => { @@ -70,16 +122,14 @@ export default function NodeGraph({ graph, onChange, nodePreviews, sourceImageB6 const factor = e.deltaY < 0 ? 1.1 : 1 / 1.1 const z = zoomRef.current const p = panRef.current - const nz = Math.min(Math.max(z * factor, 0.15), 5) + const nz = Math.min(Math.max(z * factor, 0.05), 30) const np = { x: cx - (cx - p.x) * (nz / z), y: cy - (cy - p.y) * (nz / z), } - // Update refs immediately so back-to-back wheel events are coherent zoomRef.current = nz panRef.current = np - setZoom(nz) - setPan(np) + applyTransform() }, []) // stable — reads from refs useEffect(() => { @@ -92,9 +142,8 @@ export default function NodeGraph({ graph, onChange, nodePreviews, sourceImageB6 useEffect(() => { function onMove(e) { if (panDragRef.current) { - const np = { x: e.clientX - panDragRef.current.startX, y: e.clientY - panDragRef.current.startY } - panRef.current = np - setPan(np) + panRef.current = { x: e.clientX - panDragRef.current.startX, y: e.clientY - panDragRef.current.startY } + applyTransform() } if (nodeDragRef.current) { const { nodeId, startNodeX, startNodeY, startClientX, startClientY } = nodeDragRef.current @@ -113,14 +162,30 @@ export default function NodeGraph({ graph, onChange, nodePreviews, sourceImageB6 wireRef.current = { ...wireRef.current, mouseX: mx, mouseY: my } setWire(w => w ? { ...w, mouseX: mx, mouseY: my } : w) } + // Click wire follows mouse too (for visual feedback) + if (clickWireRef.current) { + const r = canvasRef.current.getBoundingClientRect() + const p = panRef.current + const z = zoomRef.current + const mx = (e.clientX - r.left - p.x) / z + const my = (e.clientY - r.top - p.y) / z + clickWireRef.current = { ...clickWireRef.current, mouseX: mx, mouseY: my } + setWire(w => w ? { ...w, mouseX: mx, mouseY: my } : w) + } } function onUp() { panDragRef.current = null nodeDragRef.current = null - if (wireRef.current) { wireRef.current = null; setWire(null) } + // Only clear drag wire on mouse up — click wire persists until click-complete or cancel + if (wireRef.current) { wireRef.current = null; setWire(clickWireRef.current) } } function onKey(e) { - if (e.key === 'Escape') { wireRef.current = null; setWire(null) } + if (e.key === 'Escape') { + wireRef.current = null + clickWireRef.current = null + setWire(null) + setSampleNodeId(null) + } } window.addEventListener('mousemove', onMove) window.addEventListener('mouseup', onUp) @@ -140,42 +205,139 @@ export default function NodeGraph({ graph, onChange, nodePreviews, sourceImageB6 panDragRef.current = { startX: e.clientX - panRef.current.x, startY: e.clientY - panRef.current.y } } + // ── Canvas background click → cancel click-wire / sample mode ─────────── + function onCanvasClick(e) { + if (e.target !== canvasRef.current && !e.target.dataset.canvas) return + if (clickWireRef.current) { clickWireRef.current = null; setWire(null) } + if (sampleNodeIdRef.current) setSampleNodeId(null) + } + // ── Wire ─────────────────────────────────────────────────────────────────── function startWire(e, fromId) { e.stopPropagation() e.preventDefault() const node = graphRef.current.nodes.find(n => n.id === fromId) - const p = outPort(node) + const p = outPort(node, nodeWidth) const state = { fromId, fromX: p.x, fromY: p.y, mouseX: p.x, mouseY: p.y } wireRef.current = state setWire(state) } + // Click-to-connect: output port onClick starts a click wire + function startClickWire(e, fromId) { + e.stopPropagation() + // Don't start if a drag wire is active + if (wireRef.current) return + // If a click wire is already active, cancel it (toggle off) + if (clickWireRef.current) { + clickWireRef.current = null + setWire(null) + return + } + const node = graphRef.current.nodes.find(n => n.id === fromId) + const p = outPort(node, nodeWidth) + const state = { fromId, fromX: p.x, fromY: p.y, mouseX: p.x, mouseY: p.y } + clickWireRef.current = state + setWire(state) + } + + // Shared logic for completing a wire connection (used by both drag and click) + function completeConnection(fromId, toId, port) { + const g = graphRef.current + const fromNode = g.nodes.find(n => n.id === fromId) + const toNode = g.nodes.find(n => n.id === toId) + if (!fromNode || !toNode) return + if (!isCompatible(fromNode.kind, toNode.kind, g.edges, fromId, toId)) return + const filtered = g.edges.filter(ed => !(ed.to === toId && ed.port === port)) + if (!filtered.some(ed => ed.from === fromId && ed.to === toId && ed.port === port)) { + onChangeRef.current({ ...g, edges: [...filtered, { from: fromId, to: toId, port }] }) + } + } + function endWire(e, toId, port) { e.stopPropagation() if (!wireRef.current) return const { fromId } = wireRef.current if (fromId !== toId) { - const g = graphRef.current - const filtered = g.edges.filter(ed => !(ed.to === toId && ed.port === port)) - if (!filtered.some(ed => ed.from === fromId && ed.to === toId && ed.port === port)) { - onChangeRef.current({ ...g, edges: [...filtered, { from: fromId, to: toId, port }] }) - } + completeConnection(fromId, toId, port) } wireRef.current = null setWire(null) } + // Click-to-connect: input port onClick completes a click wire + function endClickWire(e, toId, port) { + e.stopPropagation() + if (!clickWireRef.current) return + const { fromId } = clickWireRef.current + if (fromId !== toId) { + completeConnection(fromId, toId, port) + } + clickWireRef.current = null + setWire(null) + } + function removeEdge(idx) { const g = graphRef.current onChangeRef.current({ ...g, edges: g.edges.filter((_, i) => i !== idx) }) } + // ── Double-click to disconnect all edges on a port ───────────────────────── + function disconnectOutputPort(e, nodeId) { + e.stopPropagation() + e.preventDefault() + const g = graphRef.current + onChangeRef.current({ ...g, edges: g.edges.filter(ed => ed.from !== nodeId) }) + } + + function disconnectInputPort(e, nodeId, portIdx) { + e.stopPropagation() + e.preventDefault() + const g = graphRef.current + onChangeRef.current({ ...g, edges: g.edges.filter(ed => !(ed.to === nodeId && ed.port === portIdx)) }) + } + // ── Node mutations ───────────────────────────────────────────────────────── function updateNode(id, patch) { const g = graphRef.current onChangeRef.current({ ...g, nodes: g.nodes.map(n => n.id === id ? { ...n, ...patch } : n) }) } + + // Update ColorIsolate helpers and rebuild the color_filter sent to backend. + function updateColorIsolate(id, patch) { + const node = graphRef.current.nodes.find(n => n.id === id) + if (!node) return + const merged = { ...node, ...patch } + const color_filter = buildColorIsolateFilter( + merged.ci_color ?? '#e63946', + merged.ci_hue_tolerance ?? 20, + merged.ci_sat_min ?? 0.2, + merged.ci_val_min ?? 0.15, + ) + updateNode(id, { ...patch, color_filter }) + } + + // Sample a pixel from the source image at the given click position (screen coords). + function sampleColorAt(e, sourceNode) { + const c = sourceSampleCanvasRef.current + if (!c) return + const r = canvasRef.current.getBoundingClientRect() + const wx = (e.clientX - r.left - panRef.current.x) / zoomRef.current + const wy = (e.clientY - r.top - panRef.current.y) / zoomRef.current + // Node body: 8px h-padding, 36px header, 6px v-padding before image + const imgW = nodeWidth - 16 + const imgH = imgW * (c.height / c.width) + const fracX = (wx - sourceNode.x - 8) / imgW + const fracY = (wy - sourceNode.y - 36 - 6) / imgH + if (fracX < 0 || fracX > 1 || fracY < 0 || fracY > 1) return + const px = Math.floor(fracX * c.width) + const py = Math.floor(fracY * c.height) + const d = c.getContext('2d').getImageData(px, py, 1, 1).data + const hex = '#' + [d[0], d[1], d[2]].map(v => v.toString(16).padStart(2, '0')).join('') + const nodeId = sampleNodeIdRef.current + if (nodeId) updateColorIsolate(nodeId, { ci_color: hex }) + setSampleNodeId(null) + } function deleteNode(id) { const g = graphRef.current onChangeRef.current({ @@ -186,12 +348,13 @@ export default function NodeGraph({ graph, onChange, nodePreviews, sourceImageB6 function addNode(kind) { const r = canvasRef.current.getBoundingClientRect() const p = panRef.current; const z = zoomRef.current - const x = (r.width / 2 - p.x) / z - NODE_W / 2 + const x = (r.width / 2 - p.x) / z - nodeWidth / 2 const y = (r.height / 2 - p.y) / z - 60 const id = newNodeId(kind) - const node = kind === 'Kernel' ? { id, kind, x, y, ...defaultKernelProps() } - : kind === 'Hull' ? { id, kind, x, y, ...defaultHullParams() } - : kind === 'Fill' ? { id, kind, x, y, ...defaultFillParams() } + const node = kind === 'Kernel' ? { id, kind, x, y, ...defaultKernelProps() } + : kind === 'Hull' ? { id, kind, x, y, ...defaultHullParams() } + : kind === 'Fill' ? { id, kind, x, y, ...defaultFillParams() } + : kind === 'PenOutput' ? { id, kind, x, y, ...defaultPenOutputParams() } : { id, kind, x, y, blend_mode: 'Average', inputCount: 2 } const g = graphRef.current onChangeRef.current({ ...g, nodes: [...g.nodes, node] }) @@ -225,33 +388,40 @@ export default function NodeGraph({ graph, onChange, nodePreviews, sourceImageB6 function renderNode(node) { const isFixed = node.kind === 'Source' const inputCnt = node.kind === 'Combine' ? (node.inputCount ?? 2) - : (node.kind === 'Kernel' || node.kind === 'Output' || node.kind === 'Hull' || node.kind === 'Fill') ? 1 : 0 - const hasOut = node.kind !== 'Output' && node.kind !== 'Hull' && node.kind !== 'Fill' + : (node.kind === 'Kernel' || node.kind === 'Output' || node.kind === 'Hull' || node.kind === 'Fill' || node.kind === 'PenOutput') ? 1 : 0 + const hasOut = node.kind !== 'Output' && node.kind !== 'PenOutput' const preview = node.kind === 'Source' ? sourceImageB64 : nodePreviews?.[node.id] - const accentColor = node.kind === 'Source' ? '#7c3aed' - : node.kind === 'Hull' ? '#0d9488' - : node.kind === 'Fill' ? '#9333ea' + const accentColor = node.kind === 'Source' ? '#7c3aed' + : node.kind === 'Hull' ? '#0d9488' + : node.kind === 'Fill' ? '#9333ea' + : node.kind === 'PenOutput' ? '#d97706' : '#374151' - const headerBg = node.kind === 'Source' ? '#2e1065' - : node.kind === 'Hull' ? '#042f2e' - : node.kind === 'Fill' ? '#3b0764' + const headerBg = node.kind === 'Source' ? '#2e1065' + : node.kind === 'Hull' ? '#042f2e' + : node.kind === 'Fill' ? '#3b0764' + : node.kind === 'PenOutput' ? '#451a03' : '#1e293b' + // Determine if a pending wire (drag or click) is active + const wireActive = !!(wireRef.current || clickWireRef.current) || !!wire + return ( -
+
{/* Input ports */} {Array.from({ length: inputCnt }, (_, i) => (
endWire(e, node.id, i)} + onClick={e => endClickWire(e, node.id, i)} + onDoubleClick={e => disconnectInputPort(e, node.id, i)} style={{ position: 'absolute', left: -PORT_R, top: PORT_TOP + i * PORT_STRIDE - PORT_R, width: PORT_R * 2, height: PORT_R * 2, borderRadius: '50%', zIndex: 10, - background: wire ? '#14b8a6' : '#1e3a3a', + background: wireActive ? '#14b8a6' : '#1e3a3a', border: `2px solid #14b8a6`, cursor: 'crosshair', - boxShadow: wire ? '0 0 8px #14b8a6aa' : 'none', + boxShadow: wireActive ? '0 0 8px #14b8a6aa' : 'none', transition: 'background 0.12s, box-shadow 0.12s', }} /> @@ -259,7 +429,10 @@ export default function NodeGraph({ graph, onChange, nodePreviews, sourceImageB6 {/* Output port */} {hasOut && ( -
startWire(e, node.id)} +
startWire(e, node.id)} + onClick={e => startClickWire(e, node.id)} + onDoubleClick={e => disconnectOutputPort(e, node.id)} style={{ position: 'absolute', right: -PORT_R, top: PORT_TOP - PORT_R, width: PORT_R * 2, height: PORT_R * 2, borderRadius: '50%', zIndex: 10, @@ -281,10 +454,11 @@ export default function NodeGraph({ graph, onChange, nodePreviews, sourceImageB6 }} > - {node.kind === 'Source' ? 'Source' - : node.kind === 'Hull' ? 'Hull' - : node.kind === 'Fill' ? (node.strategy ?? 'Fill') - : node.kind === 'Kernel' ? (node.kernel ?? 'Kernel') + {node.kind === 'Source' ? 'Source' + : node.kind === 'Hull' ? 'Hull' + : node.kind === 'Fill' ? (node.strategy ?? 'Fill') + : node.kind === 'PenOutput' ? (node.pen_label || 'Pen') + : node.kind === 'Kernel' ? (node.kernel ?? 'Kernel') : node.kind === 'Combine' ? 'Combine' : 'Output'} @@ -310,20 +484,54 @@ export default function NodeGraph({ graph, onChange, nodePreviews, sourceImageB6 >{k} ))}
- updateNode(node.id, { weight: v })} /> - {(KERNEL_PARAMS[node.kernel] ?? []).map(p => { - const m = PARAM_META[p] - return updateNode(node.id, { [p]: v })} /> - })} - + {node.kernel === 'ColorIsolate' ? ( +
e.stopPropagation()} style={{ display: 'flex', flexDirection: 'column', gap: 4 }}> + {/* Color target + eyedropper */} +
+ updateColorIsolate(node.id, { ci_color: e.target.value })} + style={{ width: 28, height: 28, border: 'none', cursor: 'pointer', background: 'transparent', padding: 0, borderRadius: 4 }} + /> + Target color + +
+ {sampleNodeId === node.id && ( +
+ Click a pixel on the Source image +
+ )} + updateColorIsolate(node.id, { ci_hue_tolerance: v })} /> + updateColorIsolate(node.id, { ci_sat_min: v })} /> + updateColorIsolate(node.id, { ci_val_min: v })} /> +
+ ) : (<> + updateNode(node.id, { weight: v })} /> + {(KERNEL_PARAMS[node.kernel] ?? []).map(p => { + const m = PARAM_META[p] + return updateNode(node.id, { [p]: v })} /> + })} + + )} )} {node.kind === 'Combine' && (<> @@ -368,10 +576,7 @@ export default function NodeGraph({ graph, onChange, nodePreviews, sourceImageB6 ))}
e.stopPropagation()}> - updateNode(node.id, { color_filter: cf })} - /> + {/* Color isolation is now handled by a ColorIsolate Kernel node upstream */}
)} @@ -412,11 +617,54 @@ export default function NodeGraph({ graph, onChange, nodePreviews, sourceImageB6 onChange={v => updateNode(node.id, { smooth_iters: v })} /> )} - {/* Preview thumbnail */} + {node.kind === 'PenOutput' && (<> +
+ c.toString(16).padStart(2,'0')).join('')} + onMouseDown={e => e.stopPropagation()} + onChange={e => { + const h = e.target.value.slice(1) + updateNode(node.id, { pen_color: [parseInt(h.slice(0,2),16), parseInt(h.slice(2,4),16), parseInt(h.slice(4,6),16)] }) + }} + style={{ width: 24, height: 24, border: 'none', cursor: 'pointer', background: 'transparent', borderRadius: 4, padding: 0 }} + /> + e.stopPropagation()} + onChange={e => updateNode(node.id, { pen_label: e.target.value })} + style={{ flex: 1, background: 'transparent', border: 'none', borderBottom: '1px solid #374151', color: '#e2e8f0', fontSize: 11, outline: 'none', padding: '1px 0' }} + /> +
+
+ Export order + e.stopPropagation()} + onChange={e => updateNode(node.id, { pen_order: Math.max(0, parseInt(e.target.value) || 0) })} + style={{ width: 44, background: '#1e293b', border: '1px solid #374151', borderRadius: 3, color: '#e2e8f0', fontSize: 10, padding: '2px 4px', outline: 'none' }} + /> +
+ )} + + {/* Preview thumbnail — Source node gets eyedropper overlay in sample mode */} {preview && ( - +
+ + {node.kind === 'Source' && sampleNodeId && ( +
{ e.stopPropagation(); sampleColorAt(e, node) }} + onMouseDown={e => e.stopPropagation()} + style={{ + position: 'absolute', inset: 0, borderRadius: 4, cursor: 'crosshair', zIndex: 20, + background: 'rgba(99,102,241,0.12)', + boxShadow: 'inset 0 0 0 2px #6366f1', + }} + /> + )} +
)}
@@ -431,12 +679,13 @@ export default function NodeGraph({ graph, onChange, nodePreviews, sourceImageB6 return (
{/* Toolbar */} @@ -445,22 +694,23 @@ export default function NodeGraph({ graph, onChange, nodePreviews, sourceImageB6 ['Kernel', '#374151', '#94a3b8'], ['Combine', '#374151', '#94a3b8'], ['Hull', '#0d9488', '#5eead4'], - ['Fill', '#7c3aed', '#c4b5fd'], + ['Fill', '#7c3aed', '#c4b5fd'], + ['PenOutput', '#d97706', '#fcd34d'], ].map(([kind, border, color]) => ( + >{kind === 'PenOutput' ? '+ Pen' : `+ ${kind}`} ))} scroll=zoom · drag=pan · click wire=delete
- {/* World transform */} -
@@ -472,7 +722,7 @@ export default function NodeGraph({ graph, onChange, nodePreviews, sourceImageB6 if (!fn_ || !tn) return null return ( removeEdge(idx)} diff --git a/src-frontend/src/components/PassPanel.jsx b/src-frontend/src/components/PassPanel.jsx index bb12ad7e..f79b444d 100644 --- a/src-frontend/src/components/PassPanel.jsx +++ b/src-frontend/src/components/PassPanel.jsx @@ -1,36 +1,6 @@ -export default function PassPanel({ - pass, onChange, - onDetectionChange, -}) { - function set(patch) { onChange({ ...pass, ...patch }) } - function setDetection(patch) { onChange({ ...pass, ...patch }); onDetectionChange?.() } - - const colorHex = '#' + pass.penColor.map(c => c.toString(16).padStart(2, '0')).join('') - const isProcessing = pass.status === 'Processing…' - const isFilling = pass.status === 'Generating fill…' - +export default function PassPanel({ pass }) { return (
- {/* Pass header */} -
- { - const h = e.target.value.slice(1) - set({ penColor: [parseInt(h.slice(0,2),16), parseInt(h.slice(2,4),16), parseInt(h.slice(4,6),16)] }) - }} - className="w-6 h-6 rounded cursor-pointer border-0 bg-transparent" - /> - set({ label: e.target.value })} - className="flex-1 bg-transparent text-neutral-200 text-sm font-medium outline-none border-b border-transparent focus:border-neutral-600" - /> - {(isProcessing || isFilling) && ( - - {isProcessing ? 'detecting…' : 'filling…'} - - )} -
- {/* Status */}

diff --git a/src-frontend/src/components/Viewport.jsx b/src-frontend/src/components/Viewport.jsx index b52badab..a95e00f3 100644 --- a/src-frontend/src/components/Viewport.jsx +++ b/src-frontend/src/components/Viewport.jsx @@ -19,8 +19,11 @@ export default function Viewport({ imageB64, strokes, imgSize, viewMode, gcodeCo const { zoom, pan } = stateRef.current const W = canvas.width const H = canvas.height - const iw = imgSize?.width ?? 512 - const ih = imgSize?.height ?? 512 + // gcode/fill views use the scaled pipeline dimensions from strokes payload; + // all other views use the original loaded image dimensions. + const useStrokeDims = strokes && (viewMode === 'gcode' || viewMode === 'fill') + const iw = useStrokeDims ? (strokes.img_width ?? imgSize?.width ?? 512) : (imgSize?.width ?? 512) + const ih = useStrokeDims ? (strokes.img_height ?? imgSize?.height ?? 512) : (imgSize?.height ?? 512) const fit = Math.min(W / iw, H / ih) * 0.92 const scale = fit * zoom const ox = W / 2 - iw * scale / 2 + pan.x @@ -64,7 +67,7 @@ export default function Viewport({ imageB64, strokes, imgSize, viewMode, gcodeCo } drawPaperOutline(ctx, iw, ih, scale, ox, oy) } else { - // All raster views (detection=JPEG, hulls/contours=SVG) go through ctx.drawImage + // All raster views (source=JPEG, detection=JPEG, contours=SVG) go through ctx.drawImage // so that negative ox/oy when zoomed in are handled correctly by the canvas, // not clipped by the parent's overflow:hidden. if (svgImg) svgImg.style.display = 'none' @@ -89,8 +92,8 @@ export default function Viewport({ imageB64, strokes, imgSize, viewMode, gcodeCo ctx.fillStyle = '#1a1a1a' ctx.fillRect(ox, oy, iw * scale, ih * scale) } - drawPaperOutline(ctx, iw, ih, scale, ox, oy) } + drawPaperOutline(ctx, iw, ih, scale, ox, oy) } function drawPaperOutline(ctx, iw, ih, scale, ox, oy) { @@ -133,11 +136,12 @@ export default function Viewport({ imageB64, strokes, imgSize, viewMode, gcodeCo p.strokes.map(s => ({ color: p.color, points: s })) ) - // Create offscreen canvas at 4× image resolution so zooming in stays sharp. - // octx.scale(4,4) keeps all stroke coordinates in image-pixel space unchanged. + // Offscreen canvas sized to stroke coordinate space (pipeline dims, after DPI scaling). + const sw = strokes.img_width ?? imgSize.width + const sh = strokes.img_height ?? imgSize.height const off = document.createElement('canvas') - off.width = imgSize.width * 4 - off.height = imgSize.height * 4 + off.width = sw * 4 + off.height = sh * 4 const octx = off.getContext('2d') octx.fillStyle = '#f5f0e8' octx.fillRect(0, 0, off.width, off.height) diff --git a/src-frontend/src/hooks/useTauri.js b/src-frontend/src/hooks/useTauri.js index c852730f..6c239f4f 100644 --- a/src-frontend/src/hooks/useTauri.js +++ b/src-frontend/src/hooks/useTauri.js @@ -1,5 +1,6 @@ import { invoke } from '@tauri-apps/api/core' import { open as openDialog, save as saveDialog } from '@tauri-apps/plugin-dialog' +import { FILE_EXT } from '../project.js' // Wraps invoke with response-size logging so IPC payload bloat is visible in console async function tracedInvoke(name, args) { @@ -21,24 +22,20 @@ export async function processPass(payload) { return tracedInvoke('process_pass', { payload }) } -export async function getAllStrokes(passColors) { - return tracedInvoke('get_all_strokes', { passColors }) +export async function getAllStrokes() { + return tracedInvoke('get_all_strokes', {}) } -export async function getGcodeViz(passColors) { - return tracedInvoke('get_gcode_viz', { passColors }) +export async function getGcodeViz() { + return tracedInvoke('get_gcode_viz', {}) } export async function getPassViz(passIndex, mode) { return tracedInvoke('get_pass_viz', { passIndex, mode }) } -export async function exportGcode(passIndex, gcodeConfig) { - return tracedInvoke('export_gcode', { passIndex, gcodeConfig }) -} - -export async function exportAllGcode(passColors, gcodeConfig, outDir) { - return tracedInvoke('export_all_gcode', { passColors, gcodeConfig, outDir }) +export async function exportAllGcode(gcodeConfig, outDir) { + return tracedInvoke('export_all_gcode', { gcodeConfig, outDir }) } export async function exportDebugState(passConfigs) { @@ -64,3 +61,27 @@ export async function pickSaveFile(defaultName) { export async function pickFolder() { return openDialog({ directory: true }) } + +// ── Project file I/O ─────────────────────────────────────────────────────────── + +export async function pickProjectOpenPath() { + return openDialog({ + multiple: false, + filters: [{ name: 'Trac3r Project', extensions: [FILE_EXT] }], + }) +} + +export async function pickProjectSavePath(suggestedName) { + return saveDialog({ + defaultPath: suggestedName ?? `project.${FILE_EXT}`, + filters: [{ name: 'Trac3r Project', extensions: [FILE_EXT] }], + }) +} + +export async function writeProjectFile(path, content) { + return invoke('write_project_file', { path, content }) +} + +export async function readProjectFile(path) { + return invoke('read_project_file', { path }) +} diff --git a/src-frontend/src/project.js b/src-frontend/src/project.js new file mode 100644 index 00000000..3956f853 --- /dev/null +++ b/src-frontend/src/project.js @@ -0,0 +1,78 @@ +// Project serialization — versioned JSON. +// +// Schema history: +// v1: image_path, dpi, node_width, graph, gcode +// +// Adding a new version: +// 1. Bump CURRENT_VERSION. +// 2. Push a migrate(doc) function onto MIGRATIONS. +// It receives the previous-version doc and returns the next-version doc. +// Spread unknown fields (`...doc`) so future additions survive round-trips. + +import { bumpNodeSeq } from './store.js' + +export const CURRENT_VERSION = 1 +export const FILE_EXT = 'trac3r' + +// ── Migrations ───────────────────────────────────────────────────────────────── +// migrations[n-1]: doc at version n → doc at version n+1 +const MIGRATIONS = [ + // placeholder — v1 → v2 would go here +] + +// ── Serialize ────────────────────────────────────────────────────────────────── +export function serialize({ imagePath, dpi, nodeWidth, graph, gcodeConfig }) { + return JSON.stringify({ + version: CURRENT_VERSION, + app: 'trac3r', + saved_at: new Date().toISOString(), + image_path: imagePath ?? null, + dpi, + node_width: nodeWidth, + graph, + gcode: gcodeConfig, + }, null, 2) +} + +// ── Deserialize ──────────────────────────────────────────────────────────────── +// Returns { imagePath, dpi, nodeWidth, graph, gcodeConfig } or throws. +// Unknown future-version files are loaded with a console warning (forward compat). +// +// The second argument is for testing only: +// migrations — inject a custom migration chain +// currentVersion — pretend the app is at this version (lets tests exercise +// the migration engine without bumping the real constant) +export function deserialize(json, { migrations: migs = MIGRATIONS, currentVersion: cv = CURRENT_VERSION } = {}) { + let doc + try { doc = JSON.parse(json) } catch { + throw new Error('Invalid project file (not valid JSON)') + } + + if (doc == null || typeof doc !== 'object' || Array.isArray(doc)) { + throw new Error('Not a Trac3r project file') + } + if (doc.app !== 'trac3r') throw new Error('Not a Trac3r project file') + if (typeof doc.version !== 'number') throw new Error('Missing version field') + + let v = doc.version + while (v < cv) { + const migrate = migs[v - 1] // migs[n-1] upgrades version n → n+1 + if (!migrate) throw new Error(`No migration from version ${v} (app too old?)`) + doc = migrate(doc) + v++ + } + if (v > cv) { + console.warn(`[project] file is version ${v}, app knows version ${cv} — loading anyway`) + } + + // Advance node ID counter so new nodes added after load don't collide + if (doc.graph?.nodes) bumpNodeSeq(doc.graph.nodes) + + return { + imagePath: doc.image_path ?? null, + dpi: doc.dpi ?? 150, + nodeWidth: doc.node_width ?? 450, + graph: doc.graph ?? null, + gcodeConfig: doc.gcode ?? null, + } +} diff --git a/src-frontend/src/project.test.js b/src-frontend/src/project.test.js new file mode 100644 index 00000000..d6c501b9 --- /dev/null +++ b/src-frontend/src/project.test.js @@ -0,0 +1,605 @@ +import { describe, it, expect, beforeEach, vi } from 'vitest' +import { serialize, deserialize, CURRENT_VERSION, FILE_EXT } from './project.js' +import { newNodeId, bumpNodeSeq, _resetNodeSeq } from './store.js' + +// ── Fixtures ─────────────────────────────────────────────────────────────────── + +const MINIMAL_GRAPH = { + nodes: [ + { id: 'source', kind: 'Source', x: 60, y: 160 }, + { id: 'kernel_1', kind: 'Kernel', x: 310, y: 100, kernel: 'Luminance' }, + { id: 'hull_2', kind: 'Hull', x: 560, y: 160, threshold: 128 }, + { id: 'fill_3', kind: 'Fill', x: 840, y: 160, strategy: 'hatch' }, + { id: 'pen_4', kind: 'PenOutput', x: 1110, y: 160 }, + ], + edges: [ + { from: 'source', to: 'kernel_1', port: 0 }, + { from: 'kernel_1', to: 'hull_2', port: 0 }, + { from: 'hull_2', to: 'fill_3', port: 0 }, + { from: 'fill_3', to: 'pen_4', port: 0 }, + ], +} + +const MINIMAL_GCODE = { + paper_w_mm: 594, paper_h_mm: 841, + img_w_mm: 540, offset_x_mm: 27, offset_y_mm: 27, + feed_draw: 1000, feed_travel: 3000, + pen_down: 'M3 S1000', pen_up: 'M5', +} + +const FULL_STATE = { + imagePath: '/home/user/photos/test.jpg', + dpi: 300, + nodeWidth: 500, + graph: MINIMAL_GRAPH, + gcodeConfig: MINIMAL_GCODE, +} + +// Build a v1 JSON doc string directly (bypassing serialize) so tests can control +// every field independently and don't depend on serialize's implementation. +function makeV1Doc(overrides = {}) { + return JSON.stringify({ + version: 1, + app: 'trac3r', + saved_at: '2026-01-01T00:00:00.000Z', + image_path: '/some/image.jpg', + dpi: 150, + node_width: 450, + graph: MINIMAL_GRAPH, + gcode: MINIMAL_GCODE, + ...overrides, + }) +} + +// ── Setup ────────────────────────────────────────────────────────────────────── + +beforeEach(() => { + _resetNodeSeq() +}) + +// ── serialize ────────────────────────────────────────────────────────────────── + +describe('serialize', () => { + it('produces valid JSON', () => { + expect(() => JSON.parse(serialize(FULL_STATE))).not.toThrow() + }) + + it('sets version to CURRENT_VERSION', () => { + const doc = JSON.parse(serialize(FULL_STATE)) + expect(doc.version).toBe(CURRENT_VERSION) + }) + + it('sets app discriminator to "trac3r"', () => { + const doc = JSON.parse(serialize(FULL_STATE)) + expect(doc.app).toBe('trac3r') + }) + + it('includes a valid ISO 8601 saved_at timestamp', () => { + const doc = JSON.parse(serialize(FULL_STATE)) + expect(doc.saved_at).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/) + expect(new Date(doc.saved_at).getTime()).not.toBeNaN() + }) + + it('includes image_path', () => { + const doc = JSON.parse(serialize(FULL_STATE)) + expect(doc.image_path).toBe(FULL_STATE.imagePath) + }) + + it('serializes null imagePath as null', () => { + const doc = JSON.parse(serialize({ ...FULL_STATE, imagePath: null })) + expect(doc.image_path).toBeNull() + }) + + it('serializes undefined imagePath as null', () => { + const { imagePath: _, ...rest } = FULL_STATE + const doc = JSON.parse(serialize(rest)) + expect(doc.image_path).toBeNull() + }) + + it('includes dpi', () => { + const doc = JSON.parse(serialize(FULL_STATE)) + expect(doc.dpi).toBe(300) + }) + + it('includes node_width', () => { + const doc = JSON.parse(serialize(FULL_STATE)) + expect(doc.node_width).toBe(500) + }) + + it('includes the full graph', () => { + const doc = JSON.parse(serialize(FULL_STATE)) + expect(doc.graph.nodes).toHaveLength(MINIMAL_GRAPH.nodes.length) + expect(doc.graph.edges).toHaveLength(MINIMAL_GRAPH.edges.length) + }) + + it('includes gcode config', () => { + const doc = JSON.parse(serialize(FULL_STATE)) + expect(doc.gcode.paper_w_mm).toBe(594) + expect(doc.gcode.pen_down).toBe('M3 S1000') + }) + + it('does NOT include runtime fields (vizB64, hullCount, etc.)', () => { + const doc = JSON.parse(serialize(FULL_STATE)) + expect(doc.vizB64).toBeUndefined() + expect(doc.hullCount).toBeUndefined() + expect(doc.strokeCount).toBeUndefined() + expect(doc.nodePreviews).toBeUndefined() + }) + + it('output is pretty-printed (human-readable)', () => { + const json = serialize(FULL_STATE) + expect(json).toContain('\n') + }) +}) + +// ── deserialize — happy path ─────────────────────────────────────────────────── + +describe('deserialize — happy path', () => { + it('loads a well-formed v1 document', () => { + const result = deserialize(makeV1Doc()) + expect(result.imagePath).toBe('/some/image.jpg') + expect(result.dpi).toBe(150) + expect(result.nodeWidth).toBe(450) + expect(result.graph.nodes).toHaveLength(MINIMAL_GRAPH.nodes.length) + expect(result.graph.edges).toHaveLength(MINIMAL_GRAPH.edges.length) + expect(result.gcodeConfig.paper_w_mm).toBe(594) + }) + + it('maps image_path → imagePath', () => { + const result = deserialize(makeV1Doc({ image_path: '/custom/path.png' })) + expect(result.imagePath).toBe('/custom/path.png') + }) + + it('maps node_width → nodeWidth', () => { + const result = deserialize(makeV1Doc({ node_width: 600 })) + expect(result.nodeWidth).toBe(600) + }) + + it('maps gcode → gcodeConfig', () => { + const result = deserialize(makeV1Doc()) + expect(result.gcodeConfig).toEqual(MINIMAL_GCODE) + }) + + it('preserves all graph node fields', () => { + const result = deserialize(makeV1Doc()) + const hull = result.graph.nodes.find(n => n.kind === 'Hull') + expect(hull.threshold).toBe(128) + }) + + it('preserves all graph edge fields', () => { + const result = deserialize(makeV1Doc()) + expect(result.graph.edges[0]).toEqual({ from: 'source', to: 'kernel_1', port: 0 }) + }) +}) + +// ── deserialize — missing optional fields use defaults ───────────────────────── + +describe('deserialize — missing optional fields', () => { + it('defaults dpi to 150 when missing', () => { + const { dpi: _, ...doc } = JSON.parse(makeV1Doc()) + const result = deserialize(JSON.stringify(doc)) + expect(result.dpi).toBe(150) + }) + + it('defaults node_width to 450 when missing', () => { + const doc = JSON.parse(makeV1Doc()) + delete doc.node_width + const result = deserialize(JSON.stringify(doc)) + expect(result.nodeWidth).toBe(450) + }) + + it('defaults imagePath to null when image_path is missing', () => { + const doc = JSON.parse(makeV1Doc()) + delete doc.image_path + const result = deserialize(JSON.stringify(doc)) + expect(result.imagePath).toBeNull() + }) + + it('defaults imagePath to null when image_path is null', () => { + const result = deserialize(makeV1Doc({ image_path: null })) + expect(result.imagePath).toBeNull() + }) + + it('defaults graph to null when missing', () => { + const doc = JSON.parse(makeV1Doc()) + delete doc.graph + const result = deserialize(JSON.stringify(doc)) + expect(result.graph).toBeNull() + }) + + it('defaults gcodeConfig to null when gcode is missing', () => { + const doc = JSON.parse(makeV1Doc()) + delete doc.gcode + const result = deserialize(JSON.stringify(doc)) + expect(result.gcodeConfig).toBeNull() + }) + + it('applies all defaults simultaneously when all optional fields are absent', () => { + const minimalDoc = JSON.stringify({ version: 1, app: 'trac3r' }) + const result = deserialize(minimalDoc) + expect(result).toEqual({ + imagePath: null, dpi: 150, nodeWidth: 450, graph: null, gcodeConfig: null, + }) + }) +}) + +// ── deserialize — validation errors ─────────────────────────────────────────── + +describe('deserialize — validation errors', () => { + it('throws on non-JSON input', () => { + expect(() => deserialize('not json {{{')).toThrow('Invalid project file (not valid JSON)') + }) + + it('throws on empty string', () => { + expect(() => deserialize('')).toThrow('Invalid project file (not valid JSON)') + }) + + it('throws on JSON null', () => { + expect(() => deserialize('null')).toThrow('Not a Trac3r project file') + }) + + it('throws on JSON array instead of object', () => { + expect(() => deserialize('[]')).toThrow('Not a Trac3r project file') + }) + + it('throws on JSON number', () => { + expect(() => deserialize('42')).toThrow('Not a Trac3r project file') + }) + + it('throws when app field is missing', () => { + const doc = JSON.parse(makeV1Doc()) + delete doc.app + expect(() => deserialize(JSON.stringify(doc))).toThrow('Not a Trac3r project file') + }) + + it('throws when app field is a different value', () => { + expect(() => deserialize(makeV1Doc({ app: 'inkscape' }))).toThrow('Not a Trac3r project file') + }) + + it('throws when app field is empty string', () => { + expect(() => deserialize(makeV1Doc({ app: '' }))).toThrow('Not a Trac3r project file') + }) + + it('throws when version field is missing', () => { + const doc = JSON.parse(makeV1Doc()) + delete doc.version + expect(() => deserialize(JSON.stringify(doc))).toThrow('Missing version field') + }) + + it('throws when version is a string', () => { + expect(() => deserialize(makeV1Doc({ version: '1' }))).toThrow('Missing version field') + }) + + it('throws when version is null', () => { + expect(() => deserialize(makeV1Doc({ version: null }))).toThrow('Missing version field') + }) + + it('throws when version is a float that is not an integer', () => { + // floats ARE numbers — the check is typeof === 'number', so 1.5 passes the + // type check but then fails in the migration loop (no migration from v1.5) + // OR if 1.5 < CURRENT_VERSION it tries to migrate from v1 and succeeds. + // The important thing: it doesn't silently corrupt the data. + // Document the actual behavior rather than asserting a specific error: + const result = deserialize(makeV1Doc({ version: 1.5 })) + // 1.5 > CURRENT_VERSION=1, so it warns and loads as-is + expect(result).toBeDefined() + }) + + it('throws when a v1 file targets v2 but no migration is defined', () => { + expect(() => deserialize(makeV1Doc({ version: 1 }), { migrations: [], currentVersion: 2 })) + .toThrow('No migration from version 1') + }) +}) + +// ── deserialize — version handling ──────────────────────────────────────────── + +describe('deserialize — version handling', () => { + it('loads v1 without running any migrations when CURRENT_VERSION is 1', () => { + const spy = vi.fn(d => d) + // Inject a migration that should NOT run (file is already at current version) + const result = deserialize(makeV1Doc(), { migrations: [spy] }) + expect(spy).not.toHaveBeenCalled() + expect(result.dpi).toBe(150) + }) + + it('warns (not throws) when file version is ahead of the app', () => { + const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}) + const futureDoc = makeV1Doc({ version: CURRENT_VERSION + 5 }) + expect(() => deserialize(futureDoc)).not.toThrow() + expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining('loading anyway')) + warnSpy.mockRestore() + }) + + it('future version warn message includes both file version and app version', () => { + const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}) + deserialize(makeV1Doc({ version: 99 })) + expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining('99')) + expect(warnSpy).toHaveBeenCalledWith(expect.stringContaining(String(CURRENT_VERSION))) + warnSpy.mockRestore() + }) + + // Migration tests inject `currentVersion: 2` to simulate "the app has been + // upgraded to v2 but the file on disk is still v1." migs[v-1] means + // migs[0] is the v1→v2 migration. + + it('runs a single migration when upgrading from v1 to v2', () => { + const v1_to_v2 = vi.fn(doc => ({ ...doc, version: 2 })) + deserialize(makeV1Doc({ version: 1 }), { migrations: [v1_to_v2], currentVersion: 2 }) + expect(v1_to_v2).toHaveBeenCalledTimes(1) + }) + + it('runs migrations in order for a multi-version upgrade chain (v1 → v2 → v3)', () => { + const callOrder = [] + const v1_to_v2 = vi.fn(doc => { callOrder.push('1→2'); return { ...doc, version: 2 } }) + const v2_to_v3 = vi.fn(doc => { callOrder.push('2→3'); return { ...doc, version: 3 } }) + deserialize(makeV1Doc({ version: 1 }), { migrations: [v1_to_v2, v2_to_v3], currentVersion: 3 }) + expect(callOrder).toEqual(['1→2', '2→3']) + }) + + it('each migration receives the output of the previous one (chaining)', () => { + const v1_to_v2 = doc => ({ ...doc, version: 2, marker: 'added_at_v2' }) + const v2_to_v3 = vi.fn(doc => ({ ...doc, version: 3 })) + deserialize(makeV1Doc({ version: 1 }), { migrations: [v1_to_v2, v2_to_v3], currentVersion: 3 }) + // v2_to_v3 should have received the output of v1_to_v2 (which added marker) + expect(v2_to_v3).toHaveBeenCalledWith(expect.objectContaining({ marker: 'added_at_v2', version: 2 })) + }) + + it('migration can transform field names (example: node_width renamed from card_width)', () => { + // Simulate a v1 schema that used "card_width" → v2 renames it to "node_width" + const oldDoc = JSON.stringify({ version: 1, app: 'trac3r', card_width: 300 }) + const v1_to_v2 = doc => ({ ...doc, version: 2, node_width: doc.card_width ?? 450 }) + const result = deserialize(oldDoc, { migrations: [v1_to_v2], currentVersion: 2 }) + expect(result.nodeWidth).toBe(300) + }) + + it('migration can add a new required field with a default', () => { + const oldDoc = JSON.stringify({ version: 1, app: 'trac3r', dpi: 150 }) + const v1_to_v2 = doc => ({ ...doc, version: 2, node_width: doc.node_width ?? 450 }) + const result = deserialize(oldDoc, { migrations: [v1_to_v2], currentVersion: 2 }) + expect(result.nodeWidth).toBe(450) + }) + + it('throws with a clear message when no migration is defined for the version gap', () => { + // App at v2 but MIGRATIONS is empty: no upgrade path from v1 → v2 + expect(() => deserialize(makeV1Doc({ version: 1 }), { migrations: [], currentVersion: 2 })) + .toThrow(/No migration from version 1/) + }) +}) + +// ── round-trip ───────────────────────────────────────────────────────────────── + +describe('round-trip: serialize → deserialize', () => { + it('round-trips the full state', () => { + const json = serialize(FULL_STATE) + const result = deserialize(json) + expect(result.imagePath).toBe(FULL_STATE.imagePath) + expect(result.dpi).toBe(FULL_STATE.dpi) + expect(result.nodeWidth).toBe(FULL_STATE.nodeWidth) + expect(result.gcodeConfig).toEqual(FULL_STATE.gcodeConfig) + }) + + it('round-trips graph nodes with all their fields', () => { + const json = serialize(FULL_STATE) + const result = deserialize(json) + expect(result.graph.nodes).toHaveLength(FULL_STATE.graph.nodes.length) + for (const original of FULL_STATE.graph.nodes) { + const restored = result.graph.nodes.find(n => n.id === original.id) + expect(restored).toEqual(original) + } + }) + + it('round-trips graph edges exactly', () => { + const json = serialize(FULL_STATE) + const result = deserialize(json) + expect(result.graph.edges).toEqual(FULL_STATE.graph.edges) + }) + + it('round-trips null imagePath', () => { + const json = serialize({ ...FULL_STATE, imagePath: null }) + const result = deserialize(json) + expect(result.imagePath).toBeNull() + }) + + it('round-trips a graph with a Combine node and multiple input ports', () => { + const combineGraph = { + nodes: [ + { id: 'source', kind: 'Source', x: 0, y: 0 }, + { id: 'kernel_10', kind: 'Kernel', x: 200, y: 0, kernel: 'Sobel' }, + { id: 'kernel_11', kind: 'Kernel', x: 200, y: 150, kernel: 'Canny' }, + { id: 'combine_12', kind: 'Combine', x: 400, y: 75, blend_mode: 'Max', inputCount: 2 }, + { id: 'hull_13', kind: 'Hull', x: 600, y: 75, threshold: 100 }, + ], + edges: [ + { from: 'source', to: 'kernel_10', port: 0 }, + { from: 'source', to: 'kernel_11', port: 0 }, + { from: 'kernel_10', to: 'combine_12', port: 0 }, + { from: 'kernel_11', to: 'combine_12', port: 1 }, + { from: 'combine_12', to: 'hull_13', port: 0 }, + ], + } + const json = serialize({ ...FULL_STATE, graph: combineGraph }) + const result = deserialize(json) + expect(result.graph.edges).toHaveLength(5) + const combineEdges = result.graph.edges.filter(e => e.to === 'combine_12') + expect(combineEdges.map(e => e.port).sort()).toEqual([0, 1]) + }) + + it('round-trips ColorIsolate node with all color filter fields', () => { + const ciNode = { + id: 'kernel_20', kind: 'Kernel', x: 300, y: 100, kernel: 'ColorIsolate', + ci_color: '#e63946', ci_hue_tolerance: 25, ci_sat_min: 0.3, ci_val_min: 0.2, + color_filter: { enabled: true, hue_min: 10, hue_max: 60, sat_min: 0.3, sat_max: 1.0, val_min: 0.2, val_max: 1.0 }, + } + const graph = { nodes: [{ id: 'source', kind: 'Source', x: 0, y: 0 }, ciNode], edges: [] } + const result = deserialize(serialize({ ...FULL_STATE, graph })) + const restored = result.graph.nodes.find(n => n.id === 'kernel_20') + expect(restored.ci_color).toBe('#e63946') + expect(restored.color_filter.hue_min).toBe(10) + }) + + it('round-trips XDoG kernel parameters', () => { + const xdogNode = { + id: 'kernel_30', kind: 'Kernel', x: 300, y: 100, kernel: 'XDoG', + blur_radius: 1.2, xdog_sigma2: 3.4, xdog_tau: 0.97, xdog_phi: 50.0, + } + const graph = { nodes: [{ id: 'source', kind: 'Source', x: 0, y: 0 }, xdogNode], edges: [] } + const result = deserialize(serialize({ ...FULL_STATE, graph })) + const restored = result.graph.nodes.find(n => n.id === 'kernel_30') + expect(restored.xdog_sigma2).toBe(3.4) + expect(restored.xdog_tau).toBe(0.97) + }) +}) + +// ── forward compatibility: unknown fields ────────────────────────────────────── + +describe('forward compatibility — unknown fields', () => { + it('does not throw when the document has extra unknown top-level fields', () => { + const doc = makeV1Doc({ future_feature: { enabled: true, value: 42 } }) + expect(() => deserialize(doc)).not.toThrow() + }) + + it('does not throw when a node has extra unknown fields', () => { + const graph = { + ...MINIMAL_GRAPH, + nodes: MINIMAL_GRAPH.nodes.map(n => + n.kind === 'Hull' ? { ...n, future_hull_param: 99 } : n + ), + } + const json = makeV1Doc({ graph }) + expect(() => deserialize(json)).not.toThrow() + }) + + it('unknown node fields survive in the loaded graph', () => { + const graph = { + ...MINIMAL_GRAPH, + nodes: MINIMAL_GRAPH.nodes.map(n => + n.kind === 'Fill' ? { ...n, mystery_param: 'hello' } : n + ), + } + const result = deserialize(makeV1Doc({ graph })) + const fill = result.graph.nodes.find(n => n.kind === 'Fill') + expect(fill.mystery_param).toBe('hello') + }) + + it('a migration preserves unknown fields it does not know about', () => { + const oldDoc = JSON.stringify({ + version: 1, app: 'trac3r', dpi: 100, future_field: 'keep me', + }) + // Spread the whole doc so the migration doesn't drop unknown fields + const v1_to_v2 = doc => ({ ...doc, version: 2 }) + expect(() => deserialize(oldDoc, { migrations: [v1_to_v2], currentVersion: 2 })).not.toThrow() + }) +}) + +// ── bumpNodeSeq ──────────────────────────────────────────────────────────────── + +describe('bumpNodeSeq', () => { + it('has no effect on an empty array', () => { + _resetNodeSeq(0) + bumpNodeSeq([]) + const id = newNodeId('kernel') + expect(id).toBe('kernel_1') + }) + + it('advances counter past the highest numeric suffix found', () => { + _resetNodeSeq(0) + bumpNodeSeq([ + { id: 'kernel_5' }, + { id: 'hull_3' }, + { id: 'fill_12' }, + ]) + const id = newNodeId('kernel') + const suffix = parseInt(id.split('_').pop(), 10) + expect(suffix).toBeGreaterThan(12) + }) + + it('does not affect the counter for non-matching IDs', () => { + _resetNodeSeq(0) + bumpNodeSeq([ + { id: 'source' }, // no underscore-number suffix + { id: 'hull' }, + ]) + const id = newNodeId('kernel') + expect(id).toBe('kernel_1') // counter was not advanced + }) + + it('handles fixed IDs like "source" alongside numbered IDs', () => { + _resetNodeSeq(0) + bumpNodeSeq([ + { id: 'source' }, + { id: 'kernel_7' }, + ]) + const id = newNodeId('fill') + const suffix = parseInt(id.split('_').pop(), 10) + expect(suffix).toBeGreaterThan(7) + }) + + it('handles nodes with no id field gracefully', () => { + _resetNodeSeq(0) + expect(() => bumpNodeSeq([{ kind: 'Source' }, { id: 'kernel_3' }])).not.toThrow() + const suffix = parseInt(newNodeId('fill').split('_').pop(), 10) + expect(suffix).toBeGreaterThan(3) + }) + + it('handles large ID numbers', () => { + _resetNodeSeq(0) + bumpNodeSeq([{ id: 'kernel_9999' }]) + const suffix = parseInt(newNodeId('hull').split('_').pop(), 10) + expect(suffix).toBeGreaterThan(9999) + }) + + it('calling bumpNodeSeq twice does not reset progress from the first call', () => { + _resetNodeSeq(0) + bumpNodeSeq([{ id: 'kernel_10' }]) + bumpNodeSeq([{ id: 'hull_5' }]) // lower — should not decrease counter + const suffix = parseInt(newNodeId('fill').split('_').pop(), 10) + expect(suffix).toBeGreaterThan(10) + }) + + it('two successive newNodeId calls after a bump produce distinct IDs', () => { + _resetNodeSeq(0) + bumpNodeSeq([{ id: 'kernel_20' }]) + const id1 = newNodeId('fill') + const id2 = newNodeId('hull') + expect(id1).not.toBe(id2) + expect(parseInt(id1.split('_').pop(), 10)).toBeGreaterThan(20) + expect(parseInt(id2.split('_').pop(), 10)).toBeGreaterThan(20) + }) +}) + +// ── FILE_EXT constant ───────────────────────────────────────────────────────── + +describe('FILE_EXT', () => { + it('is a non-empty string', () => { + expect(typeof FILE_EXT).toBe('string') + expect(FILE_EXT.length).toBeGreaterThan(0) + }) + + it('does not contain a leading dot', () => { + expect(FILE_EXT.startsWith('.')).toBe(false) + }) +}) + +// ── CURRENT_VERSION constant ─────────────────────────────────────────────────── + +describe('CURRENT_VERSION', () => { + it('is a positive integer', () => { + expect(Number.isInteger(CURRENT_VERSION)).toBe(true) + expect(CURRENT_VERSION).toBeGreaterThan(0) + }) + + it('matches what serialize writes into the doc', () => { + const doc = JSON.parse(serialize(FULL_STATE)) + expect(doc.version).toBe(CURRENT_VERSION) + }) + + it('matches what deserialize expects as the current version', () => { + // A file at CURRENT_VERSION should load without migrations or warnings + const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}) + const migrationSpy = vi.fn(d => d) + deserialize(makeV1Doc({ version: CURRENT_VERSION }), { migrations: [migrationSpy] }) + expect(migrationSpy).not.toHaveBeenCalled() + expect(warnSpy).not.toHaveBeenCalled() + warnSpy.mockRestore() + }) +}) diff --git a/src-frontend/src/store.js b/src-frontend/src/store.js index 8a5cd033..b10ef5dd 100644 --- a/src-frontend/src/store.js +++ b/src-frontend/src/store.js @@ -1,10 +1,10 @@ // Central app state — plain React state lifted to App.jsx. // This file defines the shapes / defaults. -export const KERNELS = ['Luminance','Sobel','ColorGradient','Laplacian','Canny','Saturation','XDoG'] +export const KERNELS = ['Luminance','Sobel','ColorGradient','Laplacian','Canny','Saturation','XDoG','ColorIsolate'] export const BLEND_MODES = ['Average','Min','Max','Multiply','Screen','Difference'] -export const FILL_STRATEGIES = ['hatch','zigzag','offset','spiral','outline','circles','voronoi','hilbert','waves','flow','gradient_hatch'] +export const FILL_STRATEGIES = ['hatch','zigzag','offset','spiral','outline','circles','voronoi','hilbert','waves','flow','gradient_hatch','gradient_cross_hatch'] // Per-strategy secondary parameter exposed as a slider. // Strategies not listed here have no secondary parameter. @@ -15,25 +15,68 @@ export const FILL_STRATEGY_PARAMS = { hint: 'Number of concentric ring emitters' }, flow: { label: 'Bend', min: 0.0, max: 2.0, step: 0.1, default: 1.0, hint: '0 = straight lines · 1 = default ±45° · 2 = wild curves' }, - gradient_hatch: { label: 'Min Scale', min: 0.05, max: 1.0, step: 0.05, default: 0.25, - hint: '1.0 = uniform · 0.05 = 20× denser at darkest ink' }, + gradient_hatch: { label: 'Min Scale', min: 0.05, max: 1.0, step: 0.05, default: 0.25, + hint: '1.0 = uniform · 0.05 = 20× denser at darkest ink' }, + gradient_cross_hatch: { label: 'Min Scale', min: 0.05, max: 1.0, step: 0.05, default: 0.25, + hint: '1.0 = uniform · 0.05 = 20× denser at darkest ink' }, } // Strategies that use the angle slider -export const FILL_USES_ANGLE = new Set(['hatch', 'zigzag', 'flow', 'gradient_hatch']) +export const FILL_USES_ANGLE = new Set(['hatch', 'zigzag', 'flow', 'gradient_hatch', 'gradient_cross_hatch']) + +export function rgbToHsv(r, g, b) { + const max = Math.max(r, g, b), min = Math.min(r, g, b), d = max - min + let h = 0 + if (d > 0) { + if (max === r) h = 60 * (((g - b) / d) % 6) + else if (max === g) h = 60 * ((b - r) / d + 2) + else h = 60 * ((r - g) / d + 4) + } + if (h < 0) h += 360 + return { h, s: max === 0 ? 0 : d / max, v: max } +} + +export function buildColorIsolateFilter(ciColor, hueTol, satMin, valMin) { + const r = parseInt(ciColor.slice(1, 3), 16) / 255 + const g = parseInt(ciColor.slice(3, 5), 16) / 255 + const b = parseInt(ciColor.slice(5, 7), 16) / 255 + const { h } = rgbToHsv(r, g, b) + return { + enabled: true, + hue_min: (h - hueTol + 360) % 360, + hue_max: (h + hueTol) % 360, + sat_min: satMin, sat_max: 1.0, + val_min: valMin, val_max: 1.0, + } +} export function defaultKernelProps() { + const ci_color = '#e63946', ci_hue_tolerance = 20, ci_sat_min = 0.2, ci_val_min = 0.15 return { kernel: 'Luminance', weight: 1.0, invert: false, blur_radius: 0.0, sat_min_value: 0.1, canny_low: 50.0, canny_high: 150.0, xdog_sigma2: 1.6, xdog_tau: 0.98, xdog_phi: 10.0, + color_filter: buildColorIsolateFilter(ci_color, ci_hue_tolerance, ci_sat_min, ci_val_min), + ci_color, ci_hue_tolerance, ci_sat_min, ci_val_min, } } let _nodeSeq = 0 export function newNodeId(kind) { return `${kind.toLowerCase()}_${++_nodeSeq}` } +// After loading a saved graph, advance _nodeSeq past any IDs already in use +// so newly-created nodes don't collide with loaded ones. +export function bumpNodeSeq(nodes) { + for (const n of nodes) { + const m = n.id?.match(/_(\d+)$/) + if (m) _nodeSeq = Math.max(_nodeSeq, parseInt(m[1], 10)) + } +} + +// Reset the counter to a known value. Test-only — not for production use. +export function _resetNodeSeq(to = 0) { _nodeSeq = to } + export function defaultColorFilter() { return { enabled: false, hue_min: 0, hue_max: 360, sat_min: 0, sat_max: 1, val_min: 0, val_max: 1 } } @@ -45,6 +88,10 @@ export function defaultFillParams() { } } +export function defaultPenOutputParams() { + return { pen_color: [20, 20, 20], pen_label: 'Pen 1', pen_order: 0 } +} + export function defaultHullParams() { return { threshold: 128, min_area: 4, rdp_epsilon: 1.5, connectivity: 'four', @@ -59,21 +106,20 @@ export function defaultGraph() { { id: 'source', kind: 'Source', x: 60, y: 160 }, { id: kId, kind: 'Kernel', x: 310, y: 100, ...defaultKernelProps() }, { id: 'hull', kind: 'Hull', x: 560, y: 160, ...defaultHullParams() }, - { id: 'fill', kind: 'Fill', x: 840, y: 160, ...defaultFillParams() }, + { id: 'fill', kind: 'Fill', x: 840, y: 160, ...defaultFillParams() }, + { id: 'pen1', kind: 'PenOutput', x: 1110, y: 160, ...defaultPenOutputParams() }, ], edges: [ { from: 'source', to: kId, port: 0 }, { from: kId, to: 'hull', port: 0 }, { from: 'hull', to: 'fill', port: 0 }, + { from: 'fill', to: 'pen1', port: 0 }, ], } } export function defaultPass(index) { - const colors = [[20,20,20],[60,100,220],[200,60,60]] return { - label: `Pass ${index + 1}`, - penColor: colors[index] ?? [128,128,128], graph: defaultGraph(), nodePreviews: {}, // runtime diff --git a/src-frontend/vite.config.js b/src-frontend/vite.config.js index b64b7a0a..3c8ed442 100644 --- a/src-frontend/vite.config.js +++ b/src-frontend/vite.config.js @@ -4,6 +4,10 @@ import tailwindcss from '@tailwindcss/vite' export default defineConfig({ plugins: [react(), tailwindcss()], + test: { + environment: 'node', + include: ['src/**/*.test.js'], + }, // Tauri expects a fixed port and doesn't use HTTPS in dev server: { port: 1420, diff --git a/src/detect.rs b/src/detect.rs index 84835dd1..4c0d9e3b 100644 --- a/src/detect.rs +++ b/src/detect.rs @@ -12,6 +12,7 @@ pub enum DetectionKernel { Canny, // thinned 1-px edges via Gaussian + Sobel NMS + hysteresis Saturation, // HSV saturation (vivid-colour regions as ink) XDoG, // Extended Difference of Gaussians — coherent edges from photos + ColorIsolate, // HSV range selection: 0=selected colour, 255=not selected } impl DetectionKernel { @@ -24,12 +25,13 @@ impl DetectionKernel { DetectionKernel::Canny => "Canny", DetectionKernel::Saturation => "Saturation", DetectionKernel::XDoG => "XDoG", + DetectionKernel::ColorIsolate => "ColorIsolate", } } pub fn all() -> &'static [DetectionKernel] { use DetectionKernel::*; - &[Luminance, Sobel, ColorGradient, Laplacian, Canny, Saturation, XDoG] + &[Luminance, Sobel, ColorGradient, Laplacian, Canny, Saturation, XDoG, ColorIsolate] } } @@ -50,6 +52,10 @@ pub struct DetectionLayer { pub xdog_sigma2: f32, pub xdog_tau: f32, pub xdog_phi: f32, + // ColorIsolate: HSV range selection (full range = select everything) + pub ci_hue_min: f32, pub ci_hue_max: f32, + pub ci_sat_min: f32, pub ci_sat_max: f32, + pub ci_val_min: f32, pub ci_val_max: f32, } impl Default for DetectionLayer { @@ -65,6 +71,9 @@ impl Default for DetectionLayer { xdog_sigma2: 1.6, xdog_tau: 0.98, xdog_phi: 10.0, + ci_hue_min: 0.0, ci_hue_max: 360.0, + ci_sat_min: 0.0, ci_sat_max: 1.0, + ci_val_min: 0.0, ci_val_max: 1.0, } } } @@ -92,6 +101,7 @@ pub fn apply_layer(rgb: &RgbImage, layer: &DetectionLayer) -> Vec { DetectionKernel::Canny => canny(rgb, layer.canny_low, layer.canny_high), DetectionKernel::Saturation => invert(saturation_filtered(rgb, sigma, layer.sat_min_value)), DetectionKernel::XDoG => xdog(rgb, sigma.max(0.1), layer.xdog_sigma2.max(sigma + 0.1), layer.xdog_tau, layer.xdog_phi), + DetectionKernel::ColorIsolate => color_isolate(rgb, layer), }; if layer.invert { for v in &mut response { *v = 255 - *v; } @@ -276,6 +286,42 @@ fn saturation_filtered(rgb: &RgbImage, sigma: f32, min_value: f32) -> Vec { blurred.into_iter().map(|v| (v * 255.0) as u8).collect() } +// ── ColorIsolate ─────────────────────────────────────────────────────────────── + +/// Select pixels whose HSV values fall within the specified ranges. +/// Output: 0 = selected (ink), 255 = not selected (background). +/// Hue wraps: if hue_min > hue_max the range crosses 0° (e.g. 330–30 captures reds). +fn color_isolate(rgb: &RgbImage, layer: &DetectionLayer) -> Vec { + rgb.pixels().map(|p| { + let r = p[0] as f32 / 255.0; + let g = p[1] as f32 / 255.0; + let b = p[2] as f32 / 255.0; + let max = r.max(g).max(b); + let min = r.min(g).min(b); + let delta = max - min; + let h = if delta < 1e-6 { + 0.0_f32 + } else if (max - r).abs() < 1e-6 { + 60.0 * ((g - b) / delta).rem_euclid(6.0) + } else if (max - g).abs() < 1e-6 { + 60.0 * ((b - r) / delta + 2.0) + } else { + 60.0 * ((r - g) / delta + 4.0) + }; + let s = if max < 1e-6 { 0.0 } else { delta / max }; + let v = max; + let in_hue = if layer.ci_hue_min <= layer.ci_hue_max { + h >= layer.ci_hue_min && h <= layer.ci_hue_max + } else { + h >= layer.ci_hue_min || h <= layer.ci_hue_max + }; + let selected = in_hue + && s >= layer.ci_sat_min && s <= layer.ci_sat_max + && v >= layer.ci_val_min && v <= layer.ci_val_max; + if selected { 0u8 } else { 255u8 } + }).collect() +} + // ── XDoG ─────────────────────────────────────────────────────────────────────── /// Extended Difference of Gaussians. @@ -442,6 +488,11 @@ pub enum NodeKind { smooth_rdp: f32, smooth_iters: u32, }, + PenOutput { + color: [u8; 3], + label: String, + order: u32, + }, } #[derive(Debug, Clone)] @@ -530,9 +581,22 @@ pub fn evaluate_graph( let result: Option> = match &node.kind { NodeKind::Source => None, NodeKind::Kernel(layer) => { - let raw = apply_layer(rgb, layer); + // If an upstream response map exists (e.g. from a Combine node), + // convert it to a grayscale RgbImage and apply the kernel to that + // instead of the original source. This lets you chain transforms: + // Luminance → Combine → Sobel finds edges in the blended map. + let upstream = incoming[id].iter() + .find_map(|(fid, _)| outputs.get(fid)); + let raw = if let Some(up) = upstream { + let gray_rgb = RgbImage::from_fn(rgb.width(), rgb.height(), |x, y| { + let v = up[(y * rgb.width() + x) as usize]; + image::Rgb([v, v, v]) + }); + apply_layer(&gray_rgb, layer) + } else { + apply_layer(rgb, layer) + }; let w = layer.weight; - // w=0 → full background, w=1 → identity, w>1 → amplify toward ink Some(if (w - 1.0).abs() < 1e-6 { raw } else { @@ -562,6 +626,8 @@ pub fn evaluate_graph( } // Fill nodes are processed in lib.rs after hull extraction. NodeKind::Fill { .. } => None, + // PenOutput nodes are processed in lib.rs — they own a fill result and carry pen metadata. + NodeKind::PenOutput { .. } => None, }; if let Some(map) = result { outputs.insert(id, map); @@ -577,7 +643,7 @@ pub fn evaluate_graph( // Final response: prefer an explicit Output node; fall back to the upstream // map of the first Hull node (which was stored under the Hull node's id). - // Fill nodes produce no output here. + // Fill and PenOutput nodes produce no output here. let response = graph.nodes.iter() .find(|n| matches!(n.kind, NodeKind::Output)) .and_then(|n| raw_maps.get(&n.id).cloned()) diff --git a/src/fill.rs b/src/fill.rs index b1d384af..6806a911 100644 --- a/src/fill.rs +++ b/src/fill.rs @@ -167,6 +167,24 @@ pub fn gradient_hatch( FillResult { hull_id: hull.id, strokes } } +/// Two perpendicular gradient-hatch passes combined. +/// Dark areas get dense lines in both directions; light areas get sparse lines in both. +pub fn gradient_cross_hatch( + hull: &Hull, + response: &[u8], + img_width: u32, + spacing_px: f32, + angle_deg: f32, + min_scale: f32, +) -> FillResult { + let pass1 = gradient_hatch(hull, response, img_width, spacing_px, angle_deg, min_scale); + let pass2 = gradient_hatch(hull, response, img_width, spacing_px, angle_deg + 90.0, min_scale); + FillResult { + hull_id: hull.id, + strokes: pass1.strokes.into_iter().chain(pass2.strokes).collect(), + } +} + // ── Outline ──────────────────────────────────────────────────────────────────── /// The simplified contour as a single closed stroke. @@ -1599,6 +1617,7 @@ mod tests { xdog_sigma2: lj["xdog_sigma2"].as_f64().unwrap_or(1.6) as f32, xdog_tau: lj["xdog_tau"].as_f64().unwrap_or(0.98) as f32, xdog_phi: lj["xdog_phi"].as_f64().unwrap_or(10.0) as f32, + ..Default::default() }; let response = crate::detect::apply_stack(&img, &crate::detect::DetectionParams { layers: vec![layer] }); let hull_params = crate::hulls::HullParams { diff --git a/src/lib.rs b/src/lib.rs index 53102aa3..6178056f 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -14,10 +14,167 @@ macro_rules! lap { } use std::sync::Mutex; +use std::collections::hash_map::DefaultHasher; +use std::hash::Hasher; use base64::{engine::general_purpose::STANDARD as B64, Engine}; use serde::{Deserialize, Serialize}; use tauri::State; +// ── Node result cache ────────────────────────────────────────────────────────── + +/// Three-level content-addressed cache for the pipeline. +/// Stored in PassState and threaded through process_pass_work. +#[derive(Clone, Default)] +struct NodeCache { + /// Detect phase: all Kernel/Combine response maps cached under one fingerprint. + detect_fp: u64, + detect_maps: std::collections::HashMap>, + detect_response: Vec, + + /// Per-Hull-node cache. + hull_entries: std::collections::HashMap, + + /// Per-Fill-node cache. + fill_entries: std::collections::HashMap, + + /// Per-node JPEG preview cache: node_id → (fingerprint, b64_jpeg). + preview_cache: std::collections::HashMap, +} + +#[derive(Clone)] +struct HullCacheEntry { + fp: u64, + hulls: Vec, + resp_map: Vec, +} + +#[derive(Clone)] +struct FillCacheEntry { + fp: u64, + fill: fill::FillResult, +} + +// ── Fingerprint helpers ──────────────────────────────────────────────────────── + +fn fp_source(orig_w: u32, orig_h: u32, dpi: Option, img_w_mm: Option) -> u64 { + let mut h = DefaultHasher::new(); + h.write_u32(orig_w); h.write_u32(orig_h); + h.write_u32(dpi.unwrap_or(0)); + h.write_u32(img_w_mm.map(|v| v.to_bits()).unwrap_or(0)); + h.finish() +} +fn fp_kernel(node: &GraphNodePayload, upstream_fp: u64) -> u64 { + let mut h = DefaultHasher::new(); + h.write_u64(upstream_fp); + h.write(node.kernel.as_deref().unwrap_or("Luminance").as_bytes()); + h.write_u32(node.weight.unwrap_or(1.0).to_bits()); + h.write_u8(node.invert.unwrap_or(false) as u8); + h.write_u32(node.blur_radius.unwrap_or(0.0).to_bits()); + h.write_u32(node.sat_min_value.unwrap_or(0.1).to_bits()); + h.write_u32(node.canny_low.unwrap_or(50.0).to_bits()); + h.write_u32(node.canny_high.unwrap_or(150.0).to_bits()); + h.write_u32(node.xdog_sigma2.unwrap_or(1.6).to_bits()); + h.write_u32(node.xdog_tau.unwrap_or(0.98).to_bits()); + h.write_u32(node.xdog_phi.unwrap_or(10.0).to_bits()); + if let Some(cf) = &node.color_filter { + h.write_u32(cf.hue_min.to_bits()); h.write_u32(cf.hue_max.to_bits()); + h.write_u32(cf.sat_min.to_bits()); h.write_u32(cf.sat_max.to_bits()); + h.write_u32(cf.val_min.to_bits()); h.write_u32(cf.val_max.to_bits()); + } + h.finish() +} +fn fp_combine(node: &GraphNodePayload, upstream_fps: &[u64]) -> u64 { + let mut h = DefaultHasher::new(); + h.write(node.blend_mode.as_deref().unwrap_or("Average").as_bytes()); + for &fp in upstream_fps { h.write_u64(fp); } + h.finish() +} +fn fp_hull(node: &GraphNodePayload, upstream_fp: u64) -> u64 { + let mut h = DefaultHasher::new(); + h.write_u64(upstream_fp); + h.write_u8(node.threshold.unwrap_or(128)); + h.write_u32(node.min_area.unwrap_or(4)); + h.write_u32(node.rdp_epsilon.unwrap_or(1.5).to_bits()); + h.write(node.connectivity.as_deref().unwrap_or("four").as_bytes()); + if let Some(cf) = &node.color_filter { + h.write_u8(cf.enabled as u8); + h.write_u32(cf.hue_min.to_bits()); h.write_u32(cf.hue_max.to_bits()); + h.write_u32(cf.sat_min.to_bits()); h.write_u32(cf.sat_max.to_bits()); + h.write_u32(cf.val_min.to_bits()); h.write_u32(cf.val_max.to_bits()); + } + h.finish() +} +fn fp_fill(node: &GraphNodePayload, upstream_fp: u64) -> u64 { + let mut h = DefaultHasher::new(); + h.write_u64(upstream_fp); + h.write(node.strategy.as_deref().unwrap_or("hatch").as_bytes()); + h.write_u32(node.spacing.unwrap_or(5.0).to_bits()); + h.write_u32(node.angle.unwrap_or(0.0).to_bits()); + h.write_u32(node.param.unwrap_or(1.0).to_bits()); + h.write_u32(node.smooth_rdp.unwrap_or(1.0).to_bits()); + h.write_u32(node.smooth_iters.unwrap_or(2)); + h.finish() +} +fn fp_pen(node: &GraphNodePayload, upstream_fp: u64) -> u64 { + let mut h = DefaultHasher::new(); + h.write_u64(upstream_fp); + for &v in node.pen_color.as_deref().unwrap_or(&[20, 20, 20]) { h.write_u8(v); } + h.write(node.pen_label.as_deref().unwrap_or("").as_bytes()); + h.write_u32(node.pen_order.unwrap_or(0)); + h.finish() +} + +/// Compute a fingerprint for every node in topological order. +/// Fingerprints cascade: downstream nodes include upstream fps so any upstream +/// change propagates automatically. +fn compute_node_fingerprints(payload: &ProcessPassPayload, source_fp: u64) + -> std::collections::HashMap +{ + use std::collections::{HashMap, VecDeque}; + let nodes = &payload.graph.nodes; + let edges = &payload.graph.edges; + + let mut incoming: HashMap<&str, Vec<(&str, usize)>> = HashMap::new(); + let mut out_edges: HashMap<&str, Vec<&str>> = HashMap::new(); + for n in nodes { incoming.entry(&n.id).or_default(); out_edges.entry(&n.id).or_default(); } + for e in edges { + incoming.entry(&e.to).or_default().push((&e.from, e.port)); + out_edges.entry(&e.from).or_default().push(&e.to); + } + let mut in_deg: HashMap<&str, usize> = nodes.iter() + .map(|n| (n.id.as_str(), incoming[n.id.as_str()].len())).collect(); + let mut queue: VecDeque<&str> = in_deg.iter() + .filter(|(_, &d)| d == 0).map(|(&id, _)| id).collect(); + let node_map: HashMap<&str, &GraphNodePayload> = nodes.iter() + .map(|n| (n.id.as_str(), n)).collect(); + let mut fps: HashMap = HashMap::new(); + + while let Some(id) = queue.pop_front() { + let node = node_map[id]; + let mut ins = incoming[id].clone(); + ins.sort_by_key(|&(_, p)| p); + let up_fps: Vec = ins.iter() + .map(|(fid, _)| fps.get(*fid).copied().unwrap_or(source_fp)).collect(); + let first = up_fps.first().copied().unwrap_or(source_fp); + let fp = match node.kind.as_str() { + "Source" => source_fp, + "Kernel" => fp_kernel(node, first), + "Combine" => fp_combine(node, &up_fps), + "Hull" => fp_hull(node, first), + "Fill" => fp_fill(node, first), + "PenOutput" => fp_pen(node, first), + _ => 0, + }; + fps.insert(id.to_string(), fp); + for &next in out_edges.get(id).into_iter().flatten() { + let d = in_deg.get_mut(next).unwrap(); + *d -= 1; + if *d == 0 { queue.push_back(next); } + } + } + fps +} + // ── Shared app state ─────────────────────────────────────────────────────────── struct AppState { @@ -29,8 +186,11 @@ struct AppState { #[derive(Default)] struct PassState { hulls: Vec, - fill_results: Vec, - response_map: Vec, // raw detect output; kept so gradient fills can query it + pen_results: Vec, + response_map: Vec, + img_w: u32, + img_h: u32, + node_cache: NodeCache, } impl Default for AppState { @@ -81,6 +241,10 @@ pub struct GraphNodePayload { pub param: Option, pub smooth_rdp: Option, pub smooth_iters: Option, + // PenOutput params (optional — only for kind="PenOutput") + pub pen_color: Option>, // [r, g, b] + pub pen_label: Option, + pub pen_order: Option, } #[derive(Deserialize, Clone, Debug)] @@ -111,6 +275,8 @@ pub struct ColorFilterPayload { pub struct ProcessPassPayload { pub pass_index: usize, pub graph: DetectionGraphPayload, + pub dpi: Option, + pub img_w_mm: Option, } #[derive(Serialize, Clone, Default)] @@ -119,14 +285,37 @@ pub struct StepTime { pub ms: u64, } +/// Owns one pen's worth of fill strokes plus metadata from a PenOutput graph node. +#[derive(Clone)] +pub struct PenResult { + pub node_id: String, + pub color: [u8; 3], + pub label: String, + pub order: u32, + pub fill: fill::FillResult, +} + +/// Per-pen summary returned to the frontend after a full pipeline run. +#[derive(Serialize)] +pub struct PenOutputResult { + pub node_id: String, + pub color: Vec, + pub label: String, + pub order: u32, + pub stroke_count: usize, +} + #[derive(Serialize)] pub struct ProcessResult { pub hull_count: usize, pub coverage_pct: usize, pub stroke_count: usize, pub viz_b64: String, + pub pen_outputs: Vec, pub node_previews: std::collections::HashMap, pub timings: Vec, + pub img_w: u32, + pub img_h: u32, } #[derive(Deserialize, Clone, Debug)] @@ -172,8 +361,10 @@ fn to_detection_graph(payload: &DetectionGraphPayload) -> detect::DetectionGraph "Canny" => Canny, "Saturation" => Saturation, "XDoG" => XDoG, + "ColorIsolate" => ColorIsolate, _ => Luminance, }; + let cf = n.color_filter.as_ref(); detect::NodeKind::Kernel(detect::DetectionLayer { kernel, weight: n.weight.unwrap_or(1.0), @@ -185,6 +376,12 @@ fn to_detection_graph(payload: &DetectionGraphPayload) -> detect::DetectionGraph xdog_sigma2: n.xdog_sigma2.unwrap_or(1.6), xdog_tau: n.xdog_tau.unwrap_or(0.98), xdog_phi: n.xdog_phi.unwrap_or(10.0), + ci_hue_min: cf.map(|f| f.hue_min).unwrap_or(0.0), + ci_hue_max: cf.map(|f| f.hue_max).unwrap_or(360.0), + ci_sat_min: cf.map(|f| f.sat_min).unwrap_or(0.0), + ci_sat_max: cf.map(|f| f.sat_max).unwrap_or(1.0), + ci_val_min: cf.map(|f| f.val_min).unwrap_or(0.0), + ci_val_max: cf.map(|f| f.val_max).unwrap_or(1.0), }) } "Combine" => { @@ -193,6 +390,16 @@ fn to_detection_graph(payload: &DetectionGraphPayload) -> detect::DetectionGraph ); detect::NodeKind::Combine(mode) } + "PenOutput" => detect::NodeKind::PenOutput { + color: { + let c = n.pen_color.as_deref().unwrap_or(&[20, 20, 20]); + [c.get(0).copied().unwrap_or(20), + c.get(1).copied().unwrap_or(20), + c.get(2).copied().unwrap_or(20)] + }, + label: n.pen_label.clone().unwrap_or_else(|| "Pen".into()), + order: n.pen_order.unwrap_or(0), + }, "Fill" => detect::NodeKind::Fill { strategy: n.strategy.clone().unwrap_or_else(|| "hatch".into()), spacing: n.spacing.unwrap_or(5.0), @@ -240,81 +447,50 @@ fn to_color_filter(p: &ColorFilterPayload) -> hulls::ColorFilter { } } -/// Encode an RGBA viz buffer as JPEG, scaling down to max 1024px on the long side. -/// JPEG is ~10× faster to encode than PNG and ~5× smaller over IPC. fn rgba_to_b64_png(rgba: &[u8], w: u32, h: u32) -> String { - const MAX_DIM: u32 = 1024; let img = image::RgbaImage::from_raw(w, h, rgba.to_vec()) .expect("bad rgba buffer"); let rgb = image::DynamicImage::ImageRgba8(img).to_rgb8(); - - let out = if w > MAX_DIM || h > MAX_DIM { - let scale = MAX_DIM as f32 / w.max(h) as f32; - let (sw, sh) = ((w as f32 * scale) as u32, (h as f32 * scale) as u32); - image::imageops::resize(&rgb, sw, sh, image::imageops::FilterType::Nearest) - } else { - rgb - }; - let mut buf = std::io::Cursor::new(Vec::new()); - out.write_to(&mut buf, image::ImageFormat::Jpeg).unwrap(); + rgb.write_to(&mut buf, image::ImageFormat::Jpeg).unwrap(); B64.encode(buf.into_inner()) } fn map_to_b64_small(map: &[u8], w: u32, h: u32) -> String { - const MAX_DIM: u32 = 512; let gray = image::GrayImage::from_raw(w, h, map.to_vec()) .expect("bad map buffer"); - let out = if w > MAX_DIM || h > MAX_DIM { - let scale = MAX_DIM as f32 / w.max(h) as f32; - let (sw, sh) = ((w as f32 * scale) as u32, (h as f32 * scale) as u32); - image::imageops::resize(&gray, sw, sh, image::imageops::FilterType::Nearest) - } else { - gray - }; let mut buf = std::io::Cursor::new(Vec::new()); - image::DynamicImage::ImageLuma8(out) + image::DynamicImage::ImageLuma8(gray) .write_to(&mut buf, image::ImageFormat::Jpeg) .unwrap(); B64.encode(buf.into_inner()) } fn rgb_to_b64_jpeg(rgb: &image::RgbImage) -> String { - const MAX_DIM: u32 = 1024; - let (w, h) = rgb.dimensions(); - let out; - let to_encode: &image::RgbImage = if w > MAX_DIM || h > MAX_DIM { - let scale = MAX_DIM as f32 / w.max(h) as f32; - let (sw, sh) = ((w as f32 * scale) as u32, (h as f32 * scale) as u32); - out = image::imageops::resize(rgb, sw, sh, image::imageops::FilterType::Nearest); - &out - } else { - rgb - }; + use image::codecs::jpeg::JpegEncoder; let mut buf = std::io::Cursor::new(Vec::new()); - to_encode.write_to(&mut buf, image::ImageFormat::Jpeg).unwrap(); + JpegEncoder::new_with_quality(&mut buf, 92) + .encode_image(rgb) + .unwrap(); B64.encode(buf.into_inner()) } // ── Pipeline inner functions (no Tauri, no mutex) ───────────────────────────── -/// Rasterize fill strokes into a small JPEG preview (256×256). -fn render_fill_preview(result: &fill::FillResult, img_w: u32, img_h: u32) -> String { - const P: u32 = 256; - let sx = P as f32 / img_w.max(1) as f32; - let sy = P as f32 / img_h.max(1) as f32; - let mut pix = vec![20u8; (P * P) as usize]; +/// Rasterize fill strokes in the pen's color on a light background at full image resolution. +fn render_pen_preview(color: [u8; 3], fill: &fill::FillResult, img_w: u32, img_h: u32) -> String { + let mut pix = vec![[235u8, 235u8, 235u8]; (img_w * img_h) as usize]; - for stroke in &result.strokes { + for stroke in &fill.strokes { for pair in stroke.windows(2) { - let (mut x, mut y) = ((pair[0].0 * sx).round() as i32, (pair[0].1 * sy).round() as i32); - let (x1, y1) = ((pair[1].0 * sx).round() as i32, (pair[1].1 * sy).round() as i32); + let (mut x, mut y) = (pair[0].0.round() as i32, pair[0].1.round() as i32); + let (x1, y1) = (pair[1].0.round() as i32, pair[1].1.round() as i32); let dx = (x1 - x).abs(); let sx_ = if x < x1 { 1i32 } else { -1 }; let dy = -(y1 - y).abs(); let sy_ = if y < y1 { 1i32 } else { -1 }; let mut err = dx + dy; loop { - if x >= 0 && y >= 0 && (x as u32) < P && (y as u32) < P { - pix[(y as u32 * P + x as u32) as usize] = 210; + if x >= 0 && y >= 0 && (x as u32) < img_w && (y as u32) < img_h { + pix[(y as u32 * img_w + x as u32) as usize] = color; } if x == x1 && y == y1 { break; } let e2 = 2 * err; @@ -324,7 +500,37 @@ fn render_fill_preview(result: &fill::FillResult, img_w: u32, img_h: u32) -> Str } } - let img = image::GrayImage::from_raw(P, P, pix).expect("fill preview buffer"); + let flat: Vec = pix.into_iter().flat_map(|[r, g, b]| [r, g, b]).collect(); + let img = image::RgbImage::from_raw(img_w, img_h, flat).expect("pen preview buffer"); + let mut buf = std::io::Cursor::new(Vec::new()); + img.write_to(&mut buf, image::ImageFormat::Jpeg).unwrap(); + B64.encode(buf.into_inner()) +} + +/// Rasterize fill strokes into a JPEG preview at full image resolution. +fn render_fill_preview(result: &fill::FillResult, img_w: u32, img_h: u32) -> String { + let mut pix = vec![20u8; (img_w * img_h) as usize]; + + for stroke in &result.strokes { + for pair in stroke.windows(2) { + let (mut x, mut y) = (pair[0].0.round() as i32, pair[0].1.round() as i32); + let (x1, y1) = (pair[1].0.round() as i32, pair[1].1.round() as i32); + let dx = (x1 - x).abs(); let sx_ = if x < x1 { 1i32 } else { -1 }; + let dy = -(y1 - y).abs(); let sy_ = if y < y1 { 1i32 } else { -1 }; + let mut err = dx + dy; + loop { + if x >= 0 && y >= 0 && (x as u32) < img_w && (y as u32) < img_h { + pix[(y as u32 * img_w + x as u32) as usize] = 210; + } + if x == x1 && y == y1 { break; } + let e2 = 2 * err; + if e2 >= dy { err += dy; x += sx_; } + if e2 <= dx { err += dx; y += sy_; } + } + } + } + + let img = image::GrayImage::from_raw(img_w, img_h, pix).expect("fill preview buffer"); let mut buf = std::io::Cursor::new(Vec::new()); image::DynamicImage::ImageLuma8(img) .write_to(&mut buf, image::ImageFormat::Jpeg) @@ -352,35 +558,112 @@ fn render_hull_preview(response: &[u8], hulls_list: &[hulls::Hull], w: u32, h: u fn process_pass_work( rgb: &image::RgbImage, payload: ProcessPassPayload, -) -> (Vec, Vec, Vec, ProcessResult) { + mut cache: NodeCache, +) -> (Vec, Vec, Vec, ProcessResult, NodeCache) { use rayon::prelude::*; let t0 = Instant::now(); let mut steps: Vec = Vec::new(); + let mut cache_hits = 0u32; + let mut cache_misses = 0u32; + + // ── DPI scale ───────────────────────────────────────────────────────────── + let mut t = Instant::now(); + let (orig_w, orig_h) = rgb.dimensions(); + let scaled_opt: Option = match (payload.dpi, payload.img_w_mm) { + (Some(dpi), Some(img_w_mm)) if dpi > 0 && img_w_mm > 0.0 => { + let target_w = ((img_w_mm * dpi as f32 / 25.4).round() as u32).max(1); + let target_h = ((orig_h as f32 * target_w as f32 / orig_w as f32).round() as u32).max(1); + if target_w != orig_w || target_h != orig_h { + Some(image::DynamicImage::ImageRgb8(rgb.clone()) + .resize_exact(target_w, target_h, image::imageops::FilterType::CatmullRom) + .to_rgb8()) + } else { None } + } + _ => None, + }; + let rgb: &image::RgbImage = scaled_opt.as_ref().unwrap_or(rgb); + t = lap!(steps, "dpi_scale", t); + let (w, h) = rgb.dimensions(); + // ── Fingerprints ────────────────────────────────────────────────────────── + let source_fp = fp_source(orig_w, orig_h, payload.dpi, payload.img_w_mm); + let node_fps = compute_node_fingerprints(&payload, source_fp); + + // Detect-phase fingerprint: combines all Kernel/Combine/Source node fps. + let detect_fp = { + let mut h = DefaultHasher::new(); + let mut fps: Vec = payload.graph.nodes.iter() + .filter(|n| matches!(n.kind.as_str(), "Source" | "Kernel" | "Combine")) + .filter_map(|n| node_fps.get(&n.id)).copied().collect(); + fps.sort_unstable(); + for fp in &fps { h.write_u64(*fp); } + h.finish() + }; + let det_graph = to_detection_graph(&payload.graph); - let mut t = Instant::now(); - let graph_maps = detect::evaluate_graph(rgb, &det_graph); + // ── Detect phase (cached) ───────────────────────────────────────────────── + let graph_maps = if detect_fp != 0 && cache.detect_fp == detect_fp + && !cache.detect_maps.is_empty() + { + cache_hits += 1; + detect::GraphMaps { + response: cache.detect_response.clone(), + raw_maps: cache.detect_maps.clone(), + } + } else { + cache_misses += 1; + let maps = detect::evaluate_graph(rgb, &det_graph); + cache.detect_fp = detect_fp; + cache.detect_response = maps.response.clone(); + cache.detect_maps = maps.raw_maps.clone(); + // Previews for detect nodes are now stale; clear them. + cache.preview_cache.retain(|id, _| { + payload.graph.nodes.iter() + .find(|n| &n.id == id) + .map_or(true, |n| !matches!(n.kind.as_str(), "Kernel" | "Combine")) + }); + maps + }; t = lap!(steps, "detect", t); - // JPEG thumbnails for detection nodes (Kernel, Combine, Output) - let mut node_previews: std::collections::HashMap = - graph_maps.raw_maps.iter() - .filter(|(id, _)| { - det_graph.nodes.iter().find(|n| &n.id == *id) - .map_or(false, |n| !matches!( - n.kind, - detect::NodeKind::Source | detect::NodeKind::Hull { .. } | detect::NodeKind::Fill { .. } - )) - }) - .map(|(id, map)| (id.clone(), map_to_b64_small(map, w, h))) - .collect(); + // ── Detect node previews (cached per-node) ──────────────────────────────── + let mut node_previews: std::collections::HashMap = Default::default(); + for (id, map) in &graph_maps.raw_maps { + let is_detect_node = det_graph.nodes.iter().find(|n| &n.id == id) + .map_or(false, |n| !matches!( + n.kind, + detect::NodeKind::Source + | detect::NodeKind::Hull { .. } + | detect::NodeKind::Fill { .. } + | detect::NodeKind::PenOutput { .. } + )); + if !is_detect_node { continue; } + let node_fp = node_fps.get(id).copied().unwrap_or(0); + let preview = if let Some((cached_fp, cached_prev)) = cache.preview_cache.get(id) { + if *cached_fp == node_fp && node_fp != 0 { + cache_hits += 1; + cached_prev.clone() + } else { + cache_misses += 1; + let p = map_to_b64_small(map, w, h); + cache.preview_cache.insert(id.clone(), (node_fp, p.clone())); + p + } + } else { + cache_misses += 1; + let p = map_to_b64_small(map, w, h); + cache.preview_cache.insert(id.clone(), (node_fp, p.clone())); + p + }; + node_previews.insert(id.clone(), preview); + } t = lap!(steps, "detect previews", t); - // ── Hull nodes ───────────────────────────────────────────────────────────── - let mut all_hulls: Vec = Vec::new(); + // ── Hull nodes (cached per-node) ────────────────────────────────────────── + let mut all_hulls: Vec = Vec::new(); let mut hull_outputs: std::collections::HashMap> = Default::default(); let mut hull_resp_maps: std::collections::HashMap> = Default::default(); let mut first_hull_response: Option> = None; @@ -388,30 +671,71 @@ fn process_pass_work( for node in &det_graph.nodes { if let detect::NodeKind::Hull { - threshold, min_area, rdp_epsilon, eight_conn, - cf_enabled, cf_hue_min, cf_hue_max, - cf_sat_min, cf_sat_max, cf_val_min, cf_val_max, + threshold, min_area, rdp_epsilon, eight_conn, .. } = &node.kind { let response = match graph_maps.raw_maps.get(&node.id) { Some(m) => m, None => continue, }; - let hull_params = hulls::HullParams { - threshold: *threshold, - min_area: *min_area, - rdp_epsilon: *rdp_epsilon, - connectivity: if *eight_conn { hulls::Connectivity::Eight } - else { hulls::Connectivity::Four }, + let hull_fp = node_fps.get(&node.id).copied().unwrap_or(0); + + let (filtered, preview) = if hull_fp != 0 { + if let Some(entry) = cache.hull_entries.get(&node.id) { + if entry.fp == hull_fp { + // Cache hit — reuse hulls and preview + let preview = cache.preview_cache.get(&node.id) + .filter(|(fp, _)| *fp == hull_fp) + .map(|(_, p)| p.clone()); + cache_hits += 1; + hull_outputs.insert(node.id.clone(), entry.hulls.clone()); + hull_resp_maps.insert(node.id.clone(), entry.resp_map.clone()); + if first_hull_response.is_none() { + first_hull_response = Some(entry.resp_map.clone()); + first_hull_threshold = *threshold; + } + all_hulls.extend(entry.hulls.clone()); + let p = preview.unwrap_or_else(|| { + let p = render_hull_preview(response, &entry.hulls, w, h); + cache.preview_cache.insert(node.id.clone(), (hull_fp, p.clone())); + p + }); + node_previews.insert(node.id.clone(), p); + continue; + } + } + cache_misses += 1; + // Cache miss — compute + let hull_params = hulls::HullParams { + threshold: *threshold, + min_area: *min_area, + rdp_epsilon: *rdp_epsilon, + connectivity: if *eight_conn { hulls::Connectivity::Eight } + else { hulls::Connectivity::Four }, + }; + let extracted = hulls::extract_hulls(response, rgb, w, h, &hull_params); + let preview = render_hull_preview(response, &extracted, w, h); + cache.hull_entries.insert(node.id.clone(), HullCacheEntry { + fp: hull_fp, + hulls: extracted.clone(), + resp_map: response.clone(), + }); + cache.preview_cache.insert(node.id.clone(), (hull_fp, preview.clone())); + (extracted, preview) + } else { + // No fingerprint — always compute, never cache + let hull_params = hulls::HullParams { + threshold: *threshold, + min_area: *min_area, + rdp_epsilon: *rdp_epsilon, + connectivity: if *eight_conn { hulls::Connectivity::Eight } + else { hulls::Connectivity::Four }, + }; + let extracted = hulls::extract_hulls(response, rgb, w, h, &hull_params); + let preview = render_hull_preview(response, &extracted, w, h); + (extracted, preview) }; - let color_filter = hulls::ColorFilter { - enabled: *cf_enabled, - hue_min: *cf_hue_min, hue_max: *cf_hue_max, - sat_min: *cf_sat_min, sat_max: *cf_sat_max, - val_min: *cf_val_min, val_max: *cf_val_max, - }; - let extracted = hulls::extract_hulls(response, rgb, w, h, &hull_params); - let filtered = hulls::filter_hulls_by_color(extracted, &color_filter); - node_previews.insert(node.id.clone(), render_hull_preview(response, &filtered, w, h)); + + node_previews.insert(node.id.clone(), preview); if first_hull_response.is_none() { first_hull_response = Some(response.clone()); first_hull_threshold = *threshold; @@ -423,15 +747,14 @@ fn process_pass_work( } t = lap!(steps, "hull extract", t); - // ── Fill nodes ───────────────────────────────────────────────────────────── - let mut all_fill_results: Vec = Vec::new(); + // ── Fill nodes (cached per-node) ────────────────────────────────────────── + let mut fill_outputs: std::collections::HashMap = Default::default(); for node in &det_graph.nodes { if let detect::NodeKind::Fill { strategy, spacing, angle, param, smooth_rdp, smooth_iters } = &node.kind { - let upstream = det_graph.edges.iter() - .find(|e| e.to == node.id && e.port == 0); + let upstream = det_graph.edges.iter().find(|e| e.to == node.id && e.port == 0); let (hulls_for_fill, resp_for_fill) = match upstream { Some(e) => ( hull_outputs.get(&e.from).cloned().unwrap_or_default(), @@ -441,38 +764,135 @@ fn process_pass_work( }; if hulls_for_fill.is_empty() { continue; } - let response_arc: std::sync::Arc<[u8]> = resp_for_fill.into(); - let (strategy, spacing, angle, param, smooth_rdp, smooth_iters) = - (strategy.clone(), *spacing, *angle, *param, *smooth_rdp, *smooth_iters); - let img_w = w; + let fill_fp = node_fps.get(&node.id).copied().unwrap_or(0); - let raw: Vec = hulls_for_fill.par_iter().map(|hull| { - match strategy.as_str() { - "outline" => fill::outline(hull), - "zigzag" => fill::zigzag_hatch(hull, spacing, angle), - "offset" => fill::contour_offset(hull, spacing), - "spiral" => fill::spiral(hull, spacing), - "circles" => fill::circle_pack(hull, spacing, param.max(0.1)), - "voronoi" => fill::voronoi_fill(hull, spacing), - "hilbert" => fill::hilbert_fill(hull, spacing), - "waves" => fill::wave_interference(hull, spacing, param.round().max(1.0) as usize), - "flow" => fill::flow_field(hull, spacing, angle, param.max(0.0)), - "gradient_hatch" => fill::gradient_hatch(hull, &response_arc, img_w, spacing, angle, param.clamp(0.05, 1.0)), - _ => fill::parallel_hatch(hull, spacing, angle), + let (optimised, preview) = if fill_fp != 0 { + if let Some(entry) = cache.fill_entries.get(&node.id) { + if entry.fp == fill_fp { + // Cache hit + let preview = cache.preview_cache.get(&node.id) + .filter(|(fp, _)| *fp == fill_fp) + .map(|(_, p)| p.clone()); + cache_hits += 1; + let p = preview.unwrap_or_else(|| { + let p = render_fill_preview(&entry.fill, w, h); + cache.preview_cache.insert(node.id.clone(), (fill_fp, p.clone())); + p + }); + node_previews.insert(node.id.clone(), p); + fill_outputs.insert(node.id.clone(), entry.fill.clone()); + continue; + } } - }).collect(); + cache_misses += 1; + // Cache miss — compute + let response_arc: std::sync::Arc<[u8]> = resp_for_fill.into(); + let (strategy, spacing, angle, param, smooth_rdp, smooth_iters) = + (strategy.clone(), *spacing, *angle, *param, *smooth_rdp, *smooth_iters); + let img_w = w; + let raw: Vec = hulls_for_fill.par_iter().map(|hull| { + match strategy.as_str() { + "outline" => fill::outline(hull), + "zigzag" => fill::zigzag_hatch(hull, spacing, angle), + "offset" => fill::contour_offset(hull, spacing), + "spiral" => fill::spiral(hull, spacing), + "circles" => fill::circle_pack(hull, spacing, param.max(0.1)), + "voronoi" => fill::voronoi_fill(hull, spacing), + "hilbert" => fill::hilbert_fill(hull, spacing), + "waves" => fill::wave_interference(hull, spacing, param.round().max(1.0) as usize), + "flow" => fill::flow_field(hull, spacing, angle, param.max(0.0)), + "gradient_hatch" => fill::gradient_hatch(hull, &response_arc, img_w, spacing, angle, param.clamp(0.05, 1.0)), + "gradient_cross_hatch" => fill::gradient_cross_hatch(hull, &response_arc, img_w, spacing, angle, param.clamp(0.05, 1.0)), + _ => fill::parallel_hatch(hull, spacing, angle), + } + }).collect(); + let smoothed: Vec = raw.iter() + .map(|r| fill::smooth_fill_result(r, smooth_rdp, smooth_iters)).collect(); + let opt = fill::optimize_travel(&smoothed); + let preview = render_fill_preview(&opt, w, h); + cache.fill_entries.insert(node.id.clone(), FillCacheEntry { fp: fill_fp, fill: opt.clone() }); + cache.preview_cache.insert(node.id.clone(), (fill_fp, preview.clone())); + (opt, preview) + } else { + let response_arc: std::sync::Arc<[u8]> = resp_for_fill.into(); + let (strategy, spacing, angle, param, smooth_rdp, smooth_iters) = + (strategy.clone(), *spacing, *angle, *param, *smooth_rdp, *smooth_iters); + let img_w = w; + let raw: Vec = hulls_for_fill.par_iter().map(|hull| { + match strategy.as_str() { + "outline" => fill::outline(hull), + "zigzag" => fill::zigzag_hatch(hull, spacing, angle), + "offset" => fill::contour_offset(hull, spacing), + "spiral" => fill::spiral(hull, spacing), + "circles" => fill::circle_pack(hull, spacing, param.max(0.1)), + "voronoi" => fill::voronoi_fill(hull, spacing), + "hilbert" => fill::hilbert_fill(hull, spacing), + "waves" => fill::wave_interference(hull, spacing, param.round().max(1.0) as usize), + "flow" => fill::flow_field(hull, spacing, angle, param.max(0.0)), + "gradient_hatch" => fill::gradient_hatch(hull, &response_arc, img_w, spacing, angle, param.clamp(0.05, 1.0)), + "gradient_cross_hatch" => fill::gradient_cross_hatch(hull, &response_arc, img_w, spacing, angle, param.clamp(0.05, 1.0)), + _ => fill::parallel_hatch(hull, spacing, angle), + } + }).collect(); + let smoothed: Vec = raw.iter() + .map(|r| fill::smooth_fill_result(r, smooth_rdp, smooth_iters)).collect(); + let opt = fill::optimize_travel(&smoothed); + let preview = render_fill_preview(&opt, w, h); + (opt, preview) + }; - let smoothed: Vec = raw.iter() - .map(|r| fill::smooth_fill_result(r, smooth_rdp, smooth_iters)) - .collect(); - let optimised = fill::optimize_travel(&smoothed); - - node_previews.insert(node.id.clone(), render_fill_preview(&optimised, w, h)); - all_fill_results.push(optimised); + node_previews.insert(node.id.clone(), preview); + fill_outputs.insert(node.id.clone(), optimised); } } t = lap!(steps, "fill", t); + // ── PenOutput nodes ──────────────────────────────────────────────────────── + let mut pen_results: Vec = Vec::new(); + let mut pen_output_results: Vec = Vec::new(); + + for node in &det_graph.nodes { + if let detect::NodeKind::PenOutput { color, label, order } = &node.kind { + let upstream = det_graph.edges.iter().find(|e| e.to == node.id && e.port == 0); + let fill = match upstream.and_then(|e| fill_outputs.get(&e.from)) { + Some(f) => f.clone(), + None => continue, + }; + let pen_fp = node_fps.get(&node.id).copied().unwrap_or(0); + let stroke_count = fill.strokes.len(); + let preview = if pen_fp != 0 { + if let Some((cached_fp, cached_p)) = cache.preview_cache.get(&node.id) { + if *cached_fp == pen_fp { + cache_hits += 1; + cached_p.clone() + } else { + cache_misses += 1; + let p = render_pen_preview(*color, &fill, w, h); + cache.preview_cache.insert(node.id.clone(), (pen_fp, p.clone())); + p + } + } else { + cache_misses += 1; + let p = render_pen_preview(*color, &fill, w, h); + cache.preview_cache.insert(node.id.clone(), (pen_fp, p.clone())); + p + } + } else { + render_pen_preview(*color, &fill, w, h) + }; + node_previews.insert(node.id.clone(), preview); + pen_output_results.push(PenOutputResult { + node_id: node.id.clone(), color: color.to_vec(), + label: label.clone(), order: *order, stroke_count, + }); + pen_results.push(PenResult { + node_id: node.id.clone(), color: *color, + label: label.clone(), order: *order, fill, + }); + } + } + t = lap!(steps, "pen output", t); + // ── Coverage + binary viz ────────────────────────────────────────────────── let response_for_viz = first_hull_response.as_deref().unwrap_or(&graph_maps.response); let threshold = first_hull_threshold; @@ -480,7 +900,7 @@ fn process_pass_work( let total_dark = response_for_viz.iter().filter(|&&p| p < threshold).count(); let hull_px: usize = all_hulls.iter().map(|h| h.pixels.len()).sum(); let coverage_pct = if total_dark > 0 { hull_px * 100 / total_dark } else { 0 }; - let stroke_count: usize = all_fill_results.iter().map(|r| r.strokes.len()).sum(); + let stroke_count: usize = pen_results.iter().map(|pr| pr.fill.strokes.len()).sum(); let mut rgba = vec![0u8; (w * h * 4) as usize]; for (i, &r) in response_for_viz.iter().enumerate() { @@ -491,13 +911,19 @@ fn process_pass_work( let viz_b64 = rgba_to_b64_png(&rgba, w, h); lap!(steps, "png encode", t); - steps.push(StepTime { label: "total".into(), ms: t0.elapsed().as_millis() as u64 }); + steps.push(StepTime { + label: format!("total (cache {cache_hits}hit/{cache_misses}miss)"), + ms: t0.elapsed().as_millis() as u64, + }); let hull_count = all_hulls.len(); let response_map = first_hull_response.unwrap_or_else(|| graph_maps.response); - (all_hulls, all_fill_results, response_map, - ProcessResult { hull_count, coverage_pct, stroke_count, viz_b64, node_previews, timings: steps }) + (all_hulls, pen_results, response_map, + ProcessResult { hull_count, coverage_pct, stroke_count, viz_b64, + pen_outputs: pen_output_results, node_previews, timings: steps, + img_w: w, img_h: h }, + cache) } // ── Tauri commands ───────────────────────────────────────────────────────────── @@ -512,10 +938,11 @@ fn load_image(path: String, state: State>) -> Result>, -) -> Result { - let st = state.lock().unwrap(); - let ps = st.passes.get(pass_index).ok_or("Invalid pass index")?; - if ps.fill_results.is_empty() { return Err("Generate fill first".into()); } - let (iw, ih) = st.image_rgb.as_ref().map(|r| r.dimensions()).unwrap_or((512, 512)); - let cfg = to_gcode_config(&gcode_config); - Ok(gcode::to_gcode(&ps.fill_results, iw, ih, &cfg)) -} - #[tauri::command] fn export_all_gcode( - pass_colors: Vec<[u8; 3]>, gcode_config: GcodeConfigPayload, out_dir: String, state: State>, @@ -581,18 +1003,27 @@ fn export_all_gcode( let cfg = to_gcode_config(&gcode_config); let mut saved = Vec::new(); - for (i, ps) in st.passes.iter().enumerate() { - if ps.fill_results.is_empty() { continue; } - let code = gcode::to_gcode(&ps.fill_results, iw, ih, &cfg); - let color = pass_colors.get(i).copied().unwrap_or([128, 128, 128]); - let fname = format!("pass{}_{:02x}{:02x}{:02x}.gcode", - i + 1, color[0], color[1], color[2]); - let path = std::path::Path::new(&out_dir).join(&fname); - std::fs::write(&path, &code).map_err(|e| e.to_string())?; - saved.push(path.to_string_lossy().into_owned()); + for ps in st.passes.iter() { + let mut pens = ps.pen_results.clone(); + pens.sort_by_key(|pr| pr.order); + for pr in &pens { + if pr.fill.strokes.is_empty() { continue; } + let code = gcode::to_gcode(&[pr.fill.clone()], iw, ih, &cfg); + let slug = if pr.label.is_empty() { + format!("{:02x}{:02x}{:02x}", pr.color[0], pr.color[1], pr.color[2]) + } else { + pr.label.chars() + .map(|c| if c.is_alphanumeric() || c == '-' || c == '_' { c } else { '_' }) + .collect() + }; + let fname = format!("{:02}_{slug}.gcode", pr.order + 1); + let path = std::path::Path::new(&out_dir).join(&fname); + std::fs::write(&path, &code).map_err(|e| e.to_string())?; + saved.push(path.to_string_lossy().into_owned()); + } } - if saved.is_empty() { Err("No passes with fill data to export".into()) } else { Ok(saved) } + if saved.is_empty() { Err("No pens with fill data to export".into()) } else { Ok(saved) } } fn to_gcode_config(p: &GcodeConfigPayload) -> gcode::GcodeConfig { @@ -763,42 +1194,51 @@ fn get_pass_viz(pass_index: usize, mode: String, state: State>) /// Generate an SVG preview of all passes' fill strokes. #[tauri::command] fn get_all_strokes( - pass_colors: Vec<[u8; 3]>, state: State>, ) -> Result { let st = state.lock().unwrap(); - let (img_width, img_height) = st.image_rgb.as_ref() - .map(|r| r.dimensions()).unwrap_or((1, 1)); - let passes = st.passes.iter().enumerate().map(|(i, ps)| { - let color = pass_colors.get(i).copied().unwrap_or([200, 200, 200]); - let strokes = ps.fill_results.iter() - .flat_map(|fr| fr.strokes.iter()) - .map(|s| s.iter().map(|&(x, y)| [x, y]).collect()) - .collect(); - PassStrokesPayload { pass_index: i, color, strokes } - }).collect(); - Ok(AllStrokesPayload { passes, img_width, img_height }) + // Use the scaled pipeline dimensions (stored after process_pass) so the + // viewport offscreen canvas matches the coordinate space of the strokes. + let (img_width, img_height) = st.passes.first() + .filter(|p| p.img_w > 0) + .map(|p| (p.img_w, p.img_h)) + .unwrap_or_else(|| st.image_rgb.as_ref().map(|r| r.dimensions()).unwrap_or((1, 1))); + let mut all: Vec = Vec::new(); + for ps in st.passes.iter() { + let mut pens = ps.pen_results.clone(); + pens.sort_by_key(|pr| pr.order); + for (i, pr) in pens.iter().enumerate() { + let strokes = pr.fill.strokes.iter() + .map(|s| s.iter().map(|&(x, y)| [x, y]).collect()) + .collect(); + all.push(PassStrokesPayload { pass_index: i, color: pr.color, strokes }); + } + } + Ok(AllStrokesPayload { passes: all, img_width, img_height }) } -/// Returns base64-encoded SVG — one per pass with subsampled points. +/// Returns base64-encoded SVG — one per pen with subsampled points. /// SVG is vector so the frontend can zoom without pixelation. -/// Strokes are subsampled to ≤12 points for preview (G-code export uses full data). +/// Strokes are subsampled to ≤10 points for preview (G-code export uses full data). #[tauri::command] -fn get_gcode_viz(pass_colors: Vec<[u8; 3]>, state: State>) -> Result { +fn get_gcode_viz(state: State>) -> Result { let st = state.lock().unwrap(); let rgb = st.image_rgb.as_ref().ok_or("No image loaded")?; - let (w, h) = rgb.dimensions(); + let (w, h) = st.passes.first() + .filter(|p| p.img_w > 0) + .map(|p| (p.img_w, p.img_h)) + .unwrap_or_else(|| rgb.dimensions()); let mut svg = format!( r##""## ); - // Travel moves: single grey path for all passes combined + // Travel moves across all pens let mut travel_d = String::new(); - for pass in st.passes.iter() { - let mut pen: Option<(f32, f32)> = None; - for fr in &pass.fill_results { - for stroke in &fr.strokes { + for ps in st.passes.iter() { + for pr in ps.pen_results.iter() { + let mut pen: Option<(f32, f32)> = None; + for stroke in &pr.fill.strokes { if let (Some(prev), Some(&first)) = (pen, stroke.first()) { travel_d.push_str(&format!("M{:.1},{:.1}L{:.1},{:.1}", prev.0, prev.1, first.0, first.1)); } @@ -812,32 +1252,32 @@ fn get_gcode_viz(pass_colors: Vec<[u8; 3]>, state: State>) -> Re )); } - // Draw strokes per pass, subsampled for preview - for (pi, pass) in st.passes.iter().enumerate() { - let [r, g, b] = pass_colors.get(pi).copied().unwrap_or([0, 0, 0]); - let mut path_d = String::new(); - for fr in &pass.fill_results { - for stroke in &fr.strokes { + // Draw strokes per pen sorted by order + for ps in st.passes.iter() { + let mut pens = ps.pen_results.clone(); + pens.sort_by_key(|pr| pr.order); + for pr in &pens { + let [r, g, b] = pr.color; + let mut path_d = String::new(); + for stroke in &pr.fill.strokes { if stroke.len() < 2 { continue; } let step = (stroke.len() / 10).max(1); let pts: Vec<&(f32, f32)> = stroke.iter().step_by(step).collect(); let first = stroke.first().unwrap(); let last = stroke.last().unwrap(); path_d.push_str(&format!("M{:.1},{:.1}", first.0, first.1)); - for p in &pts[1..] { - path_d.push_str(&format!("L{:.1},{:.1}", p.0, p.1)); - } + for p in &pts[1..] { path_d.push_str(&format!("L{:.1},{:.1}", p.0, p.1)); } if (last.0 - pts.last().unwrap().0).abs() > 0.5 || (last.1 - pts.last().unwrap().1).abs() > 0.5 { path_d.push_str(&format!("L{:.1},{:.1}", last.0, last.1)); } } - } - if !path_d.is_empty() { - svg.push_str(&format!( - r#""# - )); + if !path_d.is_empty() { + svg.push_str(&format!( + r#""# + )); + } } } @@ -864,11 +1304,11 @@ fn export_debug_state( let passes: Vec = st.passes.iter().enumerate().map(|(i, ps)| { let total_pixels: usize = ps.hulls.iter().map(|h| h.pixels.len()).sum(); - let stroke_count: usize = ps.fill_results.iter().map(|fr| fr.strokes.len()).sum(); - let total_points: usize = ps.fill_results.iter() - .flat_map(|fr| fr.strokes.iter()).map(|s| s.len()).sum(); - let strokes: Vec> = ps.fill_results.iter() - .flat_map(|fr| fr.strokes.iter()) + let stroke_count: usize = ps.pen_results.iter().map(|pr| pr.fill.strokes.len()).sum(); + let total_points: usize = ps.pen_results.iter() + .flat_map(|pr| pr.fill.strokes.iter()).map(|s| s.len()).sum(); + let strokes: Vec> = ps.pen_results.iter() + .flat_map(|pr| pr.fill.strokes.iter()) .map(|s| s.iter().map(|&(x, y)| [x, y]).collect()) .collect(); serde_json::json!({ @@ -901,6 +1341,16 @@ fn export_debug_state( Ok(out_path.to_string_lossy().into_owned()) } +#[tauri::command] +fn write_project_file(path: String, content: String) -> Result<(), String> { + std::fs::write(&path, content).map_err(|e| format!("write_project_file: {e}")) +} + +#[tauri::command] +fn read_project_file(path: String) -> Result { + std::fs::read_to_string(&path).map_err(|e| format!("read_project_file: {e}")) +} + fn hash_color(id: u32) -> (u8, u8, u8) { let h = id.wrapping_mul(2654435761).wrapping_add(id.wrapping_mul(0x9e3779b9)); // Clamp each channel to 80–255 so no hull ever renders as near-black, @@ -941,6 +1391,7 @@ mod blocking_tests { connectivity: None, color_filter: None, strategy: None, spacing: None, angle: None, param: None, smooth_rdp: None, smooth_iters: None, + pen_color: None, pen_label: None, pen_order: None, } } @@ -968,15 +1419,22 @@ mod blocking_tests { fill_node.param = Some(1.0); fill_node.smooth_rdp = Some(1.0); fill_node.smooth_iters = Some(2); + let mut pen_node = node("pen1", "PenOutput"); + pen_node.pen_color = Some(vec![20, 20, 20]); + pen_node.pen_label = Some("Pen 1".into()); + pen_node.pen_order = Some(0); ProcessPassPayload { pass_index: 0, + dpi: None, + img_w_mm: None, graph: DetectionGraphPayload { - nodes: vec![node("source", "Source"), k1, hull, fill_node], + nodes: vec![node("source", "Source"), k1, hull, fill_node, pen_node], edges: vec![ GraphEdgePayload { from: "source".into(), to: "k1".into(), port: 0 }, GraphEdgePayload { from: "k1".into(), to: "hull".into(), port: 0 }, GraphEdgePayload { from: "hull".into(), to: "fill".into(), port: 0 }, + GraphEdgePayload { from: "fill".into(), to: "pen1".into(), port: 0 }, ], }, } @@ -1004,7 +1462,7 @@ mod blocking_tests { let payload = default_process_payload(); let work = tokio::task::spawn_blocking(move || { - process_pass_work(&work_rgb, payload) + process_pass_work(&work_rgb, payload, NodeCache::default()) }); // Give the blocking thread a moment to start, then try to grab the mutex. @@ -1017,8 +1475,8 @@ mod blocking_tests { "mutex was blocked during heavy processing" ); - let (hulls, _, _, result) = work.await.unwrap(); - assert!(result.timings.iter().any(|t| t.label == "total")); + let (hulls, _, _, result, _) = work.await.unwrap(); + assert!(result.timings.iter().any(|t| t.label.starts_with("total"))); assert!(!hulls.is_empty(), "expected hulls from checkerboard image"); } } @@ -1285,6 +1743,7 @@ mod viz_tests { xdog_sigma2: lj["xdog_sigma2"].as_f64().unwrap_or(1.6) as f32, xdog_tau: lj["xdog_tau"].as_f64().unwrap_or(0.98) as f32, xdog_phi: lj["xdog_phi"].as_f64().unwrap_or(10.0) as f32, + ..Default::default() }; let graph = detect::DetectionGraph { nodes: vec![ @@ -1465,9 +1924,10 @@ pub fn run() { get_all_strokes, get_gcode_viz, get_pass_viz, - export_gcode, export_all_gcode, export_debug_state, + write_project_file, + read_project_file, ]) .run(tauri::generate_context!()) .expect("error running Tauri application"); diff --git a/src/pipeline_bench.rs b/src/pipeline_bench.rs index a7458fe1..f8f43d18 100644 --- a/src/pipeline_bench.rs +++ b/src/pipeline_bench.rs @@ -39,6 +39,7 @@ fn main() { blur_radius: 0.0, sat_min_value: 0.1, canny_low: 50.0, canny_high: 150.0, xdog_sigma2: 1.6, xdog_tau: 0.98, xdog_phi: 10.0, + ..Default::default() }], }; let response = apply_stack(&rgb, ¶ms);