diff --git a/bun.lock b/bun.lock index 3f0291c..57f24b2 100644 --- a/bun.lock +++ b/bun.lock @@ -88,7 +88,7 @@ "@workshop/shared": "workspace:*", "discord.js": "^14.19.3", "luxon": "^3.6.1", - "zod": "3.25.67", + "zod": "catalog:", }, "devDependencies": { "@types/bun": "latest", @@ -147,7 +147,8 @@ "hono": "catalog:", "luxon": "^3.7.1", "pngjs": "^7.0.0", - "zod": "3.25.67", + "tailwind": "^4.0.0", + "zod": "catalog:", }, "devDependencies": { "@types/bun": "latest", @@ -156,6 +157,7 @@ }, "catalog": { "hono": "^4.8.0", + "zod": "3.25.67", }, "packages": { "@babel/runtime": ["@babel/runtime@7.3.4", "", { "dependencies": { "regenerator-runtime": "^0.12.0" } }, "sha512-IvfvnMdSaLBateu0jfsYIpZTxAc2cKEXEMiezGGN75QcBcecDUKd3PgLAncT0oOgxKy8dd8hrJKj9MfzgfZd6g=="], @@ -836,8 +838,16 @@ "@sapphire/shapeshift/lodash": ["lodash@4.17.21", "", {}, "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="], + "@workshop/nano-remix/@types/bun": ["@types/bun@1.2.19", "", { "dependencies": { "bun-types": "1.2.19" } }, "sha512-d9ZCmrH3CJ2uYKXQIUuZ/pUnTqIvLDS0SK7pFmbx8ma+ziH/FRMoAq5bYpRG7y+w1gl+HgyNZbtqgMq4W4e2Lg=="], + + "@workshop/shared/@types/bun": ["@types/bun@1.2.19", "", { "dependencies": { "bun-types": "1.2.19" } }, "sha512-d9ZCmrH3CJ2uYKXQIUuZ/pUnTqIvLDS0SK7pFmbx8ma+ziH/FRMoAq5bYpRG7y+w1gl+HgyNZbtqgMq4W4e2Lg=="], + + "@workshop/spike/@types/bun": ["@types/bun@1.2.19", "", { "dependencies": { "bun-types": "1.2.19" } }, "sha512-d9ZCmrH3CJ2uYKXQIUuZ/pUnTqIvLDS0SK7pFmbx8ma+ziH/FRMoAq5bYpRG7y+w1gl+HgyNZbtqgMq4W4e2Lg=="], + "@workshop/whiteboard/@openai/agents": ["@openai/agents@0.0.11", "", { "dependencies": { "@openai/agents-core": "0.0.11", "@openai/agents-openai": "0.0.11", "@openai/agents-realtime": "0.0.11", "debug": "^4.4.0", "openai": "^5.0.1" } }, "sha512-MYSuQ0PptjryTb/BzrqoZB+cajv/p31uF42uXeqkI3s9PihqRttnQBJ1YCTJS/xQCl4f5R9cIradh/o5PpbDkA=="], + "@workshop/whiteboard/@types/bun": ["@types/bun@1.2.19", "", { "dependencies": { "bun-types": "1.2.19" } }, "sha512-d9ZCmrH3CJ2uYKXQIUuZ/pUnTqIvLDS0SK7pFmbx8ma+ziH/FRMoAq5bYpRG7y+w1gl+HgyNZbtqgMq4W4e2Lg=="], + "ajv/fast-deep-equal": ["fast-deep-equal@2.0.1", "", {}, "sha512-bCK/2Z4zLidyB4ReuIsvALH6w31YfAQDmXMqMx6FyfHqvBxtjC0eRumeSu4Bs3XtXwpyIywtSTrVT99BxY1f9w=="], "amqplib/readable-stream": ["readable-stream@1.1.14", "", { "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.1", "isarray": "0.0.1", "string_decoder": "~0.10.x" } }, "sha512-+MeVjFf4L44XUkhM1eYbD8fyEsxcV81pqMSR5gblfcLCHfZvbrqy4/qYHE+/R5HoBUT11WV5O08Cr1n3YXkWVQ=="], @@ -926,12 +936,20 @@ "@modelcontextprotocol/sdk/raw-body/iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="], + "@workshop/nano-remix/@types/bun/bun-types": ["bun-types@1.2.19", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-uAOTaZSPuYsWIXRpj7o56Let0g/wjihKCkeRqUBhlLVM/Bt+Fj9xTo+LhC1OV1XDaGkz4hNC80et5xgy+9KTHQ=="], + + "@workshop/shared/@types/bun/bun-types": ["bun-types@1.2.19", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-uAOTaZSPuYsWIXRpj7o56Let0g/wjihKCkeRqUBhlLVM/Bt+Fj9xTo+LhC1OV1XDaGkz4hNC80et5xgy+9KTHQ=="], + + "@workshop/spike/@types/bun/bun-types": ["bun-types@1.2.19", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-uAOTaZSPuYsWIXRpj7o56Let0g/wjihKCkeRqUBhlLVM/Bt+Fj9xTo+LhC1OV1XDaGkz4hNC80et5xgy+9KTHQ=="], + "@workshop/whiteboard/@openai/agents/@openai/agents-core": ["@openai/agents-core@0.0.11", "", { "dependencies": { "@openai/zod": "npm:zod@3.25.40 - 3.25.67", "debug": "^4.4.0", "openai": "^5.0.1" }, "optionalDependencies": { "@modelcontextprotocol/sdk": "^1.12.0" }, "peerDependencies": { "zod": "3.25.40 - 3.25.67" }, "optionalPeers": ["zod"] }, "sha512-kMG/B620fsFAwUe/ounmXty4FuAmWbMWgql4z/gCoER3S6h5tBqNTxffN0MAOFHV3EuPLiqTxA0kGiSdTpDwyA=="], "@workshop/whiteboard/@openai/agents/@openai/agents-openai": ["@openai/agents-openai@0.0.11", "", { "dependencies": { "@openai/agents-core": "0.0.11", "@openai/zod": "npm:zod@3.25.40 - 3.25.67", "debug": "^4.4.0", "openai": "^5.0.1" } }, "sha512-gqVVDfyD0UYYBkc4kPJgbWzFzayKCKQBHMKHnbMsReZ8/nqHKGEd/hjBiqAZGqDW0BTKNaGfzGB8XAiLWWipnw=="], "@workshop/whiteboard/@openai/agents/@openai/agents-realtime": ["@openai/agents-realtime@0.0.11", "", { "dependencies": { "@openai/agents-core": "0.0.11", "@openai/zod": "npm:zod@3.25.40 - 3.25.67", "@types/ws": "^8.18.1", "debug": "^4.4.0", "ws": "^8.18.1" } }, "sha512-gVdrKri0dPBOJfsQR6m9rdpBscRZK/efc1zLKqOA2mfmaL0RxI2/LvnyXbwrDGHQ6GEbovULkbWWQ9D4nUafow=="], + "@workshop/whiteboard/@types/bun/bun-types": ["bun-types@1.2.19", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-uAOTaZSPuYsWIXRpj7o56Let0g/wjihKCkeRqUBhlLVM/Bt+Fj9xTo+LhC1OV1XDaGkz4hNC80et5xgy+9KTHQ=="], + "amqplib/readable-stream/string_decoder": ["string_decoder@0.10.31", "", {}, "sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ=="], "body-parser/debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="], diff --git a/package.json b/package.json index 62f1867..af2d70c 100644 --- a/package.json +++ b/package.json @@ -6,7 +6,8 @@ "packages/*" ], "catalog": { - "hono": "^4.8.0" + "hono": "^4.8.0", + "zod": "3.25.67" } }, "prettier": { diff --git a/packages/spike/package.json b/packages/spike/package.json index 871611a..168661c 100644 --- a/packages/spike/package.json +++ b/packages/spike/package.json @@ -17,7 +17,7 @@ "@workshop/shared": "workspace:*", "discord.js": "^14.19.3", "luxon": "^3.6.1", - "zod": "3.25.67" + "zod": "catalog:" }, "devDependencies": { "@types/luxon": "^3.6.2", diff --git a/packages/whiteboard/package.json b/packages/whiteboard/package.json index 52905fa..7924266 100644 --- a/packages/whiteboard/package.json +++ b/packages/whiteboard/package.json @@ -18,7 +18,8 @@ "hono": "catalog:", "luxon": "^3.7.1", "pngjs": "^7.0.0", - "zod": "3.25.67" + "tailwind": "^4.0.0", + "zod": "catalog:" }, "devDependencies": { "@types/bun": "latest" diff --git a/packages/whiteboard/public/whiteboard.png b/packages/whiteboard/public/whiteboard.png index 2c1c5e7..0bdf311 100644 Binary files a/packages/whiteboard/public/whiteboard.png and b/packages/whiteboard/public/whiteboard.png differ diff --git a/packages/whiteboard/src/ai.ts b/packages/whiteboard/src/ai.ts index 730c07d..1042e64 100644 --- a/packages/whiteboard/src/ai.ts +++ b/packages/whiteboard/src/ai.ts @@ -21,9 +21,13 @@ export const getGeminiResponse = async ( ] const response = await ai.models.generateContent({ - model: "gemini-2.5-pro", + model: "gemini-2.5-flash", + // model: "gemini-2.5-flash-lite-preview-06-17", contents: contents, config: { + thinkingConfig: { + thinkingBudget: 0, + }, responseMimeType: "application/json", responseSchema: whiteboardSchema, }, diff --git a/packages/whiteboard/src/index.css b/packages/whiteboard/src/index.css new file mode 100644 index 0000000..4a5c8e5 --- /dev/null +++ b/packages/whiteboard/src/index.css @@ -0,0 +1 @@ +@import "tailwindcss" \ No newline at end of file diff --git a/packages/whiteboard/src/routes/index.tsx b/packages/whiteboard/src/routes/index.tsx index aba564f..4b3a5a2 100644 --- a/packages/whiteboard/src/routes/index.tsx +++ b/packages/whiteboard/src/routes/index.tsx @@ -16,11 +16,12 @@ const categories = [ const prompts = { default: `Detect all of the of the following objects: ${categories}. The box_2d should be an object with ymin, xmin, ymax, xmax properties normalized to 0-1000.`, simple: `Detect the 2d bounding boxes of the following objects: ${categories}.`, + specific: `Detect 2d inscribed box for the green circle?`, } export const action = async (req: Request, params: {}) => { const imageBuffer = await Bun.file("public/whiteboard.png").arrayBuffer() - const response = await getGeminiResponse(imageBuffer, prompts.default) + const response = await getGeminiResponse(imageBuffer, prompts.specific) // return { elements: response?.elements || [] } // const response = await detectShapes(imageBuffer) diff --git a/packages/whiteboard/src/routes/upload.tsx b/packages/whiteboard/src/routes/upload.tsx index 5e0376b..c389e1f 100644 --- a/packages/whiteboard/src/routes/upload.tsx +++ b/packages/whiteboard/src/routes/upload.tsx @@ -1,7 +1,9 @@ -import { ensure } from "@workshop/shared/utils" import { useRef, useState, useEffect } from "hono/jsx" import { useAction, submitAction } from "@workshop/nano-remix" import { join } from "path" +import { useVideo } from "../useVideo" +import { VideoOverlay, type OverlayItem } from "../videoOverlay" +import "../index.css" export const action = async (req: Request, params: {}) => { const formData = await req.formData() @@ -30,89 +32,35 @@ export const action = async (req: Request, params: {}) => { export default function Camera() { const videoRef = useRef(null) - const canvasRef = useRef(null) - const [stream, setStream] = useState(null) - const [error, setError] = useState(null) - const [capturedImage, setCapturedImage] = useState(null) const { data, error: uploadError, loading } = useAction() + const [overlays, setOverlays] = useState([ + { + type: "text", + x: 50, + y: 50, + text: "Camera Feed", + fontSize: 24, + color: "yellow", + strokeColor: "black", + }, + { + type: "image", + x: 100, + y: 100, + src: "https://picsum.photos/seed/wow/200/300", + }, + ]) - const captureImage = () => { - ensure(videoRef.current, "Video ref must be set before capturing image") - ensure(canvasRef.current, "Canvas ref must be set before capturing image") + const { isRecording, error, toggleRecording } = useVideo(videoRef, { + onCapture: (dataURL) => { + const formData = new FormData() + formData.append("imageData", dataURL) + submitAction(formData) + }, + }) - const canvas = canvasRef.current - const video = videoRef.current - - const maxWidth = 1000 - const maxHeight = 1000 - const aspectRatio = video.videoWidth / video.videoHeight - - let newWidth = maxWidth - let newHeight = maxHeight - - if (aspectRatio > 1) { - newHeight = maxWidth / aspectRatio - } else { - newWidth = maxHeight * aspectRatio - } - - canvas.width = newWidth - canvas.height = newHeight - - const ctx = canvas.getContext("2d") - if (!ctx) return - ctx.clearRect(0, 0, canvas.width, canvas.height) - - ctx.drawImage(video, 0, 0, newWidth, newHeight) - const dataURL = canvas.toDataURL("image/png") - setCapturedImage(dataURL) - - // Upload the image - const formData = new FormData() - formData.append("imageData", dataURL) - submitAction(formData) - } - - const startCamera = async () => { - try { - const mediaStream = await navigator.mediaDevices.getUserMedia({ - video: { facingMode: "environment" }, - }) - - if (videoRef.current) { - videoRef.current.srcObject = mediaStream - videoRef.current.onloadedmetadata = () => { - setTimeout(captureImage, 100) - } - } - - setStream(mediaStream) - setError(null) - } catch (err) { - setError("Failed to access camera") - } - } - - const stopCamera = () => { - if (!stream) return - - stream.getTracks().forEach((track) => track.stop()) - setStream(null) - setCapturedImage(null) - if (videoRef.current) { - videoRef.current.srcObject = null - } - } - - useEffect(() => { - if (!stream) return - - const interval = setInterval(() => { - captureImage() - }, 1000) - - return () => clearInterval(interval) - }, [stream]) + // Update overlays when camera state changes + useEffect(() => {}, [isRecording]) return (
@@ -123,33 +71,23 @@ export default function Camera() { )}
- {!stream ? ( - - ) : ( - - )} +
{uploadError && (
{uploadError}
)} - {data?.success && ( -
- Upload successful! File: {data.filename} -
- )} - - {capturedImage && ( - Captured - )} - - -