Revert "updated website copy, installed locked-view function (coordinates break when locked tho), trying to get video transcripts working"

This reverts commit d7b1e348e9.
This commit is contained in:
Shawn Anderson 2025-04-16 13:05:57 -07:00
parent 33f1aa4e90
commit bb144428d0
10 changed files with 177 additions and 448 deletions

View File

@ -13,21 +13,21 @@
<!-- Social Meta Tags -->
<meta name="description"
content="Mycelial experimentation in the digital realm.">
content="My research investigates the intersection of computing, human-system interfaces, and emancipatory politics. I am interested in the potential of computing as a medium for thought, as a tool for collective action, and as a means of emancipation.">
<meta property="og:url" content="https://jeffemmett.com">
<meta property="og:type" content="website">
<meta property="og:title" content="A MycoPunk Website">
<meta property="og:title" content="Jeff Emmett">
<meta property="og:description"
content="Mycelial knowledge and economic experimentation in the digital realm.">
content="My research doesn't investigate the intersection of computing, human-system interfaces, and emancipatory politics. I am interested in the potential of computing as a medium for thought, as a tool for collective action, and as a means of emancipation.">
<meta property="og:image" content="/website-embed.png">
<meta name="twitter:card" content="summary_large_image">
<meta property="twitter:domain" content="jeffemmett.com">
<meta property="twitter:url" content="https://jeffemmett.com">
<meta name="twitter:title" content="A MycoPunk Website">
<meta name="twitter:title" content="Jeff Emmett">
<meta name="twitter:description"
content="Mycelial knowledge and economic experimentation in the digital realm.">
content="My research doesn't investigate the intersection of computing, human-system interfaces, and emancipatory politics. I am interested in the potential of computing as a medium for thought, as a tool for collective action, and as a means of emancipation.">
<meta name="twitter:image" content="/website-embed.png">
<!-- Analytics -->

View File

@ -64,9 +64,9 @@ export function useCameraControls(editor: Editor | null) {
if (!editor) return
const camera = editor.getCamera()
const url = new URL(window.location.href)
url.searchParams.set("x", camera.x.toFixed(2))
url.searchParams.set("y", camera.y.toFixed(2))
url.searchParams.set("zoom", camera.z.toFixed(2))
url.searchParams.set("x", Math.round(camera.x).toString())
url.searchParams.set("y", Math.round(camera.y).toString())
url.searchParams.set("zoom", Math.round(camera.z).toString())
navigator.clipboard.writeText(url.toString())
},

View File

@ -33,10 +33,9 @@ import { llm } from "@/utils/llmUtils"
import {
lockElement,
unlockElement,
//setInitialCameraFromUrl,
setInitialCameraFromUrl,
initLockIndicators,
watchForLockedShapes,
zoomToSelection,
} from "@/ui/cameraUtils"
// Default to production URL if env var isn't available
@ -78,8 +77,6 @@ export function Board() {
const store = useSync(storeConfig)
const [editor, setEditor] = useState<Editor | null>(null)
const [isCameraLocked, setIsCameraLocked] = useState(false)
useEffect(() => {
const value = localStorage.getItem("makereal_settings_2")
if (value) {
@ -100,71 +97,6 @@ export function Board() {
watchForLockedShapes(editor)
}, [editor])
useEffect(() => {
if (!editor) return
// First set the camera position
const url = new URL(window.location.href)
const x = url.searchParams.get("x")
const y = url.searchParams.get("y")
const zoom = url.searchParams.get("zoom")
const shapeId = url.searchParams.get("shapeId")
const frameId = url.searchParams.get("frameId")
const isLocked = url.searchParams.get("isLocked") === "true"
const initializeCamera = async () => {
// Start with camera unlocked
setIsCameraLocked(false)
if (x && y && zoom) {
editor.stopCameraAnimation()
// Set camera position immediately when editor is available
editor.setCamera(
{
x: parseFloat(parseFloat(x).toFixed(2)),
y: parseFloat(parseFloat(y).toFixed(2)),
z: parseFloat(parseFloat(zoom).toFixed(2))
},
{ animation: { duration: 0 } }
)
// Ensure camera update is applied
editor.updateInstanceState({ ...editor.getInstanceState() })
}
// Handle shape/frame selection after camera position is set
if (shapeId) {
editor.select(shapeId as TLShapeId)
const bounds = editor.getSelectionPageBounds()
if (bounds && !x && !y && !zoom) {
zoomToSelection(editor)
}
} else if (frameId) {
editor.select(frameId as TLShapeId)
const frame = editor.getShape(frameId as TLShapeId)
if (frame && !x && !y && !zoom) {
const bounds = editor.getShapePageBounds(frame)
if (bounds) {
editor.zoomToBounds(bounds, {
targetZoom: 1,
animation: { duration: 0 },
})
}
}
}
// Lock camera after all initialization is complete
if (isLocked) {
requestAnimationFrame(() => {
setIsCameraLocked(true)
})
}
}
initializeCamera()
}, [editor])
return (
<div style={{ position: "fixed", inset: 0 }}>
<Tldraw
@ -183,7 +115,6 @@ export function Board() {
}
}}
cameraOptions={{
isLocked: isCameraLocked,
zoomSteps: [
0.001, // Min zoom
0.0025,
@ -207,7 +138,8 @@ export function Board() {
setEditor(editor)
editor.registerExternalAssetHandler("url", unfurlBookmarkUrl)
editor.setCurrentTool("hand")
setInitialCameraFromUrl(editor)
handleInitialPageLoad(editor)
registerPropagators(editor, [
TickPropagator,
ChangePropagator,

View File

@ -4,38 +4,34 @@ export function Default() {
<header>Jeff Emmett</header>
<h2>Hello! 👋🍄</h2>
<p>
My research investigates the intersection of mycelial patterns and emancipatory
My research investigates the intersection of mycelium and emancipatory
technologies. I am interested in the potential of new convivial tooling
as a medium for group consensus building and collective action, in order
to empower communities of practice to address their local challenges in an
age of ecological and instititutional collapse.
to empower communities of practice to address their own challenges.
</p>
<p>
I let my curiosity about mushrooms guide me, taking inspiration from their
willingness to playfully experiment and adapt, even in the most chaotic environments.
I am fascinated by the potential of mycelial networks to create new forms of bottoms-up
sensing, collective cohereing around sensible directions, and emergent dynamic action
towards addressing local challenges.
My current focus is basic research into the nature of digital
organisation, developing prototype toolkits to improve shared
infrastructure, and applying this research to the design of new systems
and protocols which support the self-organisation of knowledge and
emergent response to local needs.
</p>
<h2>My work</h2>
<p>
I am fortunate enough to collaborate with some pretty incredible groups of
researchers and builders. I am a research communicator at
<a href="https://block.science/">Block Science</a>, an
advisor to the <a href= "https://activeinference.org/">Active Inference Lab</a>,
co-founder of <a href="https://commonsstack.org/">Commons Stack</a>, and
board member of the <a href="https://trustedseed.org/">Trusted Seed</a>. I am also
a collaborator with <a href="https://economicspace.agency/">The Economic Space Agency</a>.
Alongside my independent work, I am a researcher and engineering
communicator at <a href="https://block.science/">Block Science</a>, an
advisor to the Active Inference Lab, Commons Stack, and the Trusted
Seed. I am also an occasional collaborator with{" "}
<a href="https://economicspace.agency/">ECSA</a>.
</p>
<h2>Get in Touch to Collaborate</h2>
<h2>Get in touch</h2>
<p>
I am on Substack <a href="https://allthingsdecent.substack.com/">@All Things Decent</a>,
Bluesky <a href="https://bsky.app/profile/jeffemmett.com">@jeffemmett</a>,
Twitter <a href="https://x.com/jeffemmett">@jeffemmett</a>,
Mastodon<a href="https://social.coop/@jeffemmett">@jeffemmett@social.coop</a>
I am on Twitter <a href="https://twitter.com/jeffemmett">@jeffemmett</a>
, Mastodon{" "}
<a href="https://social.coop/@jeffemmett">@jeffemmett@social.coop</a>{" "}
and GitHub <a href="https://github.com/Jeff-Emmett">@Jeff-Emmett</a>.
</p>
@ -46,29 +42,34 @@ export function Default() {
<li>
<a href="https://www.teamhuman.fm/episodes/238-jeff-emmett">
MycoPunk Futures on Team Human with Douglas Rushkoff
</a>
</a>{" "}
(<a href="artifact/tft-rocks-integration-domain.pdf">slides</a>)
</li>
<li>
<a href="https://www.youtube.com/watch?v=AFJFDajuCSg">
Exploring MycoFi on the Greenpill Network with Kevin Owocki
</a>
</a>{" "}
(<a href="artifact/tft-rocks-integration-domain.pdf">slides</a>)
</li>
<li>
<a href="https://youtu.be/9ad2EJhMbZ8">
Re-imagining Human Value on the Telos Podcast with Rieki &
Brandonfrom SEEDS
</a>
</a>{" "}
(<a href="artifact/tft-rocks-integration-domain.pdf">slides</a>)
</li>
<li>
<a href="https://www.youtube.com/watch?v=i8qcg7FfpLM&t=1348s">
Move Slow & Fix Things: Design Patterns from Nature
</a>
</a>{" "}
(<a href="artifact/tft-rocks-integration-domain.pdf">slides</a>)
</li>
<li>
<a href="https://podcasters.spotify.com/pod/show/theownershipeconomy/episodes/Episode-009---Localized-Democracy-and-Public-Goods-with-Token-Engineering--with-Jeff-Emmett-of-The-Commons-Stack--BlockScience-Labs-e1ggkqo">
Localized Democracy and Public Goods with Token Engineering on the
Ownership Economy
</a>
</a>{" "}
(<a href="artifact/tft-rocks-integration-domain.pdf">slides</a>)
</li>
<li>
<a href="https://youtu.be/kxcat-XBWas">

View File

@ -5,11 +5,8 @@ interface DailyApiResponse {
url: string;
}
interface DailyTranscriptResponse {
interface DailyRecordingResponse {
id: string;
transcriptionId: string;
text?: string;
link?: string;
}
export type IVideoChatShape = TLBaseShape<
@ -20,9 +17,8 @@ export type IVideoChatShape = TLBaseShape<
roomUrl: string | null
allowCamera: boolean
allowMicrophone: boolean
enableTranscription: boolean
transcriptionId: string | null
isTranscribing: boolean
enableRecording: boolean
recordingId: string | null // Track active recording
}
>
@ -40,9 +36,8 @@ export class VideoChatShape extends BaseBoxShapeUtil<IVideoChatShape> {
h: 600,
allowCamera: false,
allowMicrophone: false,
enableTranscription: true,
transcriptionId: null,
isTranscribing: false
enableRecording: true,
recordingId: null
}
}
@ -150,196 +145,80 @@ export class VideoChatShape extends BaseBoxShapeUtil<IVideoChatShape> {
}
}
async startTranscription(shape: IVideoChatShape) {
async startRecording(shape: IVideoChatShape) {
if (!shape.props.roomUrl) return;
const workerUrl = import.meta.env.VITE_TLDRAW_WORKER_URL;
const apiKey = import.meta.env.VITE_DAILY_API_KEY;
if (!apiKey) {
throw new Error('Daily.co API key not configured');
}
try {
// Extract room name from the room URL
const roomName = new URL(shape.props.roomUrl).pathname.split('/').pop();
const response = await fetch(`${workerUrl}/daily/rooms/${roomName}/start-transcription`, {
const response = await fetch(`${workerUrl}/daily/recordings/start`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${apiKey}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
room_name: shape.id,
layout: {
preset: "active-speaker"
}
})
});
if (!response.ok) {
const error = await response.json();
throw new Error(`Failed to start transcription: ${JSON.stringify(error)}`);
}
if (!response.ok) throw new Error('Failed to start recording');
const data = await response.json() as DailyTranscriptResponse;
const data = await response.json() as DailyRecordingResponse;
await this.editor.updateShape<IVideoChatShape>({
id: shape.id,
type: shape.type,
props: {
...shape.props,
transcriptionId: data.transcriptionId || data.id,
isTranscribing: true
recordingId: data.id
}
});
} catch (error) {
console.error('Error starting transcription:', error);
console.error('Error starting recording:', error);
throw error;
}
}
async stopTranscription(shape: IVideoChatShape) {
if (!shape.props.roomUrl) return;
async stopRecording(shape: IVideoChatShape) {
if (!shape.props.recordingId) return;
const workerUrl = import.meta.env.VITE_TLDRAW_WORKER_URL;
const apiKey = import.meta.env.VITE_DAILY_API_KEY;
if (!apiKey) {
throw new Error('Daily.co API key not configured');
}
try {
// Extract room name from the room URL
const roomName = new URL(shape.props.roomUrl).pathname.split('/').pop();
const response = await fetch(`${workerUrl}/daily/rooms/${roomName}/stop-transcription`, {
await fetch(`${workerUrl}/daily/recordings/${shape.props.recordingId}/stop`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${apiKey}`,
'Content-Type': 'application/json'
'Authorization': `Bearer ${apiKey}`
}
});
if (!response.ok) {
const error = await response.json();
throw new Error(`Failed to stop transcription: ${JSON.stringify(error)}`);
}
const data = await response.json() as DailyTranscriptResponse;
console.log('Stop transcription response:', data);
// Update both transcriptionId and isTranscribing state
await this.editor.updateShape<IVideoChatShape>({
id: shape.id,
type: shape.type,
props: {
...shape.props,
transcriptionId: data.transcriptionId || data.id || 'completed',
isTranscribing: false
recordingId: null
}
});
} catch (error) {
console.error('Error stopping transcription:', error);
console.error('Error stopping recording:', error);
throw error;
}
}
async getTranscriptionText(transcriptId: string): Promise<string> {
const workerUrl = import.meta.env.VITE_TLDRAW_WORKER_URL;
const apiKey = import.meta.env.VITE_DAILY_API_KEY;
if (!apiKey) {
throw new Error('Daily.co API key not configured');
}
console.log('Fetching transcript for ID:', transcriptId); // Debug log
const response = await fetch(`${workerUrl}/transcript/${transcriptId}`, { // Remove 'daily' from path
headers: {
'Authorization': `Bearer ${apiKey}`,
'Content-Type': 'application/json'
}
});
if (!response.ok) {
const error = await response.json();
console.error('Transcript API response:', error); // Debug log
throw new Error(`Failed to get transcription: ${JSON.stringify(error)}`);
}
const data = await response.json() as DailyTranscriptResponse;
console.log('Transcript data received:', data); // Debug log
return data.text || 'No transcription available';
}
async getTranscriptAccessLink(transcriptId: string): Promise<string> {
const workerUrl = import.meta.env.VITE_TLDRAW_WORKER_URL;
const apiKey = import.meta.env.VITE_DAILY_API_KEY;
if (!apiKey) {
throw new Error('Daily.co API key not configured');
}
console.log('Fetching transcript access link for ID:', transcriptId); // Debug log
const response = await fetch(`${workerUrl}/transcript/${transcriptId}/access-link`, { // Remove 'daily' from path
headers: {
'Authorization': `Bearer ${apiKey}`,
'Content-Type': 'application/json'
}
});
if (!response.ok) {
const error = await response.json();
console.error('Transcript link API response:', error); // Debug log
throw new Error(`Failed to get transcript access link: ${JSON.stringify(error)}`);
}
const data = await response.json() as DailyTranscriptResponse;
console.log('Transcript link data received:', data); // Debug log
return data.link || 'No transcript link available';
}
component(shape: IVideoChatShape) {
const [hasPermissions, setHasPermissions] = useState(false)
const [error, setError] = useState<Error | null>(null)
const [isLoading, setIsLoading] = useState(true)
const [roomUrl, setRoomUrl] = useState<string | null>(shape.props.roomUrl)
const [isCallActive, setIsCallActive] = useState(false)
const handleIframeMessage = (event: MessageEvent) => {
// Check if message is from Daily.co
if (!event.origin.includes('daily.co')) return;
console.log('Daily message received:', event.data);
// Check for call state updates
if (event.data?.action === 'daily-method-result') {
// Handle join success
if (event.data.method === 'join' && !event.data.error) {
console.log('Join successful - setting call as active');
setIsCallActive(true);
}
}
// Also check for participant events
if (event.data?.action === 'participant-joined') {
console.log('Participant joined - setting call as active');
setIsCallActive(true);
}
// Check for call ended
if (event.data?.action === 'left-meeting' ||
event.data?.action === 'participant-left') {
console.log('Call ended - setting call as inactive');
setIsCallActive(false);
}
};
useEffect(() => {
window.addEventListener('message', handleIframeMessage);
return () => {
window.removeEventListener('message', handleIframeMessage);
};
}, []);
useEffect(() => {
let mounted = true;
@ -371,7 +250,7 @@ export class VideoChatShape extends BaseBoxShapeUtil<IVideoChatShape> {
return () => {
mounted = false;
};
}, [shape.id]);
}, [shape.id]); // Only re-run if shape.id changes
useEffect(() => {
let mounted = true;
@ -403,28 +282,6 @@ export class VideoChatShape extends BaseBoxShapeUtil<IVideoChatShape> {
}
}, [shape.props.allowCamera, shape.props.allowMicrophone])
const handleTranscriptionClick = async (e: React.MouseEvent) => {
e.preventDefault();
e.stopPropagation();
if (!isCallActive) {
console.log('Cannot control transcription when call is not active');
return;
}
try {
if (shape.props.isTranscribing) {
console.log('Stopping transcription');
await this.stopTranscription(shape);
} else {
console.log('Starting transcription');
await this.startTranscription(shape);
}
} catch (err) {
console.error('Transcription error:', err);
}
};
if (error) {
return <div>Error creating room: {error.message}</div>
}
@ -460,16 +317,6 @@ export class VideoChatShape extends BaseBoxShapeUtil<IVideoChatShape> {
console.log(roomUrl)
// Debug log for render
console.log('Current call state:', { isCallActive, roomUrl });
// Add debug log before render
console.log('Rendering component with states:', {
isCallActive,
isTranscribing: shape.props.isTranscribing,
roomUrl
});
return (
<div
style={{
@ -477,7 +324,7 @@ export class VideoChatShape extends BaseBoxShapeUtil<IVideoChatShape> {
height: `${shape.props.h}px`,
position: "relative",
pointerEvents: "all",
overflow: "visible",
overflow: "hidden",
}}
>
<iframe
@ -492,80 +339,58 @@ export class VideoChatShape extends BaseBoxShapeUtil<IVideoChatShape> {
right: 0,
bottom: 0,
}}
allow="camera *; microphone *; display-capture *; clipboard-read; clipboard-write"
sandbox="allow-scripts allow-same-origin allow-forms allow-popups allow-popups-to-escape-sandbox allow-downloads allow-modals"
/>
allow={`camera ${shape.props.allowCamera ? "self" : ""}; microphone ${
shape.props.allowMicrophone ? "self" : ""
}`}
></iframe>
{/* Add data-testid to help debug iframe messages */}
<div data-testid="call-status">
Call Active: {isCallActive ? 'Yes' : 'No'}
</div>
<div
{shape.props.enableRecording && (
<button
onClick={async () => {
try {
if (shape.props.recordingId) {
await this.stopRecording(shape);
} else {
await this.startRecording(shape);
}
} catch (err) {
console.error('Recording error:', err);
}
}}
style={{
position: "absolute",
bottom: -48,
left: 0,
right: 0,
margin: "8px",
padding: "8px 12px",
background: "rgba(255, 255, 255, 0.95)",
borderRadius: "6px",
fontSize: "12px",
pointerEvents: "all",
touchAction: "manipulation",
display: "flex",
alignItems: "center",
justifyContent: "space-between",
zIndex: 999,
border: "1px solid #ccc",
boxShadow: "0 2px 4px rgba(0,0,0,0.1)",
userSelect: "none",
}}
>
<span style={{
cursor: "text",
userSelect: "text",
maxWidth: "60%",
overflow: "hidden",
textOverflow: "ellipsis",
whiteSpace: "nowrap",
pointerEvents: "all",
touchAction: "auto"
}}>
url: {roomUrl}
</span>
<button
onClick={handleTranscriptionClick}
disabled={!isCallActive}
style={{
marginLeft: "12px",
padding: "6px 12px",
background: shape.props.isTranscribing ? "#ff4444" : "#ffffff",
top: "8px",
right: "8px",
padding: "4px 8px",
background: shape.props.recordingId ? "#ff4444" : "#ffffff",
border: "1px solid #ccc",
borderRadius: "4px",
cursor: isCallActive ? "pointer" : "not-allowed",
whiteSpace: "nowrap",
flexShrink: 0,
pointerEvents: isCallActive ? "all" : "none", // Add explicit pointer-events control
touchAction: "manipulation",
WebkitTapHighlightColor: "transparent",
userSelect: "none",
minHeight: "32px",
minWidth: "44px",
zIndex: 1000,
position: "relative",
opacity: isCallActive ? 1 : 0.5
cursor: "pointer",
zIndex: 1,
}}
>
{!isCallActive
? "Join call to enable transcription"
: shape.props.isTranscribing
? "Stop Transcription"
: "Start Transcription"
}
{shape.props.recordingId ? "Stop Recording" : "Start Recording"}
</button>
</div>
)}
<p
style={{
position: "absolute",
bottom: 0,
left: 0,
margin: "8px",
padding: "4px 8px",
background: "rgba(255, 255, 255, 0.9)",
borderRadius: "4px",
fontSize: "12px",
pointerEvents: "all",
cursor: "text",
userSelect: "text",
zIndex: 1,
}}
>
url: {roomUrl}
</p>
</div>
)
}

View File

@ -12,7 +12,6 @@ import { DefaultContextMenu, DefaultContextMenuContent } from "tldraw"
import { TLUiContextMenuProps, useEditor } from "tldraw"
import {
cameraHistory,
copyLinkToLockedView,
} from "./cameraUtils"
import { useState, useEffect } from "react"
import { saveToPdf } from "../utils/pdfUtils"
@ -96,13 +95,11 @@ export function CustomContextMenu(props: TLUiContextMenuProps) {
<TldrawUiMenuGroup id="camera-controls">
<TldrawUiMenuItem {...customActions.zoomToSelection} disabled={!hasSelection} />
<TldrawUiMenuItem {...customActions.copyLinkToCurrentView} />
<TldrawUiMenuItem {...customActions.copyLockedLink} />
<TldrawUiMenuItem {...customActions.revertCamera} disabled={!hasCameraHistory} />
<TldrawUiMenuItem {...customActions.lockElement} disabled={!hasSelection} />
<TldrawUiMenuItem {...customActions.unlockElement} disabled={!hasSelection} />
<TldrawUiMenuItem {...customActions.saveToPdf} disabled={!hasSelection} />
<TldrawUiMenuItem {...customActions.llm} disabled={!hasSelection} />
</TldrawUiMenuGroup>
{/* Creation Tools Group */}

View File

@ -81,9 +81,9 @@ export const zoomToSelection = (editor: Editor) => {
const newCamera = editor.getCamera()
const url = new URL(window.location.href)
url.searchParams.set("shapeId", selectedIds[0].toString())
url.searchParams.set("x", newCamera.x.toFixed(2))
url.searchParams.set("y", newCamera.y.toFixed(2))
url.searchParams.set("zoom", newCamera.z.toFixed(2))
url.searchParams.set("x", Math.round(newCamera.x).toString())
url.searchParams.set("y", Math.round(newCamera.y).toString())
url.searchParams.set("zoom", Math.round(newCamera.z).toString())
window.history.replaceState(null, "", url.toString())
}
@ -119,49 +119,41 @@ export const revertCamera = (editor: Editor) => {
}
export const copyLinkToCurrentView = async (editor: Editor) => {
if (!editor.store.serialize()) return
if (!editor.store.serialize()) {
//console.warn("Store not ready")
return
}
try {
const baseUrl = `${window.location.origin}${window.location.pathname}`
const url = new URL(baseUrl)
const camera = editor.getCamera()
// Round camera values to 2 decimal places
url.searchParams.set("x", camera.x.toFixed(2))
url.searchParams.set("y", camera.y.toFixed(2))
url.searchParams.set("zoom", camera.z.toFixed(2))
// Round camera values to integers
url.searchParams.set("x", Math.round(camera.x).toString())
url.searchParams.set("y", Math.round(camera.y).toString())
url.searchParams.set("zoom", Math.round(camera.z).toString())
const selectedIds = editor.getSelectedShapeIds()
if (selectedIds.length > 0) {
url.searchParams.set("shapeId", selectedIds[0].toString())
}
await navigator.clipboard.writeText(url.toString())
} catch (error) {
alert("Failed to copy link. Please check clipboard permissions.")
}
}
export const copyLinkToLockedView = async (editor: Editor) => {
if (!editor.store.serialize()) return
const finalUrl = url.toString()
if (navigator.clipboard && window.isSecureContext) {
await navigator.clipboard.writeText(finalUrl)
} else {
const textArea = document.createElement("textarea")
textArea.value = finalUrl
document.body.appendChild(textArea)
try {
const baseUrl = `${window.location.origin}${window.location.pathname}`
const url = new URL(baseUrl)
const camera = editor.getCamera()
// Round camera values to 2 decimal places
url.searchParams.set("x", camera.x.toFixed(2))
url.searchParams.set("y", camera.y.toFixed(2))
url.searchParams.set("zoom", camera.z.toFixed(2))
url.searchParams.set("isLocked", "true")
const selectedIds = editor.getSelectedShapeIds()
if (selectedIds.length > 0) {
url.searchParams.set("shapeId", selectedIds[0].toString())
await navigator.clipboard.writeText(textArea.value)
} catch (err) {
}
document.body.removeChild(textArea)
}
await navigator.clipboard.writeText(url.toString())
} catch (error) {
alert("Failed to copy link. Please check clipboard permissions.")
}
@ -291,55 +283,47 @@ export const initLockIndicators = (editor: Editor) => {
})
}
// export const setInitialCameraFromUrl = (editor: Editor) => {
// const url = new URL(window.location.href)
// const x = url.searchParams.get("x")
// const y = url.searchParams.get("y")
// const zoom = url.searchParams.get("zoom")
// const shapeId = url.searchParams.get("shapeId")
// const frameId = url.searchParams.get("frameId")
// const isLocked = url.searchParams.get("isLocked") === "true"
export const setInitialCameraFromUrl = (editor: Editor) => {
const url = new URL(window.location.href)
const x = url.searchParams.get("x")
const y = url.searchParams.get("y")
const zoom = url.searchParams.get("zoom")
const shapeId = url.searchParams.get("shapeId")
const frameId = url.searchParams.get("frameId")
// // Always set camera position first if coordinates exist
// if (x && y && zoom) {
// editor.stopCameraAnimation()
// // Force camera position update
// editor.setCamera(
// {
// x: Math.round(parseFloat(x)),
// y: Math.round(parseFloat(y)),
// z: Math.round(parseFloat(zoom))
// },
// { animation: { duration: 0 } }
// )
if (x && y && zoom) {
editor.stopCameraAnimation()
editor.setCamera(
{
x: Math.round(parseFloat(x)),
y: Math.round(parseFloat(y)),
z: Math.round(parseFloat(zoom))
},
{ animation: { duration: 0 } }
)
}
// // Ensure camera update is applied
// editor.updateInstanceState({ ...editor.getInstanceState() })
// }
// // Handle other camera operations after position is set
// if (shapeId) {
// editor.select(shapeId as TLShapeId)
// const bounds = editor.getSelectionPageBounds()
// if (bounds && !x && !y && !zoom) {
// zoomToSelection(editor)
// }
// } else if (frameId) {
// editor.select(frameId as TLShapeId)
// const frame = editor.getShape(frameId as TLShapeId)
// if (frame && !x && !y && !zoom) {
// const bounds = editor.getShapePageBounds(frame as TLShape)
// if (bounds) {
// editor.zoomToBounds(bounds, {
// targetZoom: 1,
// animation: { duration: 0 },
// })
// }
// }
// }
// return isLocked
// }
// Handle shape/frame selection and zoom
if (shapeId) {
editor.select(shapeId as TLShapeId)
const bounds = editor.getSelectionPageBounds()
if (bounds && !x && !y && !zoom) {
zoomToSelection(editor)
}
} else if (frameId) {
editor.select(frameId as TLShapeId)
const frame = editor.getShape(frameId as TLShapeId)
if (frame && !x && !y && !zoom) {
const bounds = editor.getShapePageBounds(frame as TLShape)
if (bounds) {
editor.zoomToBounds(bounds, {
targetZoom: 1,
animation: { duration: 0 },
})
}
}
}
}
export const zoomToFrame = (editor: Editor, frameId: string) => {
if (!editor) return

View File

@ -12,7 +12,6 @@ import {
revertCamera,
unlockElement,
zoomToSelection,
copyLinkToLockedView,
} from "./cameraUtils"
import { saveToPdf } from "../utils/pdfUtils"
import { searchText } from "../utils/searchUtils"
@ -349,16 +348,6 @@ export const overrides: TLUiOverrides = {
}
},
},
//TODO: FIX COPY LOCKED LINK
copyLockedLink: {
id: "copy-locked-link",
label: "Copy Locked View Link",
kbd: "alt+shift+c",
onSelect() {
copyLinkToLockedView(editor)
},
readonlyOk: true,
},
//TODO: FIX PREV & NEXT SLIDE KEYBOARD COMMANDS
// "next-slide": {
// id: "next-slide",

View File

@ -77,9 +77,9 @@ export const searchText = (editor: Editor) => {
const newCamera = editor.getCamera()
const url = new URL(window.location.href)
url.searchParams.set("shapeId", matchingShapes[0].id)
url.searchParams.set("x", newCamera.x.toFixed(2))
url.searchParams.set("y", newCamera.y.toFixed(2))
url.searchParams.set("zoom", newCamera.z.toFixed(2))
url.searchParams.set("x", newCamera.x.toString())
url.searchParams.set("y", newCamera.y.toString())
url.searchParams.set("zoom", newCamera.z.toString())
window.history.replaceState(null, "", url.toString())
} else {
alert("No matches found")

View File

@ -9,12 +9,13 @@ export { TldrawDurableObject } from "./TldrawDurableObject"
// Define security headers
const securityHeaders = {
"Content-Security-Policy":
"default-src 'self'; connect-src 'self' wss: https:; img-src 'self' data: blob: https:; script-src 'self' 'unsafe-inline' 'unsafe-eval'; style-src 'self' 'unsafe-inline'; frame-src 'self' https://*.daily.co; child-src 'self' https://*.daily.co;",
"default-src 'self'; connect-src 'self' wss: https:; img-src 'self' data: blob: https:; script-src 'self' 'unsafe-inline' 'unsafe-eval'; style-src 'self' 'unsafe-inline';",
"X-Content-Type-Options": "nosniff",
"X-Frame-Options": "DENY",
"X-XSS-Protection": "1; mode=block",
"Strict-Transport-Security": "max-age=31536000; includeSubDomains",
"Referrer-Policy": "strict-origin-when-cross-origin",
"Permissions-Policy": "camera=*, microphone=*, geolocation=()",
"Permissions-Policy": "camera=(), microphone=(), geolocation=()",
}
// we use itty-router (https://itty.dev/) to handle routing. in this example we turn on CORS because