Add GitHub to Gitea mirror workflow

🤖 Generated with Claude Code
Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
Jeff Emmett 2025-11-22 18:01:25 -08:00
parent 8c318607e0
commit c70f8ec9f8
8 changed files with 244 additions and 5 deletions

0
.env:Zone.Identifier Normal file
View File

28
.github/workflows/mirror-to-gitea.yml vendored Normal file
View File

@ -0,0 +1,28 @@
name: Mirror to Gitea
on:
push:
branches:
- main
- master
workflow_dispatch:
jobs:
mirror:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Mirror to Gitea
env:
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}
GITEA_USERNAME: ${{ secrets.GITEA_USERNAME }}
run: |
REPO_NAME=$(basename $GITHUB_REPOSITORY)
git remote add gitea https://$GITEA_USERNAME:$GITEA_TOKEN@gitea.jeffemmett.com/jeffemmett/$REPO_NAME.git || true
git push gitea --all --force
git push gitea --tags --force

39
package-lock.json generated
View File

@ -9,6 +9,7 @@
"version": "0.1.0",
"license": "MIT",
"dependencies": {
"@fal-ai/client": "^1.4.0",
"@fal-ai/serverless-client": "^0.6.0",
"@fal-ai/serverless-proxy": "^0.6.0",
"@tldraw/tldraw": "3.1.0",
@ -107,6 +108,20 @@
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
}
},
"node_modules/@fal-ai/client": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/@fal-ai/client/-/client-1.4.0.tgz",
"integrity": "sha512-fi+FUg/uiZu95KiI6F+vnTla7WwJo2dN/oX8H2Fjme/gg96EYeUVFm6ddR40tLhz0HQ39RdnCXxxiOQP9AX2fA==",
"license": "MIT",
"dependencies": {
"@msgpack/msgpack": "^3.0.0-beta2",
"eventsource-parser": "^1.1.2",
"robot3": "^0.4.1"
},
"engines": {
"node": ">=18.0.0"
}
},
"node_modules/@fal-ai/serverless-client": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/@fal-ai/serverless-client/-/serverless-client-0.6.1.tgz",
@ -204,6 +219,15 @@
"integrity": "sha512-6EwiSjwWYP7pTckG6I5eyFANjPhmPjUX9JRLUSfNPC7FX7zK9gyZAfUEaECL6ALTpGX5AjnBq3C9XmVWPitNpw==",
"dev": true
},
"node_modules/@msgpack/msgpack": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/@msgpack/msgpack/-/msgpack-3.1.1.tgz",
"integrity": "sha512-DnBpqkMOUGayNVKyTLlkM6ILmU/m/+VUxGkuQlPQVAcvreLz5jn1OlQnWd8uHKL/ZSiljpM12rjRhr51VtvJUQ==",
"license": "ISC",
"engines": {
"node": ">= 18"
}
},
"node_modules/@next/env": {
"version": "14.0.3",
"resolved": "https://registry.npmjs.org/@next/env/-/env-14.0.3.tgz",
@ -3180,6 +3204,15 @@
"resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz",
"integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw=="
},
"node_modules/eventsource-parser": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-1.1.2.tgz",
"integrity": "sha512-v0eOBUbiaFojBu2s2NPBfYUoRR9GjcDNvCXVaqEf5vVfpIAh9f8RCo4vXTP8c63QRKCFwoLpMpTdPwwhEKVgzA==",
"license": "MIT",
"engines": {
"node": ">=14.18"
}
},
"node_modules/fast-deep-equal": {
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
@ -4945,6 +4978,12 @@
"url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/robot3": {
"version": "0.4.1",
"resolved": "https://registry.npmjs.org/robot3/-/robot3-0.4.1.tgz",
"integrity": "sha512-hzjy826lrxzx8eRgv80idkf8ua1JAepRc9Efdtj03N3KNJuznQCPlyCJ7gnUmDFwZCLQjxy567mQVKmdv2BsXQ==",
"license": "BSD-2-Clause"
},
"node_modules/run-parallel": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz",

View File

@ -11,6 +11,7 @@
"format": "prettier --write ."
},
"dependencies": {
"@fal-ai/client": "^1.4.0",
"@fal-ai/serverless-client": "^0.6.0",
"@fal-ai/serverless-proxy": "^0.6.0",
"@tldraw/tldraw": "3.1.0",

15
src/App.tsx Normal file
View File

@ -0,0 +1,15 @@
import { useState } from 'react';
import { ImageGenerator } from './components/ImageGenerator';
import { GenerationTypeSelector } from './components/GenerationTypeSelector';
export default function App() {
const [generationType, setGenerationType] = useState<'sketch-to-image' | 'text-to-image' | 'image-to-video'>('sketch-to-image');
return (
<div className="container mx-auto p-4">
<h1 className="text-2xl font-bold mb-4">AI Image Generator</h1>
<GenerationTypeSelector value={generationType} onChange={setGenerationType} />
<ImageGenerator generationType={generationType} />
</div>
);
}

View File

@ -35,6 +35,26 @@ const overrides: TLUiOverrides = {
editor.setCurrentTool('live-image')
},
}
tools.textToImage = {
id: 'text-to-image',
icon: 'text',
label: 'Text to Image',
kbd: 't',
readonlyOk: false,
onSelect: () => {
editor.setCurrentTool('text-to-image')
},
}
tools.imageToVideo = {
id: 'image-to-video',
icon: 'video',
label: 'Image to Video',
kbd: 'v',
readonlyOk: false,
onSelect: () => {
editor.setCurrentTool('image-to-video')
},
}
return tools
},
// toolbar(_app, toolbar, { tools }) {
@ -55,11 +75,7 @@ const shapeUtils = [LiveImageShapeUtil]
const tools = [LiveImageTool]
export default function Home() {
// Server-side rendering check
if (typeof window === 'undefined') {
// Return a minimal placeholder for SSR
return <div className="tldraw-wrapper">Loading editor...</div>
}
const [generationType, setGenerationType] = useState<'sketch-to-image' | 'text-to-image' | 'image-to-video'>('sketch-to-image')
const onEditorMount = (editor: Editor) => {
// We need the editor to think that the live image shape is a frame
@ -87,12 +103,21 @@ export default function Home() {
}
editor.setStyleForNextShapes(DefaultSizeStyle, 'xl')
// Add support for new generation types
editor.store.registerShapeUtils(
TextToImageShapeUtil,
ImageToVideoShapeUtil
)
}
return (
<LiveImageProvider appId="110602490-lcm-sd15-i2i">
<main className="tldraw-wrapper">
<div className="tldraw-wrapper__inner">
<div className="absolute top-4 left-4 z-50">
<GenerationTypeSelector value={generationType} onChange={setGenerationType} />
</div>
<Tldraw
persistenceKey="draw-fast"
onMount={onEditorMount}

View File

@ -0,0 +1,35 @@
interface GenerationTypeSelectorProps {
value: 'sketch-to-image' | 'text-to-image' | 'image-to-video';
onChange: (type: 'sketch-to-image' | 'text-to-image' | 'image-to-video') => void;
}
export function GenerationTypeSelector({ value, onChange }: GenerationTypeSelectorProps) {
return (
<div className="flex gap-2 mb-4">
<button
className={`px-4 py-2 rounded ${
value === 'sketch-to-image' ? 'bg-blue-500 text-white' : 'bg-gray-200'
}`}
onClick={() => onChange('sketch-to-image')}
>
Sketch to Image
</button>
<button
className={`px-4 py-2 rounded ${
value === 'text-to-image' ? 'bg-blue-500 text-white' : 'bg-gray-200'
}`}
onClick={() => onChange('text-to-image')}
>
Text to Image
</button>
<button
className={`px-4 py-2 rounded ${
value === 'image-to-video' ? 'bg-blue-500 text-white' : 'bg-gray-200'
}`}
onClick={() => onChange('image-to-video')}
>
Image to Video
</button>
</div>
);
}

View File

@ -0,0 +1,96 @@
import { fal } from "@fal-ai/client";
import { useState } from "react";
interface ImageGeneratorProps {
generationType: 'sketch-to-image' | 'text-to-image' | 'image-to-video';
}
export function ImageGenerator({ generationType, ...props }: ImageGeneratorProps) {
const [prompt, setPrompt] = useState('');
const [inputImage, setInputImage] = useState<string | null>(null);
const [isLoading, setIsLoading] = useState(false);
const [error, setError] = useState<string | null>(null);
const [result, setResult] = useState<any | null>(null);
const generateOutput = async () => {
setIsLoading(true);
try {
let result;
switch (generationType) {
case 'text-to-image':
result = await fal.subscribe("fal-ai/flux/dev", {
input: {
prompt: prompt,
},
});
break;
case 'image-to-video':
if (!inputImage) {
throw new Error('Input image is required for image-to-video');
}
result = await fal.subscribe("fal-ai/minimax-video/image-to-video", {
input: {
prompt: prompt,
image_url: inputImage
},
});
break;
case 'sketch-to-image':
// ... existing sketch-to-image logic ...
break;
}
setResult(result);
} catch (error) {
console.error('Generation failed:', error);
setError('Failed to generate output');
} finally {
setIsLoading(false);
}
};
return (
<div className="flex flex-col gap-4">
{/* Input Section */}
<div className="flex flex-col gap-2">
<textarea
className="w-full p-2 border rounded"
placeholder="Enter your prompt..."
value={prompt}
onChange={(e) => setPrompt(e.target.value)}
/>
{(generationType === 'sketch-to-image' || generationType === 'image-to-video') && (
<div className="border-2 border-dashed p-4 text-center">
{/* Existing image upload/sketch component */}
</div>
)}
<button
className="bg-blue-500 text-white px-4 py-2 rounded"
onClick={generateOutput}
disabled={isLoading}
>
{isLoading ? 'Generating...' : `Generate ${generationType.replace(/-/g, ' ')}`}
</button>
</div>
{/* Result Section */}
{error && <div className="text-red-500">{error}</div>}
{result && (
<div className="border rounded p-4">
{generationType === 'image-to-video' ? (
<video controls className="w-full">
<source src={result.url} type="video/mp4" />
</video>
) : (
<img src={result.url} alt="Generated output" className="w-full" />
)}
</div>
)}
</div>
);
}