Skip to content

Commit 44f91fa

Browse files
Merge pull request #12 from OneFineStarstuff/genspark_ai_developer
AGI/ASI MVP scaffold: SSE chat, orchestrator, circuit breaker, provenance
2 parents b5926d5 + 8e903d8 commit 44f91fa

File tree

12 files changed

+271
-0
lines changed

12 files changed

+271
-0
lines changed
Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
import { NextRequest } from 'next/server';
2+
3+
export const runtime = 'nodejs';
4+
5+
function* fakeStream(text: string) {
6+
for (const ch of text) {
7+
yield { delta: ch };
8+
}
9+
}
10+
11+
function streamForMessage(message: string) {
12+
const ctrl = new AbortController();
13+
const stream = new ReadableStream<Uint8Array>({
14+
async start(controller) {
15+
try {
16+
const reply = `Echo: ${message}`;
17+
const meta = { layer: 'surface', model: 'mock', version: '0.0.1', latencyMs: 42 };
18+
controller.enqueue(encode(`event: meta\ndata: ${JSON.stringify(meta)}\n\n`));
19+
for (const chunk of fakeStream(reply)) {
20+
await new Promise(r => setTimeout(r, 10));
21+
controller.enqueue(encode(`event: token\ndata: ${JSON.stringify(chunk)}\n\n`));
22+
}
23+
controller.enqueue(encode(`event: done\n\n`));
24+
controller.close();
25+
} catch (e) {
26+
controller.enqueue(encode(`event: error\ndata: {"message":"stream_failed"}\n\n`));
27+
controller.close();
28+
}
29+
},
30+
cancel() { ctrl.abort(); }
31+
});
32+
return new Response(stream, { headers: { 'Content-Type': 'text/event-stream', 'Cache-Control': 'no-cache', Connection: 'keep-alive' } });
33+
}
34+
35+
export async function POST(req: NextRequest) {
36+
const { message } = await req.json();
37+
return streamForMessage(message);
38+
}
39+
40+
export async function GET(req: NextRequest) {
41+
const { searchParams } = new URL(req.url);
42+
const message = searchParams.get('q') ?? '';
43+
return streamForMessage(message);
44+
}
45+
46+
function encode(s: string) { return new TextEncoder().encode(s); }

next-app/app/api/intent/route.ts

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
export const runtime = 'edge';
2+
export async function POST(req: Request) {
3+
const { message } = await req.json();
4+
const intent = /simulate|prove|optimize|model/i.test(message) ? 'analytical' : 'casual';
5+
return new Response(JSON.stringify({ intent }), { headers: { 'content-type': 'application/json' } });
6+
}

next-app/app/chat/page.tsx

Lines changed: 74 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,74 @@
1+
"use client";
2+
import { useEffect, useRef, useState } from 'react';
3+
import { ProvenanceBadge } from '@/components/ProvenanceBadge';
4+
5+
export default function ChatPage() {
6+
const [input, setInput] = useState("");
7+
const [messages, setMessages] = useState<{ role: 'user'|'assistant'; content: string; meta?: any }[]>([]);
8+
const [streaming, setStreaming] = useState(false);
9+
const [fallback, setFallback] = useState(false);
10+
const eventSrc = useRef<EventSource | null>(null);
11+
12+
const send = async () => {
13+
if (!input.trim() || streaming) return;
14+
const userMsg = { role: 'user' as const, content: input };
15+
setMessages(m => [...m, userMsg, { role: 'assistant', content: '' }]);
16+
setInput("");
17+
setStreaming(true);
18+
const es = new EventSource(`/api/chat/stream?q=${encodeURIComponent(userMsg.content)}&s=${Date.now()}` , { withCredentials: false });
19+
eventSrc.current = es;
20+
21+
es.addEventListener('token', (e: MessageEvent) => {
22+
const data = JSON.parse(e.data);
23+
setMessages(m => {
24+
const copy = [...m];
25+
const idx = copy.length - 1; // last assistant
26+
copy[idx] = { ...copy[idx], content: (copy[idx].content || '') + data.delta };
27+
return copy;
28+
});
29+
});
30+
31+
es.addEventListener('meta', (e: MessageEvent) => {
32+
const meta = JSON.parse(e.data);
33+
if (meta.fallback) setFallback(true);
34+
setMessages(m => {
35+
const copy = [...m];
36+
const idx = copy.length - 1;
37+
copy[idx] = { ...copy[idx], meta };
38+
return copy;
39+
});
40+
});
41+
42+
es.addEventListener('done', () => { setStreaming(false); es.close(); eventSrc.current = null; });
43+
es.addEventListener('error', () => { setStreaming(false); es.close(); eventSrc.current = null; });
44+
45+
// Using GET-only SSE with query payload; no POST body needed
46+
};
47+
48+
useEffect(() => () => { eventSrc.current?.close(); }, []);
49+
50+
return (
51+
<div className="space-y-4">
52+
<h1 className="text-2xl font-semibold">Chat</h1>
53+
<div className="rounded border bg-white p-3">
54+
<div className="space-y-3" role="log" aria-live="polite">
55+
{messages.map((m, i) => (
56+
<div key={i} className={m.role === 'user' ? 'text-right' : 'text-left'}>
57+
<div className={"inline-block max-w-[80%] rounded px-3 py-2 " + (m.role==='user'?'bg-amber-100':'bg-slate-100')}>
58+
<div className="whitespace-pre-wrap">{m.content}</div>
59+
{m.role==='assistant' && m.meta && (
60+
<div className="mt-1"><ProvenanceBadge meta={m.meta} /></div>
61+
)}
62+
</div>
63+
</div>
64+
))}
65+
</div>
66+
<div className="mt-3 flex gap-2">
67+
<input value={input} onChange={e=>setInput(e.target.value)} className="flex-1 rounded border px-3 py-2" placeholder="Type a message..." />
68+
<button onClick={send} disabled={streaming} className="rounded bg-amber-600 px-4 py-2 text-white disabled:opacity-50">Send</button>
69+
{fallback && <span className="text-xs text-slate-500">Fallback in use</span>}
70+
</div>
71+
</div>
72+
</div>
73+
);
74+
}

next-app/app/layout.tsx

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
export const metadata = { title: 'AGI/ASI Interface', description: 'AI Readiness MVP' } satisfies import('next').Metadata;
2+
export default function RootLayout({ children }: { children: React.ReactNode }) {
3+
return (
4+
<html lang="en">
5+
<body className="min-h-screen bg-gray-50 text-slate-900 antialiased" suppressHydrationWarning>
6+
<div className="mx-auto max-w-5xl p-4">{children}</div>
7+
</body>
8+
</html>
9+
);
10+
}

next-app/app/page.tsx

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
export default function Home() {
2+
return (
3+
<main className="space-y-4">
4+
<h1 className="text-2xl font-semibold">AGI/ASI Interface MVP</h1>
5+
<p>Go to the chat to try streaming and provenance badges.</p>
6+
<a className="text-amber-700 underline" href="/chat">Open Chat</a>
7+
</main>
8+
);
9+
}
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
"use client";
2+
export function ProvenanceBadge({ meta }: { meta: { name?: string; model?: string; version?: string; layer?: string; latencyMs?: number } }) {
3+
const label = `${meta.layer ?? 'surface'}${meta.name ?? meta.model ?? 'model'} ${meta.version ?? ''}`;
4+
const color = (meta.layer ?? 'surface') === 'surface' ? '#38A169' : '#1A237E';
5+
return (
6+
<span role="status" aria-label={`Model ${label}`} className="inline-flex items-center gap-1 rounded border px-2 py-0.5 text-xs text-slate-700">
7+
<span className="h-2 w-2 rounded-full" style={{ background: color }} />
8+
{label}
9+
{meta.latencyMs != null && <span className="text-slate-500">{meta.latencyMs}ms</span>}
10+
</span>
11+
);
12+
}

next-app/lib/ai/circuitBreaker.ts

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
type State = 'closed' | 'open' | 'half-open';
2+
export class CircuitBreaker {
3+
private failures = 0; private state: State = 'closed'; private openedAt = 0;
4+
constructor(private failureThreshold = 3, private recoveryMs = 15000) {}
5+
canPass(): boolean {
6+
if (this.state === 'open' && Date.now() - this.openedAt > this.recoveryMs) { this.state = 'half-open'; return true; }
7+
return this.state !== 'open';
8+
}
9+
recordSuccess() { this.failures = 0; this.state = 'closed'; }
10+
recordFailure() { this.failures++; if (this.failures >= this.failureThreshold) { this.state = 'open'; this.openedAt = Date.now(); } }
11+
isOpen() { return this.state === 'open'; }
12+
}

next-app/lib/ai/orchestrator.ts

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
import { CircuitBreaker } from './circuitBreaker';
2+
import type { ModelProvider, ModelResponse } from './types';
3+
4+
type Intent = 'casual' | 'actionable' | 'analytical' | 'sensitive';
5+
export type RouteDecision = { intent: Intent; target: 'surface' | 'depth'; reason: string };
6+
7+
export class Orchestrator {
8+
private breakerDepth = new CircuitBreaker(3, 15000);
9+
constructor(private surface: ModelProvider, private depth: ModelProvider, private intentDetect: (msg: string) => Intent) {}
10+
11+
route(input: string, override?: 'surface' | 'depth'): RouteDecision {
12+
if (override) return { intent: this.intentDetect(input), target: override, reason: 'user_override' };
13+
const intent = this.intentDetect(input);
14+
const target = intent === 'analytical' ? 'depth' : 'surface';
15+
return { intent, target, reason: 'policy' };
16+
}
17+
18+
async respond(input: string, stream = true): Promise<ModelResponse> {
19+
const decision = this.route(input);
20+
const primary = decision.target === 'depth' ? this.depth : this.surface;
21+
const fallback = decision.target === 'depth' ? this.surface : this.depth;
22+
23+
if (decision.target === 'depth' && !this.breakerDepth.canPass()) {
24+
return this.surface.invoke(this.decorate(input, { fallback: 'depth_breaker_open' }));
25+
}
26+
27+
try {
28+
const res = stream && primary.supportsStreaming
29+
? await primary.stream(this.decorate(input, decision))
30+
: await primary.invoke(this.decorate(input, decision));
31+
if (decision.target === 'depth') this.breakerDepth.recordSuccess();
32+
return res;
33+
} catch (e) {
34+
if (decision.target === 'depth') this.breakerDepth.recordFailure();
35+
return fallback.invoke(this.decorate(input, { fallback: 'primary_failed' }));
36+
}
37+
}
38+
39+
private decorate(input: string, meta: Record<string, unknown>): string {
40+
return `<!-- orchestration:${JSON.stringify(meta)} -->\n${input}`;
41+
}
42+
}

next-app/lib/ai/types.ts

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
export type ModelConfig = { temperature?: number; maxTokens?: number };
2+
export type StreamChunk = { id?: string; delta: string; done?: boolean };
3+
export type ProviderMeta = { name?: string; model?: string; layer?: 'surface' | 'depth'; version?: string; tokensIn?: number; tokensOut?: number; latencyMs?: number };
4+
export interface ModelResponse { text?: string; chunks?: AsyncIterable<StreamChunk>; meta: ProviderMeta }
5+
export interface ModelProvider { id: string; supportsStreaming: boolean; invoke(prompt: string): Promise<ModelResponse>; stream(prompt: string): Promise<ModelResponse> }

next-app/next.config.js

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
/** @type {import('next').NextConfig} */
2+
const nextConfig = {
3+
experimental: { serverActions: { allowedOrigins: ["*"] } },
4+
reactStrictMode: true
5+
};
6+
module.exports = nextConfig;

0 commit comments

Comments
 (0)