Skip to content

Commit b52afe7

Browse files
Merge pull request #1 from appwrite/feat/sdk-specific-reference-docs
feat: sdk specific reference docs
2 parents 1a85477 + 8fc120a commit b52afe7

16 files changed

+684
-86
lines changed

bun.lock

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
"@mastra/rag": "^1.1.0",
1212
"@modelcontextprotocol/sdk": "^1.17.4",
1313
"ai": "^5.0.23",
14+
"dedent": "^1.6.0",
1415
"dotenv": "^17.2.1",
1516
"front-matter": "^4.0.2",
1617
"giget": "^2.0.0",
@@ -820,6 +821,8 @@
820821

821822
"debug": ["[email protected]", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ=="],
822823

824+
"dedent": ["[email protected]", "", { "peerDependencies": { "babel-plugin-macros": "^3.1.0" }, "optionalPeers": ["babel-plugin-macros"] }, "sha512-F1Z+5UCFpmQUzJa11agbyPVMbpgT/qA3/SKyJ1jyBgm7dUcUEa8v9JwDkerSQXfakBwFljIxhOJqGkjUwZ9FSA=="],
825+
823826
"deep-is": ["[email protected]", "", {}, "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ=="],
824827

825828
"deepmerge": ["[email protected]", "", {}, "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A=="],

package.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
"watch": "tsc --watch",
88
"start": "node dist/index.js",
99
"mcp-inspect": "mcp-inspector --transport http --server-url http://localhost:1234",
10-
"download-content": "bun run scripts/download-content.js",
10+
"download-content": "rimraf content && bun run scripts/download-content.js",
1111
"init-vector-store": "rimraf tmp && mkdir tmp && bun run scripts/init-vector-store.ts",
1212
"init": "bun run download-content && bun run init-vector-store",
1313
"generate-deeplinks": "bun run scripts/generate-deeplinks.ts"
@@ -28,6 +28,7 @@
2828
"@mastra/rag": "^1.1.0",
2929
"@modelcontextprotocol/sdk": "^1.17.4",
3030
"ai": "^5.0.23",
31+
"dedent": "^1.6.0",
3132
"dotenv": "^17.2.1",
3233
"front-matter": "^4.0.2",
3334
"giget": "^2.0.0",
Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
import path from "path";
2+
import { contentRoot, getContent } from "../src/lib/utils/content";
3+
import fs from "fs";
4+
5+
export async function createTableOfContents() {
6+
const excludedPaths = [
7+
"/docs/references"
8+
]
9+
10+
console.log(`Creating table of contents for docs`, { excludedPaths });
11+
12+
const result = getContent();
13+
14+
// Create docs/toc.json
15+
const toc = result
16+
.sort((a, b) => a.webPath.localeCompare(b.webPath))
17+
.map((item) => ({
18+
path: `/${item.webPath}`,
19+
title: item.attributes.title,
20+
}))
21+
// .filter((item) => excludedPaths.some((path) => !item.path.startsWith(path)));
22+
23+
console.log(`Found ${toc.length} Table of Contents items`);
24+
25+
console.log("Writing docs/toc.json");
26+
// console.log("Writing docs/toc.json");
27+
fs.writeFileSync(
28+
path.join(contentRoot, "docs", "toc.json"),
29+
JSON.stringify(toc, null, 2)
30+
);
31+
}
32+
33+
// await createTableOfContents();

scripts/download-content.ts

Lines changed: 39 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -1,47 +1,54 @@
1-
import { downloadTemplate} from "giget";
2-
import path from 'node:path';
3-
import { contentRoot, getContent } from "../src/lib/utils/content";
4-
import fs from "fs";
5-
const __dirname = path.dirname(new URL(import.meta.url).pathname);
6-
const downloadDir = path.join(__dirname, '..', 'content', 'docs');
1+
import { downloadTemplate } from "giget";
2+
import { createTableOfContents } from "./create-table-of-contents";
3+
import { appwriteExamplesBranch, docsTargetDir, examplesTargetDir } from "../src/lib/constants";
4+
import { processLibraries, writeLirbariesExamplesToDisk } from "../src/lib/utils/process-libraries";
75

86
export async function downloadDocs() {
97
const owner = "appwrite";
108
const repo = "website";
11-
const path = "src/routes/docs";
9+
const repoSubdir = "src/routes/docs";
1210

13-
console.log(`Downloading docs from ${owner}/${repo}/${path} to ${downloadDir}`);
14-
const downloadResult = await downloadTemplate(`gh:${owner}/${repo}/${path}#main`, {
15-
dir: downloadDir,
11+
console.log(`Downloading docs from ${owner}/${repo}/${repoSubdir} to ${docsTargetDir}`);
12+
const downloadResult = await downloadTemplate(`gh:${owner}/${repo}/${repoSubdir}#main`, {
13+
dir: docsTargetDir,
1614
forceClean: true,
1715
});
1816

1917
console.log(`Creating table of contents`);
20-
createTableOfContents();
2118

2219
return {
2320
docsDir: downloadResult.dir
2421
}
2522
}
2623

27-
downloadDocs();
28-
29-
export async function createTableOfContents() {
30-
const result = getContent();
31-
32-
// Create docs/toc.json
33-
const toc = result
34-
.sort((a, b) => a.webPath.localeCompare(b.webPath))
35-
.map((item) => ({
36-
path: `/${item.webPath}`,
37-
title: item.attributes.title,
38-
description: item.attributes.description,
39-
}));
40-
41-
console.log("Writing docs/toc.json");
42-
// console.log("Writing docs/toc.json");
43-
fs.writeFileSync(
44-
path.join(contentRoot, "docs", "toc.json"),
45-
JSON.stringify(toc, null, 2)
46-
);
47-
}
24+
export async function downloadExamples() {
25+
console.log(`Downloading examples from appwrite/appwrite (branch: ${appwriteExamplesBranch})`);
26+
27+
const owner = "appwrite";
28+
const repo = "appwrite";
29+
const docsSubdirPath = `docs/examples/${appwriteExamplesBranch}`;
30+
31+
console.log(`Downloading examples from ${owner}/${repo}/${docsSubdirPath} to ${examplesTargetDir}`);
32+
const downloadResult = await downloadTemplate(`gh:${owner}/${repo}/${docsSubdirPath}#${appwriteExamplesBranch}`, {
33+
dir: examplesTargetDir,
34+
forceClean: true,
35+
});
36+
37+
return {
38+
examplesDir: downloadResult.dir
39+
}
40+
}
41+
42+
async function main() {
43+
44+
await Promise.all([
45+
downloadDocs(),
46+
downloadExamples(),
47+
]);
48+
49+
const librariesWithFeatures = await processLibraries();
50+
await writeLirbariesExamplesToDisk({ librariesWithFeatures });
51+
await createTableOfContents();
52+
}
53+
54+
await main();

scripts/init-vector-store.ts

Lines changed: 95 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,13 @@
1-
import { MDocument } from "@mastra/rag"
2-
import { vectorStore, VectorStoreMetadata } from "../src/lib/vector-store";
1+
import { MDocument } from "@mastra/rag";
2+
import { vectorStore, VectorStoreMetadata } from "../src/lib/vector-store";
33
import { getContent } from "../src/lib/utils/content";
44
import { embedMany } from "ai";
55
import { openai } from "@ai-sdk/openai";
6+
import { librariesWithFeatures } from "../src/lib/utils/process-libraries";
67

7-
const BATCH_SIZE = process.env.BATCH_SIZE ? parseInt(process.env.BATCH_SIZE) : 50;
8+
const BATCH_SIZE = process.env.BATCH_SIZE
9+
? parseInt(process.env.BATCH_SIZE)
10+
: 50;
811

912
async function initVectorStore() {
1013
// Delete index content
@@ -16,9 +19,8 @@ async function initVectorStore() {
1619
console.log("Index contents deleted");
1720
} catch (error) {}
1821

19-
2022
// Wait 3 seconds
21-
await new Promise(resolve => setTimeout(resolve, 3000));
23+
await new Promise((resolve) => setTimeout(resolve, 3000));
2224

2325
// Create index
2426
console.log("Creating index...");
@@ -29,6 +31,12 @@ async function initVectorStore() {
2931

3032
console.log("Index created");
3133

34+
await addDocsToVectorStore();
35+
console.log("Done");
36+
}
37+
38+
39+
async function addDocsToVectorStore() {
3240
// Add documents
3341
const docsPages = getContent();
3442

@@ -37,32 +45,63 @@ async function initVectorStore() {
3745
// Process in batches
3846
for (let i = 0; i < docsPages.length; i += BATCH_SIZE) {
3947
const batch = docsPages.slice(i, i + BATCH_SIZE);
40-
console.log(`Processing batch ${Math.floor(i / BATCH_SIZE) + 1} of ${Math.ceil(docsPages.length / BATCH_SIZE)}`);
41-
await Promise.all(batch.map(async page => {
42-
try {
43-
console.log(`Processing page: ${page.webPath}`);
44-
await processPage(page);
45-
} catch (error) {
46-
console.error(`Error processing document ${page.filePath}:`, error);
47-
}
48-
}));
48+
console.log(
49+
`Processing batch ${Math.floor(i / BATCH_SIZE) + 1} of ${Math.ceil(docsPages.length / BATCH_SIZE)}`
50+
);
51+
await Promise.all(
52+
batch.map(async (page) => {
53+
try {
54+
console.log(`Processing page: ${page.webPath}`);
55+
await processPage({
56+
markdown: page.body,
57+
webPath: page.webPath,
58+
layout: page.attributes.layout,
59+
title: page.attributes.title,
60+
description: page.attributes.description,
61+
});
62+
} catch (error) {
63+
console.error(`Error processing document ${page.filePath}:`, error);
64+
}
65+
})
66+
);
4967
}
50-
51-
console.log("Done");
5268
}
5369

54-
export async function processPage(page: ReturnType<typeof getContent>[0]) {
55-
const chunks = await getChunks(page);
70+
export async function processPage({
71+
markdown,
72+
webPath,
73+
layout,
74+
title,
75+
description,
76+
library,
77+
}: {
78+
markdown: string;
79+
webPath: string;
80+
layout: string;
81+
title: string;
82+
description: string;
83+
library?: string;
84+
}) {
85+
const chunks = await getChunks({ markdown });
5686
const embeddings = await embedDocsPage(chunks);
57-
await upsertDocsPageEmbeddings({ page, chunks, embeddings });
58-
return { chunks, embeddings, page };
87+
await upsertDocsPageEmbeddings({
88+
webPath,
89+
layout,
90+
title,
91+
description,
92+
embeddings,
93+
chunks,
94+
library,
95+
content: markdown,
96+
});
97+
return { chunks, embeddings };
5998
}
6099

61-
export async function getChunks(page: ReturnType<typeof getContent>[0]) {
62-
const doc = MDocument.fromMarkdown(page.body);
100+
export async function getChunks({ markdown }: { markdown: string }) {
101+
const doc = MDocument.fromMarkdown(markdown);
63102
const chunks = await doc.chunk({
64103
strategy: "markdown",
65-
extract: {}
104+
extract: {},
66105
});
67106

68107
return chunks;
@@ -71,30 +110,48 @@ export async function getChunks(page: ReturnType<typeof getContent>[0]) {
71110
async function embedDocsPage(chunks: Awaited<ReturnType<MDocument["chunk"]>>) {
72111
const embeddingsResult = await embedMany({
73112
model: openai.embedding("text-embedding-3-small"),
74-
values: chunks.map(chunk => chunk.text),
113+
values: chunks.map((chunk) => chunk.text),
75114
maxRetries: 3,
76115
});
77116
return embeddingsResult.embeddings;
78117
}
79118

80-
async function upsertDocsPageEmbeddings({ page, chunks, embeddings }: {
81-
page: ReturnType<typeof getContent>[0],
82-
chunks: Awaited<ReturnType<MDocument["chunk"]>>,
83-
embeddings: Awaited<ReturnType<typeof embedMany>>["embeddings"]
119+
async function upsertDocsPageEmbeddings({
120+
webPath,
121+
layout,
122+
title,
123+
description,
124+
embeddings,
125+
chunks,
126+
content,
127+
library,
128+
}: {
129+
webPath: string;
130+
layout: string;
131+
title: string;
132+
description: string;
133+
library?: string;
134+
content: string;
135+
chunks: Awaited<ReturnType<MDocument["chunk"]>>;
136+
embeddings: Awaited<ReturnType<typeof embedMany>>["embeddings"];
84137
}) {
85138
await vectorStore.upsert({
86139
indexName: "docs",
87140
vectors: embeddings,
88-
metadata: chunks.map((chunk, index) => ({
89-
text: chunk.text,
90-
id: `${page.webPath}_c_${index}`,
91-
layout: page.attributes.layout,
92-
title: page.attributes.title,
93-
description: page.attributes.description,
94-
createdAt: new Date().toISOString(),
95-
filePath: page.filePath,
96-
webPath: page.webPath,
97-
}) satisfies VectorStoreMetadata),
141+
metadata: chunks.map(
142+
(chunk, index) =>
143+
({
144+
text: chunk.text,
145+
id: `${webPath}_c_${index}`,
146+
layout: layout,
147+
title: title,
148+
description: description,
149+
createdAt: new Date().toISOString(),
150+
webPath: webPath,
151+
library: library,
152+
content: content,
153+
}) satisfies VectorStoreMetadata
154+
),
98155
});
99156
}
100157

src/index.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,14 @@
11
import { MCPServer } from "mcp-framework";
22
import { config } from "dotenv";
33
import path from "path";
4+
import { initLibrariesWithFeatures } from "./lib/utils/process-libraries.js";
45

56
config({ path: path.join(process.cwd(), ".env") });
67

78
const port = parseInt(process.env.PORT ?? "1234");
89

10+
await initLibrariesWithFeatures();
11+
912
const server = new MCPServer({
1013
transport: {
1114
type: "http-stream",

src/lib/constants.ts

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
import path from 'node:path';
2+
3+
const __dirname = path.dirname(new URL(import.meta.url).pathname);
4+
export const docsTargetDir = path.join(__dirname, '../../', 'content', 'docs');
5+
export const examplesTargetDir = path.join(__dirname, '../../', 'content', 'examples');
6+
export const processedReferencesTargetDir = path.join(__dirname, '../../', 'content', 'temp-references');
7+
export const appwriteExamplesBranch = process.env.APPWRITE_REPO_BRANCH || "1.8.x";

0 commit comments

Comments
 (0)