From 76623341294768f82009cbd1f5a9911b21906dfa Mon Sep 17 00:00:00 2001 From: neilblaze Date: Thu, 25 Sep 2025 17:05:35 +0530 Subject: [PATCH 1/5] code: mcp-server (working / WIP) Signed-off-by: neilblaze --- .cursor/mcp.json | 13 + .gitignore | 10 - LICENSE | 201 ++++++++++ README.md | 96 ++++- index.ts | 147 ------- mcp-server/.env.example | 29 ++ mcp-server/.gitignore | 29 ++ mcp-server/package.json | 86 ++++ mcp-server/src/analysis.ts | 560 +++++++++++++++++++++++++++ mcp-server/src/index.ts | 775 +++++++++++++++++++++++++++++++++++++ mcp-server/src/utils.ts | 173 +++++++++ mcp-server/src/version.ts | 21 + mcp-server/tsconfig.json | 54 +++ package-lock.json | 519 ------------------------- package.json | 32 -- tsconfig.json | 20 - utils.ts | 16 - version.ts | 1 - 18 files changed, 2035 insertions(+), 747 deletions(-) create mode 100644 .cursor/mcp.json delete mode 100644 .gitignore create mode 100644 LICENSE delete mode 100644 index.ts create mode 100644 mcp-server/.env.example create mode 100644 mcp-server/.gitignore create mode 100644 mcp-server/package.json create mode 100644 mcp-server/src/analysis.ts create mode 100644 mcp-server/src/index.ts create mode 100644 mcp-server/src/utils.ts create mode 100644 mcp-server/src/version.ts create mode 100644 mcp-server/tsconfig.json delete mode 100644 package-lock.json delete mode 100644 package.json delete mode 100644 tsconfig.json delete mode 100644 utils.ts delete mode 100644 version.ts diff --git a/.cursor/mcp.json b/.cursor/mcp.json new file mode 100644 index 0000000..2da8f61 --- /dev/null +++ b/.cursor/mcp.json @@ -0,0 +1,13 @@ +{ + "mcpServers": { + "open-digger-mcp": { + "command": "node", + "args": ["./mcp-server/dist/index.js"], + "cwd": "./mcp-server/", + "env": { + "CACHE_TTL_SECONDS": "300", + "NODE_ENV": "production" + } + } + } +} diff --git a/.gitignore b/.gitignore deleted file mode 100644 index 2631b39..0000000 --- a/.gitignore +++ /dev/null @@ -1,10 +0,0 @@ -.vscode -.idea -.DS_Store -*.swp -*.lock - -*/**/node_modules -lib -dist -node_modules diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..f49a4e1 --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/README.md b/README.md index af8c44d..654b009 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,97 @@ # OpenDigger MCP Server -This is a simple OpenDigger MCP server to get LLMs access to OpenDigger data by MCP tools. +A Model Context Protocol (MCP) server for OpenDigger, enabling LLMs to interact with repository metrics and analytics. -With this server installed, you can interact with it in LLMs to get online data of OpenDigger and get insights of the data. +> [!NOTE] +> The README.md is still under development. I'll also add an INSTRUCTION_GUIDE.md later which provides detailed usage instructions and examples. + + +## Quick Start + +```bash +# Setup +git clone https://github.com/X-lab2017/open-digger-mcp-server.git +cd open-digger-mcp-server && cd mcp-server +npm install +npm run build + +# Configure Cursor (update path in .cursor/mcp.json) +# Start server +npm start +``` + +## Features + +### Tools (6) +1. **get_open_digger_metric** - Single metric fetching +2. **get_open_digger_metrics_batch** - Batch operations +3. **compare_repositories** - Multi-repo analysis +4. **analyze_trends** - Growth trend analysis +5. **get_ecosystem_insights** - Ecosystem analytics +6. **server_health** - System diagnostics + +### Prompts (3) +1. **repo_health_analysis** - Comprehensive repo health reports +2. **repo_comparison** - Competitive repository analysis +3. **developer_insights** - Developer activity analysis + +### Metrics +Core: `openrank`, `stars`, `forks`, `contributors`, `participants`, `issues_new`, `issues_closed`, `pull_requests`, `commits`, `activity` + +Extended: `technical_fork`, `bus_factor`, `releases`, `inactive_contributors`, `maintainer_count`, `community_activity` + +## Usage Examples + +### Repository Comparison +``` +Compare microsoft/vscode and facebook/react using the compare_repositories tool +``` + +### Health Analysis +``` +Generate a health report for microsoft/vscode using the repo_health_analysis prompt +``` + +### Trend Analysis +``` +Analyze the growth trends for contributors in microsoft/vscode over 2 years +``` + +## Configuration + +### Environment Variables (.env) +```bash +CACHE_TTL_SECONDS=300 +# SSE_PORT=3001 # Optional +``` + +### Cursor MCP (.cursor/mcp.json) +```json +{ + "mcpServers": { + "open-digger": { + "command": "node", + "args": ["/full/path/to/dist/index.js"], + "cwd": "/full/path/to/project", + "env": { + "CACHE_TTL_SECONDS": "300" + } + } + } +} +``` + + +## Development + +```bash +npm run watch # Development mode +npm run build # Compile TypeScript +npm run clean # Clean build files +npm run sse:test # Test SSE server +``` + + + +## License +Apache-2.0 License \ No newline at end of file diff --git a/index.ts b/index.ts deleted file mode 100644 index 5701a6f..0000000 --- a/index.ts +++ /dev/null @@ -1,147 +0,0 @@ -#!/usr/bin/env node -import { Server } from '@modelcontextprotocol/sdk/server/index.js'; -import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js'; -import { - CallToolRequestSchema, - GetPromptRequestSchema, - ListPromptsRequestSchema, - ListToolsRequestSchema, -} from '@modelcontextprotocol/sdk/types.js'; -import { z } from 'zod'; -import { zodToJsonSchema } from 'zod-to-json-schema'; - -import { fetchData } from './utils.js'; -import { VERSION } from './version.js'; - -const server = new Server( - { - name: 'open-digger-mcp-server', - version: VERSION, - }, - { - capabilities: { - tools: {}, - prompts: {}, - }, - } -); - -const BASE_URL = 'https://oss.open-digger.cn/'; - -const inputSchema = z.object({ - platform: z.enum(['GitHub', 'Gitee']).describe('Platform of the repo or user (GitHub, Gitee).'), - entityType: z.enum(['Repo', 'User']).describe('What is the entity of the metric (Repo, User).'), - owner: z.string().optional().describe('The owner name of the repo to get a metric data.'), - repo: z.string().optional().describe('The repo name of the repo to get a metric data.'), - login: z.string().optional().describe('The user login to get a metric data of a user.'), - metricName: z.enum([ - 'openrank', - 'stars', - 'participants', - 'contributors', - 'issues_new', - 'change_requests', - 'issue_comments', - ]).describe('The metric name to get the data.'), -}); - -server.setRequestHandler(ListToolsRequestSchema, async () => ({ - tools: [ - { - name: 'get_open_digger_metric', - description: 'Get metric data of OpenDigger', - inputSchema: zodToJsonSchema(inputSchema), - }, - ], -})); - -server.setRequestHandler(CallToolRequestSchema, async (request) => { - try { - if (!request.params.arguments) { - throw new Error("Arguments are required"); - } - - switch (request.params.name) { - case 'get_open_digger_metric': { - const args = inputSchema.parse(request.params.arguments); - const platform = args.platform.toString().toLowerCase(); - let url = ''; - if (args.entityType === 'Repo') { - url = `${BASE_URL}${platform}/${args.owner}/${args.repo}/${args.metricName}.json`; - } else { - url = `${BASE_URL}${platform}/${args.login}/${args.metricName}.json`; - } - - const data = await fetchData(url); - return { - content: [{ type: 'text', text: JSON.stringify(data, null, 2) }], - }; - } - - default: - throw new Error(`Unknown tool: ${request.params.name}`); - } - } catch (e) { - if (e instanceof z.ZodError) { - throw new Error(`Invalid input: ${JSON.stringify(e.errors)}`); - } - throw e; - } -}); - -server.setRequestHandler(ListPromptsRequestSchema, async () => ({ - prompts: Object.values({ - name: 'open_source_repo_analysis', - description: 'Comprehensive analysis of open source repo with OpenDigger data', - arguments: [ - { name: 'platform', description: 'The platform of the repo to analysis', required: true }, - { name: 'owner', description: 'The owner of the repo to analysis', required: true }, - { name: 'repo', description: 'The name of the repo to analysis', required: true } - ] - }), -})); - -server.setRequestHandler(GetPromptRequestSchema, async (request) => { - if (request.params.name === 'open_source_repo_analysis') { - const platform = request.params.arguments?.platform; - const owner = request.params.arguments?.owner; - const repo = request.params.arguments?.repo; - - return { - messages: [ - { - role: 'user', - content: { - type: 'text', - text: ` -Generate a comprehensive report of ${owner}/${repo} repo on ${platform}, the OpenRank, stars, participants and contributors metrics are most important to give a report of the repo. - -Notice that: - -- OpenRank metric shows a general influence level of the repo, more influential developers with more activities in the repo leads to higher OpenRank value. -- Participants metric means developers count that has issues or pull requests activity like open new issue or PR, any issue comment or PR review in the repo. -- Contributors metrics means how many developers has merged PR in the repo for given period. -- Stars metric shows the popular level of the repo, which is how many developers give a star to the repo. -- If the repo was created more than 3 year ago, use yearly data; If the repo was created more than 1 year ago, use quarterly data; Else, use monthly data. - -Generate the report in HTML format that can be directly open by browser and has quite beatiful visulization, make sure that give comprehensive insights for each metric above along with the visulization charts like how's the data trending in the time period. - ` - } - } - ] - }; - } - - throw new Error("Prompt implementation not found"); -}); - -async function main() { - const transport = new StdioServerTransport(); - await server.connect(transport); - console.error("OpenDigger MCP Server running on stdio"); -} - -main().catch((error) => { - console.error("Fatal error in main():", error); - process.exit(1); -}); diff --git a/mcp-server/.env.example b/mcp-server/.env.example new file mode 100644 index 0000000..aa56deb --- /dev/null +++ b/mcp-server/.env.example @@ -0,0 +1,29 @@ +# OpenDigger MCP Server Configuration (rename to .env for actual use) + +# Cache Configuration +CACHE_TTL_SECONDS=300 + +# SSE Server Configuration (Optional) +SSE_PORT=3001 +SSE_HOST=0.0.0.0 + +# Development Settings +NODE_ENV=production + +# Server Configuration +# PORT=3000 +# NODE_ENV=development + +# Cache Configuration +# CACHE_TTL=300000 +# MAX_CACHE_SIZE=1000 + +# Rate Limiting +# RATE_LIMIT_REQUESTS=1000 +# RATE_LIMIT_WINDOW=3600000 + +# OpenDigger API Configuration +OPENDIGGER_BASE_URL=https://oss.open-digger.cn/ + +# DEEPSEEK_API_KEY= + diff --git a/mcp-server/.gitignore b/mcp-server/.gitignore new file mode 100644 index 0000000..c8df7f2 --- /dev/null +++ b/mcp-server/.gitignore @@ -0,0 +1,29 @@ +.vscode +.idea +.DS_Store +*.swp +*.lock + +node_modules/ +*/**/node_modules +lib +dist +node_modules + + +# Environment variables +.env +.env.local +.env.development +.env.production + +# Build output +dist/ +build/ + +# IDE files +.vscode/ +.cursor/ +.idea/ +*.swp +*.swo \ No newline at end of file diff --git a/mcp-server/package.json b/mcp-server/package.json new file mode 100644 index 0000000..f69fa24 --- /dev/null +++ b/mcp-server/package.json @@ -0,0 +1,86 @@ +{ + "name": "open-digger-mcp-server", + "version": "1.0.0", + "description": "MCP server for OpenDigger data with comprehensive analysis tools, trend analysis, and repository comparison capabilities", + "license": "Apache-2.0", + "author": "Frank Zhao", + "contributors": [ + { + "name": "Pratyay Banerjee" + } + ], + "homepage": "https://open-digger.cn/", + "bugs": "https://github.com/X-lab2017/open-digger-mcp-server/issues", + "repository": { + "type": "git", + "url": "https://github.com/X-lab2017/open-digger-mcp-server.git" + }, + "type": "module", + "bin": { + "open-digger-mcp-server": "dist/index.js" + }, + "main": "dist/index.js", + "files": [ + "dist", + "README.md", + "LICENSE" + ], + "scripts": { + "build": "npm run clean && tsc && chmod +x dist/index.js", + "dev": "npm run build && node dist/index.js", + "start": "node dist/index.js", + "watch": "tsc --watch", + "clean": "rm -rf dist", + "prepare": "npm run build", + "cache:clear": "node -e \"console.log('Cache management available via server_health tool')\"", + "sse:test": "SSE_PORT=3001 npm start", + "debug": "NODE_OPTIONS='--inspect' npm start" + }, + "keywords": [ + "mcp", + "model-context-protocol", + "opendigger", + "open-source", + "analytics", + "github", + "repository-metrics", + "developer-insights", + "trend-analysis", + "data-analysis", + "software-metrics", + "repository-health", + "developer-productivity", + "open-source-intelligence", + "community-analytics", + "code-metrics" + ], + "dependencies": { + "@modelcontextprotocol/sdk": "1.0.1", + "zod": "^3.22.4", + "zod-to-json-schema": "^3.23.5" + }, + "devDependencies": { + "@types/node": "^22", + "typescript": "^5.6.2" + }, + "peerDependencies": { + "node": ">=18.0.0" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "os": [ + "darwin", + "linux", + "win32" + ], + "config": { + "cache_ttl_default": 300, + "batch_size_default": 5, + "max_concurrent_requests": 100 + }, + "optionalDependencies": {}, + "bundleDependencies": false, + "preferGlobal": false +} \ No newline at end of file diff --git a/mcp-server/src/analysis.ts b/mcp-server/src/analysis.ts new file mode 100644 index 0000000..f6cef36 --- /dev/null +++ b/mcp-server/src/analysis.ts @@ -0,0 +1,560 @@ +/** + * @file analysis.ts + * @description + * This module provides utilities for analyzing repository metrics, generating comparison reports, + * detecting trends, and calculating health scores. It is designed to work with OpenDigger's + * repository and user metrics, offering insights into performance, growth, and overall health. + * + * Key Features: + * - Comparison analysis across multiple repositories and metrics + * - Trend detection and statistical analysis of time-series data + * - Health score calculation based on weighted metrics + * - Error handling and suggestion generation + */ + + +/** + * Represents the result of a comparison between repositories for a set of metrics. + */ +export interface ComparisonResult { + repository: string; + platform: string; + metrics: Array<{ + metric: string; + data?: any; + success: boolean; + error?: string; + }>; +} + +/** + * Represents the analysis result of comparing multiple repositories across various metrics. + */ +export interface ComparisonAnalysis { + summary: Record; + winners: Record; + insights: string[]; + rankings: Record>; + healthScores: Record; +} + +/** + * Represents the analysis of a time-series trend for a specific metric. + */ +export interface TrendAnalysis { + dataPoints: number; + timeRange: { + start?: string; + end?: string; + }; + values: { + first: number; + last: number; + peak: number; + lowest: number; + average: number; + median: number; + }; + trend: { + direction: 'increasing' | 'decreasing' | 'stable' | 'volatile'; + totalGrowth: number; + growthRate: string; + momentum: 'accelerating' | 'decelerating' | 'stable' | 'insufficient_data'; + volatility: 'low' | 'medium' | 'high'; + }; + patterns: { + hasSeasonality: boolean; + growthPhases: Array<{ + phase: string; + startDate: string; + endDate: string; + growth: number; + }>; + }; +} + +/** + * Extracts the latest value from a metric data object. + * Supports both flat and time-series data structures. + * + * @param data - The metric data object, which can be a number, a flat object, or a time-series object. + * @returns The latest numeric value found, or 0 if none is found. + */ +export function extractLatestValue(data: any): number { + if (typeof data === 'number') return data; + + if (typeof data === 'object' && data !== null) { + const keys = Object.keys(data) + .filter(key => key.match(/^\d{4}-\d{2}(-\d{2})?$/)) // Match YYYY-MM or YYYY-MM-DD + .sort(); + + if (keys.length === 0) { + const numericValues = Object.values(data).filter(val => typeof val === 'number'); + return numericValues.length > 0 ? Math.max(...numericValues) : 0; + } + + const latestKey = keys[keys.length - 1] as string; + const value = (data as Record)[latestKey]; + return typeof value === 'number' ? value : 0; + } + + return 0; +} + +/** + * Generates a comparison analysis across multiple repositories and metrics. + * + * @param results - Array of comparison results for each repository. + * @param metrics - Array of metric names to analyze. + * @returns A comprehensive analysis object with summaries, rankings, and insights. + */ +export function generateComparisonAnalysis(results: ComparisonResult[], metrics: string[]): ComparisonAnalysis { + const analysis: ComparisonAnalysis = { + summary: {}, + winners: {}, + insights: [], + rankings: {}, + healthScores: {} + }; + + // Analyze each metric across repositories + metrics.forEach(metric => { + const metricData = results.map(repo => { + const metricResult = repo.metrics.find(m => m.metric === metric && m.success); + const value = metricResult ? extractLatestValue(metricResult.data) : null; + return { + repo: repo.repository, + platform: repo.platform, + value: value + }; + }).filter((item): item is { repo: string; platform: string; value: number } => item.value !== null && item.value > 0); + + if (metricData.length > 0) { + const values = metricData.map(item => item.value); + const sortedData = metricData.sort((a, b) => b.value - a.value); + + const winner = sortedData[0]!; + const highest = Math.max(...values); + const lowest = Math.min(...values); + const average = values.reduce((sum, val) => sum + val, 0) / values.length; + + analysis.winners[metric] = winner.repo; + analysis.summary[metric] = { + highest, + average, + range: [lowest, highest], + winner: winner.repo + }; + + // Create rankings for this metric + analysis.rankings[metric] = sortedData.map((item, index) => ({ + repo: item.repo, + value: item.value, + rank: index + 1 + })); + } + }); + + // Calculate health scores for each repository + results.forEach(repo => { + const scores: number[] = []; + metrics.forEach(metric => { + const metricResult = repo.metrics.find(m => m.metric === metric && m.success); + if (metricResult && analysis.summary[metric]) { + const value = extractLatestValue(metricResult.data); + const maxValue = analysis.summary[metric].highest; + const normalizedScore = maxValue > 0 ? (value / maxValue) * 100 : 0; + scores.push(normalizedScore); + } + }); + + analysis.healthScores[repo.repository] = scores.length > 0 + ? scores.reduce((sum, score) => sum + score, 0) / scores.length + : 0; + }); + + // Generate insights + const topPerformer = Object.entries(analysis.healthScores) + .sort(([,a], [,b]) => b - a)[0]; + + const mostCompetitive = Object.entries(analysis.summary) + .sort(([,a], [,b]) => (b.highest - b.range[0]) - (a.highest - a.range[0]))[0]; + + analysis.insights.push( + `Repository comparison across ${metrics.length} metrics for ${results.length} repositories`, + `Top performer overall: ${topPerformer ? topPerformer[0] : 'N/A'} (${topPerformer ? topPerformer[1].toFixed(1) : 0}% health score)`, + `Most competitive metric: ${mostCompetitive ? mostCompetitive[0] : 'N/A'}`, + `Analysis completed at ${new Date().toISOString()}` + ); + + // Add metric-specific insights + Object.entries(analysis.winners).forEach(([metric, winner]) => { + const summary = analysis.summary[metric]; + if (summary && summary.highest > 0) { + const margin = summary.highest - summary.average; + const dominanceLevel = margin > summary.average * 0.5 ? 'dominates' : 'leads'; + analysis.insights.push(`${winner} ${dominanceLevel} in ${metric} with ${summary.highest.toLocaleString()}`); + } + }); + + return analysis; +} + +/** + * Processes time-series data to generate a trend analysis. + * + * @param data - The time-series data object. + * @param timeRange - Optional time range string (not currently used). + * @returns A trend analysis object with statistics, trend direction, and patterns. + */ +export function processTrendData(data: any, timeRange: string): TrendAnalysis { + if (!data || typeof data !== 'object') { + return createEmptyTrendAnalysis(); + } + + // Extract time-based data points + const timeSeriesKeys = Object.keys(data) + .filter(key => key.match(/^\d{4}-\d{2}(-\d{2})?$/)) // Match YYYY-MM or YYYY-MM-DD + .sort(); + + if (timeSeriesKeys.length === 0) { + return createEmptyTrendAnalysis(); + } + + const values = timeSeriesKeys.map(key => ({ + date: key, + value: typeof data[key] === 'number' ? data[key] : 0 + })).filter(item => item.value >= 0); // Filter out negative values + + if (values.length === 0) { + return createEmptyTrendAnalysis(); + } + + // Calculate basic statistics + const numericValues = values.map(v => v.value); + const firstValue = values[0]?.value || 0; + const lastValue = values[values.length - 1]?.value || 0; + const peakValue = Math.max(...numericValues); + const lowestValue = Math.min(...numericValues); + const averageValue = numericValues.reduce((sum, val) => sum + val, 0) / numericValues.length; + const sortedValues = [...numericValues].sort((a, b) => a - b); + const medianValue = sortedValues.length % 2 === 0 && sortedValues.length >= 2 + ? ((sortedValues[sortedValues.length / 2 - 1] as number) + (sortedValues[sortedValues.length / 2] as number)) / 2 + : sortedValues[Math.floor(sortedValues.length / 2)] as number; + + // Calculate trend metrics + const totalGrowth = lastValue - firstValue; + const growthRate = firstValue > 0 ? ((lastValue - firstValue) / firstValue) * 100 : 0; + + // Determine trend direction + let direction: 'increasing' | 'decreasing' | 'stable' | 'volatile' = 'stable'; + if (Math.abs(growthRate) < 5) { + direction = 'stable'; + } else if (growthRate > 0) { + direction = 'increasing'; + } else { + direction = 'decreasing'; + } + + // Calculate volatility + const variance = numericValues.reduce((sum, val) => sum + Math.pow(val - averageValue, 2), 0) / numericValues.length; + const standardDeviation = Math.sqrt(variance); + const coefficientOfVariation = averageValue > 0 ? standardDeviation / averageValue : 0; + + let volatility: 'low' | 'medium' | 'high' = 'low'; + if (coefficientOfVariation > 0.3) volatility = 'high'; + else if (coefficientOfVariation > 0.15) volatility = 'medium'; + if (volatility === 'high') direction = 'volatile'; + + // Calculate momentum + const momentum = calculateMomentum(values); + + // Detect growth phases + const growthPhases = detectGrowthPhases(values); + + // Simple seasonality detection + const hasSeasonality = detectSeasonality(values); + + const timeRangeObj: { start?: string; end?: string } = {}; + const firstValueEntry = values[0]; + if (firstValueEntry) timeRangeObj.start = firstValueEntry.date; + const lastValueEntry = values[values.length - 1]; + if (lastValueEntry) timeRangeObj.end = lastValueEntry.date; + + return { + dataPoints: values.length, + timeRange: timeRangeObj, + values: { + first: firstValue, + last: lastValue, + peak: peakValue, + lowest: lowestValue, + average: averageValue, + median: medianValue + }, + trend: { + direction, + totalGrowth, + growthRate: `${growthRate.toFixed(2)}%`, + momentum, + volatility + }, + patterns: { + hasSeasonality, + growthPhases + } + }; +} + +/** + * Creates an empty trend analysis object with default values. + * + * @returns An empty TrendAnalysis object. + */ +function createEmptyTrendAnalysis(): TrendAnalysis { + return { + dataPoints: 0, + timeRange: {}, + values: { first: 0, last: 0, peak: 0, lowest: 0, average: 0, median: 0 }, + trend: { + direction: 'stable', + totalGrowth: 0, + growthRate: '0%', + momentum: 'insufficient_data', + volatility: 'low' + }, + patterns: { hasSeasonality: false, growthPhases: [] } + }; +} + +/** + * Calculates the momentum of a time-series dataset. + * + * @param values - Array of time-series data points. + * @returns The momentum classification: 'accelerating', 'decelerating', 'stable', or 'insufficient_data'. + */ +export function calculateMomentum(values: Array<{date: string; value: number}>): 'accelerating' | 'decelerating' | 'stable' | 'insufficient_data' { + if (values.length < 6) return 'insufficient_data'; + + const midpoint = Math.floor(values.length / 2); + const firstHalf = values.slice(0, midpoint); + const secondHalf = values.slice(midpoint); + + const firstHalfGrowth = firstHalf.length > 1 + ? ((firstHalf[firstHalf.length - 1]!.value - firstHalf[0]!.value) / firstHalf.length) + : 0; + + const secondHalfGrowth = secondHalf.length > 1 + ? ((secondHalf[secondHalf.length - 1]!.value - secondHalf[0]!.value) / secondHalf.length) + : 0; + + const momentumDifference = secondHalfGrowth - firstHalfGrowth; + const threshold = Math.max(0.1, (firstHalf[0]?.value ?? 0) * 0.01); // Dynamic threshold based on scale + + if (Math.abs(momentumDifference) < threshold) return 'stable'; + return momentumDifference > 0 ? 'accelerating' : 'decelerating'; +} + +/** + * Detects growth phases in a time-series dataset. + * + * @param values - Array of time-series data points. + * @returns Array of growth phase objects, each with start/end dates and growth value. + */ +function detectGrowthPhases(values: Array<{date: string; value: number}>): Array<{phase: string; startDate: string; endDate: string; growth: number}> { + if (values.length < 3) return []; + + const phases = []; + let currentPhaseStart = 0; + let currentPhaseType = 'stable'; + + for (let i = 1; i < values.length; i++) { + const prevValue = values[i - 1]!.value; + const currentValue = values[i]!.value; + const growth = currentValue - prevValue; + const growthRate = prevValue > 0 ? growth / prevValue : 0; + + let phaseType = 'stable'; + if (growthRate > 0.05) phaseType = 'growth'; + else if (growthRate < -0.05) phaseType = 'decline'; + + if (phaseType !== currentPhaseType && i - currentPhaseStart >= 2) { + const phaseGrowth = values[i - 1]!.value - values[currentPhaseStart]!.value; + phases.push({ + phase: currentPhaseType, + startDate: values[currentPhaseStart]!.date, + endDate: values[i - 1]!.date, + growth: phaseGrowth + }); + currentPhaseStart = i - 1; + currentPhaseType = phaseType; + } + } + + if (values.length - currentPhaseStart >= 2) { + const phaseGrowth = values[values.length - 1]!.value - values[currentPhaseStart]!.value; + phases.push({ + phase: currentPhaseType, + startDate: values[currentPhaseStart]!.date, + endDate: values[values.length - 1]!.date, + growth: phaseGrowth + }); + } + + return phases; +} + +/** + * Detects seasonality in a time-series dataset. + * + * @param values - Array of time-series data points. + * @returns True if seasonality is detected, false otherwise. + */ +function detectSeasonality(values: Array<{date: string; value: number}>): boolean { + if (values.length < 12) return false; // Need at least a year of data + + const monthlyAverages: Record = {}; + + values.forEach(item => { + const month = item.date.substring(5, 7); // Extract MM from YYYY-MM + if (!monthlyAverages[month]) monthlyAverages[month] = []; + monthlyAverages[month].push(item.value); + }); + + const monthlyMeans = Object.entries(monthlyAverages) + .map(([month, vals]) => ({ + month, + mean: vals.reduce((sum, val) => sum + val, 0) / vals.length + })); + + if (monthlyMeans.length < 6) return false; // Data from multiple months + + const overallMean = monthlyMeans.reduce((sum, m) => sum + m.mean, 0) / monthlyMeans.length; + const variance = monthlyMeans.reduce((sum, m) => sum + Math.pow(m.mean - overallMean, 2), 0) / monthlyMeans.length; + const coefficientOfVariation = overallMean > 0 ? Math.sqrt(variance) / overallMean : 0; + + // If monthly variations are significant, there might be seasonality + return coefficientOfVariation > 0.2; +} + +/** + * Generates suggestions for handling specific error messages. + * NOTE: The suggestions have been generated with AI, so I'd request @frank-zsy / @birdflyi to help enhance this further. + * + * @param errorMessage - The error message to analyze. + * @returns Array of suggestions for resolving the error. + */ +export function generateErrorSuggestions(errorMessage: string): string[] { + const suggestions = []; + const message = errorMessage.toLowerCase(); + + if (message.includes('validation') || message.includes('invalid input')) { + suggestions.push('Check that all required parameters are provided'); + suggestions.push('Verify parameter types match the expected schema'); + suggestions.push('Ensure enum values are exactly as specified (case-sensitive)'); + suggestions.push('Review the tool documentation for parameter requirements'); + } + + if (message.includes('404') || message.includes('not found')) { + suggestions.push('Verify the repository owner and name are correct'); + suggestions.push('Check if the repository exists on the specified platform'); + suggestions.push('Ensure the metric name is supported for this repository type'); + suggestions.push('Try a different metric or check OpenDigger documentation'); + } + + if (message.includes('network') || message.includes('timeout') || message.includes('fetch')) { + suggestions.push('Check your internet connection'); + suggestions.push('Try again in a few moments - the API might be temporarily unavailable'); + suggestions.push('Consider using cached data if available'); + suggestions.push('Verify the OpenDigger API is accessible'); + } + + if (message.includes('rate limit') || message.includes('too many requests')) { + suggestions.push('Wait before making additional requests'); + suggestions.push('Use batch operations to reduce the number of API calls'); + suggestions.push('Consider implementing exponential backoff'); + suggestions.push('Check if you are making too many concurrent requests'); + } + + if (message.includes('missing required field')) { + suggestions.push('For repository metrics: provide both owner and repo parameters'); + suggestions.push('For user metrics: provide the login parameter'); + suggestions.push('Check the entityType matches your provided parameters'); + suggestions.push('Ensure all required fields are specified and not empty'); + } + + if (message.includes('parse') || message.includes('json')) { + suggestions.push('Check the data format returned by the API'); + suggestions.push('Verify the metric data is in the expected format'); + suggestions.push('Try a different metric or repository'); + suggestions.push('Report this issue if it persists across multiple requests'); + } + + if (suggestions.length === 0) { + suggestions.push('Check the server logs for more detailed error information'); + suggestions.push('Verify all input parameters are correct'); + suggestions.push('Try a simpler request to isolate the issue'); + suggestions.push('Contact support if the problem persists'); + } + + return suggestions; +} + +/** + * Calculates a health score for a repository based on weighted metrics. + * + * @param metrics - Object containing metric names and their values. + * @returns A health score between 0 and 100. + */ +export function calculateHealthScore(metrics: Record): number { + const weights = { + openrank: 0.25, + stars: 0.20, + contributors: 0.20, + participants: 0.15, + forks: 0.10, + commits: 0.10 + }; + + let totalScore = 0; + let totalWeight = 0; + + Object.entries(weights).forEach(([metric, weight]) => { + if (metrics[metric] !== undefined) { + const value = extractLatestValue(metrics[metric]); + // Normalize score based on typical ranges with logarithmic scaling for better distribution + let normalizedScore = 0; + + switch (metric) { + case 'openrank': + normalizedScore = Math.min(Math.log10(Math.max(1, value)) / Math.log10(1000), 1) * 100; + break; + case 'stars': + normalizedScore = Math.min(Math.log10(Math.max(1, value)) / Math.log10(50000), 1) * 100; + break; + case 'contributors': + normalizedScore = Math.min(Math.log10(Math.max(1, value)) / Math.log10(1000), 1) * 100; + break; + case 'participants': + normalizedScore = Math.min(Math.log10(Math.max(1, value)) / Math.log10(5000), 1) * 100; + break; + case 'forks': + normalizedScore = Math.min(Math.log10(Math.max(1, value)) / Math.log10(20000), 1) * 100; + break; + case 'commits': + normalizedScore = Math.min(Math.log10(Math.max(1, value)) / Math.log10(100000), 1) * 100; + break; + } + + totalScore += normalizedScore * weight; + totalWeight += weight; + } + }); + + return totalWeight > 0 ? Math.round(totalScore / totalWeight) : 0; +} diff --git a/mcp-server/src/index.ts b/mcp-server/src/index.ts new file mode 100644 index 0000000..918f1cb --- /dev/null +++ b/mcp-server/src/index.ts @@ -0,0 +1,775 @@ +#!/usr/bin/env node +/** + * @file index.ts + * @description OpenDigger MCP Server - A server for fetching, analyzing, and comparing open-source metrics. + * + * This server provides tools and prompts for: + * - Fetching single or batch metrics from OpenDigger + * - Comparing repositories across key metrics + * - Analyzing trends over time + * - Generating ecosystem insights + * - Providing server health and cache statistics + * + * Features: + * - Enhanced error handling and suggestions + * - Batch processing with rate limiting + * - Caching with TTL support + * - SSE (Server-Sent Events) for real-time updates + * - Comprehensive prompts for analysis and visualization + * - Health monitoring and performance metrics + * + * Environment Variables: + * - CACHE_TTL_SECONDS: Cache time-to-live in seconds (default: 300) + * - SSE_PORT: Port for SSE HTTP server (optional) + * - SSE_HOST: Host for SSE HTTP server (default: 0.0.0.0) + * + * Dependencies: + * - @modelcontextprotocol/sdk + * - zod, zod-to-json-schema + * - node:http, node:url + * - Custom utilities: utils.js, version.js, analysis.js + */ + + +import { Server } from '@modelcontextprotocol/sdk/server/index.js'; +import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js'; +import { CallToolRequestSchema, GetPromptRequestSchema, ListPromptsRequestSchema, ListToolsRequestSchema, } from '@modelcontextprotocol/sdk/types.js'; +import { z } from 'zod'; +import { zodToJsonSchema } from 'zod-to-json-schema'; +import http, { IncomingMessage, ServerResponse } from 'node:http'; +import { URL } from 'node:url'; +import { fetchWithCache, getCacheStats, clearExpiredCache } from './utils.js'; +import { VERSION } from './version.js'; +import { generateComparisonAnalysis, processTrendData, extractLatestValue, generateErrorSuggestions, calculateHealthScore } from './analysis.js'; + + +const server = new Server( + { + name: 'open-digger-mcp-server', + version: VERSION, + }, + { + capabilities: { + tools: {}, + prompts: {}, + }, + } +); + +const BASE_URL = 'https://oss.open-digger.cn/'; +const DEFAULT_TTL_SECONDS = Number(process.env.CACHE_TTL_SECONDS || 300); +const BATCH_SIZE = 5; + + + +function buildUrl(args: z.infer): string { + const platform = args.platform.toString().toLowerCase(); + if (args.entityType === 'Repo') { + if (!args.owner || !args.repo) throw new Error('Missing required fields: owner, repo'); + return `${BASE_URL}${platform}/${args.owner}/${args.repo}/${args.metricName}.json`; + } + if (!args.login) throw new Error('Missing required field: login'); + return `${BASE_URL}${platform}/${args.login}/${args.metricName}.json`; +} + + +// Input schema (with metrics) +const inputSchema = z.object({ + platform: z.enum(['GitHub', 'Gitee']).describe('Platform of the repo or user (GitHub, Gitee).'), + entityType: z.enum(['Repo', 'User']).describe('What is the entity of the metric (Repo, User).'), + owner: z.string().optional().describe('The owner name of the repo to get a metric data.'), + repo: z.string().optional().describe('The repo name of the repo to get a metric data.'), + login: z.string().optional().describe('The user login to get a metric data of a user.'), + metricName: z.enum([ + 'openrank', + 'stars', + 'forks', + 'participants', + 'contributors', + 'issues_new', + 'issues_closed', + 'change_requests', + 'pull_requests', + 'pull_requests_accepted', + 'issue_comments', + 'commits', + 'activity', + 'technical_fork', + 'bus_factor', + 'releases', + 'inactive_contributors', + 'pull_requests_merged', + 'issue_response_time', + 'maintainer_count', + 'code_change_lines', + 'community_activity', + 'developer_network' + ]).describe('The metric name to get the data.'), +}); + + +const batchInputSchema = z.object({ + requests: z.array(inputSchema).min(1).max(20).describe('Batch of up to 20 requests'), +}).describe('Batch request payload for fetching multiple metrics in one call'); + +const compareReposSchema = z.object({ + repositories: z.array(z.object({ + platform: z.enum(['GitHub', 'Gitee']), + owner: z.string(), + repo: z.string() + })).min(2).max(5).describe('2-5 repositories to compare'), + metrics: z.array(z.enum([ + 'openrank', 'stars', 'forks', 'participants', 'contributors', + 'issues_new', 'issues_closed', 'pull_requests', 'commits', + 'technical_fork', 'bus_factor', 'releases' + ])).optional().describe('Metrics to compare (default: openrank, stars, contributors)') +}); + +const trendAnalysisSchema = z.object({ + platform: z.enum(['GitHub', 'Gitee']), + entityType: z.enum(['Repo', 'User']), + owner: z.string().optional(), + repo: z.string().optional(), + login: z.string().optional(), + metricName: z.enum(['openrank', 'stars', 'forks', 'contributors', 'participants']), + timeRange: z.enum(['6m', '1y', '2y', '3y']).optional().describe('Time range for trend analysis') +}); + +const ecosystemInsightsSchema = z.object({ + platform: z.enum(['GitHub', 'Gitee']), + category: z.enum(['language', 'topic', 'organization']).describe('Type of ecosystem analysis'), + value: z.string().describe('Language name, topic, or organization to analyze'), + limit: z.number().optional().describe('Number of top results to return (default: 10)') +}); + +const healthCheckSchema = z.object({ + includeCache: z.boolean().optional().describe('Include cache statistics in response'), + includePerfMetrics: z.boolean().optional().describe('Include performance metrics') +}); + + +// Tools list +server.setRequestHandler(ListToolsRequestSchema, async () => ({ + tools: [ + { + name: 'get_open_digger_metric', + description: 'Get single metric data from OpenDigger with enhanced error handling', + inputSchema: zodToJsonSchema(inputSchema), + }, + { + name: 'get_open_digger_metrics_batch', + description: 'Batch fetch multiple OpenDigger metrics with intelligent processing', + inputSchema: zodToJsonSchema(batchInputSchema), + }, + { + name: 'compare_repositories', + description: 'Compare multiple repositories across key metrics with intelligent analysis', + inputSchema: zodToJsonSchema(compareReposSchema), + }, + { + name: 'analyze_trends', + description: 'Perform comprehensive trend analysis on metrics over time', + inputSchema: zodToJsonSchema(trendAnalysisSchema), + }, + { + name: 'get_ecosystem_insights', + description: 'Get ecosystem-level insights for languages, topics, or organizations', + inputSchema: zodToJsonSchema(ecosystemInsightsSchema), + }, + { + name: 'server_health', + description: 'Get server health status, cache statistics, and performance metrics', + inputSchema: zodToJsonSchema(healthCheckSchema), + } + ], +})); + + +server.setRequestHandler(CallToolRequestSchema, async (request: any) => { + try { + if (!request.params.arguments) { + throw new Error("Arguments are required"); + } + + switch (request.params.name) { + case 'get_open_digger_metric': { + const args = inputSchema.parse(request.params.arguments); + const url = buildUrl(args); + const data = await fetchWithCache(url, DEFAULT_TTL_SECONDS); + + return { + content: [{ + type: 'text', + text: JSON.stringify({ + data, + metadata: { + metric: args.metricName, + entity: `${args.owner || args.login}${args.repo ? '/' + args.repo : ''}`, + platform: args.platform, + timestamp: new Date().toISOString(), + cached: true // Will be determined by fetchWithCache + } + }, null, 2) + }] + }; + } + + case 'get_open_digger_metrics_batch': { + const args = batchInputSchema.parse(request.params.arguments); + const results = []; + + // Process in batches to respect API limits + for (let i = 0; i < args.requests.length; i += BATCH_SIZE) { + const chunk = args.requests.slice(i, i + BATCH_SIZE); + const chunkResults = await Promise.all(chunk.map(async (r: z.infer) => { + try { + const url = buildUrl(r); + const data = await fetchWithCache(url, DEFAULT_TTL_SECONDS); + return { ok: true, request: r, data }; + } catch (err) { + return { ok: false, request: r, error: (err as Error).message }; + } + })); + + results.push(...chunkResults); + + // NOTE: Add delay between batches to avoid rate limits + // TODO: We need to test & confirm if this is necessary | cc: @birdflyi, @frank-zsy + if (i + BATCH_SIZE < args.requests.length) { + await new Promise(resolve => setTimeout(resolve, 100)); + } + } + + return { + content: [{ + type: 'text', + text: JSON.stringify({ + results, + summary: { + total: args.requests.length, + successful: results.filter(r => r.ok).length, + failed: results.filter(r => !r.ok).length, + processingTime: Date.now() + } + }, null, 2) + }] + }; + } + + case 'compare_repositories': { + const args = compareReposSchema.parse(request.params.arguments); + const metrics = args.metrics || ['openrank', 'stars', 'contributors']; + + const results = await Promise.all( + args.repositories.map(async (repo) => { + const repoMetrics = await Promise.all( + metrics.map(async (metric) => { + try { + const url = buildUrl({ + platform: repo.platform, + entityType: 'Repo', + owner: repo.owner, + repo: repo.repo, + metricName: metric + }); + const data = await fetchWithCache(url, DEFAULT_TTL_SECONDS); + return { metric, data, success: true }; + } catch (error) { + return { + metric, + error: (error as Error).message, + success: false + }; + } + }) + ); + + return { + repository: `${repo.owner}/${repo.repo}`, + platform: repo.platform, + metrics: repoMetrics + }; + }) + ); + + const analysis = generateComparisonAnalysis(results, metrics); + + return { + content: [{ + type: 'text', + text: JSON.stringify({ + comparison: results, + analysis, + metadata: { + repositoryCount: args.repositories.length, + metricsCompared: metrics, + timestamp: new Date().toISOString() + } + }, null, 2) + }] + }; + } + + case 'analyze_trends': { + const args = trendAnalysisSchema.parse(request.params.arguments); + const url = buildUrl(args); + const data = await fetchWithCache(url, DEFAULT_TTL_SECONDS); + + const trendAnalysis = processTrendData(data, args.timeRange || '1y'); + + return { + content: [{ + type: 'text', + text: JSON.stringify({ + rawData: data, + trendAnalysis, + metadata: { + metric: args.metricName, + entity: `${args.owner || args.login}${args.repo ? '/' + args.repo : ''}`, + timeRange: args.timeRange || '1y', + platform: args.platform, + timestamp: new Date().toISOString() + } + }, null, 2) + }] + }; + } + + case 'get_ecosystem_insights': { + const args = ecosystemInsightsSchema.parse(request.params.arguments); + + // TODO: For now I've kept a placeholder for ecosystem insights - would need specialized endpoints + return { + content: [{ + type: 'text', + text: JSON.stringify({ + status: "ecosystem_insights_placeholder", + message: "Ecosystem insights feature requires specialized OpenDigger endpoints", + category: args.category, + value: args.value, + platform: args.platform, + limit: args.limit || 10, + suggestion: "Use compare_repositories for multi-repo analysis", + timestamp: new Date().toISOString() + }, null, 2) + }] + }; + } + + case 'server_health': { + const args = healthCheckSchema.parse(request.params.arguments); + const health: any = { + status: 'healthy', + version: VERSION, + uptime: process.uptime(), + timestamp: new Date().toISOString(), + memory: process.memoryUsage() + }; + + if (args.includeCache) { + health.cache = getCacheStats(); + } + + if (args.includePerfMetrics) { + health.performance = { + eventLoopDelay: process.hrtime(), + cpuUsage: process.cpuUsage() + }; + } + + // Clean expired cache entries + clearExpiredCache(); + + return { + content: [{ + type: 'text', + text: JSON.stringify(health, null, 2) + }] + }; + } + + default: + throw new Error(`Unknown tool: ${request.params.name}`); + } + } catch (error: unknown) { + if (error instanceof z.ZodError) { + return { + content: [{ + type: 'text', + text: JSON.stringify({ + error: 'Invalid input parameters', + details: error.errors, + suggestions: generateErrorSuggestions('validation_error'), + timestamp: new Date().toISOString() + }, null, 2) + }] + }; + } + + const message = error instanceof Error ? error.message : String(error); + return { + content: [{ + type: 'text', + text: JSON.stringify({ + error: message, + context: { + tool: request.params.name, + arguments: request.params.arguments + }, + suggestions: generateErrorSuggestions(message), + timestamp: new Date().toISOString() + }, null, 2) + }] + }; + } +}); + + + +// Prompts ... +// TODO: We can update and enhance the prompts further based on feedback | cc: @birdflyi, @frank-zsy +server.setRequestHandler(ListPromptsRequestSchema, async () => ({ + prompts: [ + { + name: 'repo_health_analysis', + description: 'Comprehensive repository health analysis with visualizations and actionable insights', + arguments: [ + { name: 'platform', description: 'Platform (GitHub/Gitee)', required: true }, + { name: 'owner', description: 'Repository owner', required: true }, + { name: 'repo', description: 'Repository name', required: true }, + { name: 'timeframe', description: 'Analysis timeframe (monthly/quarterly/yearly)', required: false } + ] + }, + { + name: 'repo_comparison', + description: 'Side-by-side comparison of multiple repositories with competitive analysis', + arguments: [ + { name: 'repositories', description: 'JSON array of repos: [{"platform":"GitHub","owner":"user","repo":"name"}]', required: true }, + { name: 'metrics', description: 'Comma-separated metrics to compare (default: openrank,stars,contributors)', required: false } + ] + }, + { + name: 'developer_insights', + description: 'Analyze developer activity patterns and contribution insights', + arguments: [ + { name: 'platform', description: 'Platform (GitHub/Gitee)', required: true }, + { name: 'login', description: 'Developer username', required: true }, + { name: 'analysis_type', description: 'Type: activity, influence, or comprehensive', required: false } + ] + } + ] +})); + +server.setRequestHandler(GetPromptRequestSchema, async (request: any) => { + const { name, arguments: args } = request.params; + + switch (name) { + case 'repo_health_analysis': { + const { platform, owner, repo, timeframe = 'auto' } = args || {}; + + // TODO: We need to finetune the `VISUALIZATION REQUIREMENTS` section further based on feedback | cc: @birdflyi, @frank-zsy + return { + messages: [{ + role: 'user', + content: { + type: 'text', + text: `Analyze the health and activity of ${owner}/${repo} on ${platform}. + + ANALYSIS REQUIREMENTS: + 1. Use the compare_repositories and analyze_trends tools to fetch comprehensive data + 2. Fetch metrics: openrank, stars, forks, contributors, participants, issues_new, pull_requests, commits + 3. Determine timeframe: ${timeframe === 'auto' ? 'auto-detect based on repo age (>3y: yearly, >1y: quarterly, else: monthly)' : timeframe} + 4. Calculate health scores and identify trends + + OUTPUT FORMAT: + Create a beautiful HTML report with: + - Executive summary with key findings and health score + - Repository health indicators with color coding (green/yellow/red) + - Comparative analysis against similar projects in the ecosystem + - Developer activity patterns and contribution insights + - Recent activity highlights and momentum analysis + - Actionable recommendations for maintainers + + VISUALIZATION REQUIREMENTS: + - Responsive HTML that works on desktop and mobile + - Bar charts for comparative analysis + - Radar chart for overall health assessment + - Progress indicators for health scores + - Export-friendly styling (print/PDF ready) + + HEALTH METRICS TO CALCULATE: + - Community engagement score (based on participants, contributors) + - Project momentum (based on recent activity trends) + - Maintenance health (based on issue resolution, PR acceptance) + - Popularity index (stars, forks growth rate) + - Developer satisfaction (contributor retention, activity patterns) + + Make the analysis practical and actionable for repository maintainers and potential contributors.` + } + }] + }; + } + + case 'repo_comparison': { + const { repositories, metrics = 'openrank,stars,contributors,forks' } = args || {}; + + return { + messages: [{ + role: 'user', + content: { + type: 'text', + text: `Compare multiple repositories side-by-side with competitive analysis. + + REPOSITORIES TO ANALYZE: ${repositories} + METRICS TO COMPARE: ${metrics} + + ANALYSIS STEPS: + 1. Use compare_repositories tool to fetch data for all repositories + 2. Use analyze_trends tool for each repository to understand growth patterns + 3. Calculate normalized scores for fair comparison across different scales + 4. Identify market leaders and growth champions in each category + 5. Generate strategic insights and recommendations + + OUTPUT FORMAT: + Create an interactive HTML dashboard with: + - Executive summary table with rankings and key insights + - Side-by-side metric comparison with visual indicators + - Growth trajectory analysis with trend lines + - Market positioning matrix (performance vs growth) + - Competitive gap analysis and opportunities + - Strategic recommendations for each repository + + ANALYSIS DIMENSIONS: + - Current Performance: Latest metric values and rankings + - Growth Momentum: Recent trend analysis and acceleration + - Community Health: Developer engagement and activity patterns + - Market Position: Competitive standing and differentiation + - Future Potential: Growth projections and opportunity areas + + INSIGHTS TO PROVIDE: + - Who leads in each metric and why + - Growth momentum leaders vs performance leaders + - Community engagement comparison (developer satisfaction) + - Market opportunities and competitive gaps + - Strategic recommendations for improvement + - Potential collaboration or learning opportunities + + Make this useful for strategic decision-making and competitive positioning.` + } + }] + }; + } + + case 'developer_insights': { + const { platform, login, analysis_type = 'comprehensive' } = args || {}; + + + // TODO: Update the prompt further based on feedback | cc: @birdflyi, @frank-zsy + // Also, we need to update it to support visualization requirements + return { + messages: [{ + role: 'user', + content: { + type: 'text', + text: `Analyze developer ${login} on ${platform} platform. + + ANALYSIS TYPE: ${analysis_type} + ANALYSIS FOCUS: ${analysis_type === 'activity' ? 'Activity patterns and productivity' : + analysis_type === 'influence' ? 'Community influence and impact' : + 'Comprehensive developer profile'} + + DATA COLLECTION: + 1. Use get_open_digger_metric for developer-specific metrics + 2. Use analyze_trends to understand activity patterns over time + 3. Gather metrics: openrank, activity, contributions, influence patterns + + OUTPUT FORMAT: + Generate a professional developer profile report with: + - Developer summary with key achievements and influence score + - Activity timeline showing contribution patterns over time + - Skills and expertise analysis based on project involvement + - Community impact metrics and influence assessment + - Activity heatmap showing when and how they contribute + - Collaboration network analysis (if available) + - Career development insights and growth recommendations + + INSIGHTS TO PROVIDE: + - Peak activity times and productivity patterns + - Expertise areas and technical strengths + - Community influence and leadership indicators + - Collaboration style and team interaction patterns + - Career development trajectory and growth areas + - Recommendations for skill development and community engagement + + CAREER DEVELOPMENT FOCUS: + - Identify areas of expertise and influence + - Suggest opportunities for increased impact + - Highlight collaboration and leadership potential + - Provide actionable recommendations for professional growth + + Make this valuable for career development, team building, and understanding developer contributions to the open source ecosystem.` + } + }] + }; + } + + default: + throw new Error(`Prompt '${name}' not implemented`); + } +}); + + +async function main() { + const transport = new StdioServerTransport(); + await server.connect(transport); + console.error("OpenDigger MCP Server running (on stdio)..."); + + const ssePortEnv = process.env.SSE_PORT; + if (ssePortEnv) { + const ssePort = Number(ssePortEnv); + const sseHost = process.env.SSE_HOST || '0.0.0.0'; + + const serverHttp = http.createServer(async (req: IncomingMessage, res: ServerResponse) => { + try { + if (!req.url) { + res.statusCode = 400; + res.end('Bad Request'); + return; + } + const urlObj = new URL(req.url, `http://${req.headers.host}`); + const pathname = urlObj.pathname; + + // CORS headers + res.setHeader('Access-Control-Allow-Origin', '*'); + res.setHeader('Access-Control-Allow-Methods', 'GET, OPTIONS'); + res.setHeader('Access-Control-Allow-Headers', 'Content-Type'); + + if (req.method === 'OPTIONS') { + res.statusCode = 204; + res.end(); + return; + } + + if (pathname === '/health') { + res.statusCode = 200; + res.setHeader('Content-Type', 'application/json'); + res.end(JSON.stringify({ + status: 'ok', + version: VERSION, + uptime: process.uptime(), + cache: getCacheStats() + })); + return; + } + + function startSse() { + res.writeHead(200, { + 'Content-Type': 'text/event-stream', + 'Cache-Control': 'no-cache, no-transform', + 'Connection': 'keep-alive', + 'Access-Control-Allow-Origin': '*' + }); + res.write(`event: start\n`); + res.write(`data: {"status":"connected","timestamp":"${new Date().toISOString()}"}\n\n`); + } + + function sendEvent(event: string, data: unknown) { + res.write(`event: ${event}\n`); + res.write(`data: ${JSON.stringify(data)}\n\n`); + } + + function endSse() { + res.write(`event: end\n`); + res.write(`data: {"status":"completed","timestamp":"${new Date().toISOString()}"}\n\n`); + res.end(); + } + + if (pathname === '/sse') { + if (req.method !== 'GET') { + res.statusCode = 405; + res.end('Method Not Allowed'); + return; + } + startSse(); + try { + const query = urlObj.searchParams; + const singleArgs = inputSchema.parse({ + platform: query.get('platform'), + entityType: query.get('entityType'), + owner: query.get('owner') || undefined, + repo: query.get('repo') || undefined, + login: query.get('login') || undefined, + metricName: query.get('metricName'), + }); + const url = buildUrl(singleArgs); + const data = await fetchWithCache(url, DEFAULT_TTL_SECONDS); + sendEvent('data', { request: singleArgs, data }); + endSse(); + } catch (err) { + sendEvent('error', { error: (err as Error).message }); + endSse(); + } + return; + } + + if (pathname === '/sse/batch') { + if (req.method !== 'GET') { + res.statusCode = 405; + res.end('Method Not Allowed!'); + return; + } + startSse(); + try { + const raw = urlObj.searchParams.get('requests'); + if (!raw) throw new Error('Missing requests query param!'); + let parsed: unknown; + try { + parsed = JSON.parse(raw); + } catch { + throw new Error('Invalid JSON in requests param!'); + } + const args = batchInputSchema.parse({ requests: parsed }); + + sendEvent('progress', { total: args.requests.length, processed: 0 }); + + for (let i = 0; i < args.requests.length; i++) { + const r = args.requests[i]!; + try { + const url = buildUrl(r); + const data = await fetchWithCache(url, DEFAULT_TTL_SECONDS); + sendEvent('data', { index: i, ok: true, request: r, data }); + } catch (e) { + sendEvent('data', { index: i, ok: false, request: r, error: (e as Error).message }); + } + sendEvent('progress', { total: args.requests.length, processed: i + 1 }); + } + endSse(); + } catch (err) { + sendEvent('error', { error: (err as Error).message }); + endSse(); + } + return; + } + + res.statusCode = 404; + res.end('Not Found'); + } catch (e) { + try { + res.statusCode = 500; + res.end('Internal Server Error!'); + } catch { + // ignore + } + } + }); + + serverHttp.listen(ssePort, sseHost, () => { + console.error(`SSE HTTP server listening on http://${sseHost}:${ssePort}`); + }); + } +} + +main().catch((error) => { + console.error("Fatal error in main():", error); + process.exit(1); +}); \ No newline at end of file diff --git a/mcp-server/src/utils.ts b/mcp-server/src/utils.ts new file mode 100644 index 0000000..89aa8ec --- /dev/null +++ b/mcp-server/src/utils.ts @@ -0,0 +1,173 @@ +/** + * @file utils.ts + * @description + * A utility module for managing an in-memory cache system with TTL (Time-To-Live) support. + * This module provides functions to fetch data with caching, track cache statistics, and manage cache entries. + * It is designed to optimize performance by reducing redundant network requests and providing insights into cache usage. + * + * Features: + * - In-memory caching with TTL support. + * - Cache hit/miss tracking and statistics. + * - Automatic cache expiration and cleanup. + * - Fetching data with cache fallback. + * - Retrieving popular cache entries and memory usage statistics. + * + * Usage: + * - Use `fetchWithCache` to fetch data with caching. + * - Use `getCacheStats` to retrieve cache performance metrics. + * - Use `clearExpiredCache` or `clearCache` to manage cache entries. + * - Use `getPopularCacheEntries` to analyze frequently accessed cache entries. + */ + + +import { performance } from 'perf_hooks'; + +interface CacheEntry { + value: unknown; + expiresAt: number; + createdAt: number; + hitCount: number; +} + +interface CacheStats { + totalEntries: number; + totalHits: number; + totalMisses: number; + hitRate: number; + memoryUsage: { + heapUsed: number; + heapTotal: number; + external: number; + }; + oldestEntry?: number | undefined; + newestEntry?: number | undefined; +} + +const inMemoryCache: Map = new Map(); +let cacheHits = 0; +let cacheMisses = 0; + +async function parseResponseBody(response: Response): Promise { + const contentType = response.headers.get("content-type"); + if (contentType?.includes("application/json")) { + return response.json(); + } + return response.text(); +} + +function getCached(url: string): unknown | undefined { + const entry = inMemoryCache.get(url); + if (!entry) { + cacheMisses++; + return undefined; + } + + if (Date.now() > entry.expiresAt) { + inMemoryCache.delete(url); + cacheMisses++; + return undefined; + } + + entry.hitCount++; + cacheHits++; + return entry.value; +} + +function setCached(url: string, value: unknown, ttlSeconds: number): void { + const now = Date.now(); + inMemoryCache.set(url, { + value, + expiresAt: now + ttlSeconds * 1000, + createdAt: now, + hitCount: 0 + }); +} + +export async function fetchWithCache(url: string, ttlSeconds: number): Promise { + const startTime = performance.now(); + + try { + const cached = getCached(url); + if (cached !== undefined) { + console.error(`Cache hit for ${url} (${(performance.now() - startTime).toFixed(2)}ms)`); + return cached; + } + + console.error(`Fetching ${url}...`); + const response = await fetch(url); + const responseBody = await parseResponseBody(response); + + if (!response.ok) { + const errorMessage = `HTTP ${response.status}: ${response.statusText}`; + throw new Error(`Error fetching data from ${url} - ${errorMessage}`); + } + + setCached(url, responseBody, ttlSeconds); + console.error(`Cached ${url} (${(performance.now() - startTime).toFixed(2)}ms)`); + return responseBody; + + } catch (error) { + const duration = (performance.now() - startTime).toFixed(2); + if (error instanceof Error) { + throw new Error(`Failed to fetch ${url} after ${duration}ms: ${error.message}`); + } + throw new Error(`Failed to fetch ${url} after ${duration}ms: Unknown error`); + } +} + +export function getCacheStats(): CacheStats { + const entries = Array.from(inMemoryCache.values()); + const memUsage = process.memoryUsage(); + + return { + totalEntries: inMemoryCache.size, + totalHits: cacheHits, + totalMisses: cacheMisses, + hitRate: cacheHits + cacheMisses > 0 ? cacheHits / (cacheHits + cacheMisses) : 0, + memoryUsage: { + heapUsed: memUsage.heapUsed, + heapTotal: memUsage.heapTotal, + external: memUsage.external + }, + oldestEntry: entries.length > 0 ? Math.min(...entries.map(e => e.createdAt)) : undefined, + newestEntry: entries.length > 0 ? Math.max(...entries.map(e => e.createdAt)) : undefined + }; +} + +export function clearExpiredCache(): number { + const now = Date.now(); + let cleared = 0; + + for (const [key, entry] of inMemoryCache.entries()) { + if (now > entry.expiresAt) { + inMemoryCache.delete(key); + cleared++; + } + } + + if (cleared > 0) { + console.error(`Cleared ${cleared} expired cache entries`); + } + + return cleared; +} + +export function clearCache(): void { + const size = inMemoryCache.size; + inMemoryCache.clear(); + cacheHits = 0; + cacheMisses = 0; + console.error(`Cleared all cache entries (${size} items)`); +} + +export function getPopularCacheEntries(limit: number = 10): Array<{url: string, hitCount: number, age: number}> { + const now = Date.now(); + return Array.from(inMemoryCache.entries()) + .map(([url, entry]) => ({ + url, + hitCount: entry.hitCount, + age: now - entry.createdAt + })) + .sort((a, b) => b.hitCount - a.hitCount) + .slice(0, limit); +} \ No newline at end of file diff --git a/mcp-server/src/version.ts b/mcp-server/src/version.ts new file mode 100644 index 0000000..da54422 --- /dev/null +++ b/mcp-server/src/version.ts @@ -0,0 +1,21 @@ +/** + * @file version.ts + * @description + * This module exports version and build information for the application. + * It includes the current version, build date, and a list of supported features. + * These constants are used to track the application's version, build metadata, + * and feature flags for conditional logic and debugging. + */ + + +export const VERSION = '1.0.0'; +export const BUILD_DATE = new Date().toISOString(); +export const FEATURES = { + ENHANCED_METRICS: true, + SSE_SUPPORT: true, + BATCH_PROCESSING: true, + TREND_ANALYSIS: true, + COMPARISON_TOOLS: true, + HEALTH_MONITORING: true, + ADVANCED_CACHING: true +}; diff --git a/mcp-server/tsconfig.json b/mcp-server/tsconfig.json new file mode 100644 index 0000000..4c255c0 --- /dev/null +++ b/mcp-server/tsconfig.json @@ -0,0 +1,54 @@ +/** + * TypeScript Configuration for OpenDigger's MCP Server + * + * This configuration is optimized for a Node.js project using ES Modules, + * strict type checking, and modern JavaScript features. + * + * Key Features: + * - Targets ES2022 and Node16 module system + * - Enables strict type checking and modern best practices + * - Outputs to `dist` directory, sources from `src` + * - Generates declaration files and source maps for debugging + * - Excludes test files and node_modules from compilation + * - Configures ts-node for ES Module support + */ + +{ + "compilerOptions": { + "target": "ES2022", + "module": "Node16", + "moduleResolution": "Node16", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "outDir": "./dist", + "rootDir": "./src", + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "removeComments": false, + "noEmitOnError": true, + "exactOptionalPropertyTypes": true, + "noImplicitReturns": true, + "noFallthroughCasesInSwitch": true, + "noUncheckedIndexedAccess": true, + "allowUnusedLabels": false, + "allowUnreachableCode": false, + "experimentalDecorators": false, + "emitDecoratorMetadata": false + }, + "include": [ + "src/**/*" + ], + "exclude": [ + "node_modules", + "dist", + "**/*.test.ts", + "**/*.spec.ts" + ], + "ts-node": { + "esm": true + } + } \ No newline at end of file diff --git a/package-lock.json b/package-lock.json deleted file mode 100644 index 9b01031..0000000 --- a/package-lock.json +++ /dev/null @@ -1,519 +0,0 @@ -{ - "name": "open-digger-mcp-server", - "version": "0.0.1", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "open-digger-mcp-server", - "version": "0.0.1", - "license": "MIT", - "dependencies": { - "@modelcontextprotocol/sdk": "1.0.1", - "node-fetch": "^3.3.2", - "zod": "^3.22.4", - "zod-to-json-schema": "^3.23.5" - }, - "bin": { - "mcp-server-open-digger": "dist/index.js" - }, - "devDependencies": { - "@types/node": "^22", - "@types/node-fetch": "^2.6.12", - "typescript": "^5.6.2" - } - }, - "node_modules/@modelcontextprotocol/sdk": { - "version": "1.0.1", - "resolved": "https://registry.npmmirror.com/@modelcontextprotocol/sdk/-/sdk-1.0.1.tgz", - "integrity": "sha512-slLdFaxQJ9AlRg+hw28iiTtGvShAOgOKXcD0F91nUcRYiOMuS9ZBYjcdNZRXW9G5JQ511GRTdUy1zQVZDpJ+4w==", - "dependencies": { - "content-type": "^1.0.5", - "raw-body": "^3.0.0", - "zod": "^3.23.8" - } - }, - "node_modules/@types/node": { - "version": "22.13.10", - "resolved": "https://registry.npmmirror.com/@types/node/-/node-22.13.10.tgz", - "integrity": "sha512-I6LPUvlRH+O6VRUqYOcMudhaIdUVWfsjnZavnsraHvpBwaEyMN29ry+0UVJhImYL16xsscu0aske3yA+uPOWfw==", - "dev": true, - "dependencies": { - "undici-types": "~6.20.0" - } - }, - "node_modules/@types/node-fetch": { - "version": "2.6.12", - "resolved": "https://registry.npmmirror.com/@types/node-fetch/-/node-fetch-2.6.12.tgz", - "integrity": "sha512-8nneRWKCg3rMtF69nLQJnOYUcbafYeFSjqkw3jCRLsqkWFlHaoQrr5mXmofFGOx3DKn7UfmBMyov8ySvLRVldA==", - "dev": true, - "dependencies": { - "@types/node": "*", - "form-data": "^4.0.0" - } - }, - "node_modules/asynckit": { - "version": "0.4.0", - "resolved": "https://registry.npmmirror.com/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", - "dev": true - }, - "node_modules/bytes": { - "version": "3.1.2", - "resolved": "https://registry.npmmirror.com/bytes/-/bytes-3.1.2.tgz", - "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/call-bind-apply-helpers": { - "version": "1.0.2", - "resolved": "https://registry.npmmirror.com/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", - "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", - "dev": true, - "dependencies": { - "es-errors": "^1.3.0", - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/combined-stream": { - "version": "1.0.8", - "resolved": "https://registry.npmmirror.com/combined-stream/-/combined-stream-1.0.8.tgz", - "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "dev": true, - "dependencies": { - "delayed-stream": "~1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/content-type": { - "version": "1.0.5", - "resolved": "https://registry.npmmirror.com/content-type/-/content-type-1.0.5.tgz", - "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/data-uri-to-buffer": { - "version": "4.0.1", - "resolved": "https://registry.npmmirror.com/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz", - "integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==", - "engines": { - "node": ">= 12" - } - }, - "node_modules/delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmmirror.com/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", - "dev": true, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/depd": { - "version": "2.0.0", - "resolved": "https://registry.npmmirror.com/depd/-/depd-2.0.0.tgz", - "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/dunder-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmmirror.com/dunder-proto/-/dunder-proto-1.0.1.tgz", - "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", - "dev": true, - "dependencies": { - "call-bind-apply-helpers": "^1.0.1", - "es-errors": "^1.3.0", - "gopd": "^1.2.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-define-property": { - "version": "1.0.1", - "resolved": "https://registry.npmmirror.com/es-define-property/-/es-define-property-1.0.1.tgz", - "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", - "dev": true, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-errors": { - "version": "1.3.0", - "resolved": "https://registry.npmmirror.com/es-errors/-/es-errors-1.3.0.tgz", - "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", - "dev": true, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-object-atoms": { - "version": "1.1.1", - "resolved": "https://registry.npmmirror.com/es-object-atoms/-/es-object-atoms-1.1.1.tgz", - "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", - "dev": true, - "dependencies": { - "es-errors": "^1.3.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-set-tostringtag": { - "version": "2.1.0", - "resolved": "https://registry.npmmirror.com/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", - "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", - "dev": true, - "dependencies": { - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.6", - "has-tostringtag": "^1.0.2", - "hasown": "^2.0.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/fetch-blob": { - "version": "3.2.0", - "resolved": "https://registry.npmmirror.com/fetch-blob/-/fetch-blob-3.2.0.tgz", - "integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/jimmywarting" - }, - { - "type": "paypal", - "url": "https://paypal.me/jimmywarting" - } - ], - "dependencies": { - "node-domexception": "^1.0.0", - "web-streams-polyfill": "^3.0.3" - }, - "engines": { - "node": "^12.20 || >= 14.13" - } - }, - "node_modules/form-data": { - "version": "4.0.2", - "resolved": "https://registry.npmmirror.com/form-data/-/form-data-4.0.2.tgz", - "integrity": "sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==", - "dev": true, - "dependencies": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.8", - "es-set-tostringtag": "^2.1.0", - "mime-types": "^2.1.12" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/formdata-polyfill": { - "version": "4.0.10", - "resolved": "https://registry.npmmirror.com/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz", - "integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==", - "dependencies": { - "fetch-blob": "^3.1.2" - }, - "engines": { - "node": ">=12.20.0" - } - }, - "node_modules/function-bind": { - "version": "1.1.2", - "resolved": "https://registry.npmmirror.com/function-bind/-/function-bind-1.1.2.tgz", - "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", - "dev": true, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/get-intrinsic": { - "version": "1.3.0", - "resolved": "https://registry.npmmirror.com/get-intrinsic/-/get-intrinsic-1.3.0.tgz", - "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", - "dev": true, - "dependencies": { - "call-bind-apply-helpers": "^1.0.2", - "es-define-property": "^1.0.1", - "es-errors": "^1.3.0", - "es-object-atoms": "^1.1.1", - "function-bind": "^1.1.2", - "get-proto": "^1.0.1", - "gopd": "^1.2.0", - "has-symbols": "^1.1.0", - "hasown": "^2.0.2", - "math-intrinsics": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/get-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmmirror.com/get-proto/-/get-proto-1.0.1.tgz", - "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", - "dev": true, - "dependencies": { - "dunder-proto": "^1.0.1", - "es-object-atoms": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/gopd": { - "version": "1.2.0", - "resolved": "https://registry.npmmirror.com/gopd/-/gopd-1.2.0.tgz", - "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-symbols": { - "version": "1.1.0", - "resolved": "https://registry.npmmirror.com/has-symbols/-/has-symbols-1.1.0.tgz", - "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-tostringtag": { - "version": "1.0.2", - "resolved": "https://registry.npmmirror.com/has-tostringtag/-/has-tostringtag-1.0.2.tgz", - "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", - "dev": true, - "dependencies": { - "has-symbols": "^1.0.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/hasown": { - "version": "2.0.2", - "resolved": "https://registry.npmmirror.com/hasown/-/hasown-2.0.2.tgz", - "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", - "dev": true, - "dependencies": { - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/http-errors": { - "version": "2.0.0", - "resolved": "https://registry.npmmirror.com/http-errors/-/http-errors-2.0.0.tgz", - "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", - "dependencies": { - "depd": "2.0.0", - "inherits": "2.0.4", - "setprototypeof": "1.2.0", - "statuses": "2.0.1", - "toidentifier": "1.0.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/iconv-lite": { - "version": "0.6.3", - "resolved": "https://registry.npmmirror.com/iconv-lite/-/iconv-lite-0.6.3.tgz", - "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmmirror.com/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" - }, - "node_modules/math-intrinsics": { - "version": "1.1.0", - "resolved": "https://registry.npmmirror.com/math-intrinsics/-/math-intrinsics-1.1.0.tgz", - "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", - "dev": true, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmmirror.com/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "dev": true, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmmirror.com/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "dev": true, - "dependencies": { - "mime-db": "1.52.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/node-domexception": { - "version": "1.0.0", - "resolved": "https://registry.npmmirror.com/node-domexception/-/node-domexception-1.0.0.tgz", - "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/jimmywarting" - }, - { - "type": "github", - "url": "https://paypal.me/jimmywarting" - } - ], - "engines": { - "node": ">=10.5.0" - } - }, - "node_modules/node-fetch": { - "version": "3.3.2", - "resolved": "https://registry.npmmirror.com/node-fetch/-/node-fetch-3.3.2.tgz", - "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", - "dependencies": { - "data-uri-to-buffer": "^4.0.0", - "fetch-blob": "^3.1.4", - "formdata-polyfill": "^4.0.10" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/node-fetch" - } - }, - "node_modules/raw-body": { - "version": "3.0.0", - "resolved": "https://registry.npmmirror.com/raw-body/-/raw-body-3.0.0.tgz", - "integrity": "sha512-RmkhL8CAyCRPXCE28MMH0z2PNWQBNk2Q09ZdxM9IOOXwxwZbN+qbWaatPkdkWIKL2ZVDImrN/pK5HTRz2PcS4g==", - "dependencies": { - "bytes": "3.1.2", - "http-errors": "2.0.0", - "iconv-lite": "0.6.3", - "unpipe": "1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmmirror.com/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" - }, - "node_modules/setprototypeof": { - "version": "1.2.0", - "resolved": "https://registry.npmmirror.com/setprototypeof/-/setprototypeof-1.2.0.tgz", - "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" - }, - "node_modules/statuses": { - "version": "2.0.1", - "resolved": "https://registry.npmmirror.com/statuses/-/statuses-2.0.1.tgz", - "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/toidentifier": { - "version": "1.0.1", - "resolved": "https://registry.npmmirror.com/toidentifier/-/toidentifier-1.0.1.tgz", - "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", - "engines": { - "node": ">=0.6" - } - }, - "node_modules/typescript": { - "version": "5.8.2", - "resolved": "https://registry.npmmirror.com/typescript/-/typescript-5.8.2.tgz", - "integrity": "sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ==", - "dev": true, - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=14.17" - } - }, - "node_modules/undici-types": { - "version": "6.20.0", - "resolved": "https://registry.npmmirror.com/undici-types/-/undici-types-6.20.0.tgz", - "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==", - "dev": true - }, - "node_modules/unpipe": { - "version": "1.0.0", - "resolved": "https://registry.npmmirror.com/unpipe/-/unpipe-1.0.0.tgz", - "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/web-streams-polyfill": { - "version": "3.3.3", - "resolved": "https://registry.npmmirror.com/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz", - "integrity": "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw==", - "engines": { - "node": ">= 8" - } - }, - "node_modules/zod": { - "version": "3.24.2", - "resolved": "https://registry.npmmirror.com/zod/-/zod-3.24.2.tgz", - "integrity": "sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ==", - "funding": { - "url": "https://github.com/sponsors/colinhacks" - } - }, - "node_modules/zod-to-json-schema": { - "version": "3.24.4", - "resolved": "https://registry.npmmirror.com/zod-to-json-schema/-/zod-to-json-schema-3.24.4.tgz", - "integrity": "sha512-0uNlcvgabyrni9Ag8Vghj21drk7+7tp7VTwwR7KxxXXc/3pbXz2PHlDgj3cICahgF1kHm4dExBFj7BXrZJXzig==", - "peerDependencies": { - "zod": "^3.24.1" - } - } - } -} diff --git a/package.json b/package.json deleted file mode 100644 index d10cd4a..0000000 --- a/package.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "name": "open-digger-mcp-server", - "version": "0.0.1", - "description": "MCP server for using OpenDigger data", - "license": "MIT", - "author": "Frank Zhao", - "homepage": "https://open-digger.cn/", - "bugs": "https://github.com/X-lab2017/open-digger-mcp-server/issues", - "type": "module", - "bin": { - "mcp-server-open-digger": "dist/index.js" - }, - "files": [ - "dist" - ], - "scripts": { - "build": "tsc && chmod +x dist/*.js", - "prepare": "npm run build", - "watch": "tsc --watch" - }, - "dependencies": { - "@modelcontextprotocol/sdk": "1.0.1", - "node-fetch": "^3.3.2", - "zod": "^3.22.4", - "zod-to-json-schema": "^3.23.5" - }, - "devDependencies": { - "typescript": "^5.6.2", - "@types/node": "^22", - "@types/node-fetch": "^2.6.12" - } -} diff --git a/tsconfig.json b/tsconfig.json deleted file mode 100644 index 7a5f10c..0000000 --- a/tsconfig.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "compilerOptions": { - "target": "ES2022", - "module": "Node16", - "moduleResolution": "Node16", - "strict": true, - "esModuleInterop": true, - "skipLibCheck": true, - "forceConsistentCasingInFileNames": true, - "resolveJsonModule": true, - "outDir": "./dist", - "rootDir": "." - }, - "exclude": [ - "node_modules" - ], - "include": [ - "./**/*.ts" - ] -} diff --git a/utils.ts b/utils.ts deleted file mode 100644 index e97bb4c..0000000 --- a/utils.ts +++ /dev/null @@ -1,16 +0,0 @@ -async function parseResponseBody(response: Response): Promise { - const contentType = response.headers.get("content-type"); - if (contentType?.includes("application/json")) { - return response.json(); - } - return response.text(); -} - -export async function fetchData(url: string): Promise { - const response = await fetch(url); - const responseBody = await parseResponseBody(response); - if (!response.ok) { - throw new Error(`Error on fetching data: status ${response.status}, body: ${responseBody}`); - } - return responseBody; -} diff --git a/version.ts b/version.ts deleted file mode 100644 index b560152..0000000 --- a/version.ts +++ /dev/null @@ -1 +0,0 @@ -export const VERSION='0.0.1'; From 1406ef3d5f49221df5745b7873137a99976de7c1 Mon Sep 17 00:00:00 2001 From: neilblaze Date: Fri, 26 Sep 2025 03:15:04 +0530 Subject: [PATCH 2/5] docs: updated README Signed-off-by: neilblaze --- .github/PULL_REQUEST_TEMPLATE.md | 15 +++ INSTALLATION.md | 127 +++++++++++++++++++++++++ README.md | 156 ++++++++++++++++++++++++++----- mcp-server/.env.example | 7 +- mcp-server/.npmignore | 31 ++++++ mcp-server/package.json | 8 +- 6 files changed, 309 insertions(+), 35 deletions(-) create mode 100644 .github/PULL_REQUEST_TEMPLATE.md create mode 100644 INSTALLATION.md create mode 100644 mcp-server/.npmignore diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000..4b10a91 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,15 @@ +## Description + +Describe your PR here. + +## Resolved issues + +Closes #1 + +### Before submitting the PR, please take the following into consideration +- [ ] It's really useful if your PR references an issue where it is discussed ahead of time. In many cases, features are absent for a reason. If you don't have an issue, please create one. +- [ ] Prefix your PR title with `feat: `, `fix: `, `chore: `, `docs:`, or `refactor:`. +- [ ] The description should clearly illustrate what problems it solves. +- [ ] Ensure that the commit messages follow our guidelines. +- [ ] Resolve merge conflicts (if any). +- [ ] Make sure that the current branch is upto date with the `main` branch. \ No newline at end of file diff --git a/INSTALLATION.md b/INSTALLATION.md new file mode 100644 index 0000000..aed6d00 --- /dev/null +++ b/INSTALLATION.md @@ -0,0 +1,127 @@ +# MCP Server Installation Guide +## Prerequisites + +Before installation, ensure you have: +- Node.js installed on your system +- The MCP server files located in `./mcp-server/` directory +- Built server files available in `./mcp-server/dist/index.js` + +--- + +## Installation + +### Cursor + +To add this server to Cursor IDE: + +1. Navigate to `Cursor Settings` > `MCP` +2. Click `+ Add new Global MCP Server` +3. Add the following configuration to your global `.cursor/mcp.json` file: + +```json +{ + "mcpServers": { + "open-digger-mcp": { + "command": "node", + "args": ["./mcp-server/dist/index.js"], + "cwd": "./mcp-server/", + "env": { + "CACHE_TTL_SECONDS": "300", + "NODE_ENV": "production" + } + } + } +} +``` + +**Note**: You can also add this to your project-specific Cursor configuration (supported in Cursor 0.46+). + +Refer to the [Cursor documentation](https://docs.cursor.com/context/model-context-protocol) for additional details. + +### Windsurf + +To set up MCP with Cascade: + +1. Open Windsurf and navigate to `Settings` > `Advanced Settings` or use the Command Palette > `Open Windsurf Settings Page` +2. Scroll to the Cascade section to add a new server, view existing servers, or access the raw JSON config file at `mcp_config.json` +3. Click "Add custom server +" to include the open-digger MCP server directly in `mcp_config.json`: + +```json +{ + "mcpServers": { + "open-digger-mcp": { + "command": "node", + "args": ["./mcp-server/dist/index.js"], + "cwd": "./mcp-server/", + "env": { + "CACHE_TTL_SECONDS": "300", + "NODE_ENV": "production" + } + } + } +} +``` + +Refer to the [Windsurf documentation](https://docs.codeium.com/windsurf/mcp) for more information. + +### VS Code + +To install the open-digger MCP server in VS Code, you can use the VS Code CLI: + +```bash +# For VS Code +code --add-mcp '{"name":"open-digger-mcp","command":"node","args":["./mcp-server/dist/index.js"],"cwd":"./mcp-server/","env":{"CACHE_TTL_SECONDS":"300","NODE_ENV":"production"}}' +``` + +```bash +# For VS Code Insiders +code-insiders --add-mcp '{"name":"open-digger-mcp","command":"node","args":["./mcp-server/dist/index.js"],"cwd":"./mcp-server/","env":{"CACHE_TTL_SECONDS":"300","NODE_ENV":"production"}}' +``` + +Alternatively, you can manually add the configuration to your MCP settings file through the VS Code interface. + +After installation, the open-digger MCP server will be available for use with your GitHub Copilot agent in VS Code. + +### Claude Desktop + +Add the following configuration to your `claude_desktop_config.json` file: + +```json +{ + "mcpServers": { + "open-digger-mcp": { + "command": "node", + "args": ["./mcp-server/dist/index.js"], + "cwd": "./mcp-server/", + "env": { + "CACHE_TTL_SECONDS": "300", + "NODE_ENV": "production" + } + } + } +} +``` + +**Configuration file locations:** +- **macOS**: `~/Library/Application Support/Claude/claude_desktop_config.json` +- **Windows**: `%APPDATA%\Claude\claude_desktop_config.json` + +Refer to the [Claude Desktop documentation](https://modelcontextprotocol.io/quickstart/user) for more details. + +## Configuration Options + +The MCP server supports the following environment variables: + +- `CACHE_TTL_SECONDS`: Cache time-to-live in seconds (default: 300) +- `NODE_ENV`: Node environment setting (production/development) + +## Troubleshooting + +1. **Server not starting**: Ensure the built files exist in `./mcp-server/dist/index.js` +2. **Path issues**: Verify that the `cwd` path is correct relative to your project root +3. **Node.js not found**: Make sure Node.js is installed and available in your system PATH +4. **Permission issues**: Check that the MCP server files have appropriate read/execute permissions + +## Verification + +After installation, you should see the open-digger MCP server listed in your IDE's MCP server configuration and it should be available for use with AI assistants that support MCP. \ No newline at end of file diff --git a/README.md b/README.md index 654b009..1aee9ec 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,6 @@ # OpenDigger MCP Server -A Model Context Protocol (MCP) server for OpenDigger, enabling LLMs to interact with repository metrics and analytics. - -> [!NOTE] -> The README.md is still under development. I'll also add an INSTRUCTION_GUIDE.md later which provides detailed usage instructions and examples. +A Model Context Protocol (MCP) server for OpenDigger enabling advanced repository analytics and insights through tools and prompts. ## Quick Start @@ -15,54 +12,120 @@ cd open-digger-mcp-server && cd mcp-server npm install npm run build -# Configure Cursor (update path in .cursor/mcp.json) # Start server npm start ``` +> [!IMPORTANT] +> Don't forget to configure Cursor (update path in .cursor/mcp.json) + + +Expected output: + +``` +OpenDigger MCP Server running (on stdio)... +``` + +
+ +> [!IMPORTANT] +> If you are using Cursor AI IDE, you should see the MCP server (**`open-digger-mcp`**) toast in the bottom-left corner. You should _**enable**_ it, & now you can start using the tools and prompts provided by the MCP server. To verify, open Cursor Settings and check the MCP Servers section → you should see `open-digger-mcp` listed there. + + +![opendigger-mcp-img](https://res.cloudinary.com/dmlwye965/image/upload/v1758830133/open-digger-mcp-cursorSettings-snap_eewkz6.png) + + +To further confirm that the server is functioning correctly, you can check the following indicators in Cursor: + +- ✅ **Green dot** next to "open-digger-mcp" title +- ✅ **"6 tools"** displayed in server status +- ✅ **"3 prompts"** displayed in server status +- ✅ **No error messages** or red indicators + +
+
+
+ demo-mcp-opendigger +
+
+
+ + +> [!TIP] +> Please refer to [`Installation.md`](./INSTALLATION.md) for detailed installation instructions and configuration options for different IDEs. + +--- + ## Features -### Tools (6) -1. **get_open_digger_metric** - Single metric fetching -2. **get_open_digger_metrics_batch** - Batch operations -3. **compare_repositories** - Multi-repo analysis -4. **analyze_trends** - Growth trend analysis -5. **get_ecosystem_insights** - Ecosystem analytics -6. **server_health** - System diagnostics +### Tools (6 Available) + +| No. | Tool | Description | +|-----|-------------------------------------|--------------------------------------------------| +| 1 | **`get_open_digger_metric`** | Fetch single repository metrics | +| 2 | **`get_open_digger_metrics_batch`** | Batch operations for multiple metrics | +| 3 | **`compare_repositories`** | Multi-repository comparative analysis | +| 4 | **`analyze_trends`** | Growth trend analysis over time periods | +| 5 | **`get_ecosystem_insights`** | Ecosystem analytics & insights | +| 6 | **`server_health`** | System diagnostics and health monitoring (Beta) |d + + +### Prompts (3 Available) +1. **`repo_health_analysis`** - Comprehensive repository health reports +2. **`repo_comparison`** - Competitive repository analysis +3. **`developer_insights`** - Developer activity and contribution analysis -### Prompts (3) -1. **repo_health_analysis** - Comprehensive repo health reports -2. **repo_comparison** - Competitive repository analysis -3. **developer_insights** - Developer activity analysis ### Metrics -Core: `openrank`, `stars`, `forks`, `contributors`, `participants`, `issues_new`, `issues_closed`, `pull_requests`, `commits`, `activity` +**Core Metrics**: `openrank`, `stars`, `forks`, `contributors`, `participants`, `issues_new`, `issues_closed`, `pull_requests`, `commits`, `activity` + +**Extended Metrics**: `technical_fork`, `bus_factor`, `releases`, `inactive_contributors`, `maintainer_count`, `community_activity` -Extended: `technical_fork`, `bus_factor`, `releases`, `inactive_contributors`, `maintainer_count`, `community_activity` +--- -## Usage Examples +## 💡 Usage Examples -### Repository Comparison +### 💠 Repository Comparison ``` Compare microsoft/vscode and facebook/react using the compare_repositories tool ``` -### Health Analysis +### 💠 Health Analysis ``` Generate a health report for microsoft/vscode using the repo_health_analysis prompt ``` -### Trend Analysis +### 💠 Trend Analysis ``` Analyze the growth trends for contributors in microsoft/vscode over 2 years ``` +--- + +
+ +## Server Status Check +After starting the server, verify it's working: +```bash +# In a new terminal +echo '{"jsonrpc":"2.0","id":1,"method":"tools/list","params":{}}' | node dist/index.js +``` + +Expected response should list all 6 tools. + + +
+ ## Configuration ### Environment Variables (.env) ```bash +# Cache configuration (recommended) CACHE_TTL_SECONDS=300 -# SSE_PORT=3001 # Optional + +# Optional SSE server +SSE_PORT=3001 +SSE_HOST=127.0.0.1 ``` ### Cursor MCP (.cursor/mcp.json) @@ -81,6 +144,8 @@ CACHE_TTL_SECONDS=300 } ``` +> [!TIP] +> Replace `/full/path/to/open-digger-mcp-server` with your actual project directory path. Use `pwd` to get the current directory path. ## Development @@ -91,7 +156,50 @@ npm run clean # Clean build files npm run sse:test # Test SSE server ``` +> [!TIP] +> Beside Cursor, you can also use other MCP clients like VS Code, Claude Chat, or the official [MCP Inspector](https://modelcontextprotocol.io/docs/tools/inspector). + + +## Troubleshooting + +### Common Issues + +**Server not appearing in Cursor:** +1. Verify absolute paths in `.cursor/mcp.json` +2. Restart Cursor completely (Cmd+Q / Alt+F4) +3. Check MCP Settings section for error messages + +**Permission errors:** +```bash +chmod +x dist/index.js +``` + +**Build errors:** +```bash +npm run clean +npm install +npm run build +``` + +**Cache issues:** +```bash +# Clear npm cache +npm cache clean --force + +# Rebuild +npm run clean && npm run build +``` + +--- + +## Contributing + +1. Fork the repository +2. Create your feature branch (`git checkout -b feature/amazing-feature`) +3. Commit changes (`git commit -m 'Add amazing feature'`) +4. Push to branch (`git push origin feature/amazing-feature`) +5. Open a Pull Request ## License -Apache-2.0 License \ No newline at end of file +Apache-2.0 License - see [LICENSE](LICENSE) file for details. \ No newline at end of file diff --git a/mcp-server/.env.example b/mcp-server/.env.example index aa56deb..3c6750c 100644 --- a/mcp-server/.env.example +++ b/mcp-server/.env.example @@ -1,6 +1,8 @@ # OpenDigger MCP Server Configuration (rename to .env for actual use) -# Cache Configuration +# DEEPSEEK_API_KEY= + +# Cache Configuration (Recommended, but update as needed) CACHE_TTL_SECONDS=300 # SSE Server Configuration (Optional) @@ -24,6 +26,3 @@ NODE_ENV=production # OpenDigger API Configuration OPENDIGGER_BASE_URL=https://oss.open-digger.cn/ - -# DEEPSEEK_API_KEY= - diff --git a/mcp-server/.npmignore b/mcp-server/.npmignore new file mode 100644 index 0000000..8217e01 --- /dev/null +++ b/mcp-server/.npmignore @@ -0,0 +1,31 @@ +# Node modules +node_modules/ + +# Build output +dist/ +build/ +out/ + +# Environment variables +.env + +# Logs +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* + +# OS files +.DS_Store +Thumbs.db + +# IDE files +.vscode/ +.idea/ + +# TypeScript cache +*.tsbuildinfo + +# Misc +coverage/ +*.log \ No newline at end of file diff --git a/mcp-server/package.json b/mcp-server/package.json index f69fa24..462c728 100644 --- a/mcp-server/package.json +++ b/mcp-server/package.json @@ -1,7 +1,7 @@ { "name": "open-digger-mcp-server", "version": "1.0.0", - "description": "MCP server for OpenDigger data with comprehensive analysis tools, trend analysis, and repository comparison capabilities", + "description": "MCP server for OpenDigger data with comprehensive analysis tools, trend analysis, repository comparison capabilities and more.", "license": "Apache-2.0", "author": "Frank Zhao", "contributors": [ @@ -32,7 +32,6 @@ "watch": "tsc --watch", "clean": "rm -rf dist", "prepare": "npm run build", - "cache:clear": "node -e \"console.log('Cache management available via server_health tool')\"", "sse:test": "SSE_PORT=3001 npm start", "debug": "NODE_OPTIONS='--inspect' npm start" }, @@ -70,11 +69,6 @@ "node": ">=18.0.0", "npm": ">=8.0.0" }, - "os": [ - "darwin", - "linux", - "win32" - ], "config": { "cache_ttl_default": 300, "batch_size_default": 5, From b571165ab7e8548a27ac0d5ee189f29fd5be48d1 Mon Sep 17 00:00:00 2001 From: neilblaze Date: Sat, 27 Sep 2025 14:00:53 +0530 Subject: [PATCH 3/5] feat: reactored code + docs Signed-off-by: neilblaze Co-Authored-By: cs_zhlou --- README.md | 11 +++++++++-- mcp-server/src/analysis.ts | 21 +++++++++++++-------- mcp-server/src/index.ts | 18 +++++++++--------- mcp-server/src/utils.ts | 3 ++- 4 files changed, 33 insertions(+), 20 deletions(-) diff --git a/README.md b/README.md index 1aee9ec..cc78997 100644 --- a/README.md +++ b/README.md @@ -8,6 +8,10 @@ A Model Context Protocol (MCP) server for OpenDigger enabling advanced repositor ```bash # Setup git clone https://github.com/X-lab2017/open-digger-mcp-server.git + +# (Optional) Ensure you're on the master branch +git checkout master + cd open-digger-mcp-server && cd mcp-server npm install npm run build @@ -77,9 +81,12 @@ To further confirm that the server is functioning correctly, you can check the f ### Metrics -**Core Metrics**: `openrank`, `stars`, `forks`, `contributors`, `participants`, `issues_new`, `issues_closed`, `pull_requests`, `commits`, `activity` -**Extended Metrics**: `technical_fork`, `bus_factor`, `releases`, `inactive_contributors`, `maintainer_count`, `community_activity` +- **Core Metrics**: `openrank`, `stars`, `forks`, `contributors`, `participants`, `issues_new`, `issues_closed`, `pull_requests`, `commits`, `activity` + +- **Extended Metrics**: `technical_fork`, `bus_factor`, `releases`, `inactive_contributors`, `maintainer_count`, `community_activity` + +- **Additional Metrics**: `change_requests`, `pull_requests_accepted`, `pull_requests_merged`, `issue_comments`, `issue_response_time`, `code_change_lines`, `developer_network` --- diff --git a/mcp-server/src/analysis.ts b/mcp-server/src/analysis.ts index f6cef36..01fe808 100644 --- a/mcp-server/src/analysis.ts +++ b/mcp-server/src/analysis.ts @@ -333,10 +333,15 @@ function createEmptyTrendAnalysis(): TrendAnalysis { } /** - * Calculates the momentum of a time-series dataset. + * Calculates the momentum of a time-series dataset by comparing growth rates between periods. * - * @param values - Array of time-series data points. + * @param values - Array of time-series data points with date and value properties. * @returns The momentum classification: 'accelerating', 'decelerating', 'stable', or 'insufficient_data'. + * + * The function splits the data into two halves and calculates the percentage growth rate for each half. + * It then compares these rates to determine if growth is accelerating (second half growing faster), + * decelerating (second half growing slower), or stable (similar growth rates). A 5% threshold is used + * to determine significance of the difference in growth rates. */ export function calculateMomentum(values: Array<{date: string; value: number}>): 'accelerating' | 'decelerating' | 'stable' | 'insufficient_data' { if (values.length < 6) return 'insufficient_data'; @@ -345,16 +350,16 @@ export function calculateMomentum(values: Array<{date: string; value: number}>): const firstHalf = values.slice(0, midpoint); const secondHalf = values.slice(midpoint); - const firstHalfGrowth = firstHalf.length > 1 - ? ((firstHalf[firstHalf.length - 1]!.value - firstHalf[0]!.value) / firstHalf.length) + const firstHalfGrowthRate = firstHalf.length > 1 && firstHalf[0]!.value > 0 + ? ((firstHalf[firstHalf.length - 1]!.value - firstHalf[0]!.value) / firstHalf[0]!.value) : 0; - const secondHalfGrowth = secondHalf.length > 1 - ? ((secondHalf[secondHalf.length - 1]!.value - secondHalf[0]!.value) / secondHalf.length) + const secondHalfGrowthRate = secondHalf.length > 1 && secondHalf[0]!.value > 0 + ? ((secondHalf[secondHalf.length - 1]!.value - secondHalf[0]!.value) / secondHalf[0]!.value) : 0; - const momentumDifference = secondHalfGrowth - firstHalfGrowth; - const threshold = Math.max(0.1, (firstHalf[0]?.value ?? 0) * 0.01); // Dynamic threshold based on scale + const momentumDifference = secondHalfGrowthRate - firstHalfGrowthRate; + const threshold = 0.05; // 5% difference in growth rates | cc: @birdflyi, @frank-zsy if (Math.abs(momentumDifference) < threshold) return 'stable'; return momentumDifference > 0 ? 'accelerating' : 'decelerating'; diff --git a/mcp-server/src/index.ts b/mcp-server/src/index.ts index 918f1cb..cfcd6b4 100644 --- a/mcp-server/src/index.ts +++ b/mcp-server/src/index.ts @@ -220,6 +220,11 @@ server.setRequestHandler(CallToolRequestSchema, async (request: any) => { // Process in batches to respect API limits for (let i = 0; i < args.requests.length; i += BATCH_SIZE) { + // Add delay between batches to avoid rate limits (except first batch) + if (i > 0) { + await new Promise(resolve => setTimeout(resolve, 100)); + } + const chunk = args.requests.slice(i, i + BATCH_SIZE); const chunkResults = await Promise.all(chunk.map(async (r: z.infer) => { try { @@ -232,12 +237,6 @@ server.setRequestHandler(CallToolRequestSchema, async (request: any) => { })); results.push(...chunkResults); - - // NOTE: Add delay between batches to avoid rate limits - // TODO: We need to test & confirm if this is necessary | cc: @birdflyi, @frank-zsy - if (i + BATCH_SIZE < args.requests.length) { - await new Promise(resolve => setTimeout(resolve, 100)); - } } return { @@ -622,7 +621,8 @@ server.setRequestHandler(GetPromptRequestSchema, async (request: any) => { async function main() { const transport = new StdioServerTransport(); await server.connect(transport); - console.error("OpenDigger MCP Server running (on stdio)..."); + // console.error("OpenDigger MCP Server running (on stdio)..."); + console.log("OpenDigger MCP Server running (on stdio)..."); const ssePortEnv = process.env.SSE_PORT; if (ssePortEnv) { @@ -764,12 +764,12 @@ async function main() { }); serverHttp.listen(ssePort, sseHost, () => { - console.error(`SSE HTTP server listening on http://${sseHost}:${ssePort}`); + console.log(`SSE HTTP server listening on http://${sseHost}:${ssePort}`); }); } } main().catch((error) => { - console.error("Fatal error in main():", error); + console.log("Fatal error in main():", error); process.exit(1); }); \ No newline at end of file diff --git a/mcp-server/src/utils.ts b/mcp-server/src/utils.ts index 89aa8ec..524d02c 100644 --- a/mcp-server/src/utils.ts +++ b/mcp-server/src/utils.ts @@ -89,7 +89,8 @@ export async function fetchWithCache(url: string, ttlSeconds: number): Promise Date: Sat, 27 Sep 2025 20:28:31 +0530 Subject: [PATCH 4/5] resolve cross-platform environment variable issue in sse:test script Signed-off-by: neilblaze --- mcp-server/package.json | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/mcp-server/package.json b/mcp-server/package.json index 462c728..038af0e 100644 --- a/mcp-server/package.json +++ b/mcp-server/package.json @@ -32,7 +32,7 @@ "watch": "tsc --watch", "clean": "rm -rf dist", "prepare": "npm run build", - "sse:test": "SSE_PORT=3001 npm start", + "sse:test": "cross-env SSE_PORT=3001 npm start", "debug": "NODE_OPTIONS='--inspect' npm start" }, "keywords": [ @@ -60,7 +60,8 @@ }, "devDependencies": { "@types/node": "^22", - "typescript": "^5.6.2" + "typescript": "^5.6.2", + "cross-env": "^10.0.0" }, "peerDependencies": { "node": ">=18.0.0" From 2997217065d2e6bd9d366a7b3dc40e047ced0abc Mon Sep 17 00:00:00 2001 From: neilblaze Date: Sun, 28 Sep 2025 21:21:07 +0530 Subject: [PATCH 5/5] fix: add cross-env to debug script for cross-OS compatibility Signed-off-by: neilblaze --- mcp-server/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mcp-server/package.json b/mcp-server/package.json index 038af0e..c7f5940 100644 --- a/mcp-server/package.json +++ b/mcp-server/package.json @@ -33,7 +33,7 @@ "clean": "rm -rf dist", "prepare": "npm run build", "sse:test": "cross-env SSE_PORT=3001 npm start", - "debug": "NODE_OPTIONS='--inspect' npm start" + "debug": "cross-env NODE_OPTIONS='--inspect' npm start" }, "keywords": [ "mcp",