published package

This commit is contained in:
Pratik Narola 2025-07-11 18:21:50 +05:30
parent 885bd1075b
commit 762c91f9f3
4 changed files with 233 additions and 73 deletions

108
.npmignore Normal file
View file

@ -0,0 +1,108 @@
# Source files
src/
*.ts
!*.d.ts
# Development files
.roo/
.workflows/
.test-workflows/
test-prompt-for-llm.md
# Build artifacts
.tsbuildinfo
tsconfig.json
# Development dependencies
node_modules/
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# IDE files
.vscode/
.idea/
*.swp
*.swo
*~
# OS files
.DS_Store
.DS_Store?
._*
.Spotlight-V100
.Trashes
ehthumbs.db
Thumbs.db
# Git files
.git/
.gitignore
# Logs
*.log
logs/
# Runtime data
pids/
*.pid
*.seed
*.pid.lock
# Coverage directory used by tools like istanbul
coverage/
.nyc_output/
# Dependency directories
jspm_packages/
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env
.env.test
.env.production
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# next.js build output
.next
# nuxt.js build output
.nuxt
# vuepress build output
.vuepress/dist
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port

152
README.md
View file

@ -20,12 +20,96 @@ This server helps LLMs follow any structured workflow without losing context or
## Installation
### From npm (Recommended)
```bash
npm install -g dfa-mcp-server
```
Or install locally in your project:
```bash
npm install dfa-mcp-server
```
### From Source
```bash
git clone https://github.com/your-username/dfa-mcp-server.git
cd dfa-mcp-server
npm install
npm run build
```
## Configuration
### Claude Desktop Setup
Add to your Claude Desktop MCP configuration:
**Basic Configuration:**
```json
{
"mcpServers": {
"dfa-workflow": {
"command": "npx",
"args": ["dfa-mcp-server"]
}
}
}
```
**With OpenAI:**
```json
{
"mcpServers": {
"dfa-workflow": {
"command": "npx",
"args": ["dfa-mcp-server"],
"env": {
"LLM_BASE_URL": "https://api.openai.com",
"LLM_JUDGE_MODEL": "gpt-4",
"LLM_API_KEY": "sk-your-openai-key"
}
}
}
}
```
**With Custom Endpoint (Gemini via Veronica):**
```json
{
"mcpServers": {
"dfa-workflow": {
"command": "npx",
"args": ["dfa-mcp-server"],
"env": {
"LLM_BASE_URL": "https://your-llm-api-endpoint.com",
"LLM_JUDGE_MODEL": "gemini-2.5-pro",
"LLM_API_KEY": "sk-your-api-key"
}
}
}
}
```
## Quick Start
### Using as Global Command
After installing globally:
```bash
dfa-mcp-server
```
### Using as Node Module
```javascript
const { McpServer } = require('dfa-mcp-server');
// Server will start automatically when imported
```
## Running the Server
For development:
@ -530,71 +614,3 @@ Suggestions: ["Add detailed approval comments", "Include review findings",
The LLM judge provides richer, context-aware feedback!
## Adding to Claude Desktop
Add to your Claude Desktop configuration:
```json
{
"mcpServers": {
"dfa-workflow": {
"command": "node",
"args": ["/path/to/dfa-mcp-server/dist/index.js"],
"env": {
"LLM_BASE_URL": "https://api.openai.com",
"LLM_JUDGE_MODEL": "gpt-4",
"LLM_API_KEY": "sk-your-api-key-here",
"LLM_JUDGE_THINKING_MODE": "high"
}
}
}
}
```
### Configuration Examples
**Without LLM Judge (default):**
```json
{
"mcpServers": {
"dfa-workflow": {
"command": "node",
"args": ["/path/to/dfa-mcp-server/dist/index.js"]
}
}
}
```
**With OpenAI:**
```json
{
"mcpServers": {
"dfa-workflow": {
"command": "node",
"args": ["/path/to/dfa-mcp-server/dist/index.js"],
"env": {
"LLM_BASE_URL": "https://api.openai.com",
"LLM_JUDGE_MODEL": "gpt-4",
"LLM_API_KEY": "sk-your-openai-key"
}
}
}
}
```
**With Custom Endpoint (Gemini via Veronica):**
```json
{
"mcpServers": {
"dfa-workflow": {
"command": "node",
"args": ["/path/to/dfa-mcp-server/dist/index.js"],
"env": {
"LLM_BASE_URL": "https://your-llm-api-endpoint.com",
"LLM_JUDGE_MODEL": "gemini-2.5-pro",
"LLM_API_KEY": "sk-your-api-key"
}
}
}
}
```

View file

@ -3,15 +3,47 @@
"version": "1.0.0",
"description": "DFA-based workflow MCP server for guiding LLM task completion",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"bin": {
"dfa-mcp-server": "dist/index.js"
},
"files": [
"dist/**/*",
"README.md",
"LICENSE"
],
"scripts": {
"build": "tsc",
"dev": "tsx src/index.ts",
"start": "node dist/index.js",
"test": "echo \"No tests yet\""
"test": "echo \"No tests yet\"",
"prepublishOnly": "npm run build"
},
"keywords": ["mcp", "workflow", "state-machine", "dfa"],
"author": "",
"keywords": [
"mcp",
"workflow",
"state-machine",
"dfa",
"model-context-protocol",
"llm",
"ai",
"automation",
"finite-automata",
"workflow-engine"
],
"author": "DFA MCP Server Contributors",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/your-username/dfa-mcp-server.git"
},
"homepage": "https://github.com/your-username/dfa-mcp-server#readme",
"bugs": {
"url": "https://github.com/your-username/dfa-mcp-server/issues"
},
"engines": {
"node": ">=18.0.0"
},
"dependencies": {
"@modelcontextprotocol/sdk": "^1.0.0",
"zod": "^3.0.0"

View file

@ -13,8 +13,12 @@
"moduleResolution": "node",
"declaration": true,
"declarationMap": true,
"sourceMap": true
"sourceMap": true,
"removeComments": false,
"preserveConstEnums": true,
"incremental": true,
"tsBuildInfoFile": "./dist/.tsbuildinfo"
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist"]
"exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts"]
}