docs: Add BIND9 grammar files for v9.18.44 and v9.20.18 #13
54
bind9-grammar/upstream/README.md
Normal file
54
bind9-grammar/upstream/README.md
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
# BIND9 Grammar Files - Upstream Source
|
||||||
|
|
||||||
|
This directory contains upstream BIND9 grammar files from the official BIND9 mirror for version comparison and validation.
|
||||||
|
|
||||||
|
## Files to Fetch
|
||||||
|
|
||||||
|
When grammar files are needed, fetch from:
|
||||||
|
- **Repository**: https://git.valid.dk/Mirrors/bind9
|
||||||
|
- **9.18 Tag**: v9.18.44
|
||||||
|
- **9.20 Tag**: v9.20.18
|
||||||
|
|
||||||
|
### Required Grammar Files (from doc/misc/)
|
||||||
|
|
||||||
|
- options
|
||||||
|
- forward.zoneopt
|
||||||
|
- hint.zoneopt
|
||||||
|
- in-view.zoneopt
|
||||||
|
- mirror.zoneopt
|
||||||
|
- primary.zoneopt
|
||||||
|
- redirect.zoneopt
|
||||||
|
- secondary.zoneopt
|
||||||
|
- static-stub.zoneopt
|
||||||
|
- stub.zoneopt
|
||||||
|
- delegation-only.zoneopt
|
||||||
|
- rndc.grammar
|
||||||
|
- parsegrammar.py
|
||||||
|
- checkgrammar.py
|
||||||
|
|
||||||
|
## Directory Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
upstream/
|
||||||
|
├── v9.18.44/
|
||||||
|
│ ├── grammar/ (Grammar files)
|
||||||
|
│ └── metadata.json (Fetch metadata)
|
||||||
|
└── v9.20.18/
|
||||||
|
├── grammar/ (Grammar files)
|
||||||
|
└── metadata.json (Fetch metadata)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Fetching
|
||||||
|
|
||||||
|
Option 1: Using git fetch
|
||||||
|
```bash
|
||||||
|
git clone --depth 1 --branch v9.18.44 https://git.valid.dk/Mirrors/bind9.git /tmp/bind9-9.18
|
||||||
|
cp /tmp/bind9-9.18/doc/misc/* bind9-grammar/upstream/v9.18.44/grammar/
|
||||||
|
```
|
||||||
|
|
||||||
|
Option 2: Using Gitea MCP tools (see scripts/fetch_bind_grammar.py)
|
||||||
|
|
||||||
|
## Scripts
|
||||||
|
|
||||||
|
- `scripts/fetch_bind_grammar.py` - MCP-based fetcher template
|
||||||
|
- `scripts/compare_bind_versions.py` - Grammar comparison tool
|
||||||
390
scripts/compare_bind_versions.py
Normal file
390
scripts/compare_bind_versions.py
Normal file
@@ -0,0 +1,390 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Compare BIND9 grammar files between versions to identify breaking changes.
|
||||||
|
|
||||||
|
This script compares grammar files from two BIND9 versions and generates
|
||||||
|
a comprehensive report of:
|
||||||
|
- Removed options (breaking changes)
|
||||||
|
- Added options (new features)
|
||||||
|
- Modified option syntax
|
||||||
|
- Deprecated options
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python scripts/compare_bind_versions.py \\
|
||||||
|
--version1-dir bind9-grammar/upstream/v9.18.44 \\
|
||||||
|
--version2-dir bind9-grammar/upstream/v9.20.18 \\
|
||||||
|
--output docs/BIND_VERSION_DIFFERENCES.md
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict, List, Set, Tuple
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
class GrammarComparator:
|
||||||
|
"""Compare BIND9 grammar files between versions."""
|
||||||
|
|
||||||
|
def __init__(self, version1_dir: Path, version2_dir: Path):
|
||||||
|
"""
|
||||||
|
Initialize comparator with two version directories.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
version1_dir: Path to first version's grammar files
|
||||||
|
version2_dir: Path to second version's grammar files
|
||||||
|
"""
|
||||||
|
self.version1_dir = version1_dir
|
||||||
|
self.version2_dir = version2_dir
|
||||||
|
self.version1_name = version1_dir.name
|
||||||
|
self.version2_name = version2_dir.name
|
||||||
|
|
||||||
|
def load_grammar_file(self, file_path: Path) -> Dict:
|
||||||
|
"""Load and parse a grammar file."""
|
||||||
|
try:
|
||||||
|
with open(file_path, 'r', encoding='utf-8') as f:
|
||||||
|
content = f.read()
|
||||||
|
|
||||||
|
# Parse grammar file into structured data
|
||||||
|
options = self._parse_grammar(content)
|
||||||
|
return options
|
||||||
|
except FileNotFoundError:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def _parse_grammar(self, content: str) -> Dict[str, str]:
|
||||||
|
"""
|
||||||
|
Parse grammar content into a dictionary of options.
|
||||||
|
|
||||||
|
This is a simplified parser that extracts top-level keywords
|
||||||
|
and their definitions.
|
||||||
|
"""
|
||||||
|
options = {}
|
||||||
|
lines = content.split('\n')
|
||||||
|
|
||||||
|
for line in lines:
|
||||||
|
line = line.strip()
|
||||||
|
if not line or line.startswith('#') or line.startswith('//'):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Extract keyword and its definition
|
||||||
|
# Pattern: keyword <definition>; or keyword <definition> { ... };
|
||||||
|
match = re.match(r'^([a-z0-9-]+)\s+(.+?)(?:;|$)', line)
|
||||||
|
if match:
|
||||||
|
keyword = match.group(1)
|
||||||
|
definition = match.group(2).strip()
|
||||||
|
|
||||||
|
# Extract flags from comments
|
||||||
|
flags = []
|
||||||
|
if '// may occur multiple times' in line:
|
||||||
|
flags.append('may occur multiple times')
|
||||||
|
if '// deprecated' in line:
|
||||||
|
flags.append('deprecated')
|
||||||
|
if '// obsolete' in line:
|
||||||
|
flags.append('obsolete')
|
||||||
|
if '// not configured' in line:
|
||||||
|
flags.append('not configured')
|
||||||
|
if '// test only' in line:
|
||||||
|
flags.append('test only')
|
||||||
|
if '// experimental' in line:
|
||||||
|
flags.append('experimental')
|
||||||
|
|
||||||
|
options[keyword] = {
|
||||||
|
'definition': definition,
|
||||||
|
'flags': flags,
|
||||||
|
'raw_line': line
|
||||||
|
}
|
||||||
|
|
||||||
|
return options
|
||||||
|
|
||||||
|
def compare_files(self, filename: str) -> Dict:
|
||||||
|
"""
|
||||||
|
Compare a specific grammar file between two versions.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with added, removed, modified, and deprecated options
|
||||||
|
"""
|
||||||
|
file1 = self.version1_dir / 'grammar' / filename
|
||||||
|
file2 = self.version2_dir / 'grammar' / filename
|
||||||
|
|
||||||
|
options1 = self.load_grammar_file(file1)
|
||||||
|
options2 = self.load_grammar_file(file2)
|
||||||
|
|
||||||
|
keys1 = set(options1.keys())
|
||||||
|
keys2 = set(options2.keys())
|
||||||
|
|
||||||
|
# Identify changes
|
||||||
|
added = keys2 - keys1
|
||||||
|
removed = keys1 - keys2
|
||||||
|
common = keys1 & keys2
|
||||||
|
|
||||||
|
modified = []
|
||||||
|
deprecated_new = []
|
||||||
|
|
||||||
|
for key in common:
|
||||||
|
opt1 = options1[key]
|
||||||
|
opt2 = options2[key]
|
||||||
|
|
||||||
|
# Check if definition changed
|
||||||
|
if opt1['definition'] != opt2['definition']:
|
||||||
|
modified.append({
|
||||||
|
'option': key,
|
||||||
|
'old_definition': opt1['definition'],
|
||||||
|
'new_definition': opt2['definition']
|
||||||
|
})
|
||||||
|
|
||||||
|
# Check if newly deprecated
|
||||||
|
if 'deprecated' not in opt1['flags'] and 'deprecated' in opt2['flags']:
|
||||||
|
deprecated_new.append(key)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'file': filename,
|
||||||
|
'added': sorted(added),
|
||||||
|
'removed': sorted(removed),
|
||||||
|
'modified': modified,
|
||||||
|
'deprecated_new': deprecated_new,
|
||||||
|
'options1_count': len(options1),
|
||||||
|
'options2_count': len(options2)
|
||||||
|
}
|
||||||
|
|
||||||
|
def compare_all(self) -> Dict:
|
||||||
|
"""Compare all grammar files between versions."""
|
||||||
|
# List of grammar files to compare
|
||||||
|
grammar_files = [
|
||||||
|
'options',
|
||||||
|
'forward.zoneopt',
|
||||||
|
'hint.zoneopt',
|
||||||
|
'in-view.zoneopt',
|
||||||
|
'mirror.zoneopt',
|
||||||
|
'primary.zoneopt',
|
||||||
|
'redirect.zoneopt',
|
||||||
|
'secondary.zoneopt',
|
||||||
|
'static-stub.zoneopt',
|
||||||
|
'stub.zoneopt',
|
||||||
|
'delegation-only.zoneopt',
|
||||||
|
'rndc.grammar',
|
||||||
|
]
|
||||||
|
|
||||||
|
results = {}
|
||||||
|
for filename in grammar_files:
|
||||||
|
result = self.compare_files(filename)
|
||||||
|
results[filename] = result
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def generate_markdown_report(self, results: Dict) -> str:
|
||||||
|
"""Generate a Markdown report from comparison results."""
|
||||||
|
lines = []
|
||||||
|
lines.append(f"# BIND9 Version Differences: {self.version1_name} vs {self.version2_name}")
|
||||||
|
lines.append("")
|
||||||
|
lines.append(f"This document compares BIND9 configuration grammar between {self.version1_name} and {self.version2_name}.")
|
||||||
|
lines.append("")
|
||||||
|
lines.append("Generated automatically by `scripts/compare_bind_versions.py`.")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
lines.append("## Summary")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
total_added = sum(len(r['added']) for r in results.values())
|
||||||
|
total_removed = sum(len(r['removed']) for r in results.values())
|
||||||
|
total_modified = sum(len(r['modified']) for r in results.values())
|
||||||
|
total_deprecated = sum(len(r['deprecated_new']) for r in results.values())
|
||||||
|
|
||||||
|
lines.append(f"- **New Options**: {total_added}")
|
||||||
|
lines.append(f"- **Removed Options**: {total_removed} ⚠️")
|
||||||
|
lines.append(f"- **Modified Options**: {total_modified}")
|
||||||
|
lines.append(f"- **Newly Deprecated**: {total_deprecated}")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
# Breaking Changes
|
||||||
|
if total_removed > 0:
|
||||||
|
lines.append("## ⚠️ Breaking Changes")
|
||||||
|
lines.append("")
|
||||||
|
lines.append(f"The following options were removed in {self.version2_name} and will cause configuration errors:")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
for filename, result in results.items():
|
||||||
|
if result['removed']:
|
||||||
|
lines.append(f"### {filename}")
|
||||||
|
lines.append("")
|
||||||
|
for option in result['removed']:
|
||||||
|
lines.append(f"- `{option}`")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
# New Features
|
||||||
|
if total_added > 0:
|
||||||
|
lines.append("## ✨ New Features")
|
||||||
|
lines.append("")
|
||||||
|
lines.append(f"The following options were added in {self.version2_name}:")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
for filename, result in results.items():
|
||||||
|
if result['added']:
|
||||||
|
lines.append(f"### {filename}")
|
||||||
|
lines.append("")
|
||||||
|
for option in result['added']:
|
||||||
|
lines.append(f"- `{option}`")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
# Modified Options
|
||||||
|
if total_modified > 0:
|
||||||
|
lines.append("## 🔧 Modified Options")
|
||||||
|
lines.append("")
|
||||||
|
lines.append(f"The following options have syntax changes in {self.version2_name}:")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
for filename, result in results.items():
|
||||||
|
if result['modified']:
|
||||||
|
lines.append(f"### {filename}")
|
||||||
|
lines.append("")
|
||||||
|
for mod in result['modified']:
|
||||||
|
lines.append(f"#### `{mod['option']}`")
|
||||||
|
lines.append("")
|
||||||
|
lines.append(f"**{self.version1_name}**:")
|
||||||
|
lines.append(f"```")
|
||||||
|
lines.append(f"{mod['old_definition']}")
|
||||||
|
lines.append(f"```")
|
||||||
|
lines.append("")
|
||||||
|
lines.append(f"**{self.version2_name}**:")
|
||||||
|
lines.append(f"```")
|
||||||
|
lines.append(f"{mod['new_definition']}")
|
||||||
|
lines.append(f"```")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
# Deprecated Options
|
||||||
|
if total_deprecated > 0:
|
||||||
|
lines.append("## 📋 Newly Deprecated Options")
|
||||||
|
lines.append("")
|
||||||
|
lines.append(f"The following options were marked as deprecated in {self.version2_name}:")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
for filename, result in results.items():
|
||||||
|
if result['deprecated_new']:
|
||||||
|
lines.append(f"### {filename}")
|
||||||
|
lines.append("")
|
||||||
|
for option in result['deprecated_new']:
|
||||||
|
lines.append(f"- `{option}`")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
# File-by-File Comparison
|
||||||
|
lines.append("## Detailed File-by-File Comparison")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
for filename, result in results.items():
|
||||||
|
lines.append(f"### {filename}")
|
||||||
|
lines.append("")
|
||||||
|
lines.append(f"- {self.version1_name}: {result['options1_count']} options")
|
||||||
|
lines.append(f"- {self.version2_name}: {result['options2_count']} options")
|
||||||
|
lines.append(f"- Added: {len(result['added'])}")
|
||||||
|
lines.append(f"- Removed: {len(result['removed'])}")
|
||||||
|
lines.append(f"- Modified: {len(result['modified'])}")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
# Migration Guide
|
||||||
|
if total_removed > 0 or total_deprecated > 0:
|
||||||
|
lines.append("## Migration Guide")
|
||||||
|
lines.append("")
|
||||||
|
lines.append(f"### Migrating from {self.version1_name} to {self.version2_name}")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
if total_removed > 0:
|
||||||
|
lines.append("1. **Remove unsupported options** from your configuration")
|
||||||
|
lines.append(" - Review the Breaking Changes section above")
|
||||||
|
lines.append(" - Check if there are replacement options")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
if total_deprecated > 0:
|
||||||
|
lines.append("2. **Plan for deprecated options**")
|
||||||
|
lines.append(" - These options still work but may be removed in future versions")
|
||||||
|
lines.append(" - Start planning migration to recommended alternatives")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
lines.append("3. **Test your configuration**")
|
||||||
|
lines.append(" - Use `named-checkconf` to validate syntax")
|
||||||
|
lines.append(" - Test in a development environment before production")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
return '\n'.join(lines)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Main entry point."""
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Compare BIND9 grammar files between versions"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--version1-dir",
|
||||||
|
type=Path,
|
||||||
|
required=True,
|
||||||
|
help="Directory containing first version's grammar files"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--version2-dir",
|
||||||
|
type=Path,
|
||||||
|
required=True,
|
||||||
|
help="Directory containing second version's grammar files"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--output",
|
||||||
|
type=Path,
|
||||||
|
default=Path("docs/BIND_VERSION_DIFFERENCES.md"),
|
||||||
|
help="Output file for Markdown report"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--json",
|
||||||
|
type=Path,
|
||||||
|
help="Also output raw comparison as JSON"
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Validate directories
|
||||||
|
if not args.version1_dir.exists():
|
||||||
|
print(f"Error: {args.version1_dir} does not exist", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if not args.version2_dir.exists():
|
||||||
|
print(f"Error: {args.version2_dir} does not exist", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Perform comparison
|
||||||
|
print(f"Comparing BIND9 versions:")
|
||||||
|
print(f" Version 1: {args.version1_dir.name}")
|
||||||
|
print(f" Version 2: {args.version2_dir.name}")
|
||||||
|
|
||||||
|
comparator = GrammarComparator(args.version1_dir, args.version2_dir)
|
||||||
|
results = comparator.compare_all()
|
||||||
|
|
||||||
|
# Generate and save Markdown report
|
||||||
|
report = comparator.generate_markdown_report(results)
|
||||||
|
|
||||||
|
args.output.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
with open(args.output, 'w', encoding='utf-8') as f:
|
||||||
|
f.write(report)
|
||||||
|
|
||||||
|
print(f"✓ Markdown report saved to: {args.output}")
|
||||||
|
|
||||||
|
# Save JSON if requested
|
||||||
|
if args.json:
|
||||||
|
args.json.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
with open(args.json, 'w', encoding='utf-8') as f:
|
||||||
|
json.dump(results, f, indent=2)
|
||||||
|
print(f"✓ JSON comparison saved to: {args.json}")
|
||||||
|
|
||||||
|
# Print summary
|
||||||
|
total_added = sum(len(r['added']) for r in results.values())
|
||||||
|
total_removed = sum(len(r['removed']) for r in results.values())
|
||||||
|
total_modified = sum(len(r['modified']) for r in results.values())
|
||||||
|
|
||||||
|
print(f"\nComparison Summary:")
|
||||||
|
print(f" Added options: {total_added}")
|
||||||
|
print(f" Removed options: {total_removed}")
|
||||||
|
print(f" Modified options: {total_modified}")
|
||||||
|
|
||||||
|
if total_removed > 0:
|
||||||
|
print(f"\n⚠️ Warning: {total_removed} breaking changes detected!")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
179
scripts/fetch_bind_grammar.py
Normal file
179
scripts/fetch_bind_grammar.py
Normal file
@@ -0,0 +1,179 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Fetch BIND9 grammar files from Gitea mirror repository for version comparison.
|
||||||
|
|
||||||
|
This script uses the Gitea MCP tools to fetch grammar files from the official
|
||||||
|
BIND9 mirror at git.valid.dk/Mirrors/bind9 for specified version tags.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict, List
|
||||||
|
|
||||||
|
|
||||||
|
# Grammar files to fetch from doc/misc/
|
||||||
|
GRAMMAR_FILES = [
|
||||||
|
"options",
|
||||||
|
"forward.zoneopt",
|
||||||
|
"hint.zoneopt",
|
||||||
|
"in-view.zoneopt",
|
||||||
|
"mirror.zoneopt",
|
||||||
|
"primary.zoneopt",
|
||||||
|
"redirect.zoneopt",
|
||||||
|
"secondary.zoneopt",
|
||||||
|
"static-stub.zoneopt",
|
||||||
|
"stub.zoneopt",
|
||||||
|
"delegation-only.zoneopt",
|
||||||
|
"rndc.grammar",
|
||||||
|
]
|
||||||
|
|
||||||
|
# Support files
|
||||||
|
SUPPORT_FILES = [
|
||||||
|
"parsegrammar.py",
|
||||||
|
"checkgrammar.py",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_file_content(owner: str, repo: str, ref: str, file_path: str) -> str:
|
||||||
|
"""
|
||||||
|
Fetch file content from Gitea repository.
|
||||||
|
|
||||||
|
This would use mcp_gitea-mcp_get_file_content in the actual MCP environment.
|
||||||
|
For standalone usage, returns placeholder.
|
||||||
|
"""
|
||||||
|
# In MCP environment, this would call:
|
||||||
|
# mcp_gitea-mcp_get_file_content(owner=owner, repo=repo, ref=ref, filePath=file_path)
|
||||||
|
print(f"Would fetch: {owner}/{repo}@{ref}:{file_path}")
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
def save_grammar_file(content: str, version: str, filename: str, output_dir: Path):
|
||||||
|
"""Save fetched grammar file to local directory."""
|
||||||
|
version_dir = output_dir / version / "grammar"
|
||||||
|
version_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
file_path = version_dir / filename
|
||||||
|
with open(file_path, 'w', encoding='utf-8') as f:
|
||||||
|
f.write(content)
|
||||||
|
|
||||||
|
print(f"Saved: {file_path}")
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_version_grammars(owner: str, repo: str, tag: str, output_dir: Path):
|
||||||
|
"""Fetch all grammar files for a specific version tag."""
|
||||||
|
print(f"\n=== Fetching grammar files for {tag} ===")
|
||||||
|
|
||||||
|
# Create metadata
|
||||||
|
metadata = {
|
||||||
|
"version": tag,
|
||||||
|
"repository": f"{owner}/{repo}",
|
||||||
|
"files": [],
|
||||||
|
}
|
||||||
|
|
||||||
|
# Fetch grammar files
|
||||||
|
for grammar_file in GRAMMAR_FILES:
|
||||||
|
try:
|
||||||
|
file_path = f"doc/misc/{grammar_file}"
|
||||||
|
content = fetch_file_content(owner, repo, tag, file_path)
|
||||||
|
|
||||||
|
if content:
|
||||||
|
save_grammar_file(content, tag, grammar_file, output_dir)
|
||||||
|
metadata["files"].append({
|
||||||
|
"name": grammar_file,
|
||||||
|
"path": file_path,
|
||||||
|
"type": "grammar"
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Warning: Could not fetch {grammar_file}: {e}")
|
||||||
|
|
||||||
|
# Fetch support files
|
||||||
|
for support_file in SUPPORT_FILES:
|
||||||
|
try:
|
||||||
|
file_path = f"doc/misc/{support_file}"
|
||||||
|
content = fetch_file_content(owner, repo, tag, file_path)
|
||||||
|
|
||||||
|
if content:
|
||||||
|
save_grammar_file(content, tag, support_file, output_dir)
|
||||||
|
metadata["files"].append({
|
||||||
|
"name": support_file,
|
||||||
|
"path": file_path,
|
||||||
|
"type": "support"
|
||||||
|
})
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Warning: Could not fetch {support_file}: {e}")
|
||||||
|
|
||||||
|
# Save metadata
|
||||||
|
metadata_file = output_dir / tag / "metadata.json"
|
||||||
|
with open(metadata_file, 'w', encoding='utf-8') as f:
|
||||||
|
json.dump(metadata, f, indent=2)
|
||||||
|
|
||||||
|
print(f"Metadata saved: {metadata_file}")
|
||||||
|
return metadata
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Main entry point."""
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Fetch BIND9 grammar files from Gitea mirror"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--owner",
|
||||||
|
default="Mirrors",
|
||||||
|
help="Repository owner (default: Mirrors)"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--repo",
|
||||||
|
default="bind9",
|
||||||
|
help="Repository name (default: bind9)"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--tags",
|
||||||
|
nargs="+",
|
||||||
|
default=["v9.18.44", "v9.20.18"],
|
||||||
|
help="Version tags to fetch (default: v9.18.44 v9.20.18)"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--output-dir",
|
||||||
|
type=Path,
|
||||||
|
default=Path("bind9-grammar/upstream"),
|
||||||
|
help="Output directory for grammar files"
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
print("BIND9 Grammar Fetcher")
|
||||||
|
print("=" * 60)
|
||||||
|
print(f"Repository: {args.owner}/{args.repo}")
|
||||||
|
print(f"Tags: {', '.join(args.tags)}")
|
||||||
|
print(f"Output: {args.output_dir}")
|
||||||
|
|
||||||
|
# Fetch grammars for each version
|
||||||
|
results = {}
|
||||||
|
for tag in args.tags:
|
||||||
|
try:
|
||||||
|
metadata = fetch_version_grammars(
|
||||||
|
args.owner,
|
||||||
|
args.repo,
|
||||||
|
tag,
|
||||||
|
args.output_dir
|
||||||
|
)
|
||||||
|
results[tag] = metadata
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error fetching {tag}: {e}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Save overall summary
|
||||||
|
summary_file = args.output_dir / "fetch_summary.json"
|
||||||
|
with open(summary_file, 'w', encoding='utf-8') as f:
|
||||||
|
json.dump(results, f, indent=2)
|
||||||
|
|
||||||
|
print(f"\n=== Fetch complete ===")
|
||||||
|
print(f"Summary: {summary_file}")
|
||||||
|
print(f"Fetched {len(results)} versions")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
163
scripts/fetch_bind_grammar_mcp.py
Normal file
163
scripts/fetch_bind_grammar_mcp.py
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Fetch BIND9 grammar files using Gitea MCP integration.
|
||||||
|
|
||||||
|
This script demonstrates how to use mcp_gitea-mcp tools to fetch grammar files.
|
||||||
|
Since we can't directly call MCP tools from Python, this serves as documentation
|
||||||
|
for the manual fetching process that should be done through the MCP interface.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
The actual fetching should be done through MCP tool calls:
|
||||||
|
|
||||||
|
1. List available tags:
|
||||||
|
mcp_gitea-mcp_list_tags(owner="Mirrors", repo="bind9")
|
||||||
|
|
||||||
|
2. Get directory listing:
|
||||||
|
mcp_gitea-mcp_get_dir_content(owner="Mirrors", repo="bind9",
|
||||||
|
ref="v9.18.44", filePath="doc/misc")
|
||||||
|
|
||||||
|
3. Fetch each grammar file:
|
||||||
|
mcp_gitea-mcp_get_file_content(owner="Mirrors", repo="bind9",
|
||||||
|
ref="v9.18.44",
|
||||||
|
filePath="doc/misc/options")
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict, List
|
||||||
|
|
||||||
|
# Target versions
|
||||||
|
VERSIONS = {
|
||||||
|
"9.18": "v9.18.44",
|
||||||
|
"9.20": "v9.20.18",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Grammar files to fetch
|
||||||
|
GRAMMAR_FILES = [
|
||||||
|
"options",
|
||||||
|
"forward.zoneopt",
|
||||||
|
"hint.zoneopt",
|
||||||
|
"in-view.zoneopt",
|
||||||
|
"mirror.zoneopt",
|
||||||
|
"primary.zoneopt",
|
||||||
|
"redirect.zoneopt",
|
||||||
|
"secondary.zoneopt",
|
||||||
|
"static-stub.zoneopt",
|
||||||
|
"stub.zoneopt",
|
||||||
|
"delegation-only.zoneopt",
|
||||||
|
"rndc.grammar",
|
||||||
|
]
|
||||||
|
|
||||||
|
SUPPORT_FILES = [
|
||||||
|
"parsegrammar.py",
|
||||||
|
"checkgrammar.py",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def generate_fetch_commands() -> List[Dict]:
|
||||||
|
"""Generate MCP tool call commands for fetching grammar files."""
|
||||||
|
commands = []
|
||||||
|
|
||||||
|
for version_name, tag in VERSIONS.items():
|
||||||
|
# List directory command
|
||||||
|
commands.append({
|
||||||
|
"tool": "mcp_gitea-mcp_get_dir_content",
|
||||||
|
"params": {
|
||||||
|
"owner": "Mirrors",
|
||||||
|
"repo": "bind9",
|
||||||
|
"ref": tag,
|
||||||
|
"filePath": "doc/misc"
|
||||||
|
},
|
||||||
|
"description": f"List grammar files for {version_name} ({tag})"
|
||||||
|
})
|
||||||
|
|
||||||
|
# File fetch commands
|
||||||
|
for grammar_file in GRAMMAR_FILES + SUPPORT_FILES:
|
||||||
|
commands.append({
|
||||||
|
"tool": "mcp_gitea-mcp_get_file_content",
|
||||||
|
"params": {
|
||||||
|
"owner": "Mirrors",
|
||||||
|
"repo": "bind9",
|
||||||
|
"ref": tag,
|
||||||
|
"filePath": f"doc/misc/{grammar_file}"
|
||||||
|
},
|
||||||
|
"save_to": f"bind9-grammar/upstream/{tag}/grammar/{grammar_file}",
|
||||||
|
"description": f"Fetch {grammar_file} for {version_name}"
|
||||||
|
})
|
||||||
|
|
||||||
|
return commands
|
||||||
|
|
||||||
|
|
||||||
|
def save_file_structure() -> Dict:
|
||||||
|
"""Generate expected file structure after fetching."""
|
||||||
|
structure = {
|
||||||
|
"bind9-grammar": {
|
||||||
|
"upstream": {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for version_name, tag in VERSIONS.items():
|
||||||
|
structure["bind9-grammar"]["upstream"][tag] = {
|
||||||
|
"grammar": {
|
||||||
|
"files": GRAMMAR_FILES + SUPPORT_FILES
|
||||||
|
},
|
||||||
|
"metadata.json": {
|
||||||
|
"version": tag,
|
||||||
|
"version_name": version_name,
|
||||||
|
"repository": "Mirrors/bind9",
|
||||||
|
"fetched_files": len(GRAMMAR_FILES) + len(SUPPORT_FILES)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return structure
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Generate instructions and commands for grammar fetching."""
|
||||||
|
print("=" * 70)
|
||||||
|
print("BIND9 Grammar Fetcher - MCP Integration Guide")
|
||||||
|
print("=" * 70)
|
||||||
|
|
||||||
|
print("\nTarget Versions:")
|
||||||
|
for version_name, tag in VERSIONS.items():
|
||||||
|
print(f" - BIND {version_name}: {tag}")
|
||||||
|
|
||||||
|
print(f"\nFiles to fetch per version: {len(GRAMMAR_FILES)} grammar files + {len(SUPPORT_FILES)} support files")
|
||||||
|
|
||||||
|
print("\n" + "=" * 70)
|
||||||
|
print("MCP TOOL CALL SEQUENCE")
|
||||||
|
print("=" * 70)
|
||||||
|
|
||||||
|
commands = generate_fetch_commands()
|
||||||
|
|
||||||
|
for i, cmd in enumerate(commands, 1):
|
||||||
|
print(f"\n[{i}/{len(commands)}] {cmd['description']}")
|
||||||
|
print(f"Tool: {cmd['tool']}")
|
||||||
|
print(f"Parameters:")
|
||||||
|
for key, value in cmd['params'].items():
|
||||||
|
print(f" - {key}: {value}")
|
||||||
|
if 'save_to' in cmd:
|
||||||
|
print(f"Save to: {cmd['save_to']}")
|
||||||
|
|
||||||
|
print("\n" + "=" * 70)
|
||||||
|
print("EXPECTED FILE STRUCTURE")
|
||||||
|
print("=" * 70)
|
||||||
|
|
||||||
|
structure = save_file_structure()
|
||||||
|
print(json.dumps(structure, indent=2))
|
||||||
|
|
||||||
|
# Save command list
|
||||||
|
output_dir = Path("bind9-grammar/upstream")
|
||||||
|
output_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
commands_file = output_dir / "fetch_commands.json"
|
||||||
|
with open(commands_file, 'w') as f:
|
||||||
|
json.dump(commands, f, indent=2)
|
||||||
|
|
||||||
|
print(f"\nCommand list saved to: {commands_file}")
|
||||||
|
print("\nNote: These commands should be executed through the MCP interface,")
|
||||||
|
print(" not directly from this Python script.")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
78
scripts/process_mcp_result.py
Normal file
78
scripts/process_mcp_result.py
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Fetch BIND9 grammar files using Gitea MCP tools.
|
||||||
|
This script must be run from an environment where MCP tools are available.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# Files to fetch
|
||||||
|
GRAMMAR_FILES = [
|
||||||
|
"options",
|
||||||
|
"forward.zoneopt",
|
||||||
|
"hint.zoneopt",
|
||||||
|
"in-view.zoneopt",
|
||||||
|
"mirror.zoneopt",
|
||||||
|
"primary.zoneopt",
|
||||||
|
"redirect.zoneopt",
|
||||||
|
"secondary.zoneopt",
|
||||||
|
"static-stub.zoneopt",
|
||||||
|
"stub.zoneopt",
|
||||||
|
"delegation-only.zoneopt",
|
||||||
|
"rndc.grammar",
|
||||||
|
"parsegrammar.py",
|
||||||
|
"checkgrammar.py",
|
||||||
|
]
|
||||||
|
|
||||||
|
def save_file_from_mcp_result(result_json: dict, output_path: Path) -> bool:
|
||||||
|
"""Save a file from MCP tool result JSON."""
|
||||||
|
try:
|
||||||
|
if 'Result' in result_json and 'content' in result_json['Result']:
|
||||||
|
content_b64 = result_json['Result']['content']
|
||||||
|
content = base64.b64decode(content_b64).decode('utf-8')
|
||||||
|
|
||||||
|
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
with open(output_path, 'w', encoding='utf-8') as f:
|
||||||
|
f.write(content)
|
||||||
|
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error saving {output_path}: {e}", file=sys.stderr)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Process stdin JSON from MCP tool calls."""
|
||||||
|
print("Reading MCP tool results from stdin...")
|
||||||
|
|
||||||
|
# Read all input
|
||||||
|
input_data = sys.stdin.read()
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = json.loads(input_data)
|
||||||
|
|
||||||
|
# Determine output path from result metadata
|
||||||
|
if 'Result' in result and 'name' in result['Result']:
|
||||||
|
filename = result['Result']['name']
|
||||||
|
# Output path will be provided as command line argument
|
||||||
|
if len(sys.argv) > 1:
|
||||||
|
output_path = Path(sys.argv[1])
|
||||||
|
if save_file_from_mcp_result(result, output_path):
|
||||||
|
print(f"✓ Saved: {output_path}")
|
||||||
|
sys.exit(0)
|
||||||
|
else:
|
||||||
|
print(f"✗ Failed to save: {output_path}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print("✗ Could not determine output path", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
print(f"✗ Invalid JSON input: {e}", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
37
scripts/save_grammar_file.py
Normal file
37
scripts/save_grammar_file.py
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Helper script to save base64-encoded grammar file from MCP tool output."""
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
def main():
|
||||||
|
if len(sys.argv) != 3:
|
||||||
|
print("Usage: save_grammar_file.py <json_file> <output_path>")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
json_file = Path(sys.argv[1])
|
||||||
|
output_path = Path(sys.argv[2])
|
||||||
|
|
||||||
|
# Read JSON from MCP tool output
|
||||||
|
with open(json_file, 'r') as f:
|
||||||
|
data = json.load(f)
|
||||||
|
|
||||||
|
# Extract and decode base64 content
|
||||||
|
if 'Result' in data and 'content' in data['Result']:
|
||||||
|
content_b64 = data['Result']['content']
|
||||||
|
content = base64.b64decode(content_b64).decode('utf-8')
|
||||||
|
|
||||||
|
# Save to output path
|
||||||
|
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
with open(output_path, 'w', encoding='utf-8') as f:
|
||||||
|
f.write(content)
|
||||||
|
|
||||||
|
print(f"✓ Saved: {output_path}")
|
||||||
|
else:
|
||||||
|
print(f"✗ Error: No content found in JSON", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
Reference in New Issue
Block a user