diff --git a/README.md b/README.md index d39e324..b7b45f3 100644 --- a/README.md +++ b/README.md @@ -19,6 +19,7 @@ - **🛡️ Smart File Handling** - Multiple strategies for managing existing files (overwrite, skip, backup, etc.) - **🪝 Automation Hooks** - Pre and post-generation shell commands - **🎯 Dry Run Mode** - Preview changes before applying them +- **🔎 Structure Explanations** - Inspect nested structures, variables, hooks, remote references, and conflict behavior before generation - **✅ Validation & Schema** - Built-in YAML validation and IDE support - **🤖 MCP Integration** - Model Context Protocol support for AI-assisted development workflows @@ -73,6 +74,9 @@ docker run -v $(pwd):/workdir ghcr.io/httpdss/structkit:main generate my-config. ### Basic Usage ```bash +# Explain a Terraform module structure without writing files +structkit explain terraform/modules/generic + # Generate a Terraform module structure structkit generate terraform-module ./my-terraform-module diff --git a/docs/cli-reference.md b/docs/cli-reference.md index d8d0089..1555daf 100644 --- a/docs/cli-reference.md +++ b/docs/cli-reference.md @@ -9,7 +9,7 @@ The `struct` CLI allows you to generate project structures from YAML configurati **Basic Usage:** ```sh -structkit {info,validate,generate,list,generate-schema,mcp,completion,init} ... +structkit {info,validate,generate,explain,vars,list,generate-schema,mcp,completion,init} ... ``` ## Global Options @@ -115,6 +115,61 @@ structkit generate - `--mappings-file MAPPINGS_FILE`: Path to a YAML file containing mappings to be used in templates (can be specified multiple times). - `-o {console,file}, --output {console,file}`: Output mode. +### `explain` + +Explain how a structure definition resolves before generation. This is structure-focused: it lists the files and folders that would be generated, nested structures referenced by `folders[].struct`, remote file references, declared variables and resolved values, hooks that would be present, and the configured conflict behavior. It does not create files or folders, fetch remote content, generate prompt-based content, or execute hooks. + +Use `generate --dry-run` when you want a generation-style preview of file operations and diffs. Use `explain` when you want to inspect the structure graph and metadata before any generation work happens. + +**Usage:** + +```sh +structkit explain [-h] [-l LOG] [-c CONFIG_FILE] [-i LOG_FILE] [-s STRUCTURES_PATH] [-v VARS] [--json] [-o {text,json}] [-f {overwrite,skip,append,rename,backup}] [--mappings-file MAPPINGS_FILE] [structure_definition] [base_path] +``` + +**Arguments:** + +- `structure_definition` (optional): Built-in structure name or path to a YAML structure file (default: `.struct.yaml`). +- `base_path` (optional): Base path used to resolve generated paths (default: `.`). +- `-s STRUCTURES_PATH, --structures-path STRUCTURES_PATH`: Path to structure definitions. +- `-v VARS, --vars VARS`: Template variables in the format KEY1=value1,KEY2=value2; shown in the explanation with resolved defaults. +- `--json`: Output the explanation as JSON. +- `-o {text,json}, --output {text,json}`: Output format (default: `text`). +- `-f {overwrite,skip,append,rename,backup}, --file-strategy {overwrite,skip,append,rename,backup}`: File conflict strategy to explain. +- `--mappings-file MAPPINGS_FILE`: Path to a YAML mappings file for resolving `with` values and templates (can be specified multiple times). + +**Examples:** + +```sh +structkit explain terraform/modules/generic +structkit explain ./my-struct.yaml --vars project_name=demo +structkit explain project/python --json +``` + +### `vars` + +Inspect variables declared by a structure definition without generating files. + +**Usage:** + +```sh +structkit vars [-h] [-l LOG] [-c CONFIG_FILE] [-i LOG_FILE] [-s STRUCTURES_PATH] [--json] structure_definition +``` + +**Arguments:** + +- `structure_definition`: Built-in structure name, custom structure name, or local YAML file path. Local `.yaml` and `.yml` files can be passed directly, or with `file://`. +- `-s STRUCTURES_PATH, --structures-path STRUCTURES_PATH`: Path to custom structure definitions. Can be set via the `STRUCTKIT_STRUCTURES_PATH` environment variable. +- `--json`: Print machine-readable JSON with each variable's name, type, default value, description/help text, and required status. + +Examples: + +```sh +structkit vars project/python +structkit vars ./my-struct.yaml --json +structkit vars python-basic --structures-path ~/custom-structures +``` + ### `list` List available structures. @@ -214,6 +269,16 @@ Pass template variables to the structure: structkit generate -v "project_name=MyApp,author=John Doe" file://structure.yaml ./output ``` +### Explaining Structure Resolution + +Preview the structure graph, variables, hooks, remote file references, and conflict behavior without writing files or executing hooks: + +```sh +structkit explain project/python --json +``` + +`explain` is different from `generate --dry-run`: `explain` focuses on resolving and describing the structure definition, while `generate --dry-run` follows the generation path to preview file operations. + ### Dry Run Test structure generation without creating files: diff --git a/docs/mcp-integration.md b/docs/mcp-integration.md index a47178c..c9fe987 100644 --- a/docs/mcp-integration.md +++ b/docs/mcp-integration.md @@ -69,7 +69,57 @@ Generate a project structure using specified definition and options. - `mappings` (optional): Variable mappings for template substitution - `structures_path` (optional): Custom path to structure definitions -### 4. validate_structure +### 4. get_structure_vars +Inspect variables declared by a specific structure without generating files. + +```json +{ + "name": "get_structure_vars", + "arguments": { + "structure_name": "project/python", + "structures_path": "/path/to/custom/structures", // optional + "output": "json" // "text" or "json", optional + } +} +``` + +**Parameters:** +- `structure_name` (required): Name or local YAML path of the structure to inspect +- `structures_path` (optional): Custom path to structure definitions +- `output` (optional): Output format - "text" for aligned human-readable output or "json" for machine-readable output (default: "text") + +### 5. explain_structure +Explain how a structure resolves without creating files, fetching remote content, generating prompt-based content, or executing hooks. This is useful when an AI assistant needs to inspect the structure graph before deciding whether to generate it. + +```json +{ + "name": "explain_structure", + "arguments": { + "structure_definition": "project/python", + "base_path": "/tmp/myproject", + "output": "json", + "variables": { + "project_name": "MyProject" + }, + "mappings": { + "team": "platform" + }, + "file_strategy": "overwrite", + "structures_path": "/path/to/custom/structures" + } +} +``` + +**Parameters:** +- `structure_definition` (required): Name or path to the structure definition +- `base_path` (optional): Base path used to resolve generated paths (default: `.`) +- `output` (optional): Output mode - `"text"` or `"json"` (default: `"text"`) +- `variables` (optional): Template variables to resolve structure names, paths, hooks, and nested `with` values +- `mappings` (optional): Additional mappings exposed to templates as `mappings` +- `file_strategy` (optional): Conflict strategy to explain: `overwrite`, `skip`, `append`, `rename`, or `backup` (default: `overwrite`) +- `structures_path` (optional): Custom path to structure definitions + +### 6. validate_structure Validate a structure configuration YAML file. ```json @@ -204,6 +254,12 @@ async def main(): # FastMCP tools return plain text content print(result.content[0].text) + explanation = await session.call_tool("explain_structure", { + "structure_definition": "project/python", + "output": "json" + }) + print(explanation.content[0].text) + if __name__ == "__main__": asyncio.run(main()) ``` @@ -225,8 +281,9 @@ The MCP tools can be chained together for complex workflows: 1. List available structures 2. Get detailed info about a specific structure -3. Generate the structure with custom mappings -4. Validate any custom configurations +3. Explain the structure to preview files, folders, variables, hooks, and remote references +4. Generate the structure with custom mappings +5. Validate any custom configurations ### Integration Examples @@ -265,6 +322,32 @@ The MCP tools can be chained together for complex workflows: } ``` + +**Example 3: Explain Before Generate** +```json +// 1. Explain structure resolution without side effects +{ + "name": "explain_structure", + "arguments": { + "structure_definition": "project/python", + "base_path": "/tmp/review", + "output": "json", + "variables": { + "project_name": "ReviewProject" + } + } +} + +// 2. If the explanation looks correct, generate the structure +{ + "name": "generate_structure", + "arguments": { + "structure_definition": "project/python", + "base_path": "/tmp/review" + } +} +``` + ## Configuration ### Environment Variables @@ -353,6 +436,8 @@ Once connected, you can use these tools: - `list_structures` - Get all available structures - `get_structure_info` - Get details about a specific structure - `generate_structure` - Generate project structures +- `get_structure_vars` - Inspect declared structure variables +- `explain_structure` - Explain structure resolution without side effects - `validate_structure` - Validate YAML configuration files ## Troubleshooting diff --git a/docs/usage.md b/docs/usage.md index 81eb32e..635e2cf 100644 --- a/docs/usage.md +++ b/docs/usage.md @@ -6,6 +6,7 @@ Run the script with the following command using one of the following subcommands - `generate-schema`: Generate JSON schema for available structure templates. - `validate`: Validate the YAML configuration file. - `info`: Display information about the script and its dependencies. +- `vars`: Inspect variables declared by a structure definition without generating files. - `list`: List the available structs For more information, run the script with the `-h` or `--help` option (this is also available for each subcommand): @@ -145,6 +146,19 @@ The file includes: - A README.md placeholder in files - A folders entry pointing to the github/workflows/run-structkit workflow at ./ + +### Inspect Variables + +Use `structkit vars` to see the inputs a structure declares before running `generate`. The command supports built-in structures, custom structures via `--structures-path`, and local YAML files without creating any files. + +```sh +structkit vars project/python +structkit vars ./my-struct.yaml --json +structkit vars python-basic --structures-path ~/custom-structures +``` + +Text output lists each variable's name, type, default value, description/help text, and whether it is required or optional. Use `--json` for CI and other machine-readable workflows. + ### Validate Configuration ```sh diff --git a/structkit/commands/explain.py b/structkit/commands/explain.py new file mode 100644 index 0000000..b03ee33 --- /dev/null +++ b/structkit/commands/explain.py @@ -0,0 +1,399 @@ +import argparse +import json +import os + +import yaml +from jinja2 import Environment + +from structkit.commands import Command +from structkit.completers import file_strategy_completer, structures_completer +from structkit.filters import ( + env as env_get, + from_json, + from_yaml, + gen_uuid, + get_default_branch, + get_latest_release, + now_iso, + read_file, + slugify, + to_json, + to_yaml, +) +from structkit.utils import get_current_repo + + +class ExplainCommand(Command): + """Preview how a structure resolves without creating files or running hooks.""" + + REMOTE_PREFIXES = ( + "https://", + "github://", + "githubhttps://", + "githubssh://", + "s3://", + "gs://", + ) + + def __init__(self, parser): + super().__init__(parser) + parser.description = "Explain structure resolution without writing files or running hooks" + structure_arg = parser.add_argument( + 'structure_definition', + nargs='?', + default='.struct.yaml', + type=str, + help='Built-in structure name or path to a YAML structure file (default: .struct.yaml)' + ) + structure_arg.completer = structures_completer + parser.add_argument('base_path', nargs='?', default='.', type=str, help='Base path used to resolve generated paths (default: current directory)') + parser.add_argument( + '-s', + '--structures-path', + type=str, + help='Path to structure definitions (env: STRUCTKIT_STRUCTURES_PATH)', + default=os.getenv('STRUCTKIT_STRUCTURES_PATH', None) + ) + parser.add_argument('-v', '--vars', type=str, help='Template variables in the format KEY1=value1,KEY2=value2') + parser.add_argument('--json', action='store_true', dest='json_output', help='Output the explanation as JSON') + parser.add_argument('-o', '--output', choices=['text', 'json'], default='text', help='Output format (default: text)') + parser.add_argument( + '-f', + '--file-strategy', + type=str, + choices=['overwrite', 'skip', 'append', 'rename', 'backup'], + default=os.getenv('STRUCTKIT_FILE_STRATEGY', 'overwrite'), + help='File conflict strategy to explain (env: STRUCTKIT_FILE_STRATEGY)' + ).completer = file_strategy_completer + parser.add_argument('--mappings-file', type=str, action='append', help='Path to a YAML mappings file (can be specified multiple times)') + parser.set_defaults(func=self.execute) + + def execute(self, args): + mappings = self._load_mappings(getattr(args, 'mappings_file', None)) + explanation = self.explain(args, mappings=mappings) + if explanation is None: + return + + if args.json_output or args.output == 'json': + print(json.dumps(explanation, indent=2, sort_keys=True)) + else: + print(self._format_text(explanation)) + + def explain(self, args, mappings=None): + if isinstance(args, dict): + args = argparse.Namespace(**args) + + vars_provided = self._parse_template_vars(getattr(args, 'vars', None)) + explanation = { + "structure_definition": args.structure_definition, + "base_path": args.base_path, + "file_strategy": getattr(args, 'file_strategy', 'overwrite'), + "variables": [], + "files": [], + "folders": [], + "remote_files": [], + "hooks": {"pre": [], "post": []}, + "notes": ["No files or folders are created and hooks are not executed by explain."], + } + self._collect_structure( + args.structure_definition, + args.base_path, + getattr(args, 'structures_path', None), + vars_provided, + mappings or {}, + explanation, + depth=0, + parent_chain=[], + file_strategy=getattr(args, 'file_strategy', 'overwrite'), + ) + return explanation + + def _collect_structure(self, structure_definition, base_path, structures_path, template_vars, mappings, explanation, depth, parent_chain, file_strategy): + resolved = self._resolve_structure_path(structure_definition, structures_path) + if resolved in parent_chain: + explanation["notes"].append(f"Skipped recursive structure reference: {structure_definition}") + return + + config = self._load_yaml_config(structure_definition, structures_path) + if config is None: + return + + config_variables = config.get('variables', []) or [] + resolved_vars = self._resolve_variables(config_variables, template_vars) + for variable in resolved_vars: + variable["structure"] = structure_definition + variable["depth"] = depth + explanation["variables"].append(variable) + + render_context = {**{v["name"]: v.get("value") for v in resolved_vars}, **template_vars} + if mappings: + render_context['mappings'] = mappings + + for hook in config.get('pre_hooks', []) or []: + explanation["hooks"]["pre"].append({"command": self._render(str(hook), render_context), "structure": structure_definition, "depth": depth}) + for hook in config.get('post_hooks', []) or []: + explanation["hooks"]["post"].append({"command": self._render(str(hook), render_context), "structure": structure_definition, "depth": depth}) + + for item in config.get('files', config.get('structure', [])) or []: + if not isinstance(item, dict): + continue + for name, content in item.items(): + rendered_name = self._render(str(name), render_context) + file_path = os.path.join(base_path, rendered_name) + exists = os.path.exists(file_path) + entry = { + "path": file_path, + "name": rendered_name, + "structure": structure_definition, + "depth": depth, + "exists": exists, + "conflict_behavior": self._conflict_behavior(exists, content, file_strategy), + "source": "inline", + } + if isinstance(content, dict): + if content.get('file'): + remote = self._render(str(content.get('file')), render_context) + entry["source"] = "remote" if self._is_remote(remote) else "local_file" + entry["content_location"] = remote + if self._is_remote(remote): + explanation["remote_files"].append({"path": file_path, "content_location": remote, "structure": structure_definition, "depth": depth}) + if content.get('skip'): + entry["skip"] = True + if content.get('skip_if_exists'): + entry["skip_if_exists"] = True + if content.get('user_prompt'): + entry["source"] = "prompt" + entry["prompt"] = "Content would be generated from user_prompt during generate." + explanation["files"].append(entry) + + for item in config.get('folders', []) or []: + if not isinstance(item, dict): + continue + for folder, content in item.items(): + rendered_folder = self._render(str(folder), render_context) + folder_path = os.path.join(base_path, rendered_folder) + content = content or {} + structs = content.get('struct') if isinstance(content, dict) else None + structs_list = structs if isinstance(structs, list) else ([structs] if isinstance(structs, str) else []) + with_vars = self._render_with_vars(content.get('with', {}) if isinstance(content, dict) else {}, render_context, mappings) + explanation["folders"].append({ + "path": folder_path, + "name": rendered_folder, + "structure": structure_definition, + "depth": depth, + "exists": os.path.exists(folder_path), + "structs": structs_list, + "with": with_vars, + }) + for nested in structs_list: + nested_vars = {**template_vars, **with_vars} + self._collect_structure(nested, folder_path, structures_path, nested_vars, mappings, explanation, depth + 1, parent_chain + [resolved], file_strategy) + + def _resolve_structure_path(self, structure_definition, structures_path): + normalized = structure_definition[7:] if structure_definition.startswith("file://") else structure_definition + if normalized.endswith(('.yaml', '.yml')): + return os.path.abspath(normalized) + + search_roots = [] + if structures_path: + search_roots.append(structures_path) + this_file = os.path.dirname(os.path.realpath(__file__)) + search_roots.append(os.path.join(this_file, "..", "contribs")) + for root in search_roots: + for suffix in ("", ".yaml", ".yml"): + candidate = os.path.join(root, f"{normalized}{suffix}") + if os.path.exists(candidate): + return os.path.abspath(candidate) + return os.path.abspath(normalized) + + def _load_yaml_config(self, structure_definition, structures_path): + file_path = self._resolve_structure_path(structure_definition, structures_path) + if not os.path.exists(file_path): + self.logger.error(f"❗ File not found: {file_path}") + return None + with open(file_path, 'r') as f: + return yaml.safe_load(f) or {} + + def _parse_template_vars(self, vars_str): + result = {} + if not vars_str: + return result + tokens = [t.strip() for t in vars_str.strip(', ').split(',')] + for token in tokens: + if not token or '=' not in token: + continue + key, value = token.split('=', 1) + key = key.strip() + if key: + result[key] = value + return result + + def _resolve_variables(self, config_variables, template_vars): + resolved = [] + seen = set() + for item in config_variables: + if not isinstance(item, dict): + continue + for name, definition in item.items(): + definition = definition or {} + value = template_vars.get(name, definition.get('default')) + env_key = definition.get('env') or definition.get('default_from_env') + if name not in template_vars and env_key and os.environ.get(env_key) is not None: + value = os.environ.get(env_key) + resolved.append({ + "name": name, + "value": value, + "provided": name in template_vars, + "default": definition.get('default'), + "type": definition.get('type', 'string'), + "required": definition.get('required', False), + "description": definition.get('description') or definition.get('help'), + }) + seen.add(name) + for name, value in template_vars.items(): + if name not in seen: + resolved.append({"name": name, "value": value, "provided": True, "declared": False}) + return resolved + + def _render_with_vars(self, with_vars, context, mappings): + if not isinstance(with_vars, dict): + return {} + rendered = {} + render_context = context.copy() + if mappings: + render_context['mappings'] = mappings + for key, value in with_vars.items(): + rendered[key] = self._render(str(value), render_context) + return rendered + + def _render(self, value, context): + env = Environment( + trim_blocks=True, + block_start_string='{%@', + block_end_string='@%}', + variable_start_string='{{@', + variable_end_string='@}}', + comment_start_string='{#@', + comment_end_string='@#}' + ) + env.globals.update({ + 'current_repo': get_current_repo, + 'uuid': gen_uuid, + 'now': now_iso, + 'env': env_get, + 'read_file': read_file, + }) + env.filters.update({ + 'latest_release': get_latest_release, + 'slugify': slugify, + 'default_branch': get_default_branch, + 'to_yaml': to_yaml, + 'from_yaml': from_yaml, + 'to_json': to_json, + 'from_json': from_json, + }) + return env.from_string(value).render(context or {}) + + def _is_remote(self, content_location): + return str(content_location).startswith(self.REMOTE_PREFIXES) + + def _conflict_behavior(self, exists, content, file_strategy): + if isinstance(content, dict): + if content.get('skip'): + return "skip (skip=true)" + if content.get('skip_if_exists') and exists: + return "skip (skip_if_exists=true)" + if not exists: + return "create" + behaviors = { + 'overwrite': 'overwrite existing file', + 'skip': 'skip existing file', + 'append': 'append to existing file', + 'rename': 'write renamed file', + 'backup': 'backup then overwrite existing file', + } + return behaviors.get(file_strategy, f"apply {file_strategy} strategy") + + def _load_mappings(self, mapping_files): + mappings = {} + for mappings_file_path in mapping_files or []: + if not os.path.exists(mappings_file_path): + self.logger.error(f"Mappings file not found: {mappings_file_path}") + continue + with open(mappings_file_path, 'r') as mf: + file_mappings = yaml.safe_load(mf) or {} + mappings = self._deep_merge_dicts(mappings, file_mappings) + return mappings + + def _deep_merge_dicts(self, dict1, dict2): + result = dict1.copy() + for key, value in dict2.items(): + if key in result and isinstance(result[key], dict) and isinstance(value, dict): + result[key] = self._deep_merge_dicts(result[key], value) + else: + result[key] = value + return result + + def _format_text(self, explanation): + lines = [ + "Structure explanation", + f" Structure definition: {explanation['structure_definition']}", + f" Base path: {explanation['base_path']}", + f" File strategy: {explanation['file_strategy']}", + "", + "Variables:", + ] + if explanation['variables']: + for variable in explanation['variables']: + indent = " " + (" " * variable.get('depth', 0)) + marker = "provided" if variable.get('provided') else "default" + value = variable.get('value') + value_text = "" if value is None else value + lines.append(f"{indent}- {variable['name']}: {value_text} ({marker})") + else: + lines.append(" - none") + + lines.extend(["", "Files:"]) + if explanation['files']: + for file_entry in explanation['files']: + indent = " " + (" " * file_entry.get('depth', 0)) + source = file_entry.get('source', 'inline') + extra = f" from {file_entry['content_location']}" if file_entry.get('content_location') else "" + lines.append(f"{indent}- {file_entry['path']} [{source}{extra}; {file_entry['conflict_behavior']}]") + else: + lines.append(" - none") + + lines.extend(["", "Folders and nested structures:"]) + if explanation['folders']: + for folder_entry in explanation['folders']: + indent = " " + (" " * folder_entry.get('depth', 0)) + structs = ", ".join(folder_entry.get('structs') or []) or "none" + with_vars = folder_entry.get('with') or {} + with_text = f" with {with_vars}" if with_vars else "" + lines.append(f"{indent}- {folder_entry['path']} (nested structs: {structs}){with_text}") + else: + lines.append(" - none") + + lines.extend(["", "Remote files:"]) + if explanation['remote_files']: + for remote in explanation['remote_files']: + indent = " " + (" " * remote.get('depth', 0)) + lines.append(f"{indent}- {remote['path']} <= {remote['content_location']}") + else: + lines.append(" - none") + + lines.extend(["", "Hooks (not executed):"]) + for hook_type in ('pre', 'post'): + hooks = explanation['hooks'][hook_type] + if hooks: + lines.append(f" {hook_type}:") + for hook in hooks: + indent = " " + (" " * hook.get('depth', 0)) + lines.append(f"{indent}- {hook['command']}") + else: + lines.append(f" {hook_type}: none") + + lines.extend(["", "Notes:"]) + for note in explanation['notes']: + lines.append(f" - {note}") + return "\n".join(lines) diff --git a/structkit/commands/mcp.py b/structkit/commands/mcp.py index dc603c1..72305bc 100644 --- a/structkit/commands/mcp.py +++ b/structkit/commands/mcp.py @@ -48,7 +48,9 @@ def execute(self, args): print("\nMCP tools available:") print(" - list_structures: List all available structure definitions") print(" - get_structure_info: Get detailed information about a structure") + print(" - get_structure_vars: Inspect variables declared by a structure") print(" - generate_structure: Generate structures with various options") + print(" - explain_structure: Explain structure resolution without side effects") print(" - validate_structure: Validate structure configuration files") print("\nExamples:") print(" structkit mcp --server --transport stdio --debug") diff --git a/structkit/commands/vars.py b/structkit/commands/vars.py new file mode 100644 index 0000000..7d7d8ef --- /dev/null +++ b/structkit/commands/vars.py @@ -0,0 +1,133 @@ +import json +import os +import yaml + +from structkit.commands import Command +from structkit.completers import structures_completer + + +class VarsCommand(Command): + """Inspect variables declared by a structure definition.""" + + def __init__(self, parser): + super().__init__(parser) + parser.description = "Inspect variables declared by a structure definition" + structure_arg = parser.add_argument('structure_definition', type=str, help='Structure definition name or path to a YAML file') + structure_arg.completer = structures_completer + parser.add_argument( + '-s', + '--structures-path', + type=str, + help='Path to structure definitions (env: STRUCTKIT_STRUCTURES_PATH)', + default=os.getenv('STRUCTKIT_STRUCTURES_PATH', None) + ) + parser.add_argument('--json', action='store_true', help='Output variables as JSON') + parser.set_defaults(func=self.execute) + + def execute(self, args): + config = self._load_yaml_config(args.structure_definition, args.structures_path) + if config is None: + raise SystemExit(1) + if not isinstance(config, dict): + self.logger.error("❗ Invalid structure config: top-level YAML content must be a mapping") + raise SystemExit(1) + + try: + variables = self._normalize_variables(config.get('variables', [])) + except ValueError as exc: + self.logger.error(f"❗ Invalid variables config: {exc}") + raise SystemExit(1) from exc + + if args.json: + print(json.dumps(variables, indent=2)) + else: + self._print_text(args.structure_definition, variables) + + def _load_yaml_config(self, structure_definition, structures_path): + if structure_definition.endswith(('.yaml', '.yml')) and not structure_definition.startswith("file://"): + structure_definition = f"file://{structure_definition}" + + if structure_definition.startswith("file://") and structure_definition.endswith((".yaml", ".yml")): + file_path = structure_definition[7:] + else: + this_file = os.path.dirname(os.path.realpath(__file__)) + contribs_path = os.path.join(this_file, "..", "contribs") + file_path = os.path.join(contribs_path, f"{structure_definition}.yaml") + if structures_path: + file_path = os.path.join(structures_path, f"{structure_definition}.yaml") + if not os.path.exists(file_path): + file_path = os.path.join(contribs_path, f"{structure_definition}.yaml") + + if not os.path.exists(file_path): + self.logger.error(f"❗ File not found: {file_path}") + return None + + try: + with open(file_path, 'r') as f: + return yaml.safe_load(f) or {} + except yaml.YAMLError as exc: + self.logger.error(f"❗ Invalid YAML in {file_path}: {exc}") + return None + except OSError as exc: + self.logger.error(f"❗ Failed to read {file_path}: {exc}") + return None + + def _normalize_variables(self, variables): + if variables is None: + return [] + if not isinstance(variables, list): + raise ValueError("the 'variables' key must be a list") + + normalized = [] + for item in variables: + if not isinstance(item, dict): + raise ValueError("each variable entry must be a mapping") + for name, content in item.items(): + if not isinstance(name, str): + raise ValueError("each variable name must be a string") + if content is None: + content = {} + if not isinstance(content, dict): + raise ValueError(f"the content of '{name}' must be a mapping") + + has_default = 'default' in content + description = content.get('description', content.get('help', '')) + normalized.append({ + 'name': name, + 'type': content.get('type', ''), + 'default': content.get('default') if has_default else None, + 'description': description if description is not None else '', + 'required': bool(content.get('required', False)), + }) + return normalized + + def _print_text(self, structure_definition, variables): + print(f"Variables for {structure_definition}") + if not variables: + print("No variables defined.") + return + + rows = [[ + variable['name'], + variable['type'] or '-', + self._format_default(variable['default']), + 'required' if variable['required'] else 'optional', + variable['description'] or '-', + ] for variable in variables] + headers = ['Name', 'Type', 'Default', 'Required', 'Description'] + widths = [len(header) for header in headers] + for row in rows: + for index, value in enumerate(row): + widths[index] = max(widths[index], len(value)) + + print(" " + " ".join(header.ljust(widths[index]) for index, header in enumerate(headers))) + print(" " + " ".join("-" * width for width in widths)) + for row in rows: + print(" " + " ".join(value.ljust(widths[index]) for index, value in enumerate(row))) + + def _format_default(self, value): + if value is None: + return '-' + if isinstance(value, bool): + return str(value).lower() + return str(value) diff --git a/structkit/main.py b/structkit/main.py index 73e499c..266d280 100644 --- a/structkit/main.py +++ b/structkit/main.py @@ -4,7 +4,9 @@ from dotenv import load_dotenv from structkit.utils import read_config_file, merge_configs from structkit.commands.generate import GenerateCommand +from structkit.commands.explain import ExplainCommand from structkit.commands.info import InfoCommand +from structkit.commands.vars import VarsCommand from structkit.commands.validate import ValidateCommand from structkit.commands.list import ListCommand from structkit.commands.search import SearchCommand @@ -34,6 +36,8 @@ def get_parser(): InfoCommand(subparsers.add_parser('info', help='Show information about the package')) ValidateCommand(subparsers.add_parser('validate', help='Validate the YAML configuration file')) GenerateCommand(subparsers.add_parser('generate', help='Generate the project structure')) + ExplainCommand(subparsers.add_parser('explain', help='Explain structure resolution without writing files')) + VarsCommand(subparsers.add_parser('vars', help='Inspect structure variables')) ListCommand(subparsers.add_parser('list', help='List available structures')) SearchCommand(subparsers.add_parser('search', help='Search available structures by keyword')) GenerateSchemaCommand(subparsers.add_parser('generate-schema', help='Generate JSON schema for available structures')) diff --git a/structkit/mcp_server.py b/structkit/mcp_server.py index e18eba4..19dfa95 100644 --- a/structkit/mcp_server.py +++ b/structkit/mcp_server.py @@ -6,6 +6,8 @@ 2. Getting detailed information about structures 3. Generating structures with various options 4. Validating structure configurations +5. Inspecting structure variables +6. Explaining structure resolution without side effects """ import asyncio import logging @@ -17,7 +19,9 @@ from fastmcp import FastMCP from structkit.commands.generate import GenerateCommand +from structkit.commands.explain import ExplainCommand from structkit.commands.validate import ValidateCommand +from structkit.commands.vars import VarsCommand from structkit import __version__ @@ -193,6 +197,95 @@ class Args: finally: sys.stdout = old + def _get_structure_vars_logic( + self, + structure_name: Optional[str], + structures_path: Optional[str] = None, + output: str = "text", + ) -> str: + if not structure_name: + return "Error: structure_name is required" + + import argparse + from io import StringIO + dummy_parser = argparse.ArgumentParser() + vars_command = VarsCommand(dummy_parser) + + config = vars_command._load_yaml_config(structure_name, structures_path) + if config is None: + return f"❗ Structure not found or could not be loaded: {structure_name}" + if not isinstance(config, dict): + return "❗ Invalid structure config: top-level YAML content must be a mapping" + + try: + variables = vars_command._normalize_variables(config.get('variables', [])) + except ValueError as exc: + return f"❗ Invalid variables config: {exc}" + + if output == "json": + import json + return json.dumps(variables, indent=2) + + buf = StringIO() + old = sys.stdout + sys.stdout = buf + try: + vars_command._print_text(structure_name, variables) + return buf.getvalue().strip() + finally: + sys.stdout = old + + + def _explain_structure_logic( + self, + structure_definition: str, + base_path: str = ".", + output: str = "text", + variables: Optional[Dict[str, str]] = None, + mappings: Optional[Dict[str, Any]] = None, + structures_path: Optional[str] = None, + file_strategy: str = "overwrite", + ) -> str: + if not structure_definition: + return "Error: structure_definition is required" + + valid_outputs = {"text", "json"} + if output not in valid_outputs: + return f"Error: output must be one of {sorted(valid_outputs)}, got: {output}" + + valid_file_strategies = {"overwrite", "skip", "append", "rename", "backup"} + if file_strategy not in valid_file_strategies: + return f"Error: file_strategy must be one of {sorted(valid_file_strategies)}, got: {file_strategy}" + + class Args: + pass + args = Args() + args.structure_definition = structure_definition + args.base_path = base_path or "." + args.output = output + args.json_output = output == "json" + args.structures_path = structures_path + args.vars = None + args.mappings_file = None + args.file_strategy = file_strategy + args.log = "INFO" + args.config_file = None + args.log_file = None + + if variables: + args.vars = ",".join([f"{k}={v}" for k, v in variables.items()]) + + import argparse + dummy_parser = argparse.ArgumentParser() + command = ExplainCommand(dummy_parser) + explanation = command.explain(args, mappings=mappings or {}) + if explanation is None: + return f"Unable to explain structure '{structure_definition}'" + if output == "json": + import json + return json.dumps(explanation, indent=2, sort_keys=True) + return command._format_text(explanation) + # ===================== # FastMCP tool registration (maps to logic above) # ===================== @@ -215,6 +308,25 @@ async def get_structure_info(structure_name: str, structures_path: Optional[str] self.logger.debug(f"MCP response: get_structure_info len={len(result)} preview=\n{preview}") return result + @self.app.tool(name="get_structure_vars", description="Inspect variables declared by a specific structure") + async def get_structure_vars( + structure_name: str, + structures_path: Optional[str] = None, + output: str = "text", + ) -> str: + self.logger.debug( + "MCP request: get_structure_vars args=%s", + { + "structure_name": structure_name, + "structures_path": structures_path, + "output": output, + }, + ) + result = self._get_structure_vars_logic(structure_name, structures_path, output) + preview = result if len(result) <= 1000 else result[:1000] + f"... [truncated {len(result)-1000} chars]" + self.logger.debug(f"MCP response: get_structure_vars len={len(result)} preview=\n{preview}") + return result + @self.app.tool(name="generate_structure", description="Generate a project structure using specified definition and options") async def generate_structure( structure_definition: str, @@ -255,6 +367,42 @@ async def validate_structure(yaml_file: str) -> str: self.logger.debug(f"MCP response: validate_structure len={len(result)} preview=\n{preview}") return result + + @self.app.tool(name="explain_structure", description="Explain structure resolution without creating files, fetching remote content, or running hooks") + async def explain_structure( + structure_definition: str, + base_path: str = ".", + output: str = "text", + variables: Optional[Dict[str, str]] = None, + mappings: Optional[Dict[str, Any]] = None, + structures_path: Optional[str] = None, + file_strategy: str = "overwrite", + ) -> str: + self.logger.debug( + "MCP request: explain_structure args=%s", + { + "structure_definition": structure_definition, + "base_path": base_path, + "output": output, + "variables": variables, + "mappings": mappings, + "structures_path": structures_path, + "file_strategy": file_strategy, + }, + ) + result = self._explain_structure_logic( + structure_definition, + base_path, + output, + variables, + mappings, + structures_path, + file_strategy, + ) + preview = result if len(result) <= 1000 else result[:1000] + f"... [truncated {len(result)-1000} chars]" + self.logger.debug(f"MCP response: explain_structure len={len(result)} preview=\n{preview}") + return result + async def run( self, transport: str = "stdio", @@ -337,6 +485,25 @@ def __init__(self, content): return MockResult([MockContent(result_text)]) + async def _handle_get_structure_vars(self, params: Dict[str, Any]): + """Compatibility method for tests that expect MCP-style responses.""" + structure_name = params.get('structure_name') + structures_path = params.get('structures_path') + output = params.get('output', 'text') + + result_text = self._get_structure_vars_logic(structure_name, structures_path, output) + + # Mock MCP response structure + class MockContent: + def __init__(self, text): + self.text = text + + class MockResult: + def __init__(self, content): + self.content = content + + return MockResult([MockContent(result_text)]) + async def main(): logging.basicConfig(level=logging.INFO) diff --git a/tests/test_commands.py b/tests/test_commands.py index 0f9598f..b6ea1c0 100644 --- a/tests/test_commands.py +++ b/tests/test_commands.py @@ -5,6 +5,7 @@ from structkit.commands.validate import ValidateCommand from structkit.commands.list import ListCommand from structkit.commands.generate_schema import GenerateSchemaCommand +from structkit.commands.vars import VarsCommand import argparse import json import os @@ -520,3 +521,100 @@ def test_multiple_mappings_files(): } assert merged_mappings == expected_mappings + +# Tests for VarsCommand +def test_vars_command_text_output(tmp_path, capsys): + yaml_file = tmp_path / "structure.yaml" + yaml_file.write_text(""" +variables: + - project_name: + description: Project name + type: string + default: MyProject + - api_token: + help: API token + type: string + required: true +""") + command = VarsCommand(parser := argparse.ArgumentParser()) + args = parser.parse_args([str(yaml_file)]) + + command.execute(args) + + output = capsys.readouterr().out + assert "Variables for" in output + assert "project_name" in output + assert "string" in output + assert "MyProject" in output + assert "optional" in output + assert "api_token" in output + assert "required" in output + assert "API token" in output + + +def test_vars_command_json_output(tmp_path, capsys): + yaml_file = tmp_path / "structure.yaml" + yaml_file.write_text(""" +variables: + - enabled: + description: Enable feature + type: boolean + default: true + required: true +""") + command = VarsCommand(parser := argparse.ArgumentParser()) + args = parser.parse_args([str(yaml_file), '--json']) + + command.execute(args) + + output = capsys.readouterr().out + assert json.loads(output) == [ + { + "name": "enabled", + "type": "boolean", + "default": True, + "description": "Enable feature", + "required": True, + } + ] + + +def test_vars_command_no_variables(tmp_path, capsys): + yaml_file = tmp_path / "structure.yaml" + yaml_file.write_text("files: []\n") + command = VarsCommand(parser := argparse.ArgumentParser()) + args = parser.parse_args([str(yaml_file)]) + + command.execute(args) + + assert "No variables defined." in capsys.readouterr().out + + +def test_vars_command_custom_structures_path(tmp_path, capsys): + structures_path = tmp_path / "structures" + structures_path.mkdir() + (structures_path / "custom.yaml").write_text(""" +variables: + - custom_name: + type: string + description: Custom variable +""") + command = VarsCommand(parser := argparse.ArgumentParser()) + args = parser.parse_args(['custom', '--structures-path', str(structures_path)]) + + command.execute(args) + + output = capsys.readouterr().out + assert "custom_name" in output + assert "Custom variable" in output + + +def test_vars_command_invalid_config_exits_nonzero(tmp_path): + yaml_file = tmp_path / "structure.yaml" + yaml_file.write_text("variables: invalid\n") + command = VarsCommand(parser := argparse.ArgumentParser()) + args = parser.parse_args([str(yaml_file)]) + + with pytest.raises(SystemExit) as exc: + command.execute(args) + assert exc.value.code == 1 diff --git a/tests/test_explain_command.py b/tests/test_explain_command.py new file mode 100644 index 0000000..840ebe5 --- /dev/null +++ b/tests/test_explain_command.py @@ -0,0 +1,139 @@ +import argparse +import json +import os +from unittest.mock import patch + +from structkit.commands.explain import ExplainCommand +from structkit.main import get_parser + + +def write_yaml(path, content): + path.write_text(content) + return path + + +def test_explain_command_registered_in_cli(): + parser = get_parser() + args = parser.parse_args(['explain', 'project/python', '--json']) + assert callable(args.func) + assert args.structure_definition == 'project/python' + assert args.json_output is True + + +def test_explain_nested_remote_hooks_variables_json_without_side_effects(tmp_path, capsys): + structures = tmp_path / 'structures' + structures.mkdir() + base_path = tmp_path / 'out' + + write_yaml( + structures / 'child.yaml', + """ +variables: + - module_name: + type: string + default: child_default +files: + - "{{@ module_name @}}.txt": + file: https://example.com/template.txt +post_hooks: + - "echo child {{@ module_name @}}" +""".lstrip(), + ) + write_yaml( + structures / 'root.yaml', + """ +variables: + - project_name: + type: string + default: demo +pre_hooks: + - "echo preparing {{@ project_name @}}" +files: + - README.md: + content: "# {{@ project_name @}}" +folders: + - modules: + struct: child + with: + module_name: "{{@ project_name | slugify @}}-module" +""".lstrip(), + ) + + parser = argparse.ArgumentParser() + command = ExplainCommand(parser) + args = parser.parse_args([ + '--structures-path', str(structures), + '--vars', 'project_name=My Demo', + '--json', + 'root', + str(base_path), + ]) + + with patch('structkit.content_fetcher.ContentFetcher.fetch_content') as fetch_content, \ + patch('subprocess.run') as subprocess_run: + command.execute(args) + + output = json.loads(capsys.readouterr().out) + assert not base_path.exists() + fetch_content.assert_not_called() + subprocess_run.assert_not_called() + assert output['hooks']['pre'][0]['command'] == 'echo preparing My Demo' + assert output['hooks']['post'][0]['command'] == 'echo child my-demo-module' + assert output['variables'][0]['name'] == 'project_name' + assert output['variables'][0]['value'] == 'My Demo' + assert any(folder['structs'] == ['child'] for folder in output['folders']) + assert output['remote_files'] == [{ + 'path': os.path.join(str(base_path), 'modules', 'my-demo-module.txt'), + 'content_location': 'https://example.com/template.txt', + 'structure': 'child', + 'depth': 1, + }] + + +def test_explain_reports_conflict_behavior_for_existing_files(tmp_path): + structure = write_yaml( + tmp_path / 'struct.yaml', + """ +files: + - existing.txt: + content: replacement +""".lstrip(), + ) + base_path = tmp_path / 'out' + base_path.mkdir() + (base_path / 'existing.txt').write_text('old') + + parser = argparse.ArgumentParser() + command = ExplainCommand(parser) + args = parser.parse_args(['--file-strategy', 'skip', str(structure), str(base_path)]) + + explanation = command.explain(args) + + assert explanation['files'][0]['exists'] is True + assert explanation['files'][0]['conflict_behavior'] == 'skip existing file' + + +def test_explain_text_output_includes_nested_and_hooks(tmp_path, capsys): + structure = write_yaml( + tmp_path / 'struct.yaml', + """ +pre_hooks: + - echo hello +files: + - README.md: hello +folders: + - app: + struct: [] +""".lstrip(), + ) + parser = argparse.ArgumentParser() + command = ExplainCommand(parser) + args = parser.parse_args([str(structure), str(tmp_path / 'out')]) + + command.execute(args) + + output = capsys.readouterr().out + assert 'Structure explanation' in output + assert 'README.md' in output + assert 'Hooks (not executed)' in output + assert 'echo hello' in output diff --git a/tests/test_mcp_integration.py b/tests/test_mcp_integration.py index c3d0c7b..59a92a5 100644 --- a/tests/test_mcp_integration.py +++ b/tests/test_mcp_integration.py @@ -1,6 +1,8 @@ """ Tests for MCP (Model Context Protocol) integration with FastMCP stdio transport. """ +import asyncio +import json import os import tempfile import unittest @@ -19,6 +21,12 @@ def test_server_initialization(self): self.assertIsNotNone(self.server) self.assertTrue(hasattr(self.server, 'app')) + def test_get_structure_vars_tool_is_registered(self): + tools = asyncio.run(self.server.app.list_tools()) + tool_names = [tool.name for tool in tools] + self.assertIn('get_structure_vars', tool_names) + self.assertIn('explain_structure', tool_names) + def test_list_structures_logic(self): text = self.server._list_structures_logic() self.assertIsInstance(text, str) @@ -43,6 +51,118 @@ def test_generate_structure_logic(self): ) self.assertIsInstance(text, str) + def test_get_structure_vars_logic(self): + text = self.server._get_structure_vars_logic(None) + self.assertIn("structure_name is required", text) + + with tempfile.NamedTemporaryFile(mode='w', suffix='.yaml', delete=False) as f: + yaml.dump({ + 'variables': [ + { + 'project_name': { + 'type': 'string', + 'default': 'MyProject', + 'description': 'Project name' + } + }, + { + 'api_token': { + 'type': 'string', + 'help': 'API token', + 'required': True + } + }, + ] + }, f) + f.flush() + try: + text = self.server._get_structure_vars_logic(f.name) + self.assertIn("Variables for", text) + self.assertIn("project_name", text) + self.assertIn("MyProject", text) + self.assertIn("api_token", text) + self.assertIn("required", text) + + json_text = self.server._get_structure_vars_logic(f.name, output="json") + data = json.loads(json_text) + self.assertEqual(data[0]['name'], 'project_name') + self.assertEqual(data[0]['default'], 'MyProject') + self.assertEqual(data[1]['name'], 'api_token') + self.assertTrue(data[1]['required']) + finally: + os.unlink(f.name) + + def test_get_structure_vars_logic_custom_path(self): + with tempfile.TemporaryDirectory() as temp_dir: + structure_path = os.path.join(temp_dir, 'custom.yaml') + with open(structure_path, 'w') as f: + yaml.dump({ + 'variables': [ + {'custom_name': {'type': 'string', 'description': 'Custom variable'}} + ] + }, f) + + text = self.server._get_structure_vars_logic('custom', structures_path=temp_dir) + self.assertIn('custom_name', text) + self.assertIn('Custom variable', text) + + def test_get_structure_vars_compat_handler(self): + with tempfile.NamedTemporaryFile(mode='w', suffix='.yaml', delete=False) as f: + yaml.dump({'variables': [{'enabled': {'type': 'boolean', 'default': True}}]}, f) + f.flush() + try: + result = asyncio.run(self.server._handle_get_structure_vars({ + 'structure_name': f.name, + 'output': 'json', + })) + data = json.loads(result.content[0].text) + self.assertEqual(data[0]['name'], 'enabled') + self.assertTrue(data[0]['default']) + finally: + os.unlink(f.name) + + def test_explain_structure_logic(self): + with tempfile.TemporaryDirectory() as temp_dir: + structure_path = os.path.join(temp_dir, 'example.yaml') + with open(structure_path, 'w') as f: + yaml.dump({ + 'variables': [ + {'project_name': {'type': 'string', 'default': 'Demo'}} + ], + 'pre_hooks': ['echo {{@ project_name @}}'], + 'files': [ + {'README.md': {'content': '# {{@ project_name @}}'}}, + {'remote.txt': {'file': 'https://example.com/template.txt'}} + ], + }, f) + + text = self.server._explain_structure_logic( + structure_definition=structure_path, + base_path=temp_dir, + variables={'project_name': 'MCP Demo'}, + ) + + self.assertIn('Structure explanation', text) + self.assertIn('MCP Demo', text) + self.assertIn('remote.txt', text) + self.assertIn('https://example.com/template.txt', text) + self.assertIn('Hooks (not executed)', text) + + json_text = self.server._explain_structure_logic( + structure_definition=structure_path, + base_path=temp_dir, + output='json', + variables={'project_name': 'MCP Demo'}, + file_strategy='skip', + ) + self.assertIn('"file_strategy": "skip"', json_text) + self.assertIn('"remote_files"', json_text) + + def test_explain_structure_logic_validates_inputs(self): + self.assertIn('structure_definition is required', self.server._explain_structure_logic('')) + self.assertIn('output must be one of', self.server._explain_structure_logic('project/python', output='xml')) + self.assertIn('file_strategy must be one of', self.server._explain_structure_logic('project/python', file_strategy='replace')) + def test_validate_structure_logic(self): # Missing yaml_file text = self.server._validate_structure_logic(None)