init: AFS baseline (config + plugins)

This commit is contained in:
scawful
2025-12-30 07:43:27 -05:00
commit 3574cb8538
12 changed files with 430 additions and 0 deletions

22
AGENTS.md Normal file
View File

@@ -0,0 +1,22 @@
# Agent Instructions (AFS)
## Do not invent or market
- No marketing language or product claims.
- If something is unknown, state "Unknown / needs verification" and propose a test.
## Truth policy
- Only claim what is evidenced in this repo or cited notes.
- Do not guess roadmap, compatibility, or performance.
## Scope control
- Research-only; keep scope to core AFS primitives and APIs.
## Provenance / separation
- Do not use employer or internal material.
- If provenance is unclear, leave it out.
## Output style
- Concise, engineering notebook tone.
## How to verify (tests/commands)
- Unknown / needs verification (no test harness yet).

6
LICENSE Normal file
View File

@@ -0,0 +1,6 @@
Copyright (c) 2025 scawful
All rights reserved.
This repository is research-only and is not licensed for redistribution,
modification, or commercial use without explicit permission.

13
README.md Normal file
View File

@@ -0,0 +1,13 @@
# AFS (Agentic File System)
Research-only. Not a product.
Scope: core AFS primitives + internal workflow tooling.
Provenance: avoid employer/internal sources; skip unclear origins.
Docs:
- `docs/STATUS.md`
- `docs/ROADMAP.md`
- `docs/REPO_FACTS.json`
- `docs/NARRATIVE.md`

13
docs/NARRATIVE.md Normal file
View File

@@ -0,0 +1,13 @@
# Narrative
AFS is a research-only internal system focused on structuring context and workflow
for agentic tools. This repo tracks experiments and implementations; claims should
map to code or docs.
## Source papers (in `~/Documents/Research`)
- `2512.05470v1.pdf` — "Everything is Context: Agentic File System Abstraction for Context Engineering"
- `2512.08296.pdf` — "Towards a Science of Scaling Agent Systems"
## Cognitive protocol references
- `2510.04950v1.pdf` — title not present in metadata (file labeled "ArXiv"); user-noted as theory-of-mind / cognitive protocol related.
- `7799_Quantifying_Human_AI_Syne.pdf` — title not present in metadata; user-noted as theory-of-mind / cognitive protocol related.

18
docs/REPO_FACTS.json Normal file
View File

@@ -0,0 +1,18 @@
{
"name": "afs",
"stage": "prototype",
"is_product": false,
"commercial_intent": "none",
"verified_features": [],
"hard_no": [
"enterprise",
"production-ready",
"platform",
"seamless",
"scalable",
"best-in-class",
"state-of-the-art",
"robust",
"official"
]
}

13
docs/ROADMAP.md Normal file
View File

@@ -0,0 +1,13 @@
# ROADMAP
## Committed
- Minimal module layout + package stubs
- One small utility
## Planned
- Configuration schema
- Basic CLI entry point
## Ideas
- Idea: Status TUI
- Idea: Plugin discovery via ~/.config/afs/plugins

7
docs/STATUS.md Normal file
View File

@@ -0,0 +1,7 @@
# STATUS
Stage: Prototype
Now: package stub; guardrails; minimal config + plugin discovery.
Not yet: CLI/services; full configuration schema validation.
Next: one small utility; smoke-test stub.
Issues: no runtime yet.

13
pyproject.toml Normal file
View File

@@ -0,0 +1,13 @@
[project]
name = "afs"
version = "0.0.0"
description = "AFS research package"
requires-python = ">=3.11"
license = {text = "All rights reserved"}
authors = [
{name = "scawful"}
]
[build-system]
requires = ["setuptools>=68"]
build-backend = "setuptools.build_meta"

8
src/afs/__init__.py Normal file
View File

@@ -0,0 +1,8 @@
"""AFS package stub."""
__version__ = "0.0.0"
from .config import load_config, load_config_model
from .plugins import discover_plugins, load_plugins
__all__ = ["load_config", "load_config_model", "discover_plugins", "load_plugins"]

113
src/afs/config.py Normal file
View File

@@ -0,0 +1,113 @@
"""Minimal config loader for AFS."""
from __future__ import annotations
import os
import tomllib
from pathlib import Path
from typing import Any
from .schema import AFSConfig
def _deep_merge(base: dict[str, Any], override: dict[str, Any]) -> dict[str, Any]:
result = base.copy()
for key, value in override.items():
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
result[key] = _deep_merge(result[key], value)
else:
result[key] = value
return result
def _expand_path(path: str | Path) -> Path:
return Path(path).expanduser().resolve()
def _parse_bool(value: str | None, default: bool = False) -> bool:
if value is None:
return default
return value.strip().lower() in {"1", "true", "yes", "on"}
def _expand_config_paths(config_data: dict[str, Any]) -> None:
if "general" in config_data:
general = config_data["general"]
if "context_root" in general:
general["context_root"] = _expand_path(general["context_root"])
if "agent_workspaces_dir" in general:
general["agent_workspaces_dir"] = _expand_path(general["agent_workspaces_dir"])
if "python_executable" in general:
python_exec = general["python_executable"]
if isinstance(python_exec, str) and python_exec.startswith("~"):
general["python_executable"] = _expand_path(python_exec)
if "workspace_directories" in general:
for ws_dir in general["workspace_directories"]:
if "path" in ws_dir:
ws_dir["path"] = _expand_path(ws_dir["path"])
if "plugins" in config_data and "plugin_dirs" in config_data["plugins"]:
config_data["plugins"]["plugin_dirs"] = [
_expand_path(p) for p in config_data["plugins"]["plugin_dirs"]
]
if "projects" in config_data:
for project in config_data["projects"]:
if "path" in project:
project["path"] = _expand_path(project["path"])
if "knowledge_roots" in project:
project["knowledge_roots"] = [
_expand_path(p) for p in project["knowledge_roots"]
]
def load_config(config_path: Path | None = None, merge_user: bool = True) -> dict[str, Any]:
"""Load configuration with basic precedence and path expansion."""
env_config = os.environ.get("AFS_CONFIG_PATH")
if config_path is None and env_config:
config_path = Path(env_config).expanduser()
prefer_user = _parse_bool(os.environ.get("AFS_PREFER_USER_CONFIG"), default=True)
prefer_repo = _parse_bool(os.environ.get("AFS_PREFER_REPO_CONFIG"))
if prefer_repo:
prefer_user = False
config_data: dict[str, Any] = {}
user_raw: dict[str, Any] = {}
local_raw: dict[str, Any] = {}
explicit_raw: dict[str, Any] = {}
if merge_user:
user_path = Path.home() / ".config" / "afs" / "config.toml"
if user_path.exists():
with open(user_path, "rb") as f:
user_raw = tomllib.load(f)
local_path = Path("afs.toml")
if local_path.exists():
with open(local_path, "rb") as f:
local_raw = tomllib.load(f)
if prefer_user:
config_data = _deep_merge(config_data, local_raw)
config_data = _deep_merge(config_data, user_raw)
else:
config_data = _deep_merge(config_data, user_raw)
config_data = _deep_merge(config_data, local_raw)
if config_path and config_path.exists():
with open(config_path, "rb") as f:
explicit_raw = tomllib.load(f)
config_data = _deep_merge(config_data, explicit_raw)
_expand_config_paths(config_data)
return config_data
def load_config_model(
config_path: Path | None = None,
merge_user: bool = True,
) -> AFSConfig:
"""Load configuration and return a typed model."""
data = load_config(config_path=config_path, merge_user=merge_user)
return AFSConfig.from_dict(data)

102
src/afs/plugins.py Normal file
View File

@@ -0,0 +1,102 @@
"""Plugin discovery and loading helpers."""
from __future__ import annotations
import importlib
import logging
import pkgutil
import sys
from contextlib import contextmanager
from pathlib import Path
from types import ModuleType
from typing import Iterable
from .schema import AFSConfig, PluginsConfig
logger = logging.getLogger(__name__)
def _iter_module_names(paths: list[Path] | None) -> set[str]:
module_names: set[str] = set()
if paths:
search_paths = [str(path) for path in paths if path.exists()]
for module in pkgutil.iter_modules(search_paths):
module_names.add(module.name)
return module_names
for module in pkgutil.iter_modules():
module_names.add(module.name)
return module_names
def _filter_prefixes(names: Iterable[str], prefixes: list[str]) -> list[str]:
return sorted(
{
name
for name in names
if any(name.startswith(prefix) for prefix in prefixes)
}
)
@contextmanager
def _prepend_sys_path(paths: list[Path]) -> Iterable[None]:
if not paths:
yield
return
path_strings = [str(path) for path in paths if path.exists()]
if not path_strings:
yield
return
original = list(sys.path)
sys.path = path_strings + sys.path
try:
yield
finally:
sys.path = original
def _normalize_plugins_config(config: AFSConfig | PluginsConfig | dict | None) -> PluginsConfig:
if config is None:
return PluginsConfig()
if isinstance(config, PluginsConfig):
return config
if isinstance(config, AFSConfig):
return config.plugins
if isinstance(config, dict):
return PluginsConfig.from_dict(config.get("plugins", config))
return PluginsConfig()
def discover_plugins(
config: AFSConfig | PluginsConfig | dict | None = None,
extra_paths: Iterable[Path] | None = None,
) -> list[str]:
plugins_config = _normalize_plugins_config(config)
names = set(plugins_config.enabled_plugins)
if not plugins_config.auto_discover:
return sorted(names)
prefixes = plugins_config.auto_discover_prefixes or ["afs_plugin"]
search_paths = list(plugins_config.plugin_dirs)
if extra_paths:
search_paths.extend(extra_paths)
names.update(_filter_prefixes(_iter_module_names(search_paths), prefixes))
names.update(_filter_prefixes(_iter_module_names(None), prefixes))
return sorted(names)
def load_plugins(
plugin_names: Iterable[str],
plugin_dirs: Iterable[Path] | None = None,
) -> dict[str, ModuleType]:
loaded: dict[str, ModuleType] = {}
dirs = list(plugin_dirs or [])
with _prepend_sys_path(dirs):
for name in plugin_names:
try:
loaded[name] = importlib.import_module(name)
except Exception as exc:
logger.warning("Failed to load plugin %s: %s", name, exc)
return loaded

102
src/afs/schema.py Normal file
View File

@@ -0,0 +1,102 @@
"""Minimal configuration schema for AFS."""
from __future__ import annotations
from dataclasses import dataclass, field
from pathlib import Path
from typing import Any
def _as_path(value: str | Path) -> Path:
return value if isinstance(value, Path) else Path(value).expanduser().resolve()
@dataclass
class WorkspaceDirectory:
path: Path
description: str | None = None
@classmethod
def from_dict(cls, data: dict[str, Any]) -> "WorkspaceDirectory":
path = _as_path(data.get("path", ""))
description = data.get("description")
return cls(path=path, description=description)
@dataclass
class GeneralConfig:
context_root: Path = field(default_factory=lambda: Path.home() / ".context")
agent_workspaces_dir: Path = field(
default_factory=lambda: Path.home() / ".context" / "workspaces"
)
python_executable: Path | None = None
workspace_directories: list[WorkspaceDirectory] = field(default_factory=list)
@classmethod
def from_dict(cls, data: dict[str, Any]) -> "GeneralConfig":
context_root = data.get("context_root")
agent_workspaces_dir = data.get("agent_workspaces_dir")
python_executable = data.get("python_executable")
workspace_directories = [
WorkspaceDirectory.from_dict(item)
for item in data.get("workspace_directories", [])
if isinstance(item, dict)
]
return cls(
context_root=_as_path(context_root)
if context_root
else cls().context_root,
agent_workspaces_dir=_as_path(agent_workspaces_dir)
if agent_workspaces_dir
else cls().agent_workspaces_dir,
python_executable=_as_path(python_executable)
if python_executable
else None,
workspace_directories=workspace_directories,
)
@dataclass
class PluginsConfig:
enabled_plugins: list[str] = field(default_factory=list)
plugin_dirs: list[Path] = field(default_factory=list)
auto_discover: bool = True
auto_discover_prefixes: list[str] = field(
default_factory=lambda: ["afs_plugin"]
)
@classmethod
def from_dict(cls, data: dict[str, Any]) -> "PluginsConfig":
enabled_plugins = [
item for item in data.get("enabled_plugins", []) if isinstance(item, str)
]
plugin_dirs = [
_as_path(item)
for item in data.get("plugin_dirs", [])
if isinstance(item, (str, Path))
]
auto_discover = data.get("auto_discover", True)
prefixes = data.get("auto_discover_prefixes")
if prefixes and isinstance(prefixes, list):
auto_discover_prefixes = [p for p in prefixes if isinstance(p, str)]
else:
auto_discover_prefixes = cls().auto_discover_prefixes
return cls(
enabled_plugins=enabled_plugins,
plugin_dirs=plugin_dirs,
auto_discover=bool(auto_discover),
auto_discover_prefixes=auto_discover_prefixes,
)
@dataclass
class AFSConfig:
general: GeneralConfig = field(default_factory=GeneralConfig)
plugins: PluginsConfig = field(default_factory=PluginsConfig)
@classmethod
def from_dict(cls, data: dict[str, Any] | None) -> "AFSConfig":
data = data or {}
general = GeneralConfig.from_dict(data.get("general", {}))
plugins = PluginsConfig.from_dict(data.get("plugins", {}))
return cls(general=general, plugins=plugins)