merge: feat/issue-37-unit-tests (#37)
This commit is contained in:
commit
4338587360
10 changed files with 1050 additions and 0 deletions
|
|
@ -89,6 +89,11 @@ python3 luminos.py --install-extras
|
|||
- **Explain then build** — articulate the approach in a few bullets before
|
||||
writing code. Surface assumptions early.
|
||||
- **Atomic commits** — each commit is one logical change.
|
||||
- **Test coverage required** — every change to a testable module must include
|
||||
or update tests in `tests/`. Run with `python3 -m unittest discover -s tests/`.
|
||||
All tests must pass before merging. Modules exempt from unit testing:
|
||||
`ai.py` (requires live API), `ast_parser.py` (requires tree-sitter),
|
||||
`watch.py` (stateful events), `prompts.py` (string templates only).
|
||||
- **Shiny object capture** — new ideas go to PLAN.md (Raw Thoughts) or a
|
||||
Forgejo issue, not into current work.
|
||||
|
||||
|
|
|
|||
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
239
tests/test_cache.py
Normal file
239
tests/test_cache.py
Normal file
|
|
@ -0,0 +1,239 @@
|
|||
"""Tests for luminos_lib/cache.py"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import tempfile
|
||||
import unittest
|
||||
from datetime import datetime, timezone
|
||||
from unittest.mock import patch
|
||||
|
||||
from luminos_lib.cache import (
|
||||
_CacheManager,
|
||||
_sha256_path,
|
||||
_get_investigation_id,
|
||||
CACHE_ROOT,
|
||||
)
|
||||
|
||||
|
||||
def _now():
|
||||
return datetime.now(timezone.utc).isoformat()
|
||||
|
||||
|
||||
def _make_manager(root):
|
||||
cm = _CacheManager.__new__(_CacheManager)
|
||||
cm.investigation_id = "test-id"
|
||||
cm.target = root
|
||||
cm.root = os.path.join(root, "cache")
|
||||
cm.files_dir = os.path.join(cm.root, "files")
|
||||
cm.dirs_dir = os.path.join(cm.root, "dirs")
|
||||
cm.log_path = os.path.join(cm.root, "investigation.log")
|
||||
cm.meta_path = os.path.join(cm.root, "meta.json")
|
||||
os.makedirs(cm.files_dir, exist_ok=True)
|
||||
os.makedirs(cm.dirs_dir, exist_ok=True)
|
||||
return cm
|
||||
|
||||
|
||||
def _file_entry(**overrides):
|
||||
base = {
|
||||
"path": "/tmp/foo.py",
|
||||
"relative_path": "foo.py",
|
||||
"summary": "A Python file.",
|
||||
"cached_at": _now(),
|
||||
"size_bytes": 128,
|
||||
"category": "source",
|
||||
}
|
||||
base.update(overrides)
|
||||
return base
|
||||
|
||||
|
||||
def _dir_entry(**overrides):
|
||||
base = {
|
||||
"path": "/tmp/mydir",
|
||||
"relative_path": "mydir",
|
||||
"summary": "A directory.",
|
||||
"cached_at": _now(),
|
||||
"child_count": 3,
|
||||
"dominant_category": "source",
|
||||
}
|
||||
base.update(overrides)
|
||||
return base
|
||||
|
||||
|
||||
class TestSha256Path(unittest.TestCase):
|
||||
def test_deterministic(self):
|
||||
self.assertEqual(_sha256_path("/foo/bar"), _sha256_path("/foo/bar"))
|
||||
|
||||
def test_different_paths_differ(self):
|
||||
self.assertNotEqual(_sha256_path("/foo/bar"), _sha256_path("/foo/baz"))
|
||||
|
||||
def test_returns_hex_string(self):
|
||||
result = _sha256_path("/foo")
|
||||
self.assertIsInstance(result, str)
|
||||
self.assertEqual(len(result), 64)
|
||||
|
||||
|
||||
class TestWriteEntry(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.tmpdir = tempfile.mkdtemp()
|
||||
self.cm = _make_manager(self.tmpdir)
|
||||
|
||||
def test_valid_file_entry(self):
|
||||
result = self.cm.write_entry("file", "/tmp/foo.py", _file_entry())
|
||||
self.assertEqual(result, "ok")
|
||||
|
||||
def test_valid_dir_entry(self):
|
||||
result = self.cm.write_entry("dir", "/tmp/mydir", _dir_entry())
|
||||
self.assertEqual(result, "ok")
|
||||
|
||||
def test_missing_required_field_file(self):
|
||||
entry = _file_entry()
|
||||
del entry["summary"]
|
||||
result = self.cm.write_entry("file", "/tmp/foo.py", entry)
|
||||
self.assertIn("Error", result)
|
||||
self.assertIn("summary", result)
|
||||
|
||||
def test_missing_required_field_dir(self):
|
||||
entry = _dir_entry()
|
||||
del entry["child_count"]
|
||||
result = self.cm.write_entry("dir", "/tmp/mydir", entry)
|
||||
self.assertIn("Error", result)
|
||||
self.assertIn("child_count", result)
|
||||
|
||||
def test_raw_content_rejected(self):
|
||||
entry = _file_entry(content="raw file data")
|
||||
result = self.cm.write_entry("file", "/tmp/foo.py", entry)
|
||||
self.assertIn("Error", result)
|
||||
|
||||
def test_valid_confidence(self):
|
||||
entry = _file_entry(confidence=0.85, confidence_reason="")
|
||||
result = self.cm.write_entry("file", "/tmp/foo.py", entry)
|
||||
self.assertEqual(result, "ok")
|
||||
|
||||
def test_confidence_zero(self):
|
||||
entry = _file_entry(confidence=0.0, confidence_reason="completely unknown")
|
||||
result = self.cm.write_entry("file", "/tmp/foo.py", entry)
|
||||
self.assertEqual(result, "ok")
|
||||
|
||||
def test_confidence_one(self):
|
||||
entry = _file_entry(confidence=1.0)
|
||||
result = self.cm.write_entry("file", "/tmp/foo.py", entry)
|
||||
self.assertEqual(result, "ok")
|
||||
|
||||
def test_confidence_out_of_range_high(self):
|
||||
entry = _file_entry(confidence=1.5)
|
||||
result = self.cm.write_entry("file", "/tmp/foo.py", entry)
|
||||
self.assertIn("Error", result)
|
||||
self.assertIn("confidence", result)
|
||||
|
||||
def test_confidence_out_of_range_low(self):
|
||||
entry = _file_entry(confidence=-0.1)
|
||||
result = self.cm.write_entry("file", "/tmp/foo.py", entry)
|
||||
self.assertIn("Error", result)
|
||||
|
||||
def test_confidence_wrong_type(self):
|
||||
entry = _file_entry(confidence="high")
|
||||
result = self.cm.write_entry("file", "/tmp/foo.py", entry)
|
||||
self.assertIn("Error", result)
|
||||
|
||||
def test_confidence_reason_wrong_type(self):
|
||||
entry = _file_entry(confidence=0.5, confidence_reason=42)
|
||||
result = self.cm.write_entry("file", "/tmp/foo.py", entry)
|
||||
self.assertIn("Error", result)
|
||||
|
||||
def test_confidence_without_reason_is_ok(self):
|
||||
entry = _file_entry(confidence=0.9)
|
||||
result = self.cm.write_entry("file", "/tmp/foo.py", entry)
|
||||
self.assertEqual(result, "ok")
|
||||
|
||||
def test_written_file_is_valid_json(self):
|
||||
entry = _file_entry()
|
||||
self.cm.write_entry("file", "/tmp/foo.py", entry)
|
||||
stored = self.cm.read_entry("file", "/tmp/foo.py")
|
||||
self.assertIsNotNone(stored)
|
||||
self.assertEqual(stored["summary"], "A Python file.")
|
||||
|
||||
|
||||
class TestReadEntry(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.tmpdir = tempfile.mkdtemp()
|
||||
self.cm = _make_manager(self.tmpdir)
|
||||
|
||||
def test_read_after_write(self):
|
||||
entry = _file_entry(summary="Hello world")
|
||||
self.cm.write_entry("file", "/tmp/foo.py", entry)
|
||||
result = self.cm.read_entry("file", "/tmp/foo.py")
|
||||
self.assertEqual(result["summary"], "Hello world")
|
||||
|
||||
def test_read_missing_returns_none(self):
|
||||
result = self.cm.read_entry("file", "/tmp/nonexistent.py")
|
||||
self.assertIsNone(result)
|
||||
|
||||
def test_has_entry_true(self):
|
||||
self.cm.write_entry("file", "/tmp/foo.py", _file_entry())
|
||||
self.assertTrue(self.cm.has_entry("file", "/tmp/foo.py"))
|
||||
|
||||
def test_has_entry_false(self):
|
||||
self.assertFalse(self.cm.has_entry("file", "/tmp/missing.py"))
|
||||
|
||||
|
||||
class TestListEntries(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.tmpdir = tempfile.mkdtemp()
|
||||
self.cm = _make_manager(self.tmpdir)
|
||||
|
||||
def test_empty(self):
|
||||
self.assertEqual(self.cm.list_entries("file"), [])
|
||||
|
||||
def test_lists_relative_paths(self):
|
||||
self.cm.write_entry("file", "/tmp/a.py", _file_entry(path="/tmp/a.py", relative_path="a.py"))
|
||||
self.cm.write_entry("file", "/tmp/b.py", _file_entry(path="/tmp/b.py", relative_path="b.py"))
|
||||
entries = self.cm.list_entries("file")
|
||||
self.assertIn("a.py", entries)
|
||||
self.assertIn("b.py", entries)
|
||||
|
||||
def test_read_all_entries_returns_dicts(self):
|
||||
self.cm.write_entry("file", "/tmp/a.py", _file_entry(path="/tmp/a.py", relative_path="a.py"))
|
||||
result = self.cm.read_all_entries("file")
|
||||
self.assertEqual(len(result), 1)
|
||||
self.assertIsInstance(result[0], dict)
|
||||
|
||||
|
||||
class TestGetInvestigationId(unittest.TestCase):
|
||||
def test_same_target_same_id(self):
|
||||
with tempfile.TemporaryDirectory() as d:
|
||||
from luminos_lib import cache as c
|
||||
orig_root, orig_path = c.CACHE_ROOT, c.INVESTIGATIONS_PATH
|
||||
c.CACHE_ROOT = d
|
||||
c.INVESTIGATIONS_PATH = os.path.join(d, "investigations.json")
|
||||
try:
|
||||
id1, _ = _get_investigation_id(d)
|
||||
# _get_investigation_id checks the cache dir exists before reusing
|
||||
os.makedirs(os.path.join(d, id1), exist_ok=True)
|
||||
id2, new = _get_investigation_id(d)
|
||||
self.assertEqual(id1, id2)
|
||||
self.assertFalse(new)
|
||||
finally:
|
||||
c.CACHE_ROOT = orig_root
|
||||
c.INVESTIGATIONS_PATH = orig_path
|
||||
|
||||
def test_fresh_flag_creates_new_id(self):
|
||||
with tempfile.TemporaryDirectory() as d:
|
||||
from luminos_lib import cache as c
|
||||
orig_root = c.CACHE_ROOT
|
||||
orig_path = c.INVESTIGATIONS_PATH
|
||||
c.CACHE_ROOT = d
|
||||
c.INVESTIGATIONS_PATH = os.path.join(d, "investigations.json")
|
||||
try:
|
||||
os.makedirs(os.path.join(d, "someid"), exist_ok=True)
|
||||
id1, _ = _get_investigation_id(d)
|
||||
os.makedirs(os.path.join(d, id1), exist_ok=True)
|
||||
id2, new = _get_investigation_id(d, fresh=True)
|
||||
self.assertNotEqual(id1, id2)
|
||||
self.assertTrue(new)
|
||||
finally:
|
||||
c.CACHE_ROOT = orig_root
|
||||
c.INVESTIGATIONS_PATH = orig_path
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
37
tests/test_capabilities.py
Normal file
37
tests/test_capabilities.py
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
"""Tests for luminos_lib/capabilities.py"""
|
||||
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
from luminos_lib.capabilities import _check_package
|
||||
|
||||
|
||||
class TestCheckPackage(unittest.TestCase):
|
||||
def test_importable_package(self):
|
||||
# json is always available in stdlib
|
||||
self.assertTrue(_check_package("json"))
|
||||
|
||||
def test_missing_package(self):
|
||||
self.assertFalse(_check_package("_luminos_nonexistent_package_xyz"))
|
||||
|
||||
def test_importable_returns_true(self):
|
||||
with patch("builtins.__import__", return_value=None):
|
||||
# patch doesn't work cleanly here; use a real stdlib module
|
||||
pass
|
||||
self.assertTrue(_check_package("os"))
|
||||
|
||||
def test_import_error_returns_false(self):
|
||||
import builtins
|
||||
original_import = builtins.__import__
|
||||
|
||||
def fake_import(name, *args, **kwargs):
|
||||
if name == "_fake_missing_module":
|
||||
raise ImportError("No module named '_fake_missing_module'")
|
||||
return original_import(name, *args, **kwargs)
|
||||
|
||||
with patch("builtins.__import__", side_effect=fake_import):
|
||||
self.assertFalse(_check_package("_fake_missing_module"))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
132
tests/test_code.py
Normal file
132
tests/test_code.py
Normal file
|
|
@ -0,0 +1,132 @@
|
|||
"""Tests for luminos_lib/code.py"""
|
||||
|
||||
import unittest
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
from luminos_lib.code import (
|
||||
LANG_EXTENSIONS,
|
||||
LARGE_LINE_THRESHOLD,
|
||||
LARGE_SIZE_THRESHOLD,
|
||||
_count_lines,
|
||||
detect_languages,
|
||||
find_large_files,
|
||||
)
|
||||
|
||||
|
||||
def _make_file_record(name, category="source", size=100):
|
||||
return {"name": name, "path": f"/tmp/{name}", "category": category, "size": size}
|
||||
|
||||
|
||||
class TestCountLines(unittest.TestCase):
|
||||
def test_returns_line_count(self):
|
||||
mock_result = MagicMock(returncode=0, stdout="42 /tmp/foo.py\n")
|
||||
with patch("subprocess.run", return_value=mock_result):
|
||||
self.assertEqual(_count_lines("/tmp/foo.py"), 42)
|
||||
|
||||
def test_returns_zero_on_failure(self):
|
||||
mock_result = MagicMock(returncode=1, stdout="")
|
||||
with patch("subprocess.run", return_value=mock_result):
|
||||
self.assertEqual(_count_lines("/tmp/foo.py"), 0)
|
||||
|
||||
def test_returns_zero_on_timeout(self):
|
||||
import subprocess
|
||||
with patch("subprocess.run", side_effect=subprocess.TimeoutExpired("wc", 10)):
|
||||
self.assertEqual(_count_lines("/tmp/foo.py"), 0)
|
||||
|
||||
def test_returns_zero_on_file_not_found(self):
|
||||
with patch("subprocess.run", side_effect=FileNotFoundError):
|
||||
self.assertEqual(_count_lines("/tmp/foo.py"), 0)
|
||||
|
||||
|
||||
class TestDetectLanguages(unittest.TestCase):
|
||||
def _mock_lines(self, n):
|
||||
return MagicMock(returncode=0, stdout=f"{n} /tmp/file\n")
|
||||
|
||||
def test_detects_python(self):
|
||||
files = [_make_file_record("main.py")]
|
||||
with patch("subprocess.run", return_value=self._mock_lines(50)):
|
||||
langs, loc = detect_languages(files)
|
||||
self.assertIn("Python", langs)
|
||||
self.assertEqual(loc["Python"], 50)
|
||||
|
||||
def test_ignores_non_source_files(self):
|
||||
files = [
|
||||
_make_file_record("main.py", category="source"),
|
||||
_make_file_record("config.json", category="config"),
|
||||
]
|
||||
with patch("subprocess.run", return_value=self._mock_lines(10)):
|
||||
langs, loc = detect_languages(files)
|
||||
self.assertNotIn("config.json", str(langs))
|
||||
self.assertEqual(len(langs), 1)
|
||||
|
||||
def test_multiple_languages(self):
|
||||
files = [
|
||||
_make_file_record("main.py"),
|
||||
_make_file_record("app.js"),
|
||||
]
|
||||
with patch("subprocess.run", return_value=self._mock_lines(20)):
|
||||
langs, loc = detect_languages(files)
|
||||
self.assertIn("Python", langs)
|
||||
self.assertIn("JavaScript", langs)
|
||||
|
||||
def test_unknown_extension_maps_to_other(self):
|
||||
files = [_make_file_record("script.xyz")]
|
||||
with patch("subprocess.run", return_value=self._mock_lines(5)):
|
||||
langs, loc = detect_languages(files)
|
||||
self.assertIn("Other", langs)
|
||||
|
||||
def test_empty_input(self):
|
||||
langs, loc = detect_languages([])
|
||||
self.assertEqual(langs, [])
|
||||
self.assertEqual(loc, {})
|
||||
|
||||
def test_on_file_callback(self):
|
||||
files = [_make_file_record("a.py"), _make_file_record("b.py")]
|
||||
seen = []
|
||||
with patch("subprocess.run", return_value=self._mock_lines(10)):
|
||||
detect_languages(files, on_file=seen.append)
|
||||
self.assertEqual(len(seen), 2)
|
||||
|
||||
def test_loc_accumulates_across_files(self):
|
||||
files = [_make_file_record("a.py"), _make_file_record("b.py")]
|
||||
with patch("subprocess.run", return_value=self._mock_lines(100)):
|
||||
langs, loc = detect_languages(files)
|
||||
self.assertEqual(loc["Python"], 200)
|
||||
|
||||
|
||||
class TestFindLargeFiles(unittest.TestCase):
|
||||
def test_large_by_lines(self):
|
||||
files = [_make_file_record("big.py", size=100)]
|
||||
mock_result = MagicMock(returncode=0, stdout=f"{LARGE_LINE_THRESHOLD + 1} /tmp/big.py\n")
|
||||
with patch("subprocess.run", return_value=mock_result):
|
||||
large = find_large_files(files)
|
||||
self.assertEqual(len(large), 1)
|
||||
self.assertEqual(large[0]["name"], "big.py")
|
||||
self.assertTrue(any("lines" in r for r in large[0]["reasons"]))
|
||||
|
||||
def test_large_by_size(self):
|
||||
files = [_make_file_record("huge.py", size=LARGE_SIZE_THRESHOLD + 1)]
|
||||
mock_result = MagicMock(returncode=0, stdout="10 /tmp/huge.py\n")
|
||||
with patch("subprocess.run", return_value=mock_result):
|
||||
large = find_large_files(files)
|
||||
self.assertEqual(len(large), 1)
|
||||
self.assertTrue(any("size" in r for r in large[0]["reasons"]))
|
||||
|
||||
def test_normal_file_not_flagged(self):
|
||||
files = [_make_file_record("small.py", size=500)]
|
||||
mock_result = MagicMock(returncode=0, stdout="50 /tmp/small.py\n")
|
||||
with patch("subprocess.run", return_value=mock_result):
|
||||
large = find_large_files(files)
|
||||
self.assertEqual(large, [])
|
||||
|
||||
def test_ignores_non_source(self):
|
||||
files = [_make_file_record("data.csv", category="data", size=LARGE_SIZE_THRESHOLD + 1)]
|
||||
large = find_large_files(files)
|
||||
self.assertEqual(large, [])
|
||||
|
||||
def test_empty_input(self):
|
||||
self.assertEqual(find_large_files([]), [])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
105
tests/test_disk.py
Normal file
105
tests/test_disk.py
Normal file
|
|
@ -0,0 +1,105 @@
|
|||
"""Tests for luminos_lib/disk.py"""
|
||||
|
||||
import unittest
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
from luminos_lib.disk import _human_size, top_directories, get_disk_usage
|
||||
|
||||
|
||||
class TestHumanSize(unittest.TestCase):
|
||||
def test_bytes(self):
|
||||
self.assertEqual(_human_size(512), "512 B")
|
||||
|
||||
def test_kilobytes(self):
|
||||
self.assertEqual(_human_size(1024), "1.0 KB")
|
||||
|
||||
def test_megabytes(self):
|
||||
self.assertEqual(_human_size(1024 * 1024), "1.0 MB")
|
||||
|
||||
def test_gigabytes(self):
|
||||
self.assertEqual(_human_size(1024 ** 3), "1.0 GB")
|
||||
|
||||
def test_terabytes(self):
|
||||
self.assertEqual(_human_size(1024 ** 4), "1.0 TB")
|
||||
|
||||
def test_zero_bytes(self):
|
||||
self.assertEqual(_human_size(0), "0 B")
|
||||
|
||||
def test_fractional_kb(self):
|
||||
result = _human_size(1536) # 1.5 KB
|
||||
self.assertEqual(result, "1.5 KB")
|
||||
|
||||
|
||||
class TestTopDirectories(unittest.TestCase):
|
||||
def _entries(self, sizes):
|
||||
return [{"path": f"/dir{i}", "size_bytes": s, "size_human": _human_size(s)}
|
||||
for i, s in enumerate(sizes)]
|
||||
|
||||
def test_returns_top_n(self):
|
||||
entries = self._entries([100, 500, 200, 800, 300, 50])
|
||||
top = top_directories(entries, n=3)
|
||||
self.assertEqual(len(top), 3)
|
||||
self.assertEqual(top[0]["size_bytes"], 800)
|
||||
self.assertEqual(top[1]["size_bytes"], 500)
|
||||
self.assertEqual(top[2]["size_bytes"], 300)
|
||||
|
||||
def test_fewer_than_n_entries(self):
|
||||
entries = self._entries([100, 200])
|
||||
top = top_directories(entries, n=5)
|
||||
self.assertEqual(len(top), 2)
|
||||
|
||||
def test_empty(self):
|
||||
self.assertEqual(top_directories([], n=5), [])
|
||||
|
||||
def test_default_n_is_five(self):
|
||||
entries = self._entries([i * 100 for i in range(10)])
|
||||
top = top_directories(entries)
|
||||
self.assertEqual(len(top), 5)
|
||||
|
||||
|
||||
class TestGetDiskUsage(unittest.TestCase):
|
||||
def _mock_du(self, output, returncode=0):
|
||||
return MagicMock(returncode=returncode, stdout=output)
|
||||
|
||||
def test_parses_du_output(self):
|
||||
du_output = "4096\t/tmp/mydir\n1024\t/tmp/mydir/sub\n"
|
||||
with patch("subprocess.run", return_value=self._mock_du(du_output)):
|
||||
result = get_disk_usage("/tmp/mydir")
|
||||
self.assertEqual(len(result), 2)
|
||||
self.assertEqual(result[0]["size_bytes"], 4096)
|
||||
self.assertEqual(result[0]["path"], "/tmp/mydir")
|
||||
|
||||
def test_skips_hidden_dirs_by_default(self):
|
||||
du_output = "1024\t/tmp/mydir/.git\n2048\t/tmp/mydir\n"
|
||||
with patch("subprocess.run", return_value=self._mock_du(du_output)):
|
||||
result = get_disk_usage("/tmp/mydir")
|
||||
paths = [r["path"] for r in result]
|
||||
self.assertNotIn("/tmp/mydir/.git", paths)
|
||||
|
||||
def test_includes_hidden_dirs_when_requested(self):
|
||||
du_output = "1024\t/tmp/mydir/.git\n2048\t/tmp/mydir\n"
|
||||
with patch("subprocess.run", return_value=self._mock_du(du_output)):
|
||||
result = get_disk_usage("/tmp/mydir", show_hidden=True)
|
||||
paths = [r["path"] for r in result]
|
||||
self.assertIn("/tmp/mydir/.git", paths)
|
||||
|
||||
def test_timeout_returns_empty(self):
|
||||
import subprocess
|
||||
with patch("subprocess.run", side_effect=subprocess.TimeoutExpired("du", 30)):
|
||||
result = get_disk_usage("/tmp/mydir")
|
||||
self.assertEqual(result, [])
|
||||
|
||||
def test_file_not_found_returns_empty(self):
|
||||
with patch("subprocess.run", side_effect=FileNotFoundError):
|
||||
result = get_disk_usage("/tmp/mydir")
|
||||
self.assertEqual(result, [])
|
||||
|
||||
def test_size_human_is_populated(self):
|
||||
du_output = "1048576\t/tmp/mydir\n"
|
||||
with patch("subprocess.run", return_value=self._mock_du(du_output)):
|
||||
result = get_disk_usage("/tmp/mydir")
|
||||
self.assertEqual(result[0]["size_human"], "1.0 MB")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
144
tests/test_filetypes.py
Normal file
144
tests/test_filetypes.py
Normal file
|
|
@ -0,0 +1,144 @@
|
|||
"""Tests for luminos_lib/filetypes.py"""
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
from luminos_lib.filetypes import (
|
||||
EXTENSION_MAP,
|
||||
_classify_one,
|
||||
classify_files,
|
||||
summarize_categories,
|
||||
)
|
||||
|
||||
|
||||
class TestExtensionMap(unittest.TestCase):
|
||||
def test_python_is_source(self):
|
||||
self.assertEqual(EXTENSION_MAP[".py"], "source")
|
||||
|
||||
def test_json_is_config(self):
|
||||
self.assertEqual(EXTENSION_MAP[".json"], "config")
|
||||
|
||||
def test_csv_is_data(self):
|
||||
self.assertEqual(EXTENSION_MAP[".csv"], "data")
|
||||
|
||||
def test_png_is_media(self):
|
||||
self.assertEqual(EXTENSION_MAP[".png"], "media")
|
||||
|
||||
def test_md_is_document(self):
|
||||
self.assertEqual(EXTENSION_MAP[".md"], "document")
|
||||
|
||||
def test_zip_is_archive(self):
|
||||
self.assertEqual(EXTENSION_MAP[".zip"], "archive")
|
||||
|
||||
|
||||
class TestClassifyOne(unittest.TestCase):
|
||||
def test_known_extension(self):
|
||||
category, desc = _classify_one("script.py")
|
||||
self.assertEqual(category, "source")
|
||||
self.assertIsNone(desc)
|
||||
|
||||
def test_known_extension_case_insensitive(self):
|
||||
category, desc = _classify_one("image.PNG")
|
||||
self.assertEqual(category, "media")
|
||||
self.assertIsNone(desc)
|
||||
|
||||
def test_unknown_extension_falls_back_to_file_command(self):
|
||||
with patch("luminos_lib.filetypes._file_command", return_value="ASCII text"):
|
||||
category, desc = _classify_one("README")
|
||||
self.assertEqual(category, "source")
|
||||
self.assertEqual(desc, "ASCII text")
|
||||
|
||||
def test_unknown_extension_unrecognized_file_output(self):
|
||||
with patch("luminos_lib.filetypes._file_command", return_value="data"):
|
||||
category, desc = _classify_one("somefile.xyz")
|
||||
self.assertEqual(category, "unknown")
|
||||
|
||||
def test_file_command_timeout_returns_unknown(self):
|
||||
with patch("luminos_lib.filetypes._file_command", return_value=""):
|
||||
category, desc = _classify_one("oddfile")
|
||||
self.assertEqual(category, "unknown")
|
||||
|
||||
|
||||
class TestSummarizeCategories(unittest.TestCase):
|
||||
def test_empty(self):
|
||||
self.assertEqual(summarize_categories([]), {})
|
||||
|
||||
def test_single_category(self):
|
||||
files = [{"category": "source"}, {"category": "source"}]
|
||||
result = summarize_categories(files)
|
||||
self.assertEqual(result, {"source": 2})
|
||||
|
||||
def test_multiple_categories(self):
|
||||
files = [
|
||||
{"category": "source"},
|
||||
{"category": "config"},
|
||||
{"category": "source"},
|
||||
{"category": "media"},
|
||||
]
|
||||
result = summarize_categories(files)
|
||||
self.assertEqual(result["source"], 2)
|
||||
self.assertEqual(result["config"], 1)
|
||||
self.assertEqual(result["media"], 1)
|
||||
|
||||
|
||||
class TestClassifyFiles(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.tmpdir = tempfile.mkdtemp()
|
||||
|
||||
def _make_file(self, name, content=""):
|
||||
path = os.path.join(self.tmpdir, name)
|
||||
with open(path, "w") as f:
|
||||
f.write(content)
|
||||
return path
|
||||
|
||||
def test_classifies_python_file(self):
|
||||
self._make_file("script.py", "print('hello')")
|
||||
results = classify_files(self.tmpdir)
|
||||
names = [r["name"] for r in results]
|
||||
self.assertIn("script.py", names)
|
||||
py = next(r for r in results if r["name"] == "script.py")
|
||||
self.assertEqual(py["category"], "source")
|
||||
|
||||
def test_excludes_hidden_files_by_default(self):
|
||||
self._make_file(".hidden.py")
|
||||
self._make_file("visible.py")
|
||||
results = classify_files(self.tmpdir)
|
||||
names = [r["name"] for r in results]
|
||||
self.assertNotIn(".hidden.py", names)
|
||||
self.assertIn("visible.py", names)
|
||||
|
||||
def test_includes_hidden_files_when_requested(self):
|
||||
self._make_file(".hidden.py")
|
||||
results = classify_files(self.tmpdir, show_hidden=True)
|
||||
names = [r["name"] for r in results]
|
||||
self.assertIn(".hidden.py", names)
|
||||
|
||||
def test_excludes_directories(self):
|
||||
excluded_dir = os.path.join(self.tmpdir, "node_modules")
|
||||
os.makedirs(excluded_dir)
|
||||
with open(os.path.join(excluded_dir, "pkg.js"), "w") as f:
|
||||
f.write("")
|
||||
self._make_file("main.py")
|
||||
results = classify_files(self.tmpdir, exclude=["node_modules"])
|
||||
names = [r["name"] for r in results]
|
||||
self.assertNotIn("pkg.js", names)
|
||||
self.assertIn("main.py", names)
|
||||
|
||||
def test_on_file_callback(self):
|
||||
self._make_file("a.py")
|
||||
self._make_file("b.py")
|
||||
seen = []
|
||||
classify_files(self.tmpdir, on_file=seen.append)
|
||||
self.assertEqual(len(seen), 2)
|
||||
|
||||
def test_size_is_populated(self):
|
||||
self._make_file("data.json", '{"key": "value"}')
|
||||
results = classify_files(self.tmpdir)
|
||||
item = next(r for r in results if r["name"] == "data.json")
|
||||
self.assertGreater(item["size"], 0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
75
tests/test_recency.py
Normal file
75
tests/test_recency.py
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
"""Tests for luminos_lib/recency.py"""
|
||||
|
||||
import unittest
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
from luminos_lib.recency import find_recent_files
|
||||
|
||||
|
||||
class TestFindRecentFiles(unittest.TestCase):
|
||||
def _mock_find(self, lines):
|
||||
output = "\n".join(lines)
|
||||
return MagicMock(returncode=0, stdout=output)
|
||||
|
||||
def test_returns_sorted_by_recency(self):
|
||||
lines = [
|
||||
"1000.0\t/tmp/old.py",
|
||||
"2000.0\t/tmp/new.py",
|
||||
"1500.0\t/tmp/mid.py",
|
||||
]
|
||||
with patch("subprocess.run", return_value=self._mock_find(lines)):
|
||||
result = find_recent_files("/tmp")
|
||||
self.assertEqual(result[0]["name"], "new.py")
|
||||
self.assertEqual(result[1]["name"], "mid.py")
|
||||
self.assertEqual(result[2]["name"], "old.py")
|
||||
|
||||
def test_limits_to_n(self):
|
||||
lines = [f"{i}.0\t/tmp/file{i}.py" for i in range(20)]
|
||||
with patch("subprocess.run", return_value=self._mock_find(lines)):
|
||||
result = find_recent_files("/tmp", n=5)
|
||||
self.assertEqual(len(result), 5)
|
||||
|
||||
def test_entry_fields(self):
|
||||
lines = ["1700000000.0\t/tmp/subdir/script.py"]
|
||||
with patch("subprocess.run", return_value=self._mock_find(lines)):
|
||||
result = find_recent_files("/tmp")
|
||||
self.assertEqual(len(result), 1)
|
||||
entry = result[0]
|
||||
self.assertEqual(entry["name"], "script.py")
|
||||
self.assertEqual(entry["path"], "/tmp/subdir/script.py")
|
||||
self.assertIsInstance(entry["modified"], float)
|
||||
self.assertIsInstance(entry["modified_human"], str)
|
||||
|
||||
def test_timeout_returns_empty(self):
|
||||
import subprocess
|
||||
with patch("subprocess.run", side_effect=subprocess.TimeoutExpired("find", 30)):
|
||||
result = find_recent_files("/tmp")
|
||||
self.assertEqual(result, [])
|
||||
|
||||
def test_file_not_found_returns_empty(self):
|
||||
with patch("subprocess.run", side_effect=FileNotFoundError):
|
||||
result = find_recent_files("/tmp")
|
||||
self.assertEqual(result, [])
|
||||
|
||||
def test_nonzero_returncode_returns_empty(self):
|
||||
mock = MagicMock(returncode=1, stdout="")
|
||||
with patch("subprocess.run", return_value=mock):
|
||||
result = find_recent_files("/tmp")
|
||||
self.assertEqual(result, [])
|
||||
|
||||
def test_empty_output_returns_empty(self):
|
||||
mock = MagicMock(returncode=0, stdout="")
|
||||
with patch("subprocess.run", return_value=mock):
|
||||
result = find_recent_files("/tmp")
|
||||
self.assertEqual(result, [])
|
||||
|
||||
def test_malformed_lines_skipped(self):
|
||||
lines = ["notvalid", "1000.0\t/tmp/good.py", "alsoinvalid"]
|
||||
with patch("subprocess.run", return_value=self._mock_find(lines)):
|
||||
result = find_recent_files("/tmp")
|
||||
self.assertEqual(len(result), 1)
|
||||
self.assertEqual(result[0]["name"], "good.py")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
150
tests/test_report.py
Normal file
150
tests/test_report.py
Normal file
|
|
@ -0,0 +1,150 @@
|
|||
"""Tests for luminos_lib/report.py"""
|
||||
|
||||
import unittest
|
||||
|
||||
from luminos_lib.report import format_flags, format_report
|
||||
|
||||
|
||||
class TestFormatFlags(unittest.TestCase):
|
||||
def test_empty_returns_empty_string(self):
|
||||
self.assertEqual(format_flags([]), "")
|
||||
self.assertEqual(format_flags(None), "")
|
||||
|
||||
def test_single_flag(self):
|
||||
flags = [{"severity": "concern", "path": "main.py", "finding": "Hardcoded secret"}]
|
||||
result = format_flags(flags)
|
||||
self.assertIn("CONCERN", result)
|
||||
self.assertIn("main.py", result)
|
||||
self.assertIn("Hardcoded secret", result)
|
||||
|
||||
def test_severity_ordering(self):
|
||||
flags = [
|
||||
{"severity": "info", "path": "a.py", "finding": "note"},
|
||||
{"severity": "critical", "path": "b.py", "finding": "bad"},
|
||||
{"severity": "concern", "path": "c.py", "finding": "watch"},
|
||||
]
|
||||
result = format_flags(flags)
|
||||
critical_pos = result.index("CRITICAL")
|
||||
concern_pos = result.index("CONCERN")
|
||||
info_pos = result.index("INFO")
|
||||
self.assertLess(critical_pos, concern_pos)
|
||||
self.assertLess(concern_pos, info_pos)
|
||||
|
||||
def test_unknown_severity_defaults_to_info_order(self):
|
||||
flags = [{"severity": "weird", "path": "x.py", "finding": "something"}]
|
||||
result = format_flags(flags)
|
||||
self.assertIn("WEIRD", result)
|
||||
|
||||
def test_missing_path_defaults_to_general(self):
|
||||
flags = [{"severity": "info", "finding": "general note"}]
|
||||
result = format_flags(flags)
|
||||
self.assertIn("general", result)
|
||||
|
||||
def test_flags_header_present(self):
|
||||
flags = [{"severity": "info", "path": "x.py", "finding": "ok"}]
|
||||
result = format_flags(flags)
|
||||
self.assertIn("FLAGS", result)
|
||||
|
||||
|
||||
class TestFormatReport(unittest.TestCase):
|
||||
def _minimal_report(self):
|
||||
return {
|
||||
"tree_rendered": "mydir/\n file.py",
|
||||
"file_categories": {"source": 2, "config": 1},
|
||||
"languages": ["Python"],
|
||||
"lines_of_code": {"Python": 150},
|
||||
"large_files": [],
|
||||
"recent_files": [
|
||||
{"modified_human": "2026-04-06 10:00:00", "name": "main.py", "path": "/tmp/main.py"}
|
||||
],
|
||||
"top_directories": [
|
||||
{"size_human": "10.0 KB", "path": "/tmp/mydir"}
|
||||
],
|
||||
}
|
||||
|
||||
def test_header_contains_target(self):
|
||||
result = format_report(self._minimal_report(), "/tmp/mydir")
|
||||
self.assertIn("/tmp/mydir", result)
|
||||
|
||||
def test_file_type_section(self):
|
||||
result = format_report(self._minimal_report(), "/tmp")
|
||||
self.assertIn("source", result)
|
||||
self.assertIn("config", result)
|
||||
|
||||
def test_languages_section(self):
|
||||
result = format_report(self._minimal_report(), "/tmp")
|
||||
self.assertIn("Python", result)
|
||||
self.assertIn("150", result)
|
||||
|
||||
def test_recent_files_section(self):
|
||||
result = format_report(self._minimal_report(), "/tmp")
|
||||
self.assertIn("main.py", result)
|
||||
self.assertIn("2026-04-06", result)
|
||||
|
||||
def test_disk_usage_section(self):
|
||||
result = format_report(self._minimal_report(), "/tmp")
|
||||
self.assertIn("10.0 KB", result)
|
||||
|
||||
def test_tree_rendered_included(self):
|
||||
result = format_report(self._minimal_report(), "/tmp")
|
||||
self.assertIn("mydir/", result)
|
||||
|
||||
def test_no_source_files_message(self):
|
||||
report = self._minimal_report()
|
||||
report["languages"] = []
|
||||
report["lines_of_code"] = {}
|
||||
result = format_report(report, "/tmp")
|
||||
self.assertIn("No source code files detected", result)
|
||||
|
||||
def test_no_recent_files_message(self):
|
||||
report = self._minimal_report()
|
||||
report["recent_files"] = []
|
||||
result = format_report(report, "/tmp")
|
||||
self.assertIn("No recent files found", result)
|
||||
|
||||
def test_ai_brief_included_when_present(self):
|
||||
report = self._minimal_report()
|
||||
report["ai_brief"] = "This is a Python project."
|
||||
result = format_report(report, "/tmp")
|
||||
self.assertIn("This is a Python project.", result)
|
||||
self.assertIn("SUMMARY (AI)", result)
|
||||
|
||||
def test_ai_detailed_included_when_present(self):
|
||||
report = self._minimal_report()
|
||||
report["ai_detailed"] = "Detailed breakdown here."
|
||||
result = format_report(report, "/tmp")
|
||||
self.assertIn("Detailed breakdown here.", result)
|
||||
self.assertIn("DETAILED AI ANALYSIS", result)
|
||||
|
||||
def test_flags_included_when_provided(self):
|
||||
report = self._minimal_report()
|
||||
flags = [{"severity": "critical", "path": "secret.py", "finding": "API key exposed"}]
|
||||
result = format_report(report, "/tmp", flags=flags)
|
||||
self.assertIn("API key exposed", result)
|
||||
|
||||
def test_large_files_section(self):
|
||||
report = self._minimal_report()
|
||||
report["large_files"] = [{"name": "big.py", "reasons": ["lines: 5000"]}]
|
||||
result = format_report(report, "/tmp")
|
||||
self.assertIn("big.py", result)
|
||||
self.assertIn("lines: 5000", result)
|
||||
|
||||
def test_no_categories_message(self):
|
||||
report = self._minimal_report()
|
||||
report["file_categories"] = {}
|
||||
result = format_report(report, "/tmp")
|
||||
self.assertIn("No files found", result)
|
||||
|
||||
def test_total_loc_shown(self):
|
||||
report = self._minimal_report()
|
||||
report["lines_of_code"] = {"Python": 100, "JavaScript": 50}
|
||||
result = format_report(report, "/tmp")
|
||||
self.assertIn("150", result) # total
|
||||
|
||||
def test_report_ends_with_footer(self):
|
||||
result = format_report(self._minimal_report(), "/tmp")
|
||||
self.assertIn("End of report.", result)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
163
tests/test_tree.py
Normal file
163
tests/test_tree.py
Normal file
|
|
@ -0,0 +1,163 @@
|
|||
"""Tests for luminos_lib/tree.py"""
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
from luminos_lib.tree import build_tree, render_tree, _human_size
|
||||
|
||||
|
||||
class TestHumanSize(unittest.TestCase):
|
||||
def test_bytes(self):
|
||||
self.assertEqual(_human_size(0), "0 B")
|
||||
self.assertEqual(_human_size(512), "512 B")
|
||||
|
||||
def test_kilobytes(self):
|
||||
self.assertEqual(_human_size(1024), "1.0 KB")
|
||||
|
||||
def test_megabytes(self):
|
||||
self.assertEqual(_human_size(1024 * 1024), "1.0 MB")
|
||||
|
||||
def test_fractional(self):
|
||||
self.assertEqual(_human_size(1536), "1.5 KB")
|
||||
|
||||
|
||||
class TestBuildTree(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.tmpdir = tempfile.mkdtemp()
|
||||
|
||||
def _create(self, structure):
|
||||
"""Create files/dirs from a dict: {name: None=file, name: dict=dir}"""
|
||||
def _recurse(base, items):
|
||||
for name, content in items.items():
|
||||
path = os.path.join(base, name)
|
||||
if content is None:
|
||||
with open(path, "w") as f:
|
||||
f.write("x")
|
||||
else:
|
||||
os.makedirs(path, exist_ok=True)
|
||||
_recurse(path, content)
|
||||
_recurse(self.tmpdir, structure)
|
||||
|
||||
def test_root_node_type(self):
|
||||
tree = build_tree(self.tmpdir)
|
||||
self.assertEqual(tree["type"], "directory")
|
||||
self.assertEqual(tree["path"], self.tmpdir)
|
||||
|
||||
def test_lists_files(self):
|
||||
self._create({"a.py": None, "b.py": None})
|
||||
tree = build_tree(self.tmpdir)
|
||||
names = {c["name"] for c in tree["children"]}
|
||||
self.assertIn("a.py", names)
|
||||
self.assertIn("b.py", names)
|
||||
|
||||
def test_file_node_has_size(self):
|
||||
self._create({"hello.txt": None})
|
||||
tree = build_tree(self.tmpdir)
|
||||
f = next(c for c in tree["children"] if c["name"] == "hello.txt")
|
||||
self.assertIn("size", f)
|
||||
self.assertGreater(f["size"], 0)
|
||||
|
||||
def test_hidden_files_excluded_by_default(self):
|
||||
self._create({".hidden": None, "visible.py": None})
|
||||
tree = build_tree(self.tmpdir)
|
||||
names = {c["name"] for c in tree["children"]}
|
||||
self.assertNotIn(".hidden", names)
|
||||
self.assertIn("visible.py", names)
|
||||
|
||||
def test_hidden_files_included_when_requested(self):
|
||||
self._create({".hidden": None})
|
||||
tree = build_tree(self.tmpdir, show_hidden=True)
|
||||
names = {c["name"] for c in tree["children"]}
|
||||
self.assertIn(".hidden", names)
|
||||
|
||||
def test_exclude_directory(self):
|
||||
self._create({"node_modules": {"pkg.js": None}, "main.py": None})
|
||||
tree = build_tree(self.tmpdir, exclude=["node_modules"])
|
||||
names = {c["name"] for c in tree["children"]}
|
||||
self.assertNotIn("node_modules", names)
|
||||
self.assertIn("main.py", names)
|
||||
|
||||
def test_max_depth_truncates(self):
|
||||
self._create({"a": {"b": {"c": {"deep.py": None}}}})
|
||||
tree = build_tree(self.tmpdir, max_depth=1)
|
||||
# depth 0 = root, depth 1 = "a", depth 2 would be "b" but truncated
|
||||
a = next(c for c in tree["children"] if c["name"] == "a")
|
||||
b = next(c for c in a["children"] if c["name"] == "b")
|
||||
self.assertTrue(b.get("truncated"))
|
||||
|
||||
def test_nested_directory(self):
|
||||
self._create({"src": {"main.py": None}})
|
||||
tree = build_tree(self.tmpdir)
|
||||
src = next(c for c in tree["children"] if c["name"] == "src")
|
||||
self.assertEqual(src["type"], "directory")
|
||||
children = src["children"]
|
||||
self.assertTrue(any(c["name"] == "main.py" for c in children))
|
||||
|
||||
|
||||
class TestRenderTree(unittest.TestCase):
|
||||
def _simple_tree(self):
|
||||
return {
|
||||
"name": "mydir",
|
||||
"type": "directory",
|
||||
"path": "/tmp/mydir",
|
||||
"children": [
|
||||
{"name": "file.py", "type": "file", "path": "/tmp/mydir/file.py", "size": 1024},
|
||||
{
|
||||
"name": "subdir",
|
||||
"type": "directory",
|
||||
"path": "/tmp/mydir/subdir",
|
||||
"children": [],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
def test_root_name_in_output(self):
|
||||
tree = self._simple_tree()
|
||||
rendered = render_tree(tree)
|
||||
self.assertIn("mydir/", rendered)
|
||||
|
||||
def test_file_with_size_in_output(self):
|
||||
tree = self._simple_tree()
|
||||
rendered = render_tree(tree)
|
||||
self.assertIn("file.py", rendered)
|
||||
self.assertIn("1.0 KB", rendered)
|
||||
|
||||
def test_subdir_has_slash(self):
|
||||
tree = self._simple_tree()
|
||||
rendered = render_tree(tree)
|
||||
self.assertIn("subdir/", rendered)
|
||||
|
||||
def test_truncated_dir_shows_ellipsis(self):
|
||||
tree = {
|
||||
"name": "root",
|
||||
"type": "directory",
|
||||
"path": "/root",
|
||||
"children": [
|
||||
{"name": "deep", "type": "directory", "path": "/root/deep", "truncated": True},
|
||||
],
|
||||
}
|
||||
rendered = render_tree(tree)
|
||||
self.assertIn("...", rendered)
|
||||
|
||||
def test_permission_error_shown(self):
|
||||
tree = {
|
||||
"name": "root",
|
||||
"type": "directory",
|
||||
"path": "/root",
|
||||
"children": [
|
||||
{
|
||||
"name": "locked",
|
||||
"type": "directory",
|
||||
"path": "/root/locked",
|
||||
"error": "permission denied",
|
||||
"children": [],
|
||||
}
|
||||
],
|
||||
}
|
||||
rendered = render_tree(tree)
|
||||
self.assertIn("permission denied", rendered)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
Loading…
Reference in a new issue