Dane Sabo cebf8c167a Initial umbrella repo: thesis + FRET pipeline + plant model with first controllers
Folds three previously-separate pieces into one preliminary-example repo
for the HAHACS thesis:

- thesis/ (submodule) → gitea Thesis.git — the PhD proposal
- fret-pipeline/ — FRET requirements to AIGER controller (was
  ~/Documents/fret_processing/; prior single-commit history abandoned
  per user decision)
- plant-model/ — 10-state PKE + lumped T/H PWR model (was
  ~/Documents/PKE_Playground/; never version-controlled before)
- presentations/2026DICE/ (submodule) → gitea 2026DICE.git
- reachability/, hardware/ — empty placeholders for Thrust 3 and HIL
- docs/architecture.md — how the discrete and continuous layers compose
- claude_memory/ — session notes and scratch knowledge pattern

Plant model refactored to thesis naming (x, plant, u, ref); pke_th_rhs
now takes u as an explicit arg instead of reading rho_ext from the
params struct. First two controllers built to the contract
u = ctrl_<mode>(t, x, plant, ref): ctrl_null (baseline) and
ctrl_operation (stabilizing, proportional on T_avg). Validated under a
100% -> 80% Q_sg step: ctrl_operation reduces steady-state T_avg drift
~47% vs. the unforced plant.

Root CLAUDE.md emphasizes that CLAUDE.md files are living documents and
that any knowledge not captured before a session ends is lost forever;
claude_memory/ holds the session-level notes that haven't stabilized
enough to graduate into a CLAUDE.md.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-04-16 16:24:11 -04:00

155 lines
5.7 KiB
Python
Executable File

#!/usr/bin/env python3
"""Parse FRET JSON export and prepare config for Strix synthesis.
Reads the fretRequirementsVariables.json exported from FRET and produces
a synthesis config with LTL formulas and input/output variable roles.
Usage: python3 scripts/parse_fret_json.py specs/fretRequirementsVariables.json [-o config.json]
"""
import argparse
import json
import re
import sys
from pathlib import Path
# LTL operators that must stay as-is (Spot parses lowercase for these)
LTL_OPERATORS = {'G', 'F', 'X', 'U', 'R', 'V', 'W', 'M', 'H', 'O', 'Y', 'Z', 'S', 'T'}
def lowercase_ltl(formula: str, var_names: set[str]) -> str:
"""Lowercase variable names in an LTL formula, preserving operators."""
def replace_token(match):
token = match.group(0)
if token in LTL_OPERATORS:
return token
if token in var_names or token.lower() in {v.lower() for v in var_names}:
return token.lower()
return token
return re.sub(r'\b[A-Za-z_]\w*\b', replace_token, formula)
def main():
parser = argparse.ArgumentParser(description='Parse FRET JSON export for synthesis')
parser.add_argument('json_file', type=Path, help='FRET JSON export file')
parser.add_argument('-o', '--output', type=Path, default=None,
help='Output config file (default: <json_dir>/synthesis_config.json)')
parser.add_argument('--ltl-field', default='ftInfAUExpanded',
help='Which LTL field to use (default: ftInfAUExpanded)')
parser.add_argument('--fallback-field', default='ft',
help='Fallback LTL field if primary is missing (default: ft)')
parser.add_argument('--liveness', type=Path, default=None,
help='Path to liveness constraints file (one LTL formula per line)')
args = parser.parse_args()
data = json.loads(args.json_file.read_text())
requirements = data.get('requirements', [])
variables = data.get('variables', [])
if not requirements:
print('Error: no requirements found in JSON', file=sys.stderr)
sys.exit(1)
# Extract variable I/O roles
# Collect original names for lowercasing LTL formulas
all_var_names = {v['variable_name'] for v in variables}
inputs = []
outputs = []
var_info = {}
for v in variables:
name = v['variable_name']
id_type = v.get('idType', 'Unknown')
var_info[name] = id_type
if id_type == 'Input':
inputs.append(name.lower())
elif id_type == 'Output':
outputs.append(name.lower())
else:
print(f'Warning: variable {name} has type "{id_type}", defaulting to input',
file=sys.stderr)
inputs.append(name.lower())
# Extract LTL formulas (lowercase variable names for Spot compatibility)
ltl_entries = []
for req in requirements:
sem = req.get('semantics', {})
ltl_raw = sem.get(args.ltl_field) or sem.get(args.fallback_field)
ltl = lowercase_ltl(ltl_raw, all_var_names) if ltl_raw else None
entry = {
'req_id': req['reqid'],
'fulltext': req['fulltext'],
'project': req.get('project', ''),
'component': sem.get('component', ''),
'ltl': ltl,
'ltl_original': ltl_raw,
'variables': sem.get('variables', []),
}
if not ltl:
entry['error'] = f'No LTL found in fields {args.ltl_field} or {args.fallback_field}'
ltl_entries.append(entry)
# Determine spec name from project/component
projects = {req.get('project', '') for req in requirements}
components = {req.get('semantics', {}).get('component', '') for req in requirements}
spec_name = f'{"-".join(sorted(projects))}_{"-".join(sorted(components))}'.strip('_-')
if not spec_name:
spec_name = args.json_file.stem
# Load liveness constraints if provided
liveness = []
if args.liveness and args.liveness.exists():
for line in args.liveness.read_text().strip().split('\n'):
line = line.strip()
if line and not line.startswith('#'):
liveness.append(lowercase_ltl(line, all_var_names))
# Build conjoined formula
all_ltl = [e['ltl'] for e in ltl_entries if e.get('ltl')]
all_ltl.extend(liveness)
conjoined = ' & '.join(f'({f})' for f in all_ltl) if all_ltl else None
config = {
'_comment': 'Generated from FRET JSON export. Ready for ltlsynt synthesis.',
'spec_name': spec_name,
'source_file': str(args.json_file),
'inputs': sorted(inputs),
'outputs': sorted(outputs),
'requirements': ltl_entries,
'liveness_constraints': liveness,
'conjoined_ltl': conjoined,
}
output_path = args.output or (args.json_file.parent / 'synthesis_config.json')
output_path.write_text(json.dumps(config, indent=2) + '\n')
# Summary
print(f'Parsed {len(requirements)} requirements from FRET JSON')
print(f'LTL field: {args.ltl_field} (fallback: {args.fallback_field})')
print()
print(f'Inputs ({len(inputs)}): {", ".join(sorted(inputs))}')
print(f'Outputs ({len(outputs)}): {", ".join(sorted(outputs))}')
print()
for e in ltl_entries:
status = 'ok' if e.get('ltl') else 'MISSING'
print(f' [{status}] {e["req_id"]}: {e["fulltext"][:80]}')
errors = [e for e in ltl_entries if not e.get('ltl')]
if errors:
print(f'\n{len(errors)} requirements missing LTL formulas')
if liveness:
print(f'\nLiveness constraints ({len(liveness)}):')
for l in liveness:
print(f' + {l}')
print(f'\nConfig written to: {output_path}')
if __name__ == '__main__':
main()