feat: complete lens-driven codegen — remove db_projection.rs All schema transforms now flow from JSON lens files: 1. packages/lenses/*.lens.json (human-readable, editable) 2. lens_config.rs parses steps → panproto combinators + expression parser 3. tangled_interop.rs loads value transforms from lens files 4. compile_db_projections discovers NSIDs from lens files 5. emit_typescript_views.rs uses only lens-file path Removed: - db_projection.rs (all transforms now in lens JSON) - Legacy RecordConfig-based TypeScript generation path Added: - 4 identity-projection lenses (node, star, reaction, follow) - Expressions use panproto's Haskell-style application syntax (head (split (replace repo "at://" "") "/"))
Author: Aaron Steven White
Commit
e0ecb4783af35c950607326de70ee490b6eff476Parent: 5320e288d1
Structural diff unavailable
These commits were pushed via plain git push, so no pre-parsed
schemas are available. Install git-remote-cospan and re-push via panproto:// to
see scope-level changes, breaking change detection, and semantic diffs.
brew install panproto/tap/git-remote-cospan24 files changed +455 -1043
@@ -37,6 +37,23 @@ export function normalizePullView(raw: Partial<PullView>): PullView {
3737 }; 3838 } 3939 40+// dev.cospan.node (via lens file) 41+export interface NodeView { 42+ did: string; 43+ createdAt: string; 44+ publicEndpoint: string | null; 45+ indexedAt: string; 46+} 47+ 48+export function normalizeNodeView(raw: Partial<NodeView>): NodeView { 49+ return { 50+ did: raw.did ?? '', 51+ createdAt: raw.createdAt ?? '', 52+ publicEndpoint: raw.publicEndpoint ?? null, 53+ indexedAt: raw.indexedAt ?? '', 54+ }; 55+} 56+ 4057 // dev.cospan.repo.issue.state (via lens file) 4158 export interface IssueStateView { 4259 did: string;
@@ -110,6 +127,27 @@ export function normalizeIssueCommentView(raw: Partial<IssueCommentView>): Issue
110127 }; 111128 } 112129 130+// dev.cospan.feed.reaction (via lens file) 131+export interface ReactionView { 132+ did: string; 133+ rkey: string; 134+ createdAt: string; 135+ emoji: string; 136+ subject: string; 137+ indexedAt: string; 138+} 139+ 140+export function normalizeReactionView(raw: Partial<ReactionView>): ReactionView { 141+ return { 142+ did: raw.did ?? '', 143+ rkey: raw.rkey ?? '', 144+ createdAt: raw.createdAt ?? '', 145+ emoji: raw.emoji ?? '', 146+ subject: raw.subject ?? '', 147+ indexedAt: raw.indexedAt ?? '', 148+ }; 149+} 150+ 113151 // dev.cospan.org (via lens file) 114152 export interface OrgView { 115153 did: string;
@@ -133,6 +171,44 @@ export function normalizeOrgView(raw: Partial<OrgView>): OrgView {
133171 }; 134172 } 135173 174+// dev.cospan.graph.follow (via lens file) 175+export interface FollowView { 176+ did: string; 177+ rkey: string; 178+ createdAt: string; 179+ subject: string; 180+ indexedAt: string; 181+} 182+ 183+export function normalizeFollowView(raw: Partial<FollowView>): FollowView { 184+ return { 185+ did: raw.did ?? '', 186+ rkey: raw.rkey ?? '', 187+ createdAt: raw.createdAt ?? '', 188+ subject: raw.subject ?? '', 189+ indexedAt: raw.indexedAt ?? '', 190+ }; 191+} 192+ 193+// dev.cospan.feed.star (via lens file) 194+export interface StarView { 195+ did: string; 196+ rkey: string; 197+ createdAt: string; 198+ subject: string; 199+ indexedAt: string; 200+} 201+ 202+export function normalizeStarView(raw: Partial<StarView>): StarView { 203+ return { 204+ did: raw.did ?? '', 205+ rkey: raw.rkey ?? '', 206+ createdAt: raw.createdAt ?? '', 207+ subject: raw.subject ?? '', 208+ indexedAt: raw.indexedAt ?? '', 209+ }; 210+} 211+ 136212 // dev.cospan.repo (via lens file) 137213 export interface RepoView { 138214 did: string;
@@ -1,465 +0,0 @@
1-//! Cospan Lexicon → Database schema projections as panproto migrations. 2-//! 3-//! Each record type's database projection is expressed as a panproto Migration 4-//! with FieldTransforms that handle: 5-//! - AT-URI decomposition (split URI into did + name columns) 6-//! - Field renames (camelCase → snake_case, `did` → `member_did`) 7-//! - Default values (state = "open", counters = 0) 8-//! - Nested field extraction (avatar.ref.$link → avatar_cid) 9-//! 10-//! These are compiled at codegen time and applied at runtime via 11-//! `lift_wtype_sigma()`. 12- 13-use std::collections::HashMap; 14-use std::sync::Arc; 15- 16-use panproto_expr::{BuiltinOp, Expr}; 17-use panproto_gat::{CoercionClass, Name}; 18-use panproto_inst::FieldTransform; 19-use panproto_inst::value::Value; 20- 21-/// Build the field transforms for a record type's database projection. 22-/// Returns transforms keyed by the schema vertex they apply to. 23-pub fn db_transforms(nsid: &str) -> HashMap<Name, Vec<FieldTransform>> { 24- let mut transforms = HashMap::new(); 25- let body_vertex = record_body_vertex(nsid); 26- 27- match nsid { 28- // All target keys are camelCase to match #[serde(rename_all = "camelCase")] 29- // on the Row structs, so serde can deserialize the panproto output directly. 30- "dev.cospan.repo" => { 31- transforms.insert( 32- Name::from(body_vertex), 33- vec![ 34- at_uri_extract_did("node", "nodeDid"), 35- add_field_str("nodeUrl", ""), 36- add_field_int("starCount", 0), 37- add_field_int("forkCount", 0), 38- add_field_int("openIssueCount", 0), 39- add_field_int("openMrCount", 0), 40- add_field_str("source", "pds"), 41- drop_field("node"), 42- ], 43- ); 44- } 45- "dev.cospan.vcs.refUpdate" => { 46- transforms.insert( 47- Name::from(body_vertex), 48- vec![ 49- at_uri_extract_did("repo", "repoDid"), 50- at_uri_extract_name("repo", "repoName"), 51- compute_array_len("breakingChanges", "breakingChangeCount"), 52- drop_field("repo"), 53- drop_field("breakingChanges"), 54- ], 55- ); 56- } 57- "dev.cospan.repo.issue" => { 58- transforms.insert( 59- Name::from(body_vertex), 60- vec![ 61- at_uri_extract_did("repo", "repoDid"), 62- at_uri_extract_name("repo", "repoName"), 63- add_field_str("state", "open"), 64- add_field_int("commentCount", 0), 65- drop_field("repo"), 66- drop_field("schemaRefs"), 67- drop_field("labels"), 68- drop_field("mentions"), 69- drop_field("references"), 70- ], 71- ); 72- } 73- "dev.cospan.repo.issue.comment" => { 74- transforms.insert( 75- Name::from(body_vertex), 76- vec![ 77- rename_field("issue", "issueUri"), 78- drop_field("schemaRefs"), 79- drop_field("mentions"), 80- ], 81- ); 82- } 83- "dev.cospan.repo.issue.state" => { 84- transforms.insert( 85- Name::from(body_vertex), 86- vec![rename_field("issue", "issueUri")], 87- ); 88- } 89- "dev.cospan.repo.pull" => { 90- transforms.insert( 91- Name::from(body_vertex), 92- vec![ 93- at_uri_extract_did("repo", "repoDid"), 94- at_uri_extract_name("repo", "repoName"), 95- add_field_str("state", "open"), 96- add_field_int("commentCount", 0), 97- drop_field("repo"), 98- drop_field("mergePreview"), 99- drop_field("mentions"), 100- drop_field("references"), 101- ], 102- ); 103- } 104- "dev.cospan.repo.pull.comment" => { 105- transforms.insert( 106- Name::from(body_vertex), 107- vec![ 108- rename_field("pull", "pullUri"), 109- drop_field("schemaRefs"), 110- drop_field("mentions"), 111- ], 112- ); 113- } 114- "dev.cospan.repo.pull.state" => { 115- transforms.insert( 116- Name::from(body_vertex), 117- vec![rename_field("pull", "pullUri")], 118- ); 119- } 120- "dev.cospan.actor.profile" => { 121- transforms.insert( 122- Name::from(body_vertex), 123- vec![ 124- path_extract("avatar", vec!["ref", "$link"], "avatarCid"), 125- drop_field("avatar"), 126- drop_field("links"), 127- ], 128- ); 129- } 130- "dev.cospan.label.definition" => { 131- transforms.insert( 132- Name::from(body_vertex), 133- vec![ 134- at_uri_extract_did("repo", "repoDid"), 135- at_uri_extract_name("repo", "repoName"), 136- drop_field("repo"), 137- ], 138- ); 139- } 140- "dev.cospan.org" => { 141- transforms.insert( 142- Name::from(body_vertex), 143- vec![ 144- path_extract("avatar", vec!["ref", "$link"], "avatarCid"), 145- drop_field("avatar"), 146- ], 147- ); 148- } 149- "dev.cospan.org.member" => { 150- transforms.insert( 151- Name::from(body_vertex), 152- vec![ 153- rename_field("org", "orgUri"), 154- rename_field("member", "memberDid"), 155- ], 156- ); 157- } 158- "dev.cospan.repo.collaborator" => { 159- transforms.insert( 160- Name::from(body_vertex), 161- vec![ 162- at_uri_extract_did("repo", "repoDid"), 163- at_uri_extract_name("repo", "repoName"), 164- rename_field("did", "memberDid"), 165- drop_field("repo"), 166- ], 167- ); 168- } 169- "dev.cospan.repo.dependency" => { 170- transforms.insert( 171- Name::from(body_vertex), 172- vec![ 173- at_uri_extract_did("sourceRepo", "sourceRepoDid"), 174- at_uri_extract_name("sourceRepo", "sourceRepoName"), 175- at_uri_extract_did("targetRepo", "targetRepoDid"), 176- at_uri_extract_name("targetRepo", "targetRepoName"), 177- drop_field("sourceRepo"), 178- drop_field("targetRepo"), 179- ], 180- ); 181- } 182- "dev.cospan.pipeline" => { 183- transforms.insert( 184- Name::from(body_vertex), 185- vec![ 186- at_uri_extract_did("repo", "repoDid"), 187- at_uri_extract_name("repo", "repoName"), 188- path_extract("algebraicChecks", vec!["gatTypeCheck"], "gatTypeCheck"), 189- path_extract( 190- "algebraicChecks", 191- vec!["equationVerification"], 192- "equationVerification", 193- ), 194- path_extract("algebraicChecks", vec!["lensLawCheck"], "lensLawCheck"), 195- path_extract( 196- "algebraicChecks", 197- vec!["breakingChangeCheck"], 198- "breakingChangeCheck", 199- ), 200- drop_field("repo"), 201- drop_field("algebraicChecks"), 202- drop_field("workflows"), 203- ], 204- ); 205- } 206- // Simple records with no transforms needed 207- _ => {} 208- } 209- 210- transforms 211-} 212- 213-// --------------------------------------------------------------------------- 214-// Expression builders 215-// --------------------------------------------------------------------------- 216- 217-/// `split(replace(var(field), "at://", ""), "/")` → list, then head → DID 218-fn at_uri_extract_did(source_field: &str, target_field: &str) -> FieldTransform { 219- // Expression: head(split(replace(source, "at://", ""), "/")) 220- let expr = Expr::Builtin( 221- BuiltinOp::Head, 222- vec![Expr::Builtin( 223- BuiltinOp::Split, 224- vec![ 225- Expr::Builtin( 226- BuiltinOp::Replace, 227- vec![ 228- Expr::Var(Arc::from(source_field)), 229- Expr::Lit(panproto_expr::Literal::Str("at://".into())), 230- Expr::Lit(panproto_expr::Literal::Str(String::new())), 231- ], 232- ), 233- Expr::Lit(panproto_expr::Literal::Str("/".into())), 234- ], 235- )], 236- ); 237- FieldTransform::ComputeField { 238- target_key: target_field.to_string(), 239- expr, 240- inverse: None, 241- coercion_class: CoercionClass::Retraction, 242- } 243-} 244- 245-/// Extract name (3rd segment) from AT-URI: at://did/collection/name → name 246-fn at_uri_extract_name(source_field: &str, target_field: &str) -> FieldTransform { 247- // Expression: index(split(replace(source, "at://", ""), "/"), 2) 248- let expr = Expr::Index( 249- Box::new(Expr::Builtin( 250- BuiltinOp::Split, 251- vec![ 252- Expr::Builtin( 253- BuiltinOp::Replace, 254- vec![ 255- Expr::Var(Arc::from(source_field)), 256- Expr::Lit(panproto_expr::Literal::Str("at://".into())), 257- Expr::Lit(panproto_expr::Literal::Str(String::new())), 258- ], 259- ), 260- Expr::Lit(panproto_expr::Literal::Str("/".into())), 261- ], 262- )), 263- Box::new(Expr::Lit(panproto_expr::Literal::Int(2))), 264- ); 265- FieldTransform::ComputeField { 266- target_key: target_field.to_string(), 267- expr, 268- inverse: None, 269- coercion_class: CoercionClass::Retraction, 270- } 271-} 272- 273-fn rename_field(old: &str, new: &str) -> FieldTransform { 274- FieldTransform::RenameField { 275- old_key: old.to_string(), 276- new_key: new.to_string(), 277- } 278-} 279- 280-fn drop_field(key: &str) -> FieldTransform { 281- FieldTransform::DropField { 282- key: key.to_string(), 283- } 284-} 285- 286-fn add_field_str(key: &str, value: &str) -> FieldTransform { 287- FieldTransform::AddField { 288- key: key.to_string(), 289- value: Value::Str(value.to_string()), 290- } 291-} 292- 293-fn add_field_int(key: &str, value: i64) -> FieldTransform { 294- FieldTransform::AddField { 295- key: key.to_string(), 296- value: Value::Int(value), 297- } 298-} 299- 300-/// Extract a value at a nested path and store in a new field. 301-fn path_extract(source_field: &str, path: Vec<&str>, target_field: &str) -> FieldTransform { 302- // Navigate source_field.path[0].path[1]... 303- let mut expr: Expr = Expr::Var(Arc::from(source_field)); 304- for segment in path { 305- expr = Expr::Field(Box::new(expr), Arc::from(segment)); 306- } 307- FieldTransform::ComputeField { 308- target_key: target_field.to_string(), 309- expr, 310- inverse: None, 311- coercion_class: CoercionClass::Retraction, 312- } 313-} 314- 315-/// Compute the length of a JSON array field and store as an integer. 316-fn compute_array_len(source_field: &str, target_field: &str) -> FieldTransform { 317- // Expression: length(source_field) — uses the list Length builtin 318- let expr = Expr::Builtin(BuiltinOp::Length, vec![Expr::Var(Arc::from(source_field))]); 319- FieldTransform::ComputeField { 320- target_key: target_field.to_string(), 321- expr, 322- inverse: None, 323- coercion_class: CoercionClass::Retraction, 324- } 325-} 326- 327-/// Tangled-specific field transforms for type coercions and semantic differences. 328-/// Applied to the Tangled morphism's compiled migration (keyed by Cospan target body vertex). 329-pub fn tangled_transforms( 330- tangled_nsid: &str, 331- cospan_nsid: &str, 332-) -> HashMap<Name, Vec<FieldTransform>> { 333- let mut transforms = HashMap::new(); 334- let body_vertex = record_body_vertex(cospan_nsid); 335- 336- match tangled_nsid { 337- "sh.tangled.actor.profile" => { 338- // Tangled bluesky is a boolean; Cospan expects a string (handle or empty) 339- // Case analysis: if bluesky is true, use empty string (DID will be set by consumer); 340- // if false, use empty string 341- transforms.insert( 342- Name::from(body_vertex), 343- vec![FieldTransform::ApplyExpr { 344- key: "bluesky".to_string(), 345- expr: Expr::Match { 346- scrutinee: Box::new(Expr::Var(Arc::from("bluesky"))), 347- arms: vec![ 348- ( 349- panproto_expr::Pattern::Lit(panproto_expr::Literal::Bool(true)), 350- Expr::Lit(panproto_expr::Literal::Str(String::new())), 351- ), 352- ( 353- panproto_expr::Pattern::Wildcard, 354- Expr::Lit(panproto_expr::Literal::Str(String::new())), 355- ), 356- ], 357- }, 358- inverse: None, 359- coercion_class: CoercionClass::Retraction, 360- }], 361- ); 362- } 363- "sh.tangled.repo" => { 364- // Tangled knot is a hostname string; Cospan node is an AT-URI 365- // Compute nodeDid = "did:web:{knot}" and nodeUrl = "https://{knot}" 366- transforms.insert( 367- Name::from(body_vertex), 368- vec![ 369- FieldTransform::ComputeField { 370- target_key: "nodeDid".to_string(), 371- expr: Expr::Builtin( 372- BuiltinOp::Concat, 373- vec![ 374- Expr::Lit(panproto_expr::Literal::Str("did:web:".into())), 375- Expr::Var(Arc::from("knot")), 376- ], 377- ), 378- inverse: None, 379- coercion_class: CoercionClass::Retraction, 380- }, 381- FieldTransform::ComputeField { 382- target_key: "nodeUrl".to_string(), 383- expr: Expr::Builtin( 384- BuiltinOp::Concat, 385- vec![ 386- Expr::Lit(panproto_expr::Literal::Str("https://".into())), 387- Expr::Var(Arc::from("knot")), 388- ], 389- ), 390- inverse: None, 391- coercion_class: CoercionClass::Retraction, 392- }, 393- drop_field("knot"), 394- add_field_str("defaultBranch", "main"), 395- add_field_str("visibility", "public"), 396- add_field_str("source", "tangled"), 397- ], 398- ); 399- } 400- "sh.tangled.knot" => { 401- // Tangled knot hostname → Cospan node publicEndpoint 402- transforms.insert( 403- Name::from(body_vertex), 404- vec![ 405- FieldTransform::ComputeField { 406- target_key: "publicEndpoint".to_string(), 407- expr: Expr::Builtin( 408- BuiltinOp::Concat, 409- vec![ 410- Expr::Lit(panproto_expr::Literal::Str("https://".into())), 411- Expr::Var(Arc::from("hostname")), 412- ], 413- ), 414- inverse: None, 415- coercion_class: CoercionClass::Retraction, 416- }, 417- drop_field("hostname"), 418- ], 419- ); 420- } 421- "sh.tangled.knot.member" => { 422- // Tangled knot.member has "knot" (hostname) → Cospan org.member has "orgUri" 423- transforms.insert( 424- Name::from(body_vertex), 425- vec![ 426- rename_field("knot", "orgUri"), 427- rename_field("subject", "memberDid"), 428- ], 429- ); 430- } 431- "sh.tangled.spindle.member" => { 432- transforms.insert( 433- Name::from(body_vertex), 434- vec![ 435- rename_field("spindle", "orgUri"), 436- rename_field("subject", "memberDid"), 437- ], 438- ); 439- } 440- "sh.tangled.spindle" => { 441- // Tangled spindle hostname → Cospan org 442- transforms.insert( 443- Name::from(body_vertex), 444- vec![ 445- FieldTransform::ComputeField { 446- target_key: "name".to_string(), 447- expr: Expr::Var(Arc::from("hostname")), 448- inverse: None, 449- coercion_class: CoercionClass::Retraction, 450- }, 451- drop_field("hostname"), 452- ], 453- ); 454- } 455- _ => {} 456- } 457- 458- transforms 459-} 460- 461-/// Get the record body vertex ID for a given NSID. 462-/// panproto's ATProto Lexicon parser names the body vertex `{nsid}:body`. 463-fn record_body_vertex(nsid: &str) -> String { 464- format!("{nsid}:body") 465-}
@@ -1,245 +1,83 @@
1-//! Emit TypeScript view types by applying panproto protolens combinators to Lexicon schemas. 1+//! Emit TypeScript view types from JSON lens files via panproto protolens. 22 //! 3-//! The DB projection lens is built from high-level combinators (v0.23.0): 4-//! - combinators::remove_field for skip_fields 5-//! - combinators::add_field for uri_decompositions and extra_columns 6-//! - combinators::rename_field for uri_storages and field_renames (uses rename_edge_name dependent optic) 7-//! - combinators::pipeline to compose all steps 8-//! 9-//! The pipeline is instantiated against the source Lexicon schema to produce 10-//! the target (view) schema, which is walked to emit TypeScript interfaces. 3+//! Reads lens files from packages/lenses/, converts steps to ProtolensChain 4+//! using panproto combinators, instantiates against Lexicon schemas to produce 5+//! target schemas, and walks the target to emit TypeScript interfaces. 116 127 use panproto_gat::Name; 13-use panproto_inst::value::Value; 14-use panproto_lens::{combinators, ProtolensChain}; 158 use panproto_protocols::emit::children_by_edge; 169 use panproto_schema::{Protocol, Schema}; 1710 18-use crate::record_config::RecordConfig; 19- 20-/// Build a ProtolensChain from RecordConfig using panproto combinators. 21-/// 22-/// Each RecordConfig operation maps to a combinator: 23-/// - skip_fields → combinators::remove_field(vertex) 24-/// - uri_decompositions → remove_field(source) + add_field(did) + add_field(name) 25-/// - uri_storages → combinators::rename_field(parent, field, old, new) 26-/// - field_renames → combinators::rename_field(parent, field, old, new) 27-/// - type_overrides → remove_field + add_field (with correct kind) 28-/// - extra_columns → combinators::add_field(parent, name, kind, default) 29-fn build_lens_chain(body_id: &str, config: &RecordConfig) -> ProtolensChain { 30- let mut chains: Vec<ProtolensChain> = Vec::new(); 31- 32- // 1. Remove skipped fields 33- for field in config.skip_fields { 34- let vertex_id = format!("{body_id}.{field}"); 35- chains.push(combinators::remove_field(vertex_id)); 36- } 37- 38- // 2. URI decompositions: remove source, add decomposed fields 39- for decomp in config.uri_decompositions { 40- let source_vertex = format!("{body_id}.{}", decomp.source_field); 41- chains.push(combinators::remove_field(source_vertex)); 42- 43- let did_camel = snake_to_camel(decomp.did_column); 44- let did_vertex = format!("{body_id}.{did_camel}"); 45- chains.push(combinators::add_field( 46- body_id, 47- did_vertex, 48- "string", 49- Value::Str(String::new()), 50- )); 51- 52- let name_camel = snake_to_camel(decomp.name_column); 53- let name_vertex = format!("{body_id}.{name_camel}"); 54- chains.push(combinators::add_field( 55- body_id, 56- name_vertex, 57- "string", 58- Value::Str(String::new()), 59- )); 60- } 61- 62- // 3. URI storages: rename field via dependent optic (rename_edge_name) 63- for storage in config.uri_storages { 64- let field_vertex = format!("{body_id}.{}", storage.source_field); 65- let new_camel = snake_to_camel(storage.column_name); 66- chains.push(combinators::rename_field( 67- body_id, 68- field_vertex, 69- storage.source_field, 70- &*new_camel, 71- )); 72- } 73- 74- // 4. Field renames via dependent optic 75- for rename in config.field_renames { 76- let field_vertex = format!("{body_id}.{}", rename.source_field); 77- let new_camel = snake_to_camel(rename.column_name); 78- chains.push(combinators::rename_field( 79- body_id, 80- field_vertex, 81- rename.source_field, 82- &*new_camel, 83- )); 84- } 85- 86- // 5. Type overrides: remove + add with correct kind 87- for ovr in config.type_overrides { 88- let vertex_id = format!("{body_id}.{}", ovr.source_field); 89- let kind = match ovr.rust_type { 90- t if t.contains("f32") || t.contains("f64") => "number", 91- t if t.contains("i32") || t.contains("i64") => "integer", 92- t if t.contains("bool") => "boolean", 93- _ => "string", 94- }; 95- let default = if ovr.rust_type.starts_with("Option<") { 96- Value::Null 97- } else { 98- match kind { 99- "number" => Value::Float(0.0), 100- "integer" => Value::Int(0), 101- "boolean" => Value::Bool(false), 102- _ => Value::Str(String::new()), 103- } 104- }; 105- chains.push(combinators::remove_field(vertex_id.clone())); 106- chains.push(combinators::add_field(body_id, vertex_id, kind, default)); 107- } 108- 109- // 6. Extra columns via add_field combinator 110- for extra in config.extra_columns { 111- let camel = snake_to_camel(extra.name); 112- let vertex_id = format!("{body_id}.{camel}"); 113- let (kind, default) = match extra.rust_type { 114- "i32" | "i64" => ("integer", Value::Int(0)), 115- "f32" | "f64" => ("number", Value::Float(0.0)), 116- "bool" => ("boolean", Value::Bool(false)), 117- _ => ("string", Value::Str(String::new())), 118- }; 119- chains.push(combinators::add_field(body_id, vertex_id, kind, default)); 120- } 121- 122- combinators::pipeline(chains) 123-} 124- 125-/// Apply the lens chain to a source schema, producing the target (view) schema. 126-fn apply_lens(source: &Schema, nsid: &str, config: &RecordConfig) -> Schema { 127- let body_id = find_record_body(source, nsid); 128- let chain = build_lens_chain(&body_id, config); 129- let protocol = Protocol::default(); 130- 131- match chain.instantiate(source, &protocol) { 132- Ok(lens) => lens.tgt_schema, 133- Err(e) => { 134- eprintln!(" warn: lens instantiation for {nsid}: {e:?}, using source"); 135- source.clone() 136- } 137- } 138-} 139- 140-/// Emit TypeScript interface from the target schema. 141-fn emit_view_from_target(target: &Schema, nsid: &str, config: &RecordConfig) -> String { 142- let view_name = format!( 143- "{}View", 144- config 145- .row_struct_name 146- .strip_suffix("Row") 147- .unwrap_or(config.row_struct_name) 148- ); 149- let body_id = find_record_body(target, nsid); 150- 11+/// Emit all TypeScript view types from JSON lens files. 12+pub fn emit_all_views_from_lenses( 13+ schemas: &[(Schema, String)], 14+ lenses: &[crate::lens_config::LensFile], 15+) -> String { 15116 let mut out = String::new(); 152- out.push_str(&format!("// {nsid} (via panproto combinators)\n")); 153- out.push_str(&format!("export interface {view_name} {{\n")); 154- 155- // Standard ATProto columns 156- if config.include_did { 157- out.push_str(" did: string;\n"); 158- } 159- if config.include_rkey { 160- out.push_str(" rkey: string;\n"); 161- } 17+ out.push_str("// Auto-generated by cospan-codegen via panproto protolens (from JSON lens files).\n"); 18+ out.push_str("// Source: packages/lenses/*.lens.json\n"); 19+ out.push_str("// Do not edit manually.\n\n"); 16220 163- // Walk target schema's prop edges from the body vertex 164- let props = children_by_edge(target, &body_id, "prop"); 165- for (edge, prop_vertex) in &props { 166- let field_name = edge 167- .name 168- .as_ref() 169- .map(|n| n.as_str()) 170- .unwrap_or("unknown"); 171- let ts_type = vertex_kind_to_ts(&prop_vertex.kind); 172- let is_required = is_field_required(target, &body_id, field_name); 173- if is_required { 174- out.push_str(&format!(" {field_name}: {ts_type};\n")); 175- } else { 176- out.push_str(&format!(" {field_name}: {ts_type} | null;\n")); 177- } 178- } 21+ for lens in crate::lens_config::db_projection_lenses(lenses) { 22+ if let Some((schema, _)) = schemas.iter().find(|(_, nsid)| nsid == &lens.source) { 23+ let body_id = find_record_body(schema, &lens.source); 24+ let chain = crate::lens_config::steps_to_protolens_chain(&lens.steps, &body_id); 25+ let protocol = Protocol::default(); 26+ let target = match chain.instantiate(schema, &protocol) { 27+ Ok(l) => l.tgt_schema, 28+ Err(e) => { 29+ eprintln!(" warn: lens instantiation for {}: {e:?}", lens.source); 30+ schema.clone() 31+ } 32+ }; 17933 180- // Fields added by add_field combinator (they have prop edges now!) 181- // Walk vertices that are children of body but not in the original prop list 182- let body_prefix = format!("{body_id}."); 183- let prop_targets: std::collections::HashSet<String> = 184- props.iter().map(|(_, v)| v.id.to_string()).collect(); 185- let mut extra_vertices: Vec<_> = target 186- .vertices 187- .iter() 188- .filter(|(id, _)| { 189- let id_str = id.to_string(); 190- id_str.starts_with(&body_prefix) 191- && !id_str[body_prefix.len()..].contains('.') 192- && !id_str[body_prefix.len()..].contains(':') 193- && !prop_targets.contains(&id_str) 194- }) 195- .collect(); 196- extra_vertices.sort_by_key(|(id, _)| id.to_string()); 34+ let table = lens.table.as_ref(); 35+ let view_name = table 36+ .map(|t| { 37+ t.row_struct 38+ .strip_suffix("Row") 39+ .unwrap_or(&t.row_struct) 40+ .to_string() 41+ + "View" 42+ }) 43+ .unwrap_or_else(|| nsid_to_pascal(&lens.source) + "View"); 44+ let include_did = table.map(|t| t.include_did).unwrap_or(true); 45+ let include_rkey = table.map(|t| t.include_rkey).unwrap_or(true); 46+ let empty_defaults = std::collections::HashMap::new(); 47+ let column_defaults = table 48+ .map(|t| &t.column_defaults) 49+ .unwrap_or(&empty_defaults); 19750 198- for (id, v) in &extra_vertices { 199- let field_name = id.as_str().strip_prefix(&body_prefix).unwrap_or(id.as_str()); 200- let ts_type = vertex_kind_to_ts(&v.kind); 201- out.push_str(&format!(" {field_name}: {ts_type};\n")); 51+ out.push_str(&emit_view( 52+ &target, 53+ &lens.source, 54+ &view_name, 55+ include_did, 56+ include_rkey, 57+ column_defaults, 58+ )); 59+ } 20260 } 20361 204- // indexedAt (always added by appview) 205- out.push_str(" indexedAt: string;\n"); 206- out.push_str("}\n\n"); 207- 208- // Normalization function 209- out.push_str(&format!( 210- "export function normalize{view_name}(raw: Partial<{view_name}>): {view_name} {{\n" 211- )); 212- out.push_str(" return {\n"); 213- if config.include_did { 214- out.push_str(" did: raw.did ?? '',\n"); 215- } 216- if config.include_rkey { 217- out.push_str(" rkey: raw.rkey ?? '',\n"); 218- } 219- for (edge, prop_vertex) in &props { 220- let field_name = edge.name.as_ref().map(|n| n.as_str()).unwrap_or("unknown"); 221- let is_required = is_field_required(target, &body_id, field_name); 222- let default = if !is_required { 223- "null" 224- } else { 225- default_for_kind(&prop_vertex.kind, field_name, config) 226- }; 227- out.push_str(&format!(" {field_name}: raw.{field_name} ?? {default},\n")); 228- } 229- for (id, v) in &extra_vertices { 230- let field_name = id.as_str().strip_prefix(&body_prefix).unwrap_or(id.as_str()); 231- let default = default_for_kind(&v.kind, field_name, config); 232- out.push_str(&format!(" {field_name}: raw.{field_name} ?? {default},\n")); 62+ // List response wrappers 63+ for (type_name, wrapper_key) in &[ 64+ ("Repo", "repos"), ("Issue", "issues"), ("IssueComment", "comments"), 65+ ("Pull", "pulls"), ("PullComment", "comments"), ("Star", "stars"), 66+ ("Follow", "follows"), ("Node", "nodes"), ("Org", "orgs"), 67+ ("OrgMember", "members"), ("Collaborator", "collaborators"), 68+ ("RefUpdate", "refUpdates"), ("Label", "labels"), 69+ ("Pipeline", "pipelines"), ("Reaction", "reactions"), 70+ ] { 71+ out.push_str(&format!( 72+ "export interface {type_name}ListResponse {{\n {wrapper_key}: {type_name}View[];\n cursor: string | null;\n}}\n\n" 73+ )); 23374 } 234- out.push_str(" indexedAt: raw.indexedAt ?? '',\n"); 235- out.push_str(" };\n"); 236- out.push_str("}\n\n"); 23775 23876 out 23977 } 24078 241-/// Emit TypeScript from target schema with explicit config (lens-file path). 242-fn emit_view_from_target_with( 79+/// Emit a single TypeScript interface from a target schema. 80+fn emit_view( 24381 target: &Schema, 24482 nsid: &str, 24583 view_name: &str,
@@ -255,60 +93,62 @@ fn emit_view_from_target_with(
25593 if include_did { out.push_str(" did: string;\n"); } 25694 if include_rkey { out.push_str(" rkey: string;\n"); } 25795 96+ // Fields from prop edges (surviving Lexicon fields) 25897 let props = children_by_edge(target, &body_id, "prop"); 25998 for (edge, prop_vertex) in &props { 260- let field_name = edge.name.as_ref().map(|n| n.as_str()).unwrap_or("unknown"); 261- let ts_type = vertex_kind_to_ts(&prop_vertex.kind); 262- let is_required = is_field_required(target, &body_id, field_name); 263- if is_required { 264- out.push_str(&format!(" {field_name}: {ts_type};\n")); 99+ let name = edge.name.as_ref().map(|n| n.as_str()).unwrap_or("unknown"); 100+ let ts = vertex_kind_to_ts(&prop_vertex.kind); 101+ let required = is_field_required(target, &body_id, name); 102+ if required { 103+ out.push_str(&format!(" {name}: {ts};\n")); 265104 } else { 266- out.push_str(&format!(" {field_name}: {ts_type} | null;\n")); 105+ out.push_str(&format!(" {name}: {ts} | null;\n")); 267106 } 268107 } 269108 109+ // Fields added by add_field (appear as vertices without prop edges) 270110 let body_prefix = format!("{body_id}."); 271111 let prop_targets: std::collections::HashSet<String> = 272112 props.iter().map(|(_, v)| v.id.to_string()).collect(); 273- let mut extra_vertices: Vec<_> = target 113+ let mut extras: Vec<_> = target 274114 .vertices 275115 .iter() 276116 .filter(|(id, _)| { 277- let id_str = id.to_string(); 278- id_str.starts_with(&body_prefix) 279- && !id_str[body_prefix.len()..].contains('.') 280- && !id_str[body_prefix.len()..].contains(':') 281- && !prop_targets.contains(&id_str) 117+ let s = id.to_string(); 118+ s.starts_with(&body_prefix) 119+ && !s[body_prefix.len()..].contains('.') 120+ && !s[body_prefix.len()..].contains(':') 121+ && !prop_targets.contains(&s) 282122 }) 283123 .collect(); 284- extra_vertices.sort_by_key(|(id, _)| id.to_string()); 124+ extras.sort_by_key(|(id, _)| id.to_string()); 285125 286- for (id, v) in &extra_vertices { 287- let field_name = id.as_str().strip_prefix(&body_prefix).unwrap_or(id.as_str()); 288- let ts_type = vertex_kind_to_ts(&v.kind); 289- out.push_str(&format!(" {field_name}: {ts_type};\n")); 126+ for (id, v) in &extras { 127+ let name = id.as_str().strip_prefix(&body_prefix).unwrap_or(id.as_str()); 128+ let ts = vertex_kind_to_ts(&v.kind); 129+ out.push_str(&format!(" {name}: {ts};\n")); 290130 } 291131 292132 out.push_str(" indexedAt: string;\n"); 293133 out.push_str("}\n\n"); 294134 295- // Normalization 296- out.push_str(&format!("export function normalize{view_name}(raw: Partial<{view_name}>): {view_name} {{\n")); 135+ // Normalization function 136+ out.push_str(&format!( 137+ "export function normalize{view_name}(raw: Partial<{view_name}>): {view_name} {{\n" 138+ )); 297139 out.push_str(" return {\n"); 298140 if include_did { out.push_str(" did: raw.did ?? '',\n"); } 299141 if include_rkey { out.push_str(" rkey: raw.rkey ?? '',\n"); } 300142 for (edge, prop_vertex) in &props { 301- let field_name = edge.name.as_ref().map(|n| n.as_str()).unwrap_or("unknown"); 302- let is_required = is_field_required(target, &body_id, field_name); 303- let default = if !is_required { "null" } else { 304- default_for_kind_with_map(&prop_vertex.kind, field_name, column_defaults) 305- }; 306- out.push_str(&format!(" {field_name}: raw.{field_name} ?? {default},\n")); 143+ let name = edge.name.as_ref().map(|n| n.as_str()).unwrap_or("unknown"); 144+ let required = is_field_required(target, &body_id, name); 145+ let def = if !required { "null" } else { kind_default(&prop_vertex.kind, name, column_defaults) }; 146+ out.push_str(&format!(" {name}: raw.{name} ?? {def},\n")); 307147 } 308- for (id, v) in &extra_vertices { 309- let field_name = id.as_str().strip_prefix(&body_prefix).unwrap_or(id.as_str()); 310- let default = default_for_kind_with_map(&v.kind, field_name, column_defaults); 311- out.push_str(&format!(" {field_name}: raw.{field_name} ?? {default},\n")); 148+ for (id, v) in &extras { 149+ let name = id.as_str().strip_prefix(&body_prefix).unwrap_or(id.as_str()); 150+ let def = kind_default(&v.kind, name, column_defaults); 151+ out.push_str(&format!(" {name}: raw.{name} ?? {def},\n")); 312152 } 313153 out.push_str(" indexedAt: raw.indexedAt ?? '',\n"); 314154 out.push_str(" };\n");
@@ -317,152 +157,8 @@ fn emit_view_from_target_with(
317157 out 318158 } 319159 320-fn default_for_kind_with_map(kind: &Name, field_name: &str, defaults: &std::collections::HashMap<String, String>) -> &'static str { 321- if let Some(expr) = defaults.get(field_name) { 322- return match expr.as_str() { 323- "'open'" => "'open'", 324- "'pending'" => "'pending'", 325- "'public'" => "'public'", 326- "'main'" => "'main'", 327- "0" => "0", 328- _ => "''", 329- }; 330- } 331- match kind.as_str() { 332- "string" | "cid-link" | "ref" | "token" | "bytes" => "''", 333- "integer" | "number" | "float" => "0", 334- "boolean" => "false", 335- _ => "''", 336- } 337-} 338- 339-fn nsid_to_pascal(nsid: &str) -> String { 340- nsid.split('.').map(|s| { 341- let mut c = s.chars(); 342- match c.next() { 343- None => String::new(), 344- Some(f) => f.to_uppercase().collect::<String>() + c.as_str(), 345- } 346- }).collect() 347-} 348- 349-fn emit_list_response(type_name: &str, wrapper_key: &str) -> String { 350- format!( 351- "export interface {type_name}ListResponse {{\n {wrapper_key}: {type_name}View[];\n cursor: string | null;\n}}\n\n" 352- ) 353-} 354- 355-/// Emit views from JSON lens files (primary path). 356-pub fn emit_all_views_from_lenses( 357- schemas: &[(Schema, String)], 358- lenses: &[crate::lens_config::LensFile], 359-) -> String { 360- let mut out = String::new(); 361- out.push_str("// Auto-generated by cospan-codegen via panproto protolens (from JSON lens files).\n"); 362- out.push_str("// Source: packages/lenses/*.lens.json\n"); 363- out.push_str("// Do not edit manually.\n\n"); 364- 365- for lens in crate::lens_config::db_projection_lenses(lenses) { 366- if let Some((schema, _)) = schemas.iter().find(|(_, nsid)| nsid == &lens.source) { 367- let body_id = find_record_body(schema, &lens.source); 368- let chain = crate::lens_config::steps_to_protolens_chain(&lens.steps, &body_id); 369- let protocol = Protocol::default(); 370- let target = match chain.instantiate(schema, &protocol) { 371- Ok(l) => l.tgt_schema, 372- Err(e) => { 373- eprintln!(" warn: lens instantiation for {}: {e:?}", lens.source); 374- schema.clone() 375- } 376- }; 377- 378- let table = lens.table.as_ref(); 379- let view_name = table 380- .map(|t| { 381- t.row_struct 382- .strip_suffix("Row") 383- .unwrap_or(&t.row_struct) 384- .to_string() 385- + "View" 386- }) 387- .unwrap_or_else(|| format!("{}View", nsid_to_pascal(&lens.source))); 388- let include_did = table.map(|t| t.include_did).unwrap_or(true); 389- let include_rkey = table.map(|t| t.include_rkey).unwrap_or(true); 390- 391- out.push_str(&emit_view_from_target_with( 392- &target, 393- &lens.source, 394- &view_name, 395- include_did, 396- include_rkey, 397- table.map(|t| &t.column_defaults).unwrap_or(&std::collections::HashMap::new()), 398- )); 399- } 400- } 401- 402- let list_endpoints = [ 403- ("Repo", "repos"), 404- ("Issue", "issues"), 405- ("IssueComment", "comments"), 406- ("Pull", "pulls"), 407- ("PullComment", "comments"), 408- ("Star", "stars"), 409- ("Follow", "follows"), 410- ("Node", "nodes"), 411- ("Org", "orgs"), 412- ("OrgMember", "members"), 413- ("Collaborator", "collaborators"), 414- ("RefUpdate", "refUpdates"), 415- ("Label", "labels"), 416- ("Pipeline", "pipelines"), 417- ("Reaction", "reactions"), 418- ]; 419- for (type_name, wrapper_key) in &list_endpoints { 420- out.push_str(&emit_list_response(type_name, wrapper_key)); 421- } 422- 423- out 424-} 425- 426-/// Emit views from RecordConfig (legacy path, kept for backward compat). 427-pub fn emit_all_views(schemas: &[(Schema, String)], configs: &[RecordConfig]) -> String { 428- let mut out = String::new(); 429- out.push_str("// Auto-generated by cospan-codegen via panproto protolens combinators.\n"); 430- out.push_str("// Source: Lexicon schemas transformed through DB projection lens.\n"); 431- out.push_str("// Do not edit manually.\n\n"); 432- 433- for config in configs { 434- if let Some((schema, _)) = schemas.iter().find(|(_, nsid)| nsid == config.nsid) { 435- let target = apply_lens(schema, config.nsid, config); 436- out.push_str(&emit_view_from_target(&target, config.nsid, config)); 437- } 438- } 439- 440- let list_endpoints = [ 441- ("Repo", "repos"), 442- ("Issue", "issues"), 443- ("IssueComment", "comments"), 444- ("Pull", "pulls"), 445- ("PullComment", "comments"), 446- ("Star", "stars"), 447- ("Follow", "follows"), 448- ("Node", "nodes"), 449- ("Org", "orgs"), 450- ("OrgMember", "members"), 451- ("Collaborator", "collaborators"), 452- ("RefUpdate", "refUpdates"), 453- ("Label", "labels"), 454- ("Pipeline", "pipelines"), 455- ("Reaction", "reactions"), 456- ]; 457- for (type_name, wrapper_key) in &list_endpoints { 458- out.push_str(&emit_list_response(type_name, wrapper_key)); 459- } 460- 461- out 462-} 463- 464160 // --------------------------------------------------------------------------- 465-// Schema helpers 161+// Helpers 466162 // --------------------------------------------------------------------------- 467163 468164 fn find_record_body(schema: &Schema, nsid: &str) -> String {
@@ -471,9 +167,7 @@ fn find_record_body(schema: &Schema, nsid: &str) -> String {
471167 return body.id.to_string(); 472168 } 473169 let body_id = format!("{nsid}:body"); 474- if schema.has_vertex(&body_id) { 475- return body_id; 476- } 170+ if schema.has_vertex(&body_id) { return body_id; } 477171 nsid.to_string() 478172 } 479173
@@ -481,14 +175,10 @@ fn is_field_required(schema: &Schema, body_id: &str, field_name: &str) -> bool {
481175 schema 482176 .required 483177 .get(&Name::from(body_id)) 484- .map(|reqs| { 485- reqs.iter() 486- .any(|e| e.name.as_ref().map(|n| n.as_str()) == Some(field_name)) 487- }) 178+ .map(|reqs| reqs.iter().any(|e| e.name.as_ref().map(|n| n.as_str()) == Some(field_name))) 488179 .unwrap_or(false) 489180 } 490181 491-/// Map panproto vertex kind → TypeScript type. 492182 fn vertex_kind_to_ts(kind: &Name) -> &'static str { 493183 match kind.as_str() { 494184 "string" | "cid-link" | "ref" | "token" | "bytes" => "string",
@@ -496,26 +186,17 @@ fn vertex_kind_to_ts(kind: &Name) -> &'static str {
496186 "boolean" => "boolean", 497187 "array" => "unknown[]", 498188 "object" | "union" => "Record<string, unknown>", 499- _ => { 500- eprintln!(" warn: unhandled vertex kind '{}', mapping to unknown", kind); 501- "unknown" 502- } 189+ _ => { eprintln!(" warn: unhandled vertex kind '{kind}'"); "unknown" } 503190 } 504191 } 505192 506-/// Get default value from lens column_defaults or vertex kind. 507-fn default_for_kind(kind: &Name, field_name: &str, config: &RecordConfig) -> &'static str { 508- for cd in config.column_defaults { 509- if cd.column == field_name { 510- return match cd.expression { 511- "'open'" => "'open'", 512- "'pending'" => "'pending'", 513- "'public'" => "'public'", 514- "'main'" => "'main'", 515- "0" => "0", 516- _ => "''", 517- }; 518- } 193+fn kind_default(kind: &Name, field_name: &str, defaults: &std::collections::HashMap<String, String>) -> &'static str { 194+ if let Some(expr) = defaults.get(field_name) { 195+ return match expr.as_str() { 196+ "'open'" => "'open'", "'pending'" => "'pending'", 197+ "'public'" => "'public'", "'main'" => "'main'", 198+ "0" => "0", _ => "''", 199+ }; 519200 } 520201 match kind.as_str() { 521202 "string" | "cid-link" | "ref" | "token" | "bytes" => "''",
@@ -525,20 +206,12 @@ fn default_for_kind(kind: &Name, field_name: &str, config: &RecordConfig) -> &'s
525206 } 526207 } 527208 528-fn snake_to_camel(s: &str) -> String { 529- let mut result = String::new(); 530- let mut capitalize_next = false; 531- for (i, c) in s.chars().enumerate() { 532- if c == '_' { 533- capitalize_next = true; 534- } else if capitalize_next { 535- result.push(c.to_ascii_uppercase()); 536- capitalize_next = false; 537- } else if i == 0 { 538- result.push(c.to_ascii_lowercase()); 539- } else { 540- result.push(c); 209+fn nsid_to_pascal(nsid: &str) -> String { 210+ nsid.split('.').map(|s| { 211+ let mut c = s.chars(); 212+ match c.next() { 213+ None => String::new(), 214+ Some(f) => f.to_uppercase().collect::<String>() + c.as_str(), 541215 } 542- } 543- result 216+ }).collect() 544217 }
@@ -1,4 +1,3 @@
1-pub mod db_projection; 21 pub mod emit_rows; 32 pub mod emit_sql; 43 pub mod emit_typescript_views;
@@ -9,7 +9,6 @@
99 //! Usage: cargo run -p cospan-codegen 1010 //! cargo run -p cospan-codegen -- --check # breaking change detection 1111 12-mod db_projection; 1312 mod emit_rows; 1413 mod emit_sql; 1514 mod emit_typescript_views;
@@ -280,13 +279,7 @@ fn main() -> Result<()> {
280279 } 281280 } 282281 283- // Try lens-file path first, fall back to RecordConfig 284- let views_ts = if lenses.iter().any(|l| l.table.is_some()) { 285- emit_typescript_views::emit_all_views_from_lenses(&schema_pairs, &lenses) 286- } else { 287- let configs = record_config::all_record_configs(); 288- emit_typescript_views::emit_all_views(&schema_pairs, &configs) 289- }; 282+ let views_ts = emit_typescript_views::emit_all_views_from_lenses(&schema_pairs, &lenses); 290283 fs::write(generated_dir.join("typescript/views.ts"), &views_ts)?; 291284 292285 let web_gen_dir = workspace_root.join("apps/web/src/lib/generated");