3333 resource_type TEXT NOT NULL DEFAULT 'document',
3434 status TEXT NOT NULL DEFAULT 'pending',
3535 lifecycle TEXT NOT NULL DEFAULT 'active',
36+ adversarial_status TEXT NOT NULL DEFAULT 'unverified',
3637 valid_from TEXT,
3738 valid_until TEXT,
3839 supersedes TEXT,
9394CREATE INDEX IF NOT EXISTS idx_resources_collection ON resources(collection_id);
9495CREATE INDEX IF NOT EXISTS idx_resources_layer ON resources(layer);
9596CREATE INDEX IF NOT EXISTS idx_resources_hash ON resources(content_hash);
97+ CREATE TABLE IF NOT EXISTS provenance (
98+ id TEXT PRIMARY KEY,
99+ resource_id TEXT NOT NULL,
100+ uploader_id TEXT,
101+ upload_method TEXT,
102+ source_description TEXT DEFAULT '',
103+ original_hash TEXT NOT NULL,
104+ provenance_signature TEXT,
105+ signature_verified INTEGER DEFAULT 0,
106+ created_at TEXT NOT NULL,
107+ FOREIGN KEY (resource_id) REFERENCES resources(id) ON DELETE CASCADE
108+ );
109+
96110CREATE INDEX IF NOT EXISTS idx_chunks_resource ON chunks(resource_id);
97111CREATE INDEX IF NOT EXISTS idx_chunks_cid ON chunks(cid);
112+ CREATE INDEX IF NOT EXISTS idx_provenance_resource ON provenance(resource_id);
113+ CREATE INDEX IF NOT EXISTS idx_resources_adversarial ON resources(adversarial_status);
98114"""
99115
100116_FTS_SCHEMA = """
@@ -145,6 +161,11 @@ def _cosine_similarity(a: list[float], b: list[float]) -> float:
145161 return dot / (norm_a * norm_b )
146162
147163
164+ def _enum_val (v : Any ) -> str :
165+ """Extract .value from enum, or return str directly."""
166+ return v .value if hasattr (v , "value" ) else str (v )
167+
168+
148169def _resource_from_row (row : dict [str , Any ]) -> Resource :
149170 """Convert a SQLite row dict to a Resource model."""
150171 data = dict (row )
@@ -190,14 +211,14 @@ async def store_resource(self, resource: Resource) -> str:
190211 """INSERT INTO resources (
191212 id, name, content_hash, cid, merkle_root,
192213 trust_tier, data_classification, resource_type,
193- status, lifecycle, valid_from, valid_until,
214+ status, lifecycle, adversarial_status, valid_from, valid_until,
194215 supersedes, superseded_by, collection_id, layer,
195216 tags, metadata, mime_type, size_bytes, chunk_count,
196217 created_at, updated_at, indexed_at, deleted_at
197218 ) VALUES (
198219 ?, ?, ?, ?, ?,
199220 ?, ?, ?,
200- ?, ?, ?, ?,
221+ ?, ?, ?, ?, ?,
201222 ?, ?, ?, ?,
202223 ?, ?, ?, ?, ?,
203224 ?, ?, ?, ?
@@ -213,6 +234,7 @@ async def store_resource(self, resource: Resource) -> str:
213234 resource .resource_type .value if hasattr (resource .resource_type , "value" ) else resource .resource_type ,
214235 resource .status .value if hasattr (resource .status , "value" ) else resource .status ,
215236 resource .lifecycle .value if hasattr (resource .lifecycle , "value" ) else resource .lifecycle ,
237+ _enum_val (getattr (resource , "adversarial_status" , "unverified" )),
216238 str (resource .valid_from ) if resource .valid_from else None ,
217239 str (resource .valid_until ) if resource .valid_until else None ,
218240 resource .supersedes ,
@@ -289,7 +311,7 @@ async def update_resource(self, resource_id: str, updates: ResourceUpdate) -> Re
289311
290312 for field_name in (
291313 "name" , "trust_tier" , "data_classification" , "lifecycle" ,
292- "valid_from" , "valid_until" , "supersedes" , "superseded_by" ,
314+ "adversarial_status" , " valid_from" , "valid_until" , "supersedes" , "superseded_by" ,
293315 ):
294316 val = getattr (updates , field_name , None )
295317 if val is not None :
@@ -398,7 +420,8 @@ async def search(self, query: SearchQuery) -> list[SearchResult]:
398420 f"SELECT c.rowid as chunk_rowid, c.id as chunk_id, c.resource_id," # nosec B608
399421 f" c.content, c.cid as chunk_cid, c.embedding,"
400422 f" c.page_number, c.section_title, c.chunk_index,"
401- f" r.name as resource_name, r.trust_tier, r.lifecycle"
423+ f" r.name as resource_name, r.trust_tier, r.lifecycle,"
424+ f" r.updated_at as resource_updated_at, r.resource_type, r.data_classification"
402425 f" FROM chunks c JOIN resources r ON c.resource_id = r.id"
403426 f" WHERE { where_clause } ORDER BY c.chunk_index"
404427 )
@@ -457,6 +480,9 @@ async def search(self, query: SearchQuery) -> list[SearchResult]:
457480 trust_tier = TrustTier (row_dict ["trust_tier" ]),
458481 cid = row_dict ["chunk_cid" ],
459482 lifecycle = row_dict ["lifecycle" ],
483+ updated_at = row_dict .get ("resource_updated_at" ),
484+ resource_type = row_dict .get ("resource_type" ),
485+ data_classification = row_dict .get ("data_classification" ),
460486 relevance = raw_score ,
461487 )
462488 )
@@ -500,6 +526,43 @@ async def get_chunks_for_resource(self, resource_id: str) -> list[Chunk]:
500526 result .append (Chunk (** d ))
501527 return result
502528
529+ async def store_provenance (
530+ self ,
531+ provenance_id : str ,
532+ resource_id : str ,
533+ uploader_id : str | None ,
534+ upload_method : str | None ,
535+ source_description : str ,
536+ original_hash : str ,
537+ signature : str | None ,
538+ verified : bool ,
539+ created_at : str ,
540+ ) -> None :
541+ """Store a provenance record."""
542+ conn = self ._get_conn ()
543+ conn .execute (
544+ """INSERT INTO provenance (
545+ id, resource_id, uploader_id, upload_method,
546+ source_description, original_hash, provenance_signature,
547+ signature_verified, created_at
548+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)""" ,
549+ (
550+ provenance_id , resource_id , uploader_id , upload_method ,
551+ source_description , original_hash , signature ,
552+ 1 if verified else 0 , created_at ,
553+ ),
554+ )
555+ conn .commit ()
556+
557+ async def get_provenance (self , resource_id : str ) -> list [dict [str , Any ]]:
558+ """Get all provenance records for a resource."""
559+ conn = self ._get_conn ()
560+ rows = conn .execute (
561+ "SELECT * FROM provenance WHERE resource_id = ? ORDER BY created_at" ,
562+ (resource_id ,),
563+ ).fetchall ()
564+ return [dict (r ) for r in rows ]
565+
503566 async def close (self ) -> None :
504567 """Close the database connection."""
505568 if self ._conn :
0 commit comments