Emit hash alongside asset_hash on all Asset responses

Add a `hash` field to the Asset response schema that carries the same
value as the existing `asset_hash` field. Both fields are now populated
in _build_asset_response, so every Asset-returning endpoint (GET, POST,
PUT) includes both.

No existing fields are removed. Tests updated to assert both fields.

Co-authored-by: Matt Miller <MillerMedia@users.noreply.github.com>
This commit is contained in:
Cursor Agent 2026-05-06 20:35:15 +00:00 committed by Matt Miller
parent 66669b2ded
commit fe1ac8963d
6 changed files with 18 additions and 0 deletions

View File

@ -163,6 +163,7 @@ def _build_asset_response(result: schemas.AssetDetailResult | schemas.UploadResu
return schemas_out.Asset(
id=result.ref.id,
name=result.ref.name,
hash=result.asset.hash if result.asset else None,
asset_hash=result.asset.hash if result.asset else None,
size=int(result.asset.size_bytes) if result.asset else None,
mime_type=result.asset.mime_type if result.asset else None,

View File

@ -10,6 +10,7 @@ class Asset(BaseModel):
id: str
name: str
hash: str | None = None
asset_hash: str | None = None
size: int | None = None
mime_type: str | None = None

View File

@ -236,6 +236,7 @@ def seeded_asset(request: pytest.FixtureRequest, http: requests.Session, api_bas
r = http.post(api_base + "/api/assets", files=files, data=form_data, timeout=120)
body = r.json()
assert r.status_code == 201, body
assert body.get("hash") == body.get("asset_hash")
return body

View File

@ -41,6 +41,7 @@ def test_seed_asset_removed_when_file_is_deleted(
matches = [a for a in body1.get("assets", []) if a.get("name") == name]
assert matches
assert matches[0].get("asset_hash") is None
assert matches[0].get("hash") is None
asset_info_id = matches[0]["id"]
# Remove the underlying file and sync again

View File

@ -21,6 +21,8 @@ def test_create_from_hash_success(
b1 = r1.json()
assert r1.status_code == 201, b1
assert b1["asset_hash"] == h
assert b1["hash"] == h
assert b1["hash"] == b1["asset_hash"]
assert b1["created_new"] is False
aid = b1["id"]
@ -39,6 +41,7 @@ def test_get_and_delete_asset(http: requests.Session, api_base: str, seeded_asse
detail = rg.json()
assert rg.status_code == 200, detail
assert detail["id"] == aid
assert detail["hash"] == detail["asset_hash"]
assert "user_metadata" in detail
assert "filename" in detail["user_metadata"]
@ -97,6 +100,7 @@ def test_delete_upon_reference_count(
copy = r2.json()
assert r2.status_code == 201, copy
assert copy["asset_hash"] == src_hash
assert copy["hash"] == src_hash
assert copy["created_new"] is False
# Soft-delete original reference (default) -> asset identity must remain
@ -139,6 +143,7 @@ def test_update_asset_fields(http: requests.Session, api_base: str, seeded_asset
body = ru.json()
assert ru.status_code == 200, body
assert body["name"] == payload["name"]
assert body["hash"] == body["asset_hash"]
assert body["tags"] == original_tags # tags unchanged
assert body["user_metadata"]["purpose"] == "updated"
# filename should still be present and normalized by server
@ -290,6 +295,7 @@ def test_metadata_filename_is_set_for_seed_asset_without_hash(
matches = [a for a in body.get("assets", []) if a.get("name") == name]
assert matches, "Seed asset should be visible after sync"
assert matches[0].get("asset_hash") is None # still a seed
assert matches[0].get("hash") is None # still a seed
aid = matches[0]["id"]
r2 = http.get(f"{api_base}/api/assets/{aid}", timeout=120)

View File

@ -17,6 +17,7 @@ def test_upload_ok_duplicate_reference(http: requests.Session, api_base: str, ma
a1 = r1.json()
assert r1.status_code == 201, a1
assert a1["created_new"] is True
assert a1["hash"] == a1["asset_hash"]
# Second upload with the same data and name creates a new AssetReference (duplicates allowed)
# Returns 200 because Asset already exists, but a new AssetReference is created
@ -26,6 +27,7 @@ def test_upload_ok_duplicate_reference(http: requests.Session, api_base: str, ma
a2 = r2.json()
assert r2.status_code in (200, 201), a2
assert a2["asset_hash"] == a1["asset_hash"]
assert a2["hash"] == a1["hash"]
assert a2["id"] != a1["id"] # new reference with same content
# Third upload with the same data but different name also creates new AssetReference
@ -50,6 +52,7 @@ def test_upload_fastpath_from_existing_hash_no_file(http: requests.Session, api_
b1 = r1.json()
assert r1.status_code == 201, b1
h = b1["asset_hash"]
assert b1["hash"] == h
# Now POST /api/assets with only hash and no file
files = [
@ -63,6 +66,7 @@ def test_upload_fastpath_from_existing_hash_no_file(http: requests.Session, api_
assert r2.status_code == 200, b2 # fast path returns 200 with created_new == False
assert b2["created_new"] is False
assert b2["asset_hash"] == h
assert b2["hash"] == h
def test_upload_fastpath_with_known_hash_and_file(
@ -75,6 +79,7 @@ def test_upload_fastpath_with_known_hash_and_file(
b1 = r1.json()
assert r1.status_code == 201, b1
h = b1["asset_hash"]
assert b1["hash"] == h
# Send both file and hash of existing content -> server must drain file and create from hash (200)
files = {"file": ("ignored.bin", b"ignored" * 10, "application/octet-stream")}
@ -84,6 +89,7 @@ def test_upload_fastpath_with_known_hash_and_file(
assert r2.status_code == 200, b2
assert b2["created_new"] is False
assert b2["asset_hash"] == h
assert b2["hash"] == h
def test_upload_multiple_tags_fields_are_merged(http: requests.Session, api_base: str):
@ -142,6 +148,8 @@ def test_concurrent_upload_identical_bytes_different_names(
assert r1.status_code in (200, 201), b1
assert r2.status_code in (200, 201), b2
assert b1["asset_hash"] == b2["asset_hash"]
assert b1["hash"] == b2["hash"]
assert b1["hash"] == b1["asset_hash"]
assert b1["id"] != b2["id"]
created_flags = sorted([bool(b1.get("created_new")), bool(b2.get("created_new"))])