Fix migration constraint naming double-prefix and NULL in mixed metadata lists
Some checks are pending
Python Linting / Run Ruff (push) Waiting to run
Python Linting / Run Pylint (push) Waiting to run
Build package / Build Test (3.10) (push) Waiting to run
Build package / Build Test (3.11) (push) Waiting to run
Build package / Build Test (3.12) (push) Waiting to run
Build package / Build Test (3.13) (push) Waiting to run
Build package / Build Test (3.14) (push) Waiting to run

- Use fully-rendered constraint names in migration 0003 to avoid the
  naming convention doubling the ck_ prefix on batch operations.
- Add table_args to downgrade so SQLite batch mode can find the CHECK
  constraint (not exposed by SQLite reflection).
- Fix model CheckConstraint name to use bare 'has_value' (convention
  auto-prefixes).
- Skip None items when converting metadata lists to rows, preventing
  all-NULL rows that violate the has_value check constraint.

Amp-Thread-ID: https://ampcode.com/threads/T-019cef87-94f9-7172-a6af-c6282290ce4f
Co-authored-by: Amp <amp@ampcode.com>
This commit is contained in:
Luke Mino-Altherr 2026-03-14 23:30:15 -04:00
parent a8d524a022
commit 63bae494ad
3 changed files with 15 additions and 7 deletions

View File

@ -53,9 +53,7 @@ def upgrade() -> None:
"DELETE FROM asset_reference_meta" "DELETE FROM asset_reference_meta"
" WHERE val_str IS NULL AND val_num IS NULL AND val_bool IS NULL AND val_json IS NULL" " WHERE val_str IS NULL AND val_num IS NULL AND val_bool IS NULL AND val_json IS NULL"
) )
with op.batch_alter_table( with op.batch_alter_table("asset_reference_meta") as batch_op:
"asset_reference_meta", naming_convention=NAMING_CONVENTION
) as batch_op:
batch_op.create_check_constraint( batch_op.create_check_constraint(
"ck_asset_reference_meta_has_value", "ck_asset_reference_meta_has_value",
"val_str IS NOT NULL OR val_num IS NOT NULL OR val_bool IS NOT NULL OR val_json IS NOT NULL", "val_str IS NOT NULL OR val_num IS NOT NULL OR val_bool IS NOT NULL OR val_json IS NOT NULL",
@ -63,8 +61,18 @@ def upgrade() -> None:
def downgrade() -> None: def downgrade() -> None:
# SQLite doesn't reflect CHECK constraints, so we must declare it
# explicitly via table_args for the batch recreate to find it.
# Use the fully-rendered constraint name to avoid the naming convention
# doubling the prefix.
with op.batch_alter_table( with op.batch_alter_table(
"asset_reference_meta", naming_convention=NAMING_CONVENTION "asset_reference_meta",
table_args=[
sa.CheckConstraint(
"val_str IS NOT NULL OR val_num IS NOT NULL OR val_bool IS NOT NULL OR val_json IS NOT NULL",
name="ck_asset_reference_meta_has_value",
),
],
) as batch_op: ) as batch_op:
batch_op.drop_constraint( batch_op.drop_constraint(
"ck_asset_reference_meta_has_value", type_="check" "ck_asset_reference_meta_has_value", type_="check"

View File

@ -193,7 +193,7 @@ class AssetReferenceMeta(Base):
Index("ix_asset_reference_meta_key_val_bool", "key", "val_bool"), Index("ix_asset_reference_meta_key_val_bool", "key", "val_bool"),
CheckConstraint( CheckConstraint(
"val_str IS NOT NULL OR val_num IS NOT NULL OR val_bool IS NOT NULL OR val_json IS NOT NULL", "val_str IS NOT NULL OR val_num IS NOT NULL OR val_bool IS NOT NULL OR val_json IS NOT NULL",
name="ck_asset_reference_meta_has_value", name="has_value",
), ),
) )

View File

@ -66,8 +66,8 @@ def convert_metadata_to_rows(key: str, value) -> list[dict]:
if isinstance(value, list): if isinstance(value, list):
if all(_check_is_scalar(x) for x in value): if all(_check_is_scalar(x) for x in value):
return [_scalar_to_row(key, i, x) for i, x in enumerate(value)] return [_scalar_to_row(key, i, x) for i, x in enumerate(value) if x is not None]
return [{"key": key, "ordinal": i, "val_json": x} for i, x in enumerate(value)] return [{"key": key, "ordinal": i, "val_json": x} for i, x in enumerate(value) if x is not None]
return [{"key": key, "ordinal": 0, "val_json": value}] return [{"key": key, "ordinal": 0, "val_json": value}]