-
Notifications
You must be signed in to change notification settings - Fork 4
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
68 update institutions table and daos institution type fields #75
Changes from all commits
20771a1
3dcd3cd
441880c
4ccae37
58ffc07
dd1f85f
bc1f935
24006e1
3de994a
e10aa57
d914430
1b205e6
8c63f6d
c6bed69
f86eeb7
df85b82
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,49 @@ | ||
"""create fi_to_type_mapping table | ||
|
||
Revision ID: ada681e1877f | ||
Revises: 383ab402c8c2 | ||
Create Date: 2023-12-29 12:33:11.031470 | ||
|
||
""" | ||
from typing import Sequence, Union | ||
|
||
from alembic import op | ||
import sqlalchemy as sa | ||
from db_revisions.utils import table_exists | ||
|
||
|
||
# revision identifiers, used by Alembic. | ||
revision: str = "ada681e1877f" | ||
down_revision: Union[str, None] = "383ab402c8c2" | ||
branch_labels: Union[str, Sequence[str], None] = None | ||
depends_on: Union[str, Sequence[str], None] = None | ||
|
||
|
||
def upgrade() -> None: | ||
if not table_exists("fi_to_type_mapping"): | ||
op.create_table( | ||
"fi_to_type_mapping", | ||
sa.Column("fi_id", sa.String(), sa.ForeignKey("financial_institutions.lei"), primary_key=True), | ||
sa.Column("type_id", sa.String(), sa.ForeignKey("sbl_institution_type.id"), primary_key=True), | ||
) | ||
with op.batch_alter_table("financial_institutions") as batch_op: | ||
batch_op.drop_constraint("fk_sbl_institution_type_financial_institutions", type_="foreignkey") | ||
batch_op.drop_index(op.f("ix_financial_institutions_sbl_institution_type_id")) | ||
batch_op.drop_column("sbl_institution_type_id") | ||
|
||
|
||
def downgrade() -> None: | ||
op.drop_table("fi_to_type_mapping") | ||
with op.batch_alter_table("financial_institutions") as batch_op: | ||
batch_op.add_column(sa.Column("sbl_institution_type_id", sa.String(), nullable=True)) | ||
batch_op.create_foreign_key( | ||
"fk_sbl_institution_type_financial_institutions", | ||
"sbl_institution_type", | ||
["sbl_institution_type_id"], | ||
["id"], | ||
) | ||
batch_op.create_index( | ||
op.f("ix_financial_institutions_sbl_institution_type_id"), | ||
["sbl_institution_type_id"], | ||
unique=False, | ||
) |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -71,6 +71,17 @@ async def upsert_institution(session: AsyncSession, fi: FinancialInstitutionDto) | |
async with session.begin(): | ||
fi_data = fi.__dict__.copy() | ||
fi_data.pop("_sa_instance_state", None) | ||
|
||
# Populate with model objects from SBLInstitutionTypeDao and clear out | ||
# the id field since it's just a view | ||
if "sbl_institution_type_ids" in fi_data: | ||
sbl_type_stmt = select(SBLInstitutionTypeDao).filter( | ||
SBLInstitutionTypeDao.id.in_(fi_data["sbl_institution_type_ids"]) | ||
) | ||
sbl_types = await session.scalars(sbl_type_stmt) | ||
fi_data["sbl_institution_types"] = sbl_types.all() | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. One thing to keep in mind with not initializing this field out of the if check is any previous relationships would remain the same if the field if not specified. Since this function will only be used by internal processes, I'm ok with this behavior, but it is worth noting this is different from the rest of the fields. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. tested wrong, not an issue; we're good to go after clean up. |
||
del fi_data["sbl_institution_type_ids"] | ||
|
||
db_fi = await session.merge(FinancialInstitutionDao(**fi_data)) | ||
await session.flush([db_fi]) | ||
await session.refresh(db_fi) | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
With the update to the upsert using merge, this is causing the merge conflicts.
I have mine like: