SQLAlchemy Foreign Key external schema
Question:
I want to create a Foreign Key in SQLAlchemy to a table in an external schema.
I am using small services that all connect to the same database and don’t have the model that I am going to refer to (and it’s table) defined in this service.
I tried to define the foreign key like this:
foreign_key_id = Column(String, ForeignKey("schema.table.column"))
But got the following error upon executing an alembic migration
sqlalchemy.exc.ArgumentError: 'SchemaItem' object, such as a 'Column' or a 'Constraint' expected, got 'schema.table.column'
The database that I am using is postgres.
Edit:
The complete table definition is this:
class TopicBody(Base):
__tablename__ = "topic_body"
id = Column(Integer, primary_key=True)
# This is the issue:
cust_id = Column(String, ForeignKey("data_store.customer.cust_id"))
topic_header_id = Column(Integer, ForeignKey("topic_header.id"))
source = Column(Enum(Source))
valid_until = Column(DateTime)
impact = Column(String)
user_id = Column(Integer, ForeignKey("user_auth.user.id"))
status = Column(Enum(Status))
title = Column(String)
created_at = Column(DateTime, default=datetime.now())
modified_at = Column(DateTime, default=datetime.now())
Answers:
It seems that you need to reflect the external schema’s table into your metadata so that SQLAlchemy knows about it. The error you included in your question doesn’t seem to match the expected exception so I don’t understand that but I included a full example below.
I found info in this answer SQLAlchemy: ForeignKey across schemas
Here is a test script that I used to test this. NOTE that the setup_external_schema
code is JUST to setup the db for testing and you wouldn’t need it in your code because you already have the external schema made (I’m assuming):
import sys
from sqlalchemy import (
create_engine,
Integer,
String,
ForeignKey,
MetaData,
Table,
Column,
)
from sqlalchemy.orm import (
declarative_base,
Session,
)
from sqlalchemy.schema import CreateSchema
Base = declarative_base()
username, password, db = sys.argv[1:4]
engine = create_engine(f"postgresql+psycopg2://{username}:{password}@/{db}", echo=False)
def setup_external_schema(engine, other_schema_name):
""" This is just to create the external schema for testing. """
# @NOTE: After we create this we don't use this metadata again.
with engine.connect() as conn:
from sqlalchemy import event
other_metadata = MetaData()
event.listen(other_metadata, "before_create", CreateSchema(other_schema_name))
Table(
"customer",
other_metadata,
Column("cust_id", String, primary_key=True),
schema=other_schema_name,
)
other_metadata.create_all(conn)
# Create the external schema for testing.
setup_external_schema(engine, "data_store")
class TopicBody(Base):
__tablename__ = "topic_body"
__table_args__ = dict(schema="public")
id = Column(Integer, primary_key=True)
cust_id = Column(String, ForeignKey("data_store.customer.cust_id"))
Base.metadata.reflect(engine, schema="data_store", only=["customer"])
Base.metadata.create_all(engine)
with Session(engine) as session, session.begin():
pass
Async
For async you need to wrap certain calls like reflect and createall in `run_sync like this:
import asyncio
#...
from sqlalchemy.ext.asyncio import create_async_engine
from sqlalchemy.sql import select
#...
async def async_main():
# Start using async.
async_engine = create_async_engine(f"postgresql+asyncpg://{username}:{password}@/{db}", echo=False)
async with async_engine.begin() as conn:
# Using lambda to make a partial here to pass schema and only.
await conn.run_sync(lambda engine: Base.metadata.reflect(engine, schema="data_store", only=["customer"]))
await conn.run_sync(Base.metadata.create_all)
async with async_engine.connect() as conn:
result = await conn.execute(select(TopicBody.__table__))
print(result.fetchall())
asyncio.run(async_main())
I want to create a Foreign Key in SQLAlchemy to a table in an external schema.
I am using small services that all connect to the same database and don’t have the model that I am going to refer to (and it’s table) defined in this service.
I tried to define the foreign key like this:
foreign_key_id = Column(String, ForeignKey("schema.table.column"))
But got the following error upon executing an alembic migration
sqlalchemy.exc.ArgumentError: 'SchemaItem' object, such as a 'Column' or a 'Constraint' expected, got 'schema.table.column'
The database that I am using is postgres.
Edit:
The complete table definition is this:
class TopicBody(Base):
__tablename__ = "topic_body"
id = Column(Integer, primary_key=True)
# This is the issue:
cust_id = Column(String, ForeignKey("data_store.customer.cust_id"))
topic_header_id = Column(Integer, ForeignKey("topic_header.id"))
source = Column(Enum(Source))
valid_until = Column(DateTime)
impact = Column(String)
user_id = Column(Integer, ForeignKey("user_auth.user.id"))
status = Column(Enum(Status))
title = Column(String)
created_at = Column(DateTime, default=datetime.now())
modified_at = Column(DateTime, default=datetime.now())
It seems that you need to reflect the external schema’s table into your metadata so that SQLAlchemy knows about it. The error you included in your question doesn’t seem to match the expected exception so I don’t understand that but I included a full example below.
I found info in this answer SQLAlchemy: ForeignKey across schemas
Here is a test script that I used to test this. NOTE that the setup_external_schema
code is JUST to setup the db for testing and you wouldn’t need it in your code because you already have the external schema made (I’m assuming):
import sys
from sqlalchemy import (
create_engine,
Integer,
String,
ForeignKey,
MetaData,
Table,
Column,
)
from sqlalchemy.orm import (
declarative_base,
Session,
)
from sqlalchemy.schema import CreateSchema
Base = declarative_base()
username, password, db = sys.argv[1:4]
engine = create_engine(f"postgresql+psycopg2://{username}:{password}@/{db}", echo=False)
def setup_external_schema(engine, other_schema_name):
""" This is just to create the external schema for testing. """
# @NOTE: After we create this we don't use this metadata again.
with engine.connect() as conn:
from sqlalchemy import event
other_metadata = MetaData()
event.listen(other_metadata, "before_create", CreateSchema(other_schema_name))
Table(
"customer",
other_metadata,
Column("cust_id", String, primary_key=True),
schema=other_schema_name,
)
other_metadata.create_all(conn)
# Create the external schema for testing.
setup_external_schema(engine, "data_store")
class TopicBody(Base):
__tablename__ = "topic_body"
__table_args__ = dict(schema="public")
id = Column(Integer, primary_key=True)
cust_id = Column(String, ForeignKey("data_store.customer.cust_id"))
Base.metadata.reflect(engine, schema="data_store", only=["customer"])
Base.metadata.create_all(engine)
with Session(engine) as session, session.begin():
pass
Async
For async you need to wrap certain calls like reflect and createall in `run_sync like this:
import asyncio
#...
from sqlalchemy.ext.asyncio import create_async_engine
from sqlalchemy.sql import select
#...
async def async_main():
# Start using async.
async_engine = create_async_engine(f"postgresql+asyncpg://{username}:{password}@/{db}", echo=False)
async with async_engine.begin() as conn:
# Using lambda to make a partial here to pass schema and only.
await conn.run_sync(lambda engine: Base.metadata.reflect(engine, schema="data_store", only=["customer"]))
await conn.run_sync(Base.metadata.create_all)
async with async_engine.connect() as conn:
result = await conn.execute(select(TopicBody.__table__))
print(result.fetchall())
asyncio.run(async_main())