Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Model edits and bug fixes #17922

Merged
merged 15 commits into from
Apr 12, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion lib/galaxy/jobs/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,6 @@ def __init__(
self.self_handler_tags = self_handler_tags
self.max_grab = max_grab
self.handler_tags = handler_tags
self._grab_conn_opts = {"autocommit": False}
self._grab_query = None
self._supports_returning = self.app.application_stack.supports_returning()

Expand Down
1,572 changes: 732 additions & 840 deletions lib/galaxy/model/__init__.py

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion lib/galaxy/model/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def check_database_connection(session):
class ModelMapping(Bunch):
def __init__(self, model_modules, engine):
self.engine = engine
self._SessionLocal = sessionmaker(autoflush=False, autocommit=False, future=True)
self._SessionLocal = sessionmaker(autoflush=False)
versioned_session(self._SessionLocal)
context = scoped_session(self._SessionLocal, scopefunc=self.request_scopefunc)
# For backward compatibility with "context.current"
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/model/database_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def create_database(db_url, database=None, encoding="utf8", template=None):

@contextmanager
def sqlalchemy_engine(url):
engine = create_engine(url, future=True)
engine = create_engine(url)
try:
yield engine
finally:
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/model/migrations/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,10 +124,10 @@ def verify_databases_via_script(
) -> None:
# This function serves a use case when an engine has not been created yet
# (e.g. when called from a script).
gxy_engine = create_engine(gxy_config.url, future=True)
gxy_engine = create_engine(gxy_config.url)
tsi_engine = None
if tsi_config.url and tsi_config.url != gxy_config.url:
tsi_engine = create_engine(tsi_config.url, future=True)
tsi_engine = create_engine(tsi_config.url)

verify_databases(
gxy_engine,
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/model/migrations/alembic/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def _configure_and_run_migrations_offline(url: str) -> None:


def _configure_and_run_migrations_online(url) -> None:
engine = create_engine(url, future=True)
engine = create_engine(url)
with engine.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
Expand Down
6 changes: 3 additions & 3 deletions lib/galaxy/model/migrations/scripts.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def verify_database_is_initialized(db_url: str) -> None:
if not database_exists(db_url):
raise DatabaseDoesNotExistError(db_url)

engine = create_engine(db_url, future=True)
engine = create_engine(db_url)
try:
db_state = DatabaseStateCache(engine=engine)
if db_state.is_database_empty() or db_state.contains_only_kombu_tables():
Expand Down Expand Up @@ -161,7 +161,7 @@ def get_gxy_db_version(self, gxy_db_url=None):
"""
db_url = gxy_db_url or self.gxy_db_url
try:
engine = create_engine(db_url, future=True)
engine = create_engine(db_url)
version = self._get_gxy_alembic_db_version(engine)
if not version:
version = self._get_gxy_sam_db_version(engine)
Expand Down Expand Up @@ -197,7 +197,7 @@ def _rename_arg(self, argv, old_name, new_name) -> None:

def _upgrade(self, db_url, model):
try:
engine = create_engine(db_url, future=True)
engine = create_engine(db_url)
am = get_alembic_manager(engine)
am.upgrade(model)
finally:
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/model/orm/engine_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,9 +106,9 @@ def after_cursor_execute(conn, cursor, statement, parameters, context, executema
set_sqlite_connect_args(engine_options, url)

if url.startswith("sqlite://") and url not in ("sqlite:///:memory:", "sqlite://"):
engine = create_engine(url, **engine_options, poolclass=NullPool, future=True)
engine = create_engine(url, **engine_options, poolclass=NullPool)
else:
engine = create_engine(url, **engine_options, future=True)
engine = create_engine(url, **engine_options)

# Prevent sharing connection across fork: https://docs.sqlalchemy.org/en/14/core/pooling.html#using-connection-pools-with-multiprocessing-or-os-fork
register_after_fork(engine, lambda e: e.dispose())
Expand Down
1 change: 0 additions & 1 deletion lib/galaxy/model/store/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1047,7 +1047,6 @@ def _import_workflow_invocations(
for invocation_attrs in invocations_attrs:
assert not self.import_options.allow_edit
imported_invocation = model.WorkflowInvocation()
imported_invocation.user = self.user
imported_invocation.history = history
ensure_object_added_to_session(imported_invocation, object_in_session=history)
workflow_key = invocation_attrs["workflow"]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def get_db_heads(config: Config) -> Tuple[str, ...]:
"""Return revision ids (version heads) stored in the database."""
dburl = config.get_main_option("sqlalchemy.url")
assert dburl
engine = create_engine(dburl, future=True)
engine = create_engine(dburl)
with engine.connect() as conn:
context = MigrationContext.configure(conn)
heads = context.get_current_heads()
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/model/unittest_utils/model_testing_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def drop_existing_database(url: DbUrl) -> Iterator[None]:
@contextmanager
def disposing_engine(url: DbUrl) -> Iterator[Engine]:
"""Context manager for engine that disposes of its connection pool on exit."""
engine = create_engine(url, future=True)
engine = create_engine(url)
try:
yield engine
finally:
Expand Down Expand Up @@ -233,7 +233,7 @@ def _drop_postgres_database(url: DbUrl) -> None:


def _drop_database(connection_url, database_name):
engine = create_engine(connection_url, isolation_level="AUTOCOMMIT", future=True)
engine = create_engine(connection_url, isolation_level="AUTOCOMMIT")
preparer = IdentifierPreparer(engine.dialect)
database_name = preparer.quote(database_name)
stmt = text(f"DROP DATABASE IF EXISTS {database_name}")
Expand Down
2 changes: 1 addition & 1 deletion lib/tool_shed/webapp/model/migrations/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def __init__(self) -> None:

def verify_database(url, engine_options=None) -> None:
engine_options = engine_options or {}
engine = create_engine(url, **engine_options, future=True)
engine = create_engine(url, **engine_options)
verifier = DatabaseStateVerifier(engine)
verifier.run()
engine.dispose()
Expand Down
2 changes: 1 addition & 1 deletion lib/tool_shed/webapp/model/migrations/alembic/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def _configure_and_run_migrations_offline(url: str) -> None:


def _configure_and_run_migrations_online(url) -> None:
engine = create_engine(url, future=True)
engine = create_engine(url)
with engine.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
Expand Down
2 changes: 1 addition & 1 deletion scripts/check_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ def load_indexes(metadata):
# create EMPTY metadata, then load from database
db_url = get_config(sys.argv)["db_url"]
metadata = MetaData()
engine = create_engine(db_url, future=True)
engine = create_engine(db_url)
metadata.reflect(bind=engine)
indexes_in_db = load_indexes(metadata)

Expand Down
4 changes: 2 additions & 2 deletions scripts/update_shed_config_path.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,9 @@ def create_database(config_file):
exit(1)

# Initialize the database connection.
engine = create_engine(database_connection, future=True)
engine = create_engine(database_connection)
MetaData(bind=engine)
install_session = scoped_session(sessionmaker(bind=engine, autoflush=False, autocommit=True))
install_session = scoped_session(sessionmaker(bind=engine, autoflush=False))
model = mapping.init(database_connection)
return install_session, model

Expand Down
2 changes: 1 addition & 1 deletion test/unit/data/model/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def sqlite_memory_url():
@pytest.fixture(scope="module")
def engine():
db_uri = "sqlite:///:memory:"
return create_engine(db_uri, future=True)
return create_engine(db_uri)


@pytest.fixture
Expand Down
2 changes: 1 addition & 1 deletion test/unit/workflows/test_workflow_progress.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def _set_previous_progress(self, outputs):

workflow_invocation_step_state = model.WorkflowRequestStepState()
workflow_invocation_step_state.workflow_step_id = step_id
workflow_invocation_step_state.value = True
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We have to fix these column definitions. This is just a wild line of code.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please merge #17902 and follow up with a proper type definitions for that column. The truth is it can be any JSON serializable type - it is sort of up to workflow module to process it - so maybe Any or maybe start with bool and create a comment somewhere that it will need to be unioned with other types if we add more workflow module types. I'm not sure if conditionals use this or not - probably worth looking into but maybe the typing alone would tell us.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Will do.

workflow_invocation_step_state.value = cast(bytes, True)
self.invocation.step_states.append(workflow_invocation_step_state)

def _step(self, index):
Expand Down
Loading