Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
45 commits
Select commit Hold shift + click to select a range
769f553
Another pass at fixing redundant work during tool loading.
jmchilton Jan 28, 2026
10197f9
Test serialization roundtrip (discovered bugs in YAML parsing).
jmchilton Nov 6, 2025
cc89081
Modeling and validation of YAML tool test cases.
jmchilton Nov 5, 2025
e71741e
fix: don't let yaml tool source parsing change yaml tool source seria…
jmchilton Nov 6, 2025
8b662c4
Refactor path for loading user defined tools for consistency.
jmchilton Sep 29, 2025
8e208fc
Small tweaks to tool loading in celery task for downstream.
jmchilton Sep 29, 2025
dddcf09
[WIP] track abstract inputs type in tool source interface.
jmchilton Oct 3, 2025
12fb8c9
Migration for job.tool_state.
jmchilton Sep 30, 2025
c55864e
Persist validated job tool state.
jmchilton Sep 30, 2025
1bb986c
Add a function tests for multiple selects from user tools.
jmchilton Jan 28, 2026
fbeb096
Does this change make sense in light of select handling changes that …
jmchilton Jan 29, 2026
8276031
Failing gx_boolean_user.yml tool to develop against.
jmchilton Oct 22, 2025
85d5d17
claude infrastructure for managing test tools.
jmchilton Oct 22, 2025
abbc3e1
Do not allow the legacy tool submission with user tools from tests.
jmchilton Nov 10, 2025
8d84e3d
YAML test for collection WIP.
jmchilton Dec 1, 2025
3c5e3c9
Allow YAML-based tools to produce output collections.
jmchilton Dec 10, 2025
5b5bb42
YAML tools specify a minimum profile of 24.2 so test requests load.
jmchilton Jan 31, 2026
f8decd4
WIP: allow collection inputs to YAML tools.
jmchilton Jan 30, 2026
fd4183d
Improved error handling for tool tests when requests are unavailable.
jmchilton Jan 31, 2026
2b463a2
Linting fixes...
jmchilton Jan 30, 2026
3a08805
Implement collection input runtimeify for YAML tools.
jmchilton Feb 5, 2026
97ad0a6
Remove dead collection runtime models.
jmchilton Feb 5, 2026
1ac44a2
Wire job_runtime validation into parameter specification tests.
jmchilton Feb 5, 2026
1cc0e26
Typed Models for collection_to_runtime
jmchilton Feb 6, 2026
563dbca
Add explicit job_runtime specs for scalar parameter types.
jmchilton Feb 7, 2026
5aae707
Harmonize py_type_internal_json with runtime validation.
jmchilton Feb 7, 2026
0edbe4d
Add cross-type discrimination tests for collection runtime.
jmchilton Feb 8, 2026
3f3a656
Harden collection runtime models with discriminated unions.
jmchilton Feb 9, 2026
372d939
Dynamic model factory for recursive collection type validation.
jmchilton Feb 9, 2026
3815c70
Polish dynamic model factory: fix return types, consolidate routing.
jmchilton Feb 9, 2026
7493fed
Add 8 collection type functional tool definitions.
jmchilton Feb 9, 2026
30b167c
Comma-separated collection type specs, deeper E2E tests.
jmchilton Feb 10, 2026
529689a
Linting fixes: unused imports, deprecated typing, loop variable.
jmchilton Feb 10, 2026
db1c778
Fix linter test for YAML tool minimum profile 24.2.
jmchilton Feb 10, 2026
9e56526
Fix linting.
jmchilton Feb 10, 2026
e90af29
Fix mypy errors in evaluation, runtime, and test_runtime.
jmchilton Feb 10, 2026
eaa70ec
Fix mypy errors in parameters, yaml parser, interactor, runtime.
jmchilton Feb 10, 2026
7285d3b
Normalize YAML tool naming scheme in test tools.
jmchilton Feb 11, 2026
4f30b3e
Add any-collection-type YAML tool with comprehensive job_runtime specs.
jmchilton Feb 11, 2026
2400930
Move galaxy.model-dependent runtime tests to test/unit/app/tools/.
jmchilton Feb 11, 2026
93c5afc
Fix Python 3.8 compat: use List[] instead of list[] in tool_util.
jmchilton Feb 11, 2026
5395ad5
Make adapt_collection a required param of runtimeify().
jmchilton Feb 12, 2026
aa67790
Fail fast on unknown collection_type instead of silent fallbacks.
jmchilton Feb 12, 2026
0222879
Use optimized JSONB on postgres
mvdbeek Feb 13, 2026
205ec2a
Lint fixes -- make format ran with wrong black version i guess
mvdbeek Feb 13, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 8 additions & 3 deletions lib/galaxy/app_unittest_utils/tools_support.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,17 @@
galaxy.model.set_datatypes_registry(datatypes_registry)


def mock_app_for_tool_support() -> UniverseApplication:
app = cast(UniverseApplication, MockApp())
app.config.new_file_path = tempfile.mkdtemp()
app.config.admin_users = "mary@example.com"
return app


class UsesApp:
def setup_app(self):
self.test_directory = tempfile.mkdtemp()
self.app = cast(UniverseApplication, MockApp())
self.app.config.new_file_path = os.path.join(self.test_directory, "new_files")
self.app.config.admin_users = "mary@example.com"
self.app = mock_app_for_tool_support()

def tear_down_app(self):
shutil.rmtree(self.test_directory)
Expand Down
6 changes: 4 additions & 2 deletions lib/galaxy/celery/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -350,11 +350,13 @@ def fetch_data(

@galaxy_task(action="queuing up submitted jobs")
def queue_jobs(request: QueueJobs, app: MinimalManagerApp, job_submitter: JobSubmitter):
raw_tool_source = request.tool_source.raw_tool_source
tool_source_class = request.tool_source.tool_source_class
tool = cached_create_tool_from_representation(
app=app,
raw_tool_source=request.tool_source.raw_tool_source,
raw_tool_source=raw_tool_source,
tool_dir=request.tool_source.tool_dir,
tool_source_class=request.tool_source.tool_source_class,
tool_source_class=tool_source_class,
)

job_submitter.queue_jobs(
Expand Down
6 changes: 5 additions & 1 deletion lib/galaxy/model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1394,6 +1394,7 @@ class ToolSource(Base, Dictifiable, RepresentById):
id: Mapped[int] = mapped_column(primary_key=True)
hash: Mapped[Optional[str]] = mapped_column(Unicode(255))
source: Mapped[dict] = mapped_column(JSONType)
source_class: Mapped[str] = mapped_column(TrimmedString(255))


class ToolRequest(Base, Dictifiable, RepresentById):
Expand All @@ -1403,7 +1404,7 @@ class ToolRequest(Base, Dictifiable, RepresentById):

id: Mapped[int] = mapped_column(primary_key=True)
tool_source_id: Mapped[int] = mapped_column(ForeignKey("tool_source.id"), index=True)
history_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history.id"), index=True)
history_id: Mapped[int] = mapped_column(ForeignKey("history.id"), index=True, nullable=False)
request: Mapped[dict] = mapped_column(JSONType)
state: Mapped[Optional[str]] = mapped_column(TrimmedString(32), index=True)
state_message: Mapped[Optional[str]] = mapped_column(JSONType, index=True)
Expand Down Expand Up @@ -1619,6 +1620,7 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable):
preferred_object_store_id: Mapped[Optional[str]] = mapped_column(String(255))
object_store_id_overrides: Mapped[Optional[dict[str, Optional[str]]]] = mapped_column(JSONType)
tool_request_id: Mapped[Optional[int]] = mapped_column(ForeignKey("tool_request.id"), index=True)
tool_state: Mapped[Optional[dict[str, Any]]] = mapped_column(JSON().with_variant(JSONB, "postgresql"))

dynamic_tool: Mapped[Optional["DynamicTool"]] = relationship()
tool_request: Mapped[Optional["ToolRequest"]] = relationship(back_populates="jobs")
Expand Down Expand Up @@ -1764,6 +1766,7 @@ def copy_from_job(self, job: "Job", copy_outputs: bool = False):
self.exit_code = job.exit_code
self.job_runner_name = job.job_runner_name
self.job_runner_external_id = job.job_runner_external_id
self.tool_state = job.tool_state
if copy_outputs:
assert self.history
requires_addition_to_history = False
Expand Down Expand Up @@ -2205,6 +2208,7 @@ def _serialize(self, id_encoder, serialization_options):
job_attrs["create_time"] = self.create_time.isoformat()
job_attrs["update_time"] = self.update_time.isoformat()
job_attrs["job_messages"] = self.job_messages
job_attrs["tool_state"] = self.tool_state

# Get the job's parameters
param_dict = self.raw_param_dict()
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
"""Persist validated job internal tool state and enforce tool request constraints.

Revision ID: 566b691307a5
Revises: 9930b68c85af
Create Date: 2025-09-30 04:48:19.727414

"""

from sqlalchemy import (
Column,
Integer,
JSON,
String,
)
from sqlalchemy.dialects.postgresql import JSONB

from galaxy.model.migrations.util import (
add_column,
alter_column,
drop_column,
transaction,
)

# revision identifiers, used by Alembic.
revision = "566b691307a5"
down_revision = "9930b68c85af"
branch_labels = None
depends_on = None

table_name = "job"
column_name = "tool_state"

# additional database object names used in this revision
tool_request_table_name = "tool_request"
tool_request_history_id_column = "history_id"

tool_source_table_name = "tool_source"
tool_source_source_class_column = "source_class"


def upgrade():
# Wrap statements for sqlite safety and to ensure atomicity where supported
with transaction():
# Persist validated job internal tool state
add_column(table_name, Column(column_name, JSON().with_variant(JSONB, "postgresql")))

# Ensure tool_request.history_id is NOT NULL
alter_column(
tool_request_table_name,
tool_request_history_id_column,
existing_type=Integer(),
nullable=False,
)

# Add tool_source.source_class as a new NOT NULL string column
add_column(
tool_source_table_name,
Column(tool_source_source_class_column, String(255), nullable=False),
)


def downgrade():
# Reverse operations in a transaction
with transaction():
# Drop tool_source.source_class
drop_column(tool_source_table_name, tool_source_source_class_column)

# Revert tool_request.history_id to be nullable
alter_column(
tool_request_table_name,
tool_request_history_id_column,
existing_type=Integer(),
nullable=True,
)

# Drop job.tool_state
drop_column(table_name, column_name)
1 change: 1 addition & 0 deletions lib/galaxy/model/store/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1684,6 +1684,7 @@ def _set_job_attributes(
"job_stdout",
"job_stderr",
"galaxy_version",
"tool_state",
)
for attribute in ATTRIBUTES:
value = job_attrs.get(attribute)
Expand Down
6 changes: 6 additions & 0 deletions lib/galaxy/tool_util/parameters/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,10 +65,12 @@
validate_internal_landing_request,
validate_internal_request,
validate_internal_request_dereferenced,
validate_job_runtime,
validate_landing_request,
validate_relaxed_request,
validate_request,
validate_test_case,
validate_test_case_json,
validate_workflow_step,
validate_workflow_step_linked,
ValidationFunctionT,
Expand All @@ -82,6 +84,7 @@
RequestInternalDereferencedToolState,
RequestInternalToolState,
RequestToolState,
TestCaseJsonToolState,
TestCaseToolState,
ToolState,
WorkflowStepLinkedToolState,
Expand Down Expand Up @@ -142,18 +145,21 @@
"validate_against_model",
"validate_internal_job",
"validate_internal_landing_request",
"validate_job_runtime",
"validate_internal_request",
"validate_internal_request_dereferenced",
"validate_landing_request",
"validate_relaxed_request",
"validate_request",
"validate_test_case",
"validate_test_case_json",
"validate_workflow_step",
"validate_workflow_step_linked",
"validate_explicit_conditional_test_value",
"is_optional",
"ToolState",
"TestCaseToolState",
"TestCaseJsonToolState",
"ToolParameterT",
"HasToolParameters",
"to_json_schema_string",
Expand Down
55 changes: 41 additions & 14 deletions lib/galaxy/tool_util/parameters/case.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
)

from packaging.version import Version
from typing_extensions import Literal

from galaxy.tool_util.parser.interface import (
TestCollectionDef,
Expand Down Expand Up @@ -160,7 +161,10 @@ def test_case_state(
unhandled_inputs = []
state: Dict[str, Any] = {}

handled_inputs = _merge_level_into_state(tool_parameter_bundle, inputs, state, profile, warnings, None)
state_representation = test_dict.get("value_state_representation", "test_case_xml")
handled_inputs = _merge_level_into_state(
tool_parameter_bundle, inputs, state, profile, state_representation, warnings, None
)

for test_input in inputs:
input_name = test_input["name"]
Expand Down Expand Up @@ -200,12 +204,15 @@ def _merge_level_into_state(
inputs: ToolSourceTestInputs,
state_at_level: dict,
profile: str,
state_representation: Literal["test_case_xml", "test_case_json"],
warnings: List[str],
prefix: Optional[str],
) -> Set[str]:
handled_inputs: Set[str] = set()
for tool_input in tool_inputs:
handled_inputs.update(_merge_into_state(tool_input, inputs, state_at_level, profile, warnings, prefix))
handled_inputs.update(
_merge_into_state(tool_input, inputs, state_at_level, profile, state_representation, warnings, prefix)
)

return handled_inputs

Expand All @@ -223,6 +230,7 @@ def _merge_into_state(
inputs: ToolSourceTestInputs,
state_at_level: dict,
profile: str,
state_representation: Literal["test_case_xml", "test_case_json"],
warnings: List[str],
prefix: Optional[str],
) -> Set[str]:
Expand All @@ -240,10 +248,14 @@ def _merge_into_state(
when: ConditionalWhen = _select_which_when(tool_input, conditional_state, inputs, state_path)
test_parameter = tool_input.test_parameter
handled_inputs.update(
_merge_into_state(test_parameter, inputs, conditional_state, profile, warnings, state_path)
_merge_into_state(
test_parameter, inputs, conditional_state, profile, state_representation, warnings, state_path
)
)
handled_inputs.update(
_merge_level_into_state(when.parameters, inputs, conditional_state, profile, warnings, state_path)
_merge_level_into_state(
when.parameters, inputs, conditional_state, profile, state_representation, warnings, state_path
)
)
elif isinstance(tool_input, (RepeatParameterModel,)):
repeat_state_array = state_at_level.get(input_name, [])
Expand All @@ -261,7 +273,13 @@ def _merge_into_state(
repeat_instance_prefix = f"{state_path}_{i}"
handled_inputs.update(
_merge_level_into_state(
tool_input.parameters, inputs, repeat_state_array[i], profile, warnings, repeat_instance_prefix
tool_input.parameters,
inputs,
repeat_state_array[i],
profile,
state_representation,
warnings,
repeat_instance_prefix,
)
)
elif isinstance(tool_input, (SectionParameterModel,)):
Expand All @@ -270,7 +288,9 @@ def _merge_into_state(
state_at_level[input_name] = section_state

handled_inputs.update(
_merge_level_into_state(tool_input.parameters, inputs, section_state, profile, warnings, state_path)
_merge_level_into_state(
tool_input.parameters, inputs, section_state, profile, state_representation, warnings, state_path
)
)
else:
test_input = _input_for(state_path, inputs)
Expand All @@ -283,17 +303,24 @@ def _merge_into_state(
elif isinstance(tool_input, (DataParameterModel,)):
if tool_input.multiple:
value = test_input["value"]
input_value_list = []
input_value_list: List[Any] = []
if value:
test_input_values = cast(str, value).split(",")
for test_input_value in test_input_values:
instance_test_input = test_input.copy()
instance_test_input["value"] = test_input_value
input_value_json = xml_data_input_to_json(instance_test_input)
input_value_list.append(input_value_json)
if state_representation == "test_case_json":
input_value_list = test_input["value"] if test_input["value"] is not None else []
else:
test_input_values = cast(str, value).split(",")
for test_input_value in test_input_values:
instance_test_input = test_input.copy()
instance_test_input["value"] = test_input_value
input_value_json = xml_data_input_to_json(instance_test_input)
input_value_list.append(input_value_json)

input_value = input_value_list
else:
input_value = xml_data_input_to_json(test_input)
if state_representation == "test_case_json":
input_value = test_input["value"]
else:
input_value = xml_data_input_to_json(test_input)
else:
input_value = test_input["value"]
input_value = legacy_from_string(tool_input, input_value, warnings, profile)
Expand Down
Loading
Loading