Skip to content

Commit

Permalink
Refactor enum dependencies between schema and models.
Browse files Browse the repository at this point in the history
This is a piece that is needed to create a schema package - which I would really like to have around for galaxyproject#15639 and to in general write higher-level API tests.
  • Loading branch information
jmchilton committed May 12, 2023
1 parent f98794d commit 7edc84f
Show file tree
Hide file tree
Showing 2 changed files with 81 additions and 69 deletions.
58 changes: 10 additions & 48 deletions lib/galaxy/model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,12 @@
from galaxy.model.orm.now import now
from galaxy.model.orm.util import add_object_to_object_session
from galaxy.objectstore import ObjectStore
from galaxy.schema.schema import (
DatasetCollectionPopulatedState,
DatasetState,
DatasetValidatedState,
JobState,
)
from galaxy.security import get_permitted_actions
from galaxy.security.idencoding import IdEncodingHelper
from galaxy.security.validate_user_input import validate_password_str
Expand Down Expand Up @@ -1299,22 +1305,7 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable):
_numeric_metric = JobMetricNumeric
_text_metric = JobMetricText

class states(str, Enum):
NEW = "new"
RESUBMITTED = "resubmitted"
UPLOAD = "upload"
WAITING = "waiting"
QUEUED = "queued"
RUNNING = "running"
OK = "ok"
ERROR = "error"
FAILED = "failed"
PAUSED = "paused"
DELETING = "deleting"
DELETED = "deleted"
STOPPING = "stop"
STOPPED = "stopped"
SKIPPED = "skipped"
states = JobState

terminal_states = [states.OK, states.ERROR, states.DELETED]
#: job states where the job hasn't finished and the model may still change
Expand Down Expand Up @@ -3739,31 +3730,8 @@ class Dataset(Base, StorableObject, Serializable):
back_populates="dataset",
)

class states(str, Enum):
NEW = "new"
UPLOAD = "upload"
QUEUED = "queued"
RUNNING = "running"
OK = "ok"
EMPTY = "empty"
ERROR = "error"
PAUSED = "paused"
SETTING_METADATA = "setting_metadata"
FAILED_METADATA = "failed_metadata"
# Non-deleted, non-purged datasets that don't have physical files.
# These shouldn't have objectstores attached -
# 'deferred' can be materialized for jobs using
# attached DatasetSource objects but 'discarded'
# cannot (e.g. imported histories). These should still
# be able to have history contents associated (normal HDAs?)
DEFERRED = "deferred"
DISCARDED = "discarded"

@classmethod
def values(self):
return self.__members__.values()

# failed_metadata is only valid as DatasetInstance state currently
states = DatasetState

non_ready_states = (states.NEW, states.UPLOAD, states.QUEUED, states.RUNNING, states.SETTING_METADATA)
ready_states = tuple(set(states.__members__.values()) - set(non_ready_states))
Expand Down Expand Up @@ -4190,10 +4158,7 @@ class DatasetInstance(UsesCreateAndUpdateTime, _HasTable):
purged: bool
creating_job_associations: List[Union[JobToOutputDatasetCollectionAssociation, JobToOutputDatasetAssociation]]

class validated_states(str, Enum):
UNKNOWN = "unknown"
INVALID = "invalid"
OK = "ok"
validated_states = DatasetValidatedState

def __init__(
self,
Expand Down Expand Up @@ -5963,10 +5928,7 @@ class DatasetCollection(Base, Dictifiable, UsesAnnotations, Serializable):
dict_collection_visible_keys = ["id", "collection_type"]
dict_element_visible_keys = ["id", "collection_type"]

class populated_states(str, Enum):
NEW = "new" # New dataset collection, unpopulated elements
OK = "ok" # Collection elements populated (HDAs may or may not have errors)
FAILED = "failed" # some problem populating state, won't be populated
populated_states = DatasetCollectionPopulatedState

def __init__(self, id=None, collection_type=None, populated=True, element_count=None):
self.id = id
Expand Down
92 changes: 71 additions & 21 deletions lib/galaxy/schema/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,12 +32,6 @@
Literal,
)

from galaxy.model import (
Dataset,
DatasetCollection,
DatasetInstance,
Job,
)
from galaxy.schema.bco import XrefItem
from galaxy.schema.fields import (
DecodedDatabaseIdField,
Expand All @@ -64,6 +58,56 @@

OptionalNumberT = Optional[Union[int, float]]


class DatasetState(str, Enum):
NEW = "new"
UPLOAD = "upload"
QUEUED = "queued"
RUNNING = "running"
OK = "ok"
EMPTY = "empty"
ERROR = "error"
PAUSED = "paused"
SETTING_METADATA = "setting_metadata"
FAILED_METADATA = "failed_metadata"
# Non-deleted, non-purged datasets that don't have physical files.
# These shouldn't have objectstores attached -
# 'deferred' can be materialized for jobs using
# attached DatasetSource objects but 'discarded'
# cannot (e.g. imported histories). These should still
# be able to have history contents associated (normal HDAs?)
DEFERRED = "deferred"
DISCARDED = "discarded"

@classmethod
def values(self):
return self.__members__.values()


class JobState(str, Enum):
NEW = "new"
RESUBMITTED = "resubmitted"
UPLOAD = "upload"
WAITING = "waiting"
QUEUED = "queued"
RUNNING = "running"
OK = "ok"
ERROR = "error"
FAILED = "failed"
PAUSED = "paused"
DELETING = "deleting"
DELETED = "deleted"
STOPPING = "stop"
STOPPED = "stopped"
SKIPPED = "skipped"


class DatasetCollectionPopulatedState(str, Enum):
NEW = "new" # New dataset collection, unpopulated elements
OK = "ok" # Collection elements populated (HDAs may or may not have errors)
FAILED = "failed" # some problem populating state, won't be populated


# Generic and common Field annotations that can be reused across models

RelativeUrlField: RelativeUrl = Field(
Expand Down Expand Up @@ -97,7 +141,7 @@
description="The encoded ID of this entity.",
)

DatasetStateField: Dataset.states = Field(
DatasetStateField: DatasetState = Field(
...,
title="State",
description="The current state of this dataset.",
Expand All @@ -123,7 +167,7 @@
),
)

PopulatedStateField: DatasetCollection.populated_states = Field(
PopulatedStateField: DatasetCollectionPopulatedState = Field(
...,
title="Populated State",
description=(
Expand Down Expand Up @@ -442,7 +486,7 @@ class HDASummary(HistoryItemCommon):
title="Dataset ID",
description="The encoded ID of the dataset associated with this item.",
)
state: Dataset.states = DatasetStateField
state: DatasetState = DatasetStateField
extension: str = Field(
...,
title="Extension",
Expand All @@ -460,7 +504,7 @@ class HDAInaccessible(HistoryItemBase):
"""History Dataset Association information when the user can not access it."""

accessible: bool = AccessibleField
state: Dataset.states = DatasetStateField
state: DatasetState = DatasetStateField


HdaLddaField = Field(
Expand All @@ -472,6 +516,12 @@ class HDAInaccessible(HistoryItemBase):
)


class DatasetValidatedState(str, Enum):
UNKNOWN = "unknown"
INVALID = "invalid"
OK = "ok"


class HDADetailed(HDASummary):
"""History Dataset Association detailed information."""

Expand Down Expand Up @@ -572,7 +622,7 @@ class HDADetailed(HDASummary):
title="Visualizations",
description="The collection of visualizations that can be applied to this dataset.",
)
validated_state: DatasetInstance.validated_states = Field(
validated_state: DatasetValidatedState = Field(
...,
title="Validated State",
description="The state of the datatype validation for this dataset.",
Expand Down Expand Up @@ -634,7 +684,7 @@ class DCSummary(Model):
create_time: datetime = CreateTimeField
update_time: datetime = UpdateTimeField
collection_type: CollectionType = CollectionTypeField
populated_state: DatasetCollection.populated_states = PopulatedStateField
populated_state: DatasetCollectionPopulatedState = PopulatedStateField
populated_state_message: Optional[str] = PopulatedStateMessageField
element_count: Optional[int] = ElementCountField

Expand All @@ -644,7 +694,7 @@ class HDAObject(Model):

id: DecodedDatabaseIdField = EntityIdField
model_class: HDA_MODEL_CLASS = ModelClassField(HDA_MODEL_CLASS)
state: Dataset.states = DatasetStateField
state: DatasetState = DatasetStateField
hda_ldda: DatasetSourceType = HdaLddaField
history_id: DecodedDatabaseIdField = HistoryIdField
tags: List[str]
Expand Down Expand Up @@ -789,7 +839,7 @@ class HDCASummary(HistoryItemCommon):
),
] = "collection"
collection_type: CollectionType = CollectionTypeField
populated_state: DatasetCollection.populated_states = PopulatedStateField
populated_state: DatasetCollectionPopulatedState = PopulatedStateField
populated_state_message: Optional[str] = PopulatedStateMessageField
element_count: Optional[int] = ElementCountField
job_source_id: Optional[DecodedDatabaseIdField] = Field(
Expand Down Expand Up @@ -1003,8 +1053,8 @@ class HistoryActiveContentCounts(Model):
)


HistoryStateCounts = Dict[Dataset.states, int]
HistoryStateIds = Dict[Dataset.states, List[DecodedDatabaseIdField]]
HistoryStateCounts = Dict[DatasetState, int]
HistoryStateIds = Dict[DatasetState, List[DecodedDatabaseIdField]]


class HistoryDetailed(HistorySummary): # Equivalent to 'dev-detailed' view, which seems the default
Expand Down Expand Up @@ -1038,7 +1088,7 @@ class HistoryDetailed(HistorySummary): # Equivalent to 'dev-detailed' view, whi
description="The relative URL in the form of /u/{username}/h/{slug}",
)
genome_build: Optional[str] = GenomeBuildField
state: Dataset.states = Field(
state: DatasetState = Field(
...,
title="State",
description="The current state of the History based on the states of the datasets it contains.",
Expand Down Expand Up @@ -1536,7 +1586,7 @@ class JobBaseModel(Model):
title="History ID",
description="The encoded ID of the history associated with this item.",
)
state: Job.states = Field(
state: JobState = Field(
...,
title="State",
description="Current state of the job.",
Expand Down Expand Up @@ -1566,8 +1616,8 @@ class JobImportHistoryResponse(JobBaseModel):

class ItemStateSummary(Model):
id: DecodedDatabaseIdField = EntityIdField
populated_state: DatasetCollection.populated_states = PopulatedStateField
states: Dict[Job.states, int] = Field(
populated_state: DatasetCollectionPopulatedState = PopulatedStateField
states: Dict[JobState, int] = Field(
{}, title="States", description=("A dictionary of job states and the number of jobs in that state.")
)

Expand Down Expand Up @@ -2770,7 +2820,7 @@ class FileLibraryFolderItem(LibraryFolderItemBase):
date_uploaded: datetime
is_unrestricted: bool
is_private: bool
state: Dataset.states = DatasetStateField
state: DatasetState = DatasetStateField
file_size: str
raw_size: int
ldda_id: EncodedDatabaseIdField
Expand Down

0 comments on commit 7edc84f

Please sign in to comment.