Skip to content
2 changes: 2 additions & 0 deletions src/mavedb/scripts/load_pp_style_calibration.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,6 +234,8 @@ def main(db: Session, archive_path: str, dataset_map: str, overwrite: bool) -> N
score_set_urn=score_set.urn,
calibration_metadata={"prior_probability_pathogenicity": calibration_data.get("prior", None)},
method_sources=[ZEIBERG_CALIBRATION_CITATION],
threshold_sources=[],
classification_sources=[],
)

new_calibration_object = asyncio.run(
Expand Down
32 changes: 16 additions & 16 deletions src/mavedb/view_models/collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,36 +84,36 @@ class Config:
from_attributes = True

# These 'synthetic' fields are generated from other model properties. Transform data from other properties as needed, setting
# the appropriate field on the model itself. Then, proceed with Pydantic ingestion once fields are created.
# the appropriate field on the model itself. Then, proceed with Pydantic ingestion once fields are created. Only perform these
# transformations if the relevant attributes are present on the input data (i.e., when creating from an ORM object).
@model_validator(mode="before")
def generate_contribution_role_user_relationships(cls, data: Any):
try:
user_associations = transform_contribution_role_associations_to_roles(data.user_associations)
for k, v in user_associations.items():
data.__setattr__(k, v)

except AttributeError as exc:
raise ValidationError(
f"Unable to create {cls.__name__} without attribute: {exc}." # type: ignore
)
if hasattr(data, "user_associations"):
try:
user_associations = transform_contribution_role_associations_to_roles(data.user_associations)
for k, v in user_associations.items():
data.__setattr__(k, v)

except (AttributeError, KeyError) as exc:
raise ValidationError(f"Unable to coerce user associations for {cls.__name__}: {exc}.")
return data

@model_validator(mode="before")
def generate_score_set_urn_list(cls, data: Any):
if not hasattr(data, "score_set_urns"):
if hasattr(data, "score_sets"):
try:
data.__setattr__("score_set_urns", transform_score_set_list_to_urn_list(data.score_sets))
except AttributeError as exc:
raise ValidationError(f"Unable to create {cls.__name__} without attribute: {exc}.") # type: ignore
except (AttributeError, KeyError) as exc:
raise ValidationError(f"Unable to coerce score set urns for {cls.__name__}: {exc}.")
return data

@model_validator(mode="before")
def generate_experiment_urn_list(cls, data: Any):
if not hasattr(data, "experiment_urns"):
if hasattr(data, "experiments"):
try:
data.__setattr__("experiment_urns", transform_experiment_list_to_urn_list(data.experiments))
except AttributeError as exc:
raise ValidationError(f"Unable to create {cls.__name__} without attribute: {exc}.") # type: ignore
except (AttributeError, KeyError) as exc:
raise ValidationError(f"Unable to coerce experiment urns for {cls.__name__}: {exc}.")
return data


Expand Down
25 changes: 13 additions & 12 deletions src/mavedb/view_models/experiment.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,12 +125,11 @@ def publication_identifiers_validator(cls, v: Any, info: ValidationInfo) -> list
return list(v) # Re-cast into proper list-like type

# These 'synthetic' fields are generated from other model properties. Transform data from other properties as needed, setting
# the appropriate field on the model itself. Then, proceed with Pydantic ingestion once fields are created.
# the appropriate field on the model itself. Then, proceed with Pydantic ingestion once fields are created. Only perform these
# transformations if the relevant attributes are present on the input data (i.e., when creating from an ORM object).
@model_validator(mode="before")
def generate_primary_and_secondary_publications(cls, data: Any):
if not hasattr(data, "primary_publication_identifiers") or not hasattr(
data, "secondary_publication_identifiers"
):
if hasattr(data, "publication_identifier_associations"):
try:
publication_identifiers = transform_record_publication_identifiers(
data.publication_identifier_associations
Expand All @@ -141,28 +140,30 @@ def generate_primary_and_secondary_publications(cls, data: Any):
data.__setattr__(
"secondary_publication_identifiers", publication_identifiers["secondary_publication_identifiers"]
)
except AttributeError as exc:
except (KeyError, AttributeError) as exc:
raise ValidationError(
f"Unable to create {cls.__name__} without attribute: {exc}." # type: ignore
f"Unable to coerce publication identifier attributes from ORM for {cls.__name__}: {exc}." # type: ignore
)
return data

@model_validator(mode="before")
def generate_score_set_urn_list(cls, data: Any):
if not hasattr(data, "score_set_urns"):
if hasattr(data, "score_sets"):
try:
data.__setattr__("score_set_urns", transform_score_set_list_to_urn_list(data.score_sets))
except AttributeError as exc:
raise ValidationError(f"Unable to create {cls.__name__} without attribute: {exc}.") # type: ignore
except (KeyError, AttributeError) as exc:
raise ValidationError(f"Unable to coerce associated score set URNs from ORM for {cls.__name__}: {exc}.") # type: ignore
return data

@model_validator(mode="before")
def generate_experiment_set_urn(cls, data: Any):
if not hasattr(data, "experiment_set_urn"):
if hasattr(data, "experiment_set"):
try:
data.__setattr__("experiment_set_urn", transform_experiment_set_to_urn(data.experiment_set))
except AttributeError as exc:
raise ValidationError(f"Unable to create {cls.__name__} without attribute: {exc}.") # type: ignore
except (KeyError, AttributeError) as exc:
raise ValidationError(
f"Unable to coerce associated experiment set URN from ORM for {cls.__name__}: {exc}."
) # type: ignore
return data


Expand Down
18 changes: 15 additions & 3 deletions src/mavedb/view_models/mapped_variant.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,13 +59,16 @@ class SavedMappedVariant(MappedVariantBase):
class Config:
from_attributes = True

# These 'synthetic' fields are generated from other model properties. Transform data from other properties as needed, setting
# the appropriate field on the model itself. Then, proceed with Pydantic ingestion once fields are created. Only perform these
# transformations if the relevant attributes are present on the input data (i.e., when creating from an ORM object).
@model_validator(mode="before")
def generate_score_set_urn_list(cls, data: Any):
if not hasattr(data, "variant_urn") and hasattr(data, "variant"):
if hasattr(data, "variant"):
try:
data.__setattr__("variant_urn", None if not data.variant else data.variant.urn)
except AttributeError as exc:
raise ValidationError(f"Unable to create {cls.__name__} without attribute: {exc}.") # type: ignore
except (AttributeError, KeyError) as exc:
raise ValidationError(f"Unable to coerce variant urn for {cls.__name__}: {exc}.") # type: ignore
return data


Expand Down Expand Up @@ -98,3 +101,12 @@ def generate_score_set_urn_list(cls, data: Any):
except AttributeError as exc:
raise ValidationError(f"Unable to create {cls.__name__} without attribute: {exc}.") # type: ignore
return data


# ruff: noqa: E402
from mavedb.view_models.clinical_control import ClinicalControl, ClinicalControlBase, SavedClinicalControl
from mavedb.view_models.gnomad_variant import GnomADVariant, GnomADVariantBase, SavedGnomADVariant

MappedVariantUpdate.model_rebuild()
SavedMappedVariantWithControls.model_rebuild()
MappedVariantWithControls.model_rebuild()
57 changes: 24 additions & 33 deletions src/mavedb/view_models/score_calibration.py
Original file line number Diff line number Diff line change
Expand Up @@ -279,9 +279,9 @@ class ScoreCalibrationBase(BaseModel):
notes: Optional[str] = None

functional_classifications: Optional[Sequence[FunctionalClassificationBase]] = None
threshold_sources: Optional[Sequence[PublicationIdentifierBase]] = None
classification_sources: Optional[Sequence[PublicationIdentifierBase]] = None
method_sources: Optional[Sequence[PublicationIdentifierBase]] = None
threshold_sources: Sequence[PublicationIdentifierBase]
classification_sources: Sequence[PublicationIdentifierBase]
method_sources: Sequence[PublicationIdentifierBase]
calibration_metadata: Optional[dict] = None

@field_validator("functional_classifications")
Expand Down Expand Up @@ -428,18 +428,18 @@ class ScoreCalibrationModify(ScoreCalibrationBase):
score_set_urn: Optional[str] = None

functional_classifications: Optional[Sequence[FunctionalClassificationModify]] = None
threshold_sources: Optional[Sequence[PublicationIdentifierCreate]] = None
classification_sources: Optional[Sequence[PublicationIdentifierCreate]] = None
method_sources: Optional[Sequence[PublicationIdentifierCreate]] = None
threshold_sources: Sequence[PublicationIdentifierCreate]
classification_sources: Sequence[PublicationIdentifierCreate]
method_sources: Sequence[PublicationIdentifierCreate]


class ScoreCalibrationCreate(ScoreCalibrationModify):
"""Model used to create a new score calibration."""

functional_classifications: Optional[Sequence[FunctionalClassificationCreate]] = None
threshold_sources: Optional[Sequence[PublicationIdentifierCreate]] = None
classification_sources: Optional[Sequence[PublicationIdentifierCreate]] = None
method_sources: Optional[Sequence[PublicationIdentifierCreate]] = None
threshold_sources: Sequence[PublicationIdentifierCreate]
classification_sources: Sequence[PublicationIdentifierCreate]
method_sources: Sequence[PublicationIdentifierCreate]


class SavedScoreCalibration(ScoreCalibrationBase):
Expand All @@ -457,9 +457,9 @@ class SavedScoreCalibration(ScoreCalibrationBase):
private: bool = True

functional_classifications: Optional[Sequence[SavedFunctionalClassification]] = None
threshold_sources: Optional[Sequence[SavedPublicationIdentifier]] = None
classification_sources: Optional[Sequence[SavedPublicationIdentifier]] = None
method_sources: Optional[Sequence[SavedPublicationIdentifier]] = None
threshold_sources: Sequence[SavedPublicationIdentifier]
classification_sources: Sequence[SavedPublicationIdentifier]
method_sources: Sequence[SavedPublicationIdentifier]

created_by: Optional[SavedUser] = None
modified_by: Optional[SavedUser] = None
Expand All @@ -477,9 +477,6 @@ class Config:
@field_validator("threshold_sources", "classification_sources", "method_sources", mode="before")
def publication_identifiers_validator(cls, value: Any) -> Optional[list[PublicationIdentifier]]:
"""Coerce association proxy collections to plain lists."""
if value is None:
return None

assert isinstance(value, Collection), "Publication identifier lists must be a collection"
return list(value)

Expand All @@ -504,29 +501,23 @@ def primary_calibrations_may_not_be_private(self: "SavedScoreCalibration") -> "S

return self

# These 'synthetic' fields are generated from other model properties. Transform data from other properties as needed, setting
# the appropriate field on the model itself. Then, proceed with Pydantic ingestion once fields are created. Only perform these
# transformations if the relevant attributes are present on the input data (i.e., when creating from an ORM object).
@model_validator(mode="before")
def generate_threshold_classification_and_method_sources(cls, data: Any): # type: ignore[override]
"""Populate threshold/classification/method source fields from association objects if missing."""
association_keys = {
"threshold_sources",
"thresholdSources",
"classification_sources",
"classificationSources",
"method_sources",
"methodSources",
}

if not any(hasattr(data, key) for key in association_keys):
if hasattr(data, "publication_identifier_associations"):
try:
publication_identifiers = transform_score_calibration_publication_identifiers(
data.publication_identifier_associations
)
data.__setattr__("threshold_sources", publication_identifiers["threshold_sources"])
data.__setattr__("classification_sources", publication_identifiers["classification_sources"])
data.__setattr__("method_sources", publication_identifiers["method_sources"])
except AttributeError as exc:
except (AttributeError, KeyError) as exc:
raise ValidationError(
f"Unable to create {cls.__name__} without attribute: {exc}." # type: ignore
f"Unable to coerce publication associations for {cls.__name__}: {exc}." # type: ignore
)
return data

Expand All @@ -535,9 +526,9 @@ class ScoreCalibration(SavedScoreCalibration):
"""Complete score calibration model returned by the API."""

functional_classifications: Optional[Sequence[FunctionalClassification]] = None
threshold_sources: Optional[Sequence[PublicationIdentifier]] = None
classification_sources: Optional[Sequence[PublicationIdentifier]] = None
method_sources: Optional[Sequence[PublicationIdentifier]] = None
threshold_sources: Sequence[PublicationIdentifier]
classification_sources: Sequence[PublicationIdentifier]
method_sources: Sequence[PublicationIdentifier]
created_by: Optional[User] = None
modified_by: Optional[User] = None

Expand All @@ -549,11 +540,11 @@ class ScoreCalibrationWithScoreSetUrn(SavedScoreCalibration):

@model_validator(mode="before")
def generate_score_set_urn(cls, data: Any):
if not hasattr(data, "score_set_urn"):
if hasattr(data, "score_set"):
try:
data.__setattr__("score_set_urn", transform_score_set_to_urn(data.score_set))
except AttributeError as exc:
except (AttributeError, KeyError) as exc:
raise ValidationError(
f"Unable to create {cls.__name__} without attribute: {exc}." # type: ignore
f"Unable to coerce score set urn for {cls.__name__}: {exc}." # type: ignore
)
return data
40 changes: 20 additions & 20 deletions src/mavedb/view_models/score_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -303,12 +303,11 @@ class Config:
arbitrary_types_allowed = True

# These 'synthetic' fields are generated from other model properties. Transform data from other properties as needed, setting
# the appropriate field on the model itself. Then, proceed with Pydantic ingestion once fields are created.
# the appropriate field on the model itself. Then, proceed with Pydantic ingestion once fields are created. Only perform these
# transformations if the relevant attributes are present on the input data (i.e., when creating from an ORM object).
@model_validator(mode="before")
def generate_primary_and_secondary_publications(cls, data: Any):
if not hasattr(data, "primary_publication_identifiers") or not hasattr(
data, "secondary_publication_identifiers"
):
if hasattr(data, "publication_identifier_associations"):
try:
publication_identifiers = transform_record_publication_identifiers(
data.publication_identifier_associations
Expand All @@ -319,9 +318,9 @@ def generate_primary_and_secondary_publications(cls, data: Any):
data.__setattr__(
"secondary_publication_identifiers", publication_identifiers["secondary_publication_identifiers"]
)
except AttributeError as exc:
except (AttributeError, KeyError) as exc:
raise ValidationError(
f"Unable to create {cls.__name__} without attribute: {exc}." # type: ignore
f"Unable to coerce publication identifier attributes for {cls.__name__}: {exc}." # type: ignore
)
return data

Expand Down Expand Up @@ -376,12 +375,11 @@ def publication_identifiers_validator(cls, value: Any) -> list[PublicationIdenti
return list(value) # Re-cast into proper list-like type

# These 'synthetic' fields are generated from other model properties. Transform data from other properties as needed, setting
# the appropriate field on the model itself. Then, proceed with Pydantic ingestion once fields are created.
# the appropriate field on the model itself. Then, proceed with Pydantic ingestion once fields are created. Only perform these
# transformations if the relevant attributes are present on the input data (i.e., when creating from an ORM object).
@model_validator(mode="before")
def generate_primary_and_secondary_publications(cls, data: Any):
if not hasattr(data, "primary_publication_identifiers") or not hasattr(
data, "secondary_publication_identifiers"
):
if hasattr(data, "publication_identifier_associations"):
try:
publication_identifiers = transform_record_publication_identifiers(
data.publication_identifier_associations
Expand All @@ -392,33 +390,35 @@ def generate_primary_and_secondary_publications(cls, data: Any):
data.__setattr__(
"secondary_publication_identifiers", publication_identifiers["secondary_publication_identifiers"]
)
except AttributeError as exc:
raise ValidationError(
f"Unable to create {cls.__name__} without attribute: {exc}." # type: ignore
)
except (AttributeError, KeyError) as exc:
raise ValidationError(f"Unable to coerce publication identifier attributes for {cls.__name__}: {exc}.")
return data

@model_validator(mode="before")
def transform_meta_analysis_objects_to_urns(cls, data: Any):
if not hasattr(data, "meta_analyzes_score_set_urns"):
if hasattr(data, "meta_analyzes_score_sets"):
try:
data.__setattr__(
"meta_analyzes_score_set_urns", transform_score_set_list_to_urn_list(data.meta_analyzes_score_sets)
)
except AttributeError as exc:
raise ValidationError(f"Unable to create {cls.__name__} without attribute: {exc}.") # type: ignore
except (AttributeError, KeyError) as exc:
raise ValidationError(
f"Unable to coerce meta analyzes score set urn attribute for {cls.__name__}: {exc}."
)
return data

@model_validator(mode="before")
def transform_meta_analyzed_objects_to_urns(cls, data: Any):
if not hasattr(data, "meta_analyzed_by_score_set_urns"):
if hasattr(data, "meta_analyzed_by_score_sets"):
try:
data.__setattr__(
"meta_analyzed_by_score_set_urns",
transform_score_set_list_to_urn_list(data.meta_analyzed_by_score_sets),
)
except AttributeError as exc:
raise ValidationError(f"Unable to create {cls.__name__} without attribute: {exc}.") # type: ignore
except (AttributeError, KeyError) as exc:
raise ValidationError(
f"Unable to coerce meta analyzed by score set urn attribute for {cls.__name__}: {exc}."
)
return data


Expand Down
Loading