Skip to content

Commit eeb18ac

Browse files
authored
v2: Remove parameter scale (#408)
`parameterScale` no longer exists in PEtab v2.
1 parent bf4055c commit eeb18ac

File tree

2 files changed

+3
-126
lines changed

2 files changed

+3
-126
lines changed

petab/v2/C.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -69,8 +69,6 @@
6969
PARAMETER_ID = "parameterId"
7070
#: Parameter name column in the parameter table
7171
PARAMETER_NAME = "parameterName"
72-
#: Parameter scale column in the parameter table
73-
PARAMETER_SCALE = "parameterScale"
7472
#: Lower bound column in the parameter table
7573
LOWER_BOUND = "lowerBound"
7674
#: Upper bound column in the parameter table
@@ -87,7 +85,6 @@
8785
#: Mandatory columns of parameter table
8886
PARAMETER_DF_REQUIRED_COLS = [
8987
PARAMETER_ID,
90-
PARAMETER_SCALE,
9188
LOWER_BOUND,
9289
UPPER_BOUND,
9390
ESTIMATE,

petab/v2/problem.py

Lines changed: 3 additions & 123 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020

2121
from ..v1 import (
2222
parameter_mapping,
23-
parameters,
2423
validate_yaml_syntax,
2524
yaml,
2625
)
@@ -522,15 +521,6 @@ def get_optimization_parameters(self) -> list[str]:
522521
"""
523522
return [p.id for p in self.parameters if p.estimate]
524523

525-
def get_optimization_parameter_scales(self) -> dict[str, str]:
526-
"""
527-
Return list of optimization parameter scaling strings.
528-
529-
See :py:func:`petab.parameters.get_optimization_parameters`.
530-
"""
531-
# TODO: to be removed in v2?
532-
return parameters.get_optimization_parameter_scaling(self.parameter_df)
533-
534524
def get_observable_ids(self) -> list[str]:
535525
"""
536526
Returns dictionary of observable ids.
@@ -595,9 +585,7 @@ def x_fixed_ids(self) -> list[str]:
595585
"""Parameter table parameter IDs, for fixed parameters."""
596586
return self.get_x_ids(free=False)
597587

598-
def get_x_nominal(
599-
self, free: bool = True, fixed: bool = True, scaled: bool = False
600-
) -> list:
588+
def get_x_nominal(self, free: bool = True, fixed: bool = True) -> list:
601589
"""Generic function to get parameter nominal values.
602590
603591
Parameters
@@ -607,9 +595,6 @@ def get_x_nominal(
607595
fixed:
608596
Whether to return fixed parameters, i.e. parameters not to
609597
estimate.
610-
scaled:
611-
Whether to scale the values according to the parameter scale,
612-
or return them on linear scale.
613598
614599
Returns
615600
-------
@@ -620,10 +605,6 @@ def get_x_nominal(
620605
for p in self.parameters
621606
]
622607

623-
if scaled:
624-
v = list(
625-
parameters.map_scale(v, self.parameter_df[PARAMETER_SCALE])
626-
)
627608
return self._apply_mask(v, free=free, fixed=fixed)
628609

629610
@property
@@ -641,28 +622,7 @@ def x_nominal_fixed(self) -> list:
641622
"""Parameter table nominal values, for fixed parameters."""
642623
return self.get_x_nominal(free=False)
643624

644-
@property
645-
def x_nominal_scaled(self) -> list:
646-
"""Parameter table nominal values with applied parameter scaling"""
647-
return self.get_x_nominal(scaled=True)
648-
649-
@property
650-
def x_nominal_free_scaled(self) -> list:
651-
"""Parameter table nominal values with applied parameter scaling,
652-
for free parameters.
653-
"""
654-
return self.get_x_nominal(fixed=False, scaled=True)
655-
656-
@property
657-
def x_nominal_fixed_scaled(self) -> list:
658-
"""Parameter table nominal values with applied parameter scaling,
659-
for fixed parameters.
660-
"""
661-
return self.get_x_nominal(free=False, scaled=True)
662-
663-
def get_lb(
664-
self, free: bool = True, fixed: bool = True, scaled: bool = False
665-
):
625+
def get_lb(self, free: bool = True, fixed: bool = True):
666626
"""Generic function to get lower parameter bounds.
667627
668628
Parameters
@@ -672,34 +632,20 @@ def get_lb(
672632
fixed:
673633
Whether to return fixed parameters, i.e. parameters not to
674634
estimate.
675-
scaled:
676-
Whether to scale the values according to the parameter scale,
677-
or return them on linear scale.
678635
679636
Returns
680637
-------
681638
The lower parameter bounds.
682639
"""
683640
v = [p.lb if p.lb is not None else nan for p in self.parameters]
684-
if scaled:
685-
v = list(
686-
parameters.map_scale(v, self.parameter_df[PARAMETER_SCALE])
687-
)
688641
return self._apply_mask(v, free=free, fixed=fixed)
689642

690643
@property
691644
def lb(self) -> list:
692645
"""Parameter table lower bounds."""
693646
return self.get_lb()
694647

695-
@property
696-
def lb_scaled(self) -> list:
697-
"""Parameter table lower bounds with applied parameter scaling"""
698-
return self.get_lb(scaled=True)
699-
700-
def get_ub(
701-
self, free: bool = True, fixed: bool = True, scaled: bool = False
702-
):
648+
def get_ub(self, free: bool = True, fixed: bool = True):
703649
"""Generic function to get upper parameter bounds.
704650
705651
Parameters
@@ -709,31 +655,19 @@ def get_ub(
709655
fixed:
710656
Whether to return fixed parameters, i.e. parameters not to
711657
estimate.
712-
scaled:
713-
Whether to scale the values according to the parameter scale,
714-
or return them on linear scale.
715658
716659
Returns
717660
-------
718661
The upper parameter bounds.
719662
"""
720663
v = [p.ub if p.ub is not None else nan for p in self.parameters]
721-
if scaled:
722-
v = list(
723-
parameters.map_scale(v, self.parameter_df[PARAMETER_SCALE])
724-
)
725664
return self._apply_mask(v, free=free, fixed=fixed)
726665

727666
@property
728667
def ub(self) -> list:
729668
"""Parameter table upper bounds"""
730669
return self.get_ub()
731670

732-
@property
733-
def ub_scaled(self) -> list:
734-
"""Parameter table upper bounds with applied parameter scaling"""
735-
return self.get_ub(scaled=True)
736-
737671
@property
738672
def x_free_indices(self) -> list[int]:
739673
"""Parameter table estimated parameter indices."""
@@ -790,56 +724,6 @@ def sample_parameter_startpoints_dict(
790724
)
791725
]
792726

793-
# TODO: remove in v2?
794-
def unscale_parameters(
795-
self,
796-
x_dict: dict[str, float],
797-
) -> dict[str, float]:
798-
"""Unscale parameter values.
799-
800-
Parameters
801-
----------
802-
x_dict:
803-
Keys are parameter IDs in the PEtab problem, values are scaled
804-
parameter values.
805-
806-
Returns
807-
-------
808-
The unscaled parameter values.
809-
"""
810-
return {
811-
parameter_id: parameters.unscale(
812-
parameter_value,
813-
self.parameter_df[PARAMETER_SCALE][parameter_id],
814-
)
815-
for parameter_id, parameter_value in x_dict.items()
816-
}
817-
818-
# TODO: remove in v2?
819-
def scale_parameters(
820-
self,
821-
x_dict: dict[str, float],
822-
) -> dict[str, float]:
823-
"""Scale parameter values.
824-
825-
Parameters
826-
----------
827-
x_dict:
828-
Keys are parameter IDs in the PEtab problem, values are unscaled
829-
parameter values.
830-
831-
Returns
832-
-------
833-
The scaled parameter values.
834-
"""
835-
return {
836-
parameter_id: parameters.scale(
837-
parameter_value,
838-
self.parameter_df[PARAMETER_SCALE][parameter_id],
839-
)
840-
for parameter_id, parameter_value in x_dict.items()
841-
}
842-
843727
@property
844728
def n_estimated(self) -> int:
845729
"""The number of estimated parameters."""
@@ -986,7 +870,6 @@ def add_parameter(
986870
id_: str,
987871
estimate: bool | str = True,
988872
nominal_value: Number | None = None,
989-
scale: str = None,
990873
lb: Number = None,
991874
ub: Number = None,
992875
prior_dist: str = None,
@@ -1002,7 +885,6 @@ def add_parameter(
1002885
id_: The parameter id
1003886
estimate: Whether the parameter is estimated
1004887
nominal_value: The nominal value of the parameter
1005-
scale: The parameter scale
1006888
lb: The lower bound of the parameter
1007889
ub: The upper bound of the parameter
1008890
prior_dist: The type of the prior distribution
@@ -1016,8 +898,6 @@ def add_parameter(
1016898
record[ESTIMATE] = estimate
1017899
if nominal_value is not None:
1018900
record[NOMINAL_VALUE] = nominal_value
1019-
if scale is not None:
1020-
record[PARAMETER_SCALE] = scale
1021901
if lb is not None:
1022902
record[LOWER_BOUND] = lb
1023903
if ub is not None:

0 commit comments

Comments
 (0)