Skip to content

SciPy generators

Bases: Generator

Nelder-Mead algorithm from SciPy in Xopt's Generator form. Converted to use a state machine to resume in exactly the last state.

Source code in xopt/generators/scipy/neldermead.py
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
class NelderMeadGenerator(Generator):
    """
    Nelder-Mead algorithm from SciPy in Xopt's Generator form.
    Converted to use a state machine to resume in exactly the last state.
    """

    name = "neldermead"

    initial_point: Optional[Dict[str, float]] = None  # replaces x0 argument
    initial_simplex: Optional[
        Dict[str, Union[List[float], np.ndarray]]
    ] = None  # This overrides the use of initial_point
    # Same as scipy.optimize._optimize._minimize_neldermead
    adaptive: bool = Field(
        True, description="Change hyperparameters based on dimensionality"
    )
    xatol: float = Field(1e-4, description="Tolerance in x value")
    fatol: float = Field(1e-4, description="Tolerance in function value")
    current_state: SimplexState = SimplexState()
    future_state: Optional[SimplexState] = None

    # Internal data structures
    x: Optional[np.ndarray] = None
    y: Optional[float] = None
    is_done_bool: bool = False

    _initial_simplex = None
    _saved_options: Dict = None

    def __init__(self, **kwargs):
        super().__init__(**kwargs)

        # Initialize the first candidate if not given
        if self.initial_point is None:
            self.initial_point = self.vocs.random_inputs()[0]

        self._saved_options = self.model_dump(
            exclude={"current_state", "future_state"}
        ).copy()  # Used to keep track of changed options

        if self.initial_simplex:
            self._initial_simplex = np.array(
                [self.initial_simplex[k] for k in self.vocs.variable_names]
            ).T
        else:
            self._initial_simplex = None

    @property
    def x0(self):
        """Raw internal initial point for convenience"""
        return np.array([self.initial_point[k] for k in self.vocs.variable_names])

    @property
    def is_done(self):
        return self.is_done_bool

    def add_data(self, new_data: pd.DataFrame):
        if len(new_data) == 0:
            # empty data, i.e. no steps yet
            assert self.future_state is None
            return

        self.data = pd.concat([self.data, new_data], axis=0)

        # Complicated part - need to determine if data corresponds to result of last gen
        ndata = len(self.data)
        ngen = self.current_state.ngen
        if ndata == ngen:
            # just resuming
            # print(f'Resuming with {ngen=}')
            return
        else:
            # Must have made at least 1 step, require future_state
            assert self.future_state is not None

            # new data -> advance state machine 1 step
            assert ndata == self.future_state.ngen == ngen + 1
            self.current_state = self.future_state
            self.future_state = None

            # Can have multiple points if resuming from file, grab last one
            new_data_df = self.vocs.objective_data(new_data)
            res = new_data_df.iloc[-1:, :].to_numpy()
            assert np.shape(res) == (1, 1), f"Bad last point {res}"

            yt = res[0, 0].item()
            if np.isinf(yt) or np.isnan(yt):
                self.is_done_bool = True
                return

            self.y = yt
            # print(f'Added data {self.y=}')

    def generate(self, n_candidates: int) -> Optional[list[dict]]:
        if self.is_done:
            return None

        if n_candidates != 1:
            raise NotImplementedError(
                "simplex can only produce one candidate at a time"
            )

        if self.current_state.N is None:
            # fresh start
            pass
        else:
            n_inputs = len(self.data)
            if self.current_state.ngen == n_inputs:
                # We are in a state where result of last point is known
                pass
            else:
                pass

        results = self._call_algorithm()
        if results is None:
            self.is_done_bool = True
            return None

        x, state_extra = results
        assert len(state_extra) == len(STATE_KEYS)
        stateobj = SimplexState(**{k: v for k, v in zip(STATE_KEYS, state_extra)})
        # print("State:", stateobj)
        self.future_state = stateobj

        inputs = dict(zip(self.vocs.variable_names, x))
        if self.vocs.constants is not None:
            inputs.update(self.vocs.constants)

        return [inputs]

    def _call_algorithm(self):
        results = _neldermead_generator(
            self.x0,
            state=self.current_state,
            lastval=self.y,
            adaptive=self.adaptive,
            xatol=self.xatol,
            fatol=self.fatol,
            initial_simplex=self._initial_simplex,
            bounds=self.vocs.bounds,
        )

        self.y = None
        return results

    @property
    def simplex(self):
        """
        Returns the simplex in the current state.
        """
        sim = self.current_state.sim
        return dict(zip(self.vocs.variable_names, sim.T))

simplex property

Returns the simplex in the current state.

x0 property

Raw internal initial point for convenience