diff --git a/fastlane_bot/tools/optimizer/margpoptimizer.py b/fastlane_bot/tools/optimizer/margpoptimizer.py index a409ca8f0..cc4955152 100644 --- a/fastlane_bot/tools/optimizer/margpoptimizer.py +++ b/fastlane_bot/tools/optimizer/margpoptimizer.py @@ -116,7 +116,8 @@ def optimize(self, sfc=None, *, pstart=None, mode=None, result=None, params=None :sfc: the self financing constraint to use (2) :pstart: dict or tuple of starting price for optimization (3) - :mode: mode of operation (MO_MODE_REL or MO_MODE_ABS) + :mode: mode of operation (MO_MODE_REL for relative convergence, or + MO_MODE_ABS for absolute) :result: the result type (see MO_XXX constants below) (4) :params: dict of optional parameters (see table below) (4) @@ -144,7 +145,6 @@ def optimize(self, sfc=None, *, pstart=None, mode=None, result=None, params=None ================== ========================================================================= parameter meaning ================== ========================================================================= - crit criterion: MO_MODE_REL (relative; default) or MO_MODE_ABS (absolute) norm norm for convergence crit (MO_NORML1, MO_NORML2, MO_NORMLINF) epsr relative convergence threshold (default: MO_EPSR) epsa absolute convergence threshold (default: MO_EPSA) @@ -188,8 +188,8 @@ def optimize(self, sfc=None, *, pstart=None, mode=None, result=None, params=None # lambdas P = lambda item: params.get(item, None) if params is not None else None get = lambda p, ix: p[ix] if ix is not None else 1 # safe get from tuple - dxdy_f = lambda r: (np.array(r[0:2])) # extract dx, dy from result - tn = lambda t: t.split("-")[0] # token name, eg WETH-xxxx -> WETH + # dxdy_f = lambda r: (np.array(r[0:2])) # extract dx, dy from result + # tn = lambda t: t.split("-")[0] # token name, eg WETH-xxxx -> WETH # epsilons and maxiter @@ -215,7 +215,12 @@ def optimize(self, sfc=None, *, pstart=None, mode=None, result=None, params=None pairs = self.curve_container.pairs(standardize=False) curves_by_pair = {pair: tuple(c for c in curves_t if c.pair == pair) for pair in pairs } pairs_t = tuple(tuple(p.split("/")) for p in pairs) - # pstart legacy handling + + + if P("verbose") or P("debug"): + print(f"[margp_optimizer] targettkn = {targettkn}") + + # legacy paramters handling (pstart, crit) if not pstart is None: assert P("pstart") is None, "pstart must not be in params dict if pstart is provided as argument" else: @@ -223,12 +228,17 @@ def optimize(self, sfc=None, *, pstart=None, mode=None, result=None, params=None if not P("pstart") is None: print(f"[margp_optimizer] WARNING - providing `pstart` as parameter is deprecated; use `pstart` variable instead") - - - # pstart - if P("verbose") or P("debug"): - print(f"[margp_optimizer] targettkn = {targettkn}") + if not mode is None: + assert P("crit") is None, "crit must not be in params dict if mode is provided as argument" + crit = mode + else: + crit = P("crit") + if not P("crit") is None: + print(f"[margp_optimizer] WARNING - providing `crit` as parameter is deprecated; use `mode` instead") + if crit is None: + crit = self.MO_MODE_REL + # pstart pstart = P("pstart") if not pstart is None: if P("debug"):