Skip to content

Commit

Permalink
Update margpoptimizer.py
Browse files Browse the repository at this point in the history
  • Loading branch information
sklbancor committed May 4, 2024
1 parent 2ae4c09 commit d53a5e4
Showing 1 changed file with 20 additions and 10 deletions.
30 changes: 20 additions & 10 deletions fastlane_bot/tools/optimizer/margpoptimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,8 @@ def optimize(self, sfc=None, *, pstart=None, mode=None, result=None, params=None
:sfc: the self financing constraint to use (2)
:pstart: dict or tuple of starting price for optimization (3)
:mode: mode of operation (MO_MODE_REL or MO_MODE_ABS)
:mode: mode of operation (MO_MODE_REL for relative convergence, or
MO_MODE_ABS for absolute)
:result: the result type (see MO_XXX constants below) (4)
:params: dict of optional parameters (see table below) (4)
Expand Down Expand Up @@ -144,7 +145,6 @@ def optimize(self, sfc=None, *, pstart=None, mode=None, result=None, params=None
================== =========================================================================
parameter meaning
================== =========================================================================
crit criterion: MO_MODE_REL (relative; default) or MO_MODE_ABS (absolute)
norm norm for convergence crit (MO_NORML1, MO_NORML2, MO_NORMLINF)
epsr relative convergence threshold (default: MO_EPSR)
epsa absolute convergence threshold (default: MO_EPSA)
Expand Down Expand Up @@ -188,8 +188,8 @@ def optimize(self, sfc=None, *, pstart=None, mode=None, result=None, params=None
# lambdas
P = lambda item: params.get(item, None) if params is not None else None
get = lambda p, ix: p[ix] if ix is not None else 1 # safe get from tuple
dxdy_f = lambda r: (np.array(r[0:2])) # extract dx, dy from result
tn = lambda t: t.split("-")[0] # token name, eg WETH-xxxx -> WETH
# dxdy_f = lambda r: (np.array(r[0:2])) # extract dx, dy from result
# tn = lambda t: t.split("-")[0] # token name, eg WETH-xxxx -> WETH


# epsilons and maxiter
Expand All @@ -215,20 +215,30 @@ def optimize(self, sfc=None, *, pstart=None, mode=None, result=None, params=None
pairs = self.curve_container.pairs(standardize=False)
curves_by_pair = {pair: tuple(c for c in curves_t if c.pair == pair) for pair in pairs }
pairs_t = tuple(tuple(p.split("/")) for p in pairs)
# pstart legacy handling


if P("verbose") or P("debug"):
print(f"[margp_optimizer] targettkn = {targettkn}")

# legacy paramters handling (pstart, crit)
if not pstart is None:
assert P("pstart") is None, "pstart must not be in params dict if pstart is provided as argument"
else:
pstart = P("pstart")
if not P("pstart") is None:
print(f"[margp_optimizer] WARNING - providing `pstart` as parameter is deprecated; use `pstart` variable instead")



# pstart
if P("verbose") or P("debug"):
print(f"[margp_optimizer] targettkn = {targettkn}")
if not mode is None:
assert P("crit") is None, "crit must not be in params dict if mode is provided as argument"
crit = mode
else:
crit = P("crit")
if not P("crit") is None:
print(f"[margp_optimizer] WARNING - providing `crit` as parameter is deprecated; use `mode` instead")
if crit is None:
crit = self.MO_MODE_REL

# pstart
pstart = P("pstart")
if not pstart is None:
if P("debug"):
Expand Down

0 comments on commit d53a5e4

Please sign in to comment.