-
Notifications
You must be signed in to change notification settings - Fork 75
/
config.ini
78 lines (70 loc) · 4.19 KB
/
config.ini
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
[Setup]
; the target constant
const=pi
; for some constants we have an index telling which one (like zeta(i). Constants with no index, like e and pi,
; ignore this).
i=2
; normal is a_n = A0+A1*n+A2*n^2+A3*n^3+...Ak*n^k
; indexed is A0*n^k+A1*(n-1)^k+A2*(n-2)^k+A3*(n-3)^k+...+Ak*(n-k)^k
; sparse is sparse polynomials. See below.
ab_polys_type=normal
; special parameters, required by some of the a,b polynomials types:
; sparse: [n_a, n_b] - the maximal degree of a/b. For either a or b, all interlaces must be of the same degree. Then
; k is evaluated by a/b_coeffs_range degrees (len(a/b_coeffs_range[0])), all (n over k) options
; for a polynomial with degree n and k non-zero coefficients are enumerated, with coefficients
; values of the regular enumerated a_coeffs, b_coeffs.
ab_polys_special_params=none
; a, b contfrac polynomials range of enumeration.
; For example: [ [[], []], [[], [], []], [[], [], [], []], [[], []] ] is a 4-interlace with degrees of 2,3,4,2.
; [m n] means running on coefficients between m to n-1
a_coeffs_range=[[[-4, -4], [-4, -4], [-4, -4]]]
b_coeffs_range=[[[-4, -4], [-4, -4], [-4, -4]]]
; validate that no entries repetition occurs (e.g. if hashtable is expanded, and some of the existing parameters are
; re-enumerated). Actually, such double-occurences are avoided in any case, but in the case that such reoccurences are
; expected, setting this to true can save runtime
validate_hashtable_extend_no_replication=true
; run just for a hashtable generation - not looking for clicks. Options: 'true', 'false', 'auto'.
; if 'auto' - translated to True if hashtable_file_operation is 'generate'/'expand', False if it's 'use'
gen_hashtable_only=auto
; constant number of iterations for contfracs when building the hashtable.
; probably enough for the exponential converging cases. problematic for when the discriminant is <=0
; (that may create false entries in the hashtables that will be
; filtered after clicks)
hashtable_num_of_iterations=200
; number of digits saved in the hashtable
;; 8 = -26.6 bits
hashtable_precision=8
; constant number of iterations for contfracs at the first filtering round of refining clicks results.
first_filtering_num_of_iterations=1000
; number of digits to compare at the first filterting
;; 10 = -33.2 bits
first_filtering_precision=10
; required precision for the second filtering round. This filtering is done with a fitting number of iterations, after
; estimating the speed of approach, with a 'max_precision' upper boundary.
;; 13 = -43.2 bits
second_filtering_max_num_of_iterations=1200
second_filtering_precision=13
; similarly, for the third filtering round.
;; 16 = -53.15 bits, 20 = -66.43 bits
third_filtering_max_num_of_iterations=1500
third_filtering_precision=20
; type of LHS: ulcd = (u/const+const/l+c)/d ; rationalfunc = p(const)/q(const)
;lhs_type=ulcd
; u, l, c, d = lhs_params
;lhs_params=[[1, 3], [2, 4], [3, 5], [4, 6]]
lhs_type=rationalfunc
; same parsing as the ab polynomials, with the interalce being replaced by: p, q, force_bigger_numerator = lhs_params
; if force_bigger_numerator is True, only p,q with deg(p) >= deg(q) are evaluated. This is useful to save runtime if a
; 'safe_inverse' postproc-function is applied, and if the p,q ranges are symmetric
lhs_params=[[[-3, 3], [-3, 3], [-3, 3]], [[-3, 3], [-3, 3], [-3, 3]], true]
; the subgroup of postprocessing functions
postproc_funcs_filter=["safe_inverse", "lambda x: x", "lambda x: x**2", "lambda x: safe_inverse(x**2)", "safe_sqrt", "lambda x: safe_inverse(safe_sqrt(x))"]
; options for hashtable_file_operation: use, generate, expand
; generate will create a new file even if using an existing name (v2, v3,...)
; expand will first load the most updated file (v_latest) and then add the new entries and save to v_latest+1
; use will load the v_latest and use it (no generation of hash table)
hashtable_file_operation=generate
; file name without the v_number part
hashtable_file=hashtable_general.pkl
; printing slow convergence cases that we thought should be fast (can be identified by the discriminant)
print_surprising_nonexp_contfracs=False