From f81ed4500c41a91e464010e9c8b4f7d20925cfbc Mon Sep 17 00:00:00 2001 From: inthar-raven Date: Thu, 12 Sep 2024 12:02:47 -0400 Subject: [PATCH] Fix issues #31 and #40; remove console_log messages --- index.js | 2 - package-lock.json | 114 ++++++++--- src/guide.rs | 494 +++++++++++++++++++++++++--------------------- src/lib.rs | 72 ++++--- 4 files changed, 394 insertions(+), 288 deletions(-) diff --git a/index.js b/index.js index d523459..bd2cdf3 100644 --- a/index.js +++ b/index.js @@ -646,8 +646,6 @@ stack()` const latticeBases = sigResult["profiles"].map( (j) => j["lattice_basis"], ); - console.log(latticeBases); - const profiles = sigResult["profiles"]; const jiTunings = sigResult["ji_tunings"]; diff --git a/package-lock.json b/package-lock.json index f623357..3f5e73a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -4,6 +4,7 @@ "requires": true, "packages": { "": { + "name": "ternary", "devDependencies": { "@wasm-tool/wasm-pack-plugin": "1.7.0", "html-webpack-plugin": "^5.6.0", @@ -772,9 +773,9 @@ } }, "node_modules/body-parser": { - "version": "1.20.2", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz", - "integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==", + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", + "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", "dev": true, "license": "MIT", "dependencies": { @@ -786,7 +787,7 @@ "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", - "qs": "6.11.0", + "qs": "6.13.0", "raw-body": "2.5.2", "type-is": "~1.6.18", "unpipe": "1.0.0" @@ -806,6 +807,22 @@ "node": ">= 0.8" } }, + "node_modules/body-parser/node_modules/qs": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.0.6" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/bonjour-service": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/bonjour-service/-/bonjour-service-1.2.1.tgz", @@ -1600,38 +1617,38 @@ } }, "node_modules/express": { - "version": "4.19.2", - "resolved": "https://registry.npmjs.org/express/-/express-4.19.2.tgz", - "integrity": "sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==", + "version": "4.20.0", + "resolved": "https://registry.npmjs.org/express/-/express-4.20.0.tgz", + "integrity": "sha512-pLdae7I6QqShF5PnNTCVn4hI91Dx0Grkn2+IAsMTgMIKuQVte2dN9PeGSSAME2FR8anOhVA62QDIUaWVfEXVLw==", "dev": true, "license": "MIT", "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", - "body-parser": "1.20.2", + "body-parser": "1.20.3", "content-disposition": "0.5.4", "content-type": "~1.0.4", "cookie": "0.6.0", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", - "encodeurl": "~1.0.2", + "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "etag": "~1.8.1", "finalhandler": "1.2.0", "fresh": "0.5.2", "http-errors": "2.0.0", - "merge-descriptors": "1.0.1", + "merge-descriptors": "1.0.3", "methods": "~1.1.2", "on-finished": "2.4.1", "parseurl": "~1.3.3", - "path-to-regexp": "0.1.7", + "path-to-regexp": "0.1.10", "proxy-addr": "~2.0.7", "qs": "6.11.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", - "send": "0.18.0", - "serve-static": "1.15.0", + "send": "0.19.0", + "serve-static": "1.16.0", "setprototypeof": "1.2.0", "statuses": "2.0.1", "type-is": "~1.6.18", @@ -1642,6 +1659,16 @@ "node": ">= 0.10.0" } }, + "node_modules/express/node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, "node_modules/fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", @@ -2547,11 +2574,14 @@ } }, "node_modules/merge-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", - "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", + "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", "dev": true, - "license": "MIT" + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } }, "node_modules/merge-stream": { "version": "2.0.0", @@ -2895,9 +2925,9 @@ "license": "MIT" }, "node_modules/path-to-regexp": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", - "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==", + "version": "0.1.10", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.10.tgz", + "integrity": "sha512-7lf7qcQidTku0Gu3YDPc8DJ1q7OOucfa/BSsIwjuh56VU7katFvuM8hULfkwB3Fns/rsVF7PwPKVw1sl5KQS9w==", "dev": true, "license": "MIT" }, @@ -3279,9 +3309,9 @@ } }, "node_modules/send": { - "version": "0.18.0", - "resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz", - "integrity": "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==", + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", + "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", "dev": true, "license": "MIT", "dependencies": { @@ -3390,9 +3420,9 @@ } }, "node_modules/serve-static": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz", - "integrity": "sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==", + "version": "1.16.0", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.0.tgz", + "integrity": "sha512-pDLK8zwl2eKaYrs8mrPZBJua4hMplRWJ1tIFksVC3FtBEBnl8dxgeHtsaMS8DhS9i4fLObaon6ABoc4/hQGdPA==", "dev": true, "license": "MIT", "dependencies": { @@ -3405,6 +3435,38 @@ "node": ">= 0.8.0" } }, + "node_modules/serve-static/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/serve-static/node_modules/send": { + "version": "0.18.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz", + "integrity": "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "mime": "1.6.0", + "ms": "2.1.3", + "on-finished": "2.4.1", + "range-parser": "~1.2.1", + "statuses": "2.0.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, "node_modules/set-function-length": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", diff --git a/src/guide.rs b/src/guide.rs index f7a0beb..298124d 100644 --- a/src/guide.rs +++ b/src/guide.rs @@ -1,11 +1,24 @@ use std::collections::BTreeSet; use itertools::Itertools; +use wasm_bindgen::prelude::*; + +#[wasm_bindgen] +extern "C" { + fn alert(s: &str); + // Use `js_namespace` here to bind `console.log(..)` instead of just + // `log(..)` + #[wasm_bindgen(js_namespace = console)] + fn log(s: &str); +} use crate::{ helpers::gcd, - primes::{factorize, is_prime}, - words::{offset_vec, rotate, rotations, weak_period, word_on_degree, CountVector, Subtendable}, + primes::factorize, + words::{ + dyad_on_degree, offset_vec, rotate, rotations, weak_period, word_on_degree, CountVector, + Letter, Subtendable, + }, }; // Given a necklace of stacked k-steps, where k is fixed, @@ -14,10 +27,14 @@ fn guided_gs_chains(chain: &[T]) -> Vec> where T: core::fmt::Debug + PartialEq + Clone + Eq + Send, { + // println!("chain: {:?}", chain); let len = chain.len(); rotations(chain) .into_iter() - .filter(|list| !(list[..len - 1].contains(&list[len - 1]))) + .filter(|list| { + // println!("list: {:?}, {}", list, !(list[..len - 1].contains(&list[len - 1]))); + !(list[..len - 1].contains(&list[len - 1])) + }) .map(|chain| weak_period(&chain[..len - 1])) .collect::>() } @@ -36,6 +53,7 @@ pub fn stacked_k_steps(k: usize, neck: &[T]) -> Vec where T: Subtendable + std::fmt::Debug, { + // println!("scale: {:?}", neck); (0..neck.len()) .map(|i| word_on_degree(neck, k * i, k)) .map(|subword| ::interval_from_slice(&subword)) @@ -62,6 +80,14 @@ pub fn guided_gs_list_of_len(l: usize, neck: &[usize]) -> Vec], +) -> Vec>> { + guided_gs_chains(&stacked_k_steps(k, subscale)) +} + // Guided GS of a chain which is represented as `Vec>` rather than `Vec`. fn guided_gs_list_for_subscale(subscale: &[CountVector]) -> Vec>> { if subscale.len() == 2 { @@ -74,14 +100,6 @@ fn guided_gs_list_for_subscale(subscale: &[CountVector]) -> Vec], -) -> Vec>> { - guided_gs_chains(&stacked_k_steps(k, subscale)) -} - /// A guide frame structure for a scale word, consisting of a generator sequence together with a set of offsets or a multiplicity. /// Multiplicity greater than 1 is a generalization of diregular MV3s; /// scales of this type always have the number of notes divisible by the multiplicity. @@ -100,152 +118,195 @@ pub struct GuideFrame { pub gs: Vec>, /// `polyoffset` is the set of intervals that each guided generator sequence chain is based on. Always includes `CountVector::ZERO`. pub polyoffset: Vec>, - /// The base GS chains in a multiple GS structure don't form interleaved scales. Instead they form a detempered copy of m-edo. - pub multiplicity: usize, } impl GuideFrame { - pub fn new_multiple(gs: Vec>, multiplicity: usize) -> Self { + pub fn new_simple(gs: Vec>) -> Self { Self { gs, - multiplicity, polyoffset: vec![CountVector::ZERO], } } - pub fn new_simple(gs: Vec>, polyoffset: Vec>) -> Self { - Self { - gs, - polyoffset, - multiplicity: 1, - } + pub fn new_multiple(gs: Vec>, polyoffset: Vec>) -> Self { + Self { gs, polyoffset } } // The comoplexity of a guide frame. pub fn complexity(&self) -> usize { - self.gs.len() * self.polyoffset.len() * self.multiplicity + self.gs.len() * self.polyoffset.len() + } + // The multiplicity + pub fn multiplicity(&self) -> usize { + self.polyoffset.len() } // Try to get simple or interleaved guide frames with k-step generators. - pub fn try_simple_or_interleaved(scale: &[usize], k: usize) -> Vec { - if scale.is_empty() { + pub fn try_simple(scale: &[usize], k: usize) -> Vec { + if scale.is_empty() || gcd(scale.len() as u64, k as u64) != 1 { vec![] } else { - let d = gcd(scale.len() as u64, k as u64) as usize; - let subscales = (0..d) - .map(|degree| rotate(scale, degree)) - .map(|rotation| { - stacked_k_steps(d, &rotation[..scale.len()])[..scale.len() / d].to_vec() - }) - .collect::>(); - let subscales_cloned = subscales.clone(); - let subscale_on_root = subscales_cloned - .split_first() - .expect("since we checked that `scale` is nonempty, this operation should be infallible") - .0; - // println!("subscale_on_root: {:?}", subscale_on_root); - - // All of the subscales must be rotations of one another. - // `offset_vec()` returns a witness to rotational equivalence (an offset) if there is any; - // the offsets are combined to form the polyoffset. - // If it returns `None` for any subscale, the whole procedure fails. - let maybe_offsets = subscales + k_step_guided_gs_list(k, scale) .into_iter() - .enumerate() - .map(|(i, subscale)| { - offset_vec(subscale_on_root, &subscale).map(|offset| { - // `.map()` returns `None` if the previous result is `None` and functorially applies the closure to `Some`s. - CountVector::from_slice(&word_on_degree(scale, 0, offset * d + i)) - }) + .map(|gs| Self { + gs, + polyoffset: vec![CountVector::ZERO], }) - // collect returns `None` if there is any `None` returned by `map`. - .collect::>>>(); - // println!("subscale_on_root: {:?}", subscale_on_root); - // println!("maybe_offsets: {:?}", maybe_offsets); - if let Some(offsets) = maybe_offsets { - // sort list of offsets by step class - let offsets: Vec> = - offsets.into_iter().sorted_by_key(|v| v.len()).collect(); - // If polyoffset is {0} use multiplicity 1 - if offsets == [CountVector::ZERO] { - guided_gs_list(scale) - .into_iter() - .map(|gs| Self { - gs, - polyoffset: offsets.to_owned(), - multiplicity: 1, - }) - .sorted() - .dedup() - .collect::>() - } else { - guided_gs_list_for_subscale(subscale_on_root) - .into_iter() - .map(|gs| Self { - gs, - polyoffset: offsets.to_owned(), - multiplicity: 1, - }) - .sorted() - .dedup() - .collect::>() - } - } else { - vec![] - } + .sorted() + .dedup() + .collect::>() } } pub fn try_multiple(scale: &[usize], multiplicity: usize, k: usize) -> Vec { // The scale cannot be empty and its size must be divisible by `multiplicity`. - if is_prime(scale.len() as u64) || scale.is_empty() || scale.len() % multiplicity != 0 { + if multiplicity == 1 || scale.is_empty() || scale.len() % multiplicity != 0 { vec![] } else { let d = gcd(k as u64, scale.len() as u64) as usize; - if d == 1 { - // One-strand multi-GS scales. - // For each rotation (there are `scale.len() / multiplicity` of them we want to consider), - // we will get a collection with `multiplicity` elements - // all of which have to be equal. - let valid_gses: Vec>> = (0..scale.len() / multiplicity) - .map(|degree| { - // Stack k-steps and split the result into `multiplicity` vecs of equal length - let s = stacked_k_steps(k, &rotate(scale, degree)); - (0..multiplicity) - .map(|i| { - s[scale.len() / multiplicity * i - ..scale.len() / multiplicity * (i + 1)] - .to_vec() + let co_d = scale.len() / d; + if co_d % multiplicity != 0 { + if d == multiplicity { + // It's an interleaved scale. + let subscales = (0..d) + .map(|degree| rotate(scale, degree)) + .map(|rotation| { + stacked_k_steps(d, &rotation[..scale.len()])[..scale.len() / d].to_vec() + }) + .collect::>(); + let subscales_cloned = subscales.clone(); + let subscale_on_root = subscales_cloned + .split_first() + .expect("since we checked that `scale` is nonempty, this operation should be infallible") + .0; + // println!("subscale_on_root: {:?}", subscale_on_root); + + // All of the subscales must be rotations of one another. + // `offset_vec()` returns a witness to rotational equivalence (an offset) if there is any; + // the offsets are combined to form the polyoffset. + // If it returns `None` for any subscale, the whole procedure fails. + let maybe_offsets = subscales + .into_iter() + .enumerate() + .map(|(i, subscale)| { + offset_vec(subscale_on_root, &subscale).map(|offset| { + // `.map()` returns `None` if the previous result is `None` and functorially applies the closure to `Some`s. + CountVector::from_slice(&word_on_degree(scale, 0, offset * d + i)) }) - .collect::>() - }) - .filter(|gses| { - // To qualify as a valid GS, the last element cannot be in the GS. - !gses[0][0..scale.len() / multiplicity - 1] - .iter() - .contains(&gses[0][scale.len() / multiplicity - 1]) - // Ignoring the last element, all of the vecs have to be equal. - && gses.iter() - .map(|gs| - gs[0..scale.len() / multiplicity - 1].to_vec() - ) - .all_equal() - }) - // Get the first generator chain, which should exist and be equal to all the other GSes in the list. - .map(|gses| gses[0][0..scale.len() / multiplicity - 1].to_vec()) - // Turn the chain into a generator sequence recipe. - .map(|gs| weak_period(&gs)) - .collect(); - // Convert each valid multi-GS into a `GuideFrame` struct. - valid_gses - .into_iter() - .map(|gs| Self { - gs, - polyoffset: vec![CountVector::ZERO], - multiplicity, - }) - .sorted() - .dedup() - .collect() + }) + // collect returns `None` if there is any `None` returned by `map`. + .collect::>>>(); + // println!("subscale_on_root: {:?}", subscale_on_root); + // println!("maybe_offsets: {:?}", maybe_offsets); + if let Some(offsets) = maybe_offsets { + // sort list of offsets by step class + let offsets: Vec> = + offsets.into_iter().sorted_by_key(|v| v.len()).collect(); + // If polyoffset is {0} use multiplicity 1 + if offsets == [CountVector::ZERO] { + guided_gs_list(scale) + .into_iter() + .map(|gs| Self { + gs, + polyoffset: offsets.to_owned(), + }) + .sorted() + .dedup() + .collect::>() + } else { + guided_gs_list_for_subscale(subscale_on_root) + .into_iter() + .map(|gs| Self { + gs, + polyoffset: offsets.to_owned(), + }) + .sorted() + .dedup() + .collect::>() + } + } else { + vec![] + } + } else { + vec![] + } } else { - // Interleaved multi-GS scales are not handled yet. - vec![] + // stack at most this many k-steps + let chain_length: usize = co_d / multiplicity; + if chain_length == 1 { + // check that all chain lengths of 1 are equal + vec![] + } else { + // For every degree of `scale`, get stack of gs_length_limit-many k-steps on that degree. + let gen_chains_enumerated = (0..scale.len()) + .map(|degree| { + let mode = rotate(scale, degree); + stacked_k_steps(k, &mode)[0..chain_length].to_vec() + }) + .enumerate() + .filter(|(_, stack)| { + // Each stack is generated by a GS, + // but for the GS to be a guided GS, the last element must differ from all previous elements. + let mut init = stack.iter().take(chain_length - 1); + let last = stack + .last() + .expect("last exists because gs_length_limit >= 2"); + // `init` will be nonempty, so the following check won't be vacuous. + init.all(|k_step| *k_step != *last) + }); + let gses: Vec>> = gen_chains_enumerated + .clone() + // Take prefix of gs_length_limit - 1 elements and get what GS it is generated by + .map(|(_, chain)| weak_period(&chain[0..chain_length - 1])) + .sorted() + .dedup() + .collect(); + gses.iter() + .map(|gs| { + ( + gs, + gen_chains_enumerated + .clone() + // Check only the prefix of gs_length_limit - 1 elements, because that's what the guided GS is based on. + .filter(|(_, gen_chain)| { + weak_period(&gen_chain[..chain_length - 1]) == *gs.clone() + }) + // Get all indices on which this particular GS occurs. + .map(|(i, _)| i) + .collect::>(), + ) + }) + .filter(|(_, polyoffset_indices)| { + // Filter by whether the number of gen chains each GS occurs on is equal to the multiplcity. + // We also need to check that all of the chains are disjoint. + let mut union_of_chains: Vec<_> = polyoffset_indices + .iter() + .flat_map(|first| { + (0..scale.len() / multiplicity) + .map(|i| (first + i * k) % scale.len()) + .collect::>() + }) + .collect(); + union_of_chains.sort(); + union_of_chains.dedup(); + let chains_are_disjoint: bool = union_of_chains.len() == scale.len(); + chains_are_disjoint && polyoffset_indices.len() == multiplicity + }) + .map(|(gs, polyoffset_indices)| { + let first_deg = polyoffset_indices[0]; + let polyoffset: Vec> = polyoffset_indices + .iter() + .map(|degree| { + dyad_on_degree( + &rotate(scale, first_deg), + first_deg, + degree - first_deg, + ) + }) + .collect(); + Self { + gs: gs.clone(), + polyoffset, + } + }) + .collect() + } } } } @@ -257,7 +318,7 @@ impl GuideFrame { .dedup() .map(|p| p as usize) .collect(); - let simple_guide_moses: Vec = Self::try_simple_or_interleaved(scale, k); + let simple_guide_moses: Vec = Self::try_simple(scale, k); let multiple_guide_moses: Vec = if BTreeSet::from_iter(scale.iter()).len() > 1 { prime_factors .into_iter() @@ -266,13 +327,17 @@ impl GuideFrame { } else { vec![] }; - [simple_guide_moses, multiple_guide_moses].concat() + + let mut guide_frames = [simple_guide_moses, multiple_guide_moses].concat(); + guide_frames.sort_by_key(GuideFrame::complexity); + println!("{:?}", guide_frames); + guide_frames } } /// Return the collection of guide frames for the given scale word, sorted by complexity. pub fn guide_frames(scale: &[usize]) -> Vec { - (2..=scale.len() - 2) // steps subtended by generator used for the guided generator sequence + (2..=scale.len() / 2) // steps subtended by generator used for the guided generator sequence .flat_map(|k| GuideFrame::try_all_variants(scale, k)) .sorted_by_key(GuideFrame::complexity) .collect() @@ -285,7 +350,12 @@ mod tests { use crate::words::{CountVector, Letter}; use super::*; - + #[test] + fn test_blackdye() { + let blackdye: [usize; 10] = [0, 1, 0, 2, 0, 1, 0, 2, 0, 2]; + let should_have_mult_2 = GuideFrame::try_multiple(&blackdye, 2, 4); + assert!(!should_have_mult_2.is_empty()); + } #[test] fn test_fix_bug_for_4sr() { let diamech_4sr: [Letter; 11] = [0, 2, 0, 1, 0, 2, 0, 2, 0, 1, 2]; @@ -300,118 +370,100 @@ mod tests { CountVector::from_slice(&[0, 1]), CountVector::from_slice(&[0, 2]), ])); - let guide_frames = GuideFrame::try_simple_or_interleaved(&diamech_4sr, 2); - println!("{:?}", guide_frames); - assert!(guide_frames.contains(&GuideFrame::new_simple( - vec![ - CountVector::from_slice(&[0, 2]), - CountVector::from_slice(&[0, 1]), - CountVector::from_slice(&[0, 2]), - ], - vec![CountVector::ZERO], - ))); - } - - #[test] - fn test_try_simple_or_interleaved() { - let diachrome_5sc = [0, 2, 0, 2, 0, 1, 2, 0, 2, 0, 2, 1]; - let should_be_nonempty = GuideFrame::try_simple_or_interleaved(&diachrome_5sc, 6); - assert_ne!(should_be_nonempty, vec![]); - println!("{:?}", should_be_nonempty); - println!( - "{:?}", - should_be_nonempty - .into_iter() - .map(|gf| gf.complexity()) - .collect::>() - ); + let guide_frames = GuideFrame::try_simple(&diamech_4sr, 2); + // println!("{:?}", guide_frames); + assert!(guide_frames.contains(&GuideFrame::new_simple(vec![ + CountVector::from_slice(&[0, 2]), + CountVector::from_slice(&[0, 1]), + CountVector::from_slice(&[0, 2]), + ]))); } #[test] fn test_guided_gs_based_guide_frame() { let pinedye = [0, 0, 1, 0, 1, 0, 0, 2]; - let guide_moses = guide_frames(&pinedye); - println!("Pinedye has guide MOS structures: {:?}", guide_moses); - assert!(guide_moses.contains(&GuideFrame::new_simple( - vec![ - CountVector::from_slice(&[0, 0, 2]), - CountVector::from_slice(&[0, 0, 1]), - CountVector::from_slice(&[0, 0, 1]), - ], - vec![CountVector::ZERO], - ))); + let pinedye_guide_moses = guide_frames(&pinedye); + assert!(pinedye_guide_moses.contains(&GuideFrame::new_simple(vec![ + CountVector::from_slice(&[0, 0, 2]), + CountVector::from_slice(&[0, 0, 1]), + CountVector::from_slice(&[0, 0, 1]), + ]))); let diasem = [0, 1, 0, 2, 0, 1, 0, 2, 0]; - let guide_moses = guide_frames(&diasem); - println!("Diasem has guide MOS structures: {:?}", guide_moses); - assert!(guide_moses.contains(&GuideFrame::new_simple( - vec![ + let diasem_guide_moses = guide_frames(&diasem); + assert!(diasem_guide_moses.contains(&GuideFrame::new_simple(vec![ + CountVector::from_slice(&[0, 1]), + CountVector::from_slice(&[0, 2]) + ]))); + assert_eq!( + GuideFrame::new_simple(vec![ CountVector::from_slice(&[0, 1]), CountVector::from_slice(&[0, 2]) - ], - vec![CountVector::ZERO], - ))); - assert_eq!( - GuideFrame::new_simple( - vec![ - CountVector::from_slice(&[0, 1]), - CountVector::from_slice(&[0, 2]) - ], - vec![CountVector::ZERO], - ) + ],) .complexity(), 2 ); - let blackdye: [usize; 10] = [0, 1, 0, 2, 0, 1, 0, 2, 0, 2]; - let guide_moses = guide_frames(&blackdye); - println!("Blackdye has guide MOS structures: {:?}", guide_moses); - assert!(guide_moses.contains(&GuideFrame::new_simple( - vec![CountVector::from_slice(&[0, 0, 1, 2])], - vec![CountVector::ZERO, CountVector::from_slice(&[0])], + let blackdye_guide_moses = guide_frames(&blackdye); + assert!(blackdye_guide_moses.contains(&GuideFrame::new_multiple( + vec![CountVector::from_slice(&[0, 0, 1, 2]),], + vec![CountVector::ZERO, CountVector::from_slice(&[0]),] ))); - assert_eq!( - GuideFrame::new_simple( - vec![CountVector::from_slice(&[0, 0, 1, 2])], - vec![CountVector::ZERO, CountVector::from_slice(&[0])], - ) - .complexity(), - 2 - ); let diamech_4sl: [usize; 11] = [1, 0, 2, 0, 2, 0, 1, 0, 2, 0, 2]; - let guide_moses = guide_frames(&diamech_4sl); - println!("Diamech has guide MOS structures: {:?}", guide_moses); - assert!(guide_moses.contains(&GuideFrame::new_simple( - vec![ + let diamech_guide_moses = guide_frames(&diamech_4sl); + assert!(diamech_guide_moses.contains(&GuideFrame::new_simple(vec![ + CountVector::from_slice(&[0, 2]), + CountVector::from_slice(&[0, 2]), + CountVector::from_slice(&[0, 1]), + ],))); + assert_eq!( + GuideFrame::new_simple(vec![ CountVector::from_slice(&[0, 2]), CountVector::from_slice(&[0, 2]), CountVector::from_slice(&[0, 1]), - ], - vec![CountVector::ZERO], - ))); + ]) + .complexity(), + 3 + ); + + let diachrome_5sc = [0, 2, 0, 2, 0, 1, 2, 0, 2, 0, 2, 1]; + let diachrome_guide_moses = guide_frames(&diachrome_5sc); + assert!( + diachrome_guide_moses.contains(&GuideFrame::new_multiple( + vec![CountVector::from_slice(&[0, 0, 1, 2, 2]),], + vec![ + CountVector::ZERO, + CountVector::from_slice(&[0, 0, 0, 1, 2, 2]), + ], + )) || diachrome_guide_moses.contains(&GuideFrame::new_multiple( + vec![CountVector::from_slice(&[0, 0, 1, 2, 2]),], + vec![ + CountVector::ZERO, + CountVector::from_slice(&[0, 0, 1, 2, 2, 2]), + ], + )) + ); assert_eq!( - GuideFrame::new_simple( + GuideFrame::new_multiple( + vec![CountVector::from_slice(&[0, 0, 1, 2, 2])], vec![ - CountVector::from_slice(&[0, 2]), - CountVector::from_slice(&[0, 2]), - CountVector::from_slice(&[0, 1]), + CountVector::ZERO, + CountVector::from_slice(&[0, 0, 1, 2, 2, 2]), ], - vec![CountVector::ZERO], ) .complexity(), - 3 + 2 ); - let diachrome_5sc = [0, 2, 0, 2, 0, 1, 2, 0, 2, 0, 2, 1]; - let guide_moses = guide_frames(&diachrome_5sc); - println!("Diachrome has guide MOS structures: {:?}", guide_moses); - assert!(guide_moses.contains(&GuideFrame::new_multiple( - vec![CountVector::from_slice(&[0, 0, 1, 2, 2]),], - 2, - ))); assert_eq!( - GuideFrame::new_multiple(vec![CountVector::from_slice(&[0, 0, 1, 2, 2])], 2) - .complexity(), + GuideFrame::new_multiple( + vec![CountVector::from_slice(&[0, 0, 1, 2, 2])], + vec![ + CountVector::ZERO, + CountVector::from_slice(&[0, 0, 1, 2, 2, 2]), + ], + ) + .multiplicity(), 2 ); } diff --git a/src/lib.rs b/src/lib.rs index 4d157dd..a094617 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -14,7 +14,7 @@ pub mod words; use itertools::Itertools; use wasm_bindgen::prelude::*; -use words::{chirality, countvector_to_slice, dyad_on_degree}; +use words::chirality; use words::{Chirality, Letter}; #[wasm_bindgen] @@ -26,6 +26,7 @@ extern "C" { fn log(s: &str); } +#[allow(unused_macros)] macro_rules! console_log { // Note that this is using the `log` function imported above during // `bare_bones` @@ -163,9 +164,8 @@ fn guide_frame_to_result(structure: &GuideFrame) -> GuideResult { let GuideFrame { ref gs, ref polyoffset, - ref multiplicity, } = structure; - if *multiplicity == 1 { + if structure.multiplicity() == 1 { GuideResult { gs: gs .iter() @@ -200,7 +200,7 @@ fn guide_frame_to_result(structure: &GuideFrame) -> GuideResult { ] }) .collect(), - multiplicity: 1, + multiplicity: structure.multiplicity() as u8, complexity: structure.complexity() as u8, } } else { @@ -227,8 +227,18 @@ fn guide_frame_to_result(structure: &GuideFrame) -> GuideResult { *btreemap.get(&2).unwrap_or(&0) as u8, ] }, - polyoffset: vec![vec![0, 0, 0]], - multiplicity: *multiplicity as u8, + polyoffset: polyoffset + .iter() + .map(|cv| { + let btreemap = cv.into_inner(); + vec![ + *btreemap.get(&0).unwrap_or(&0) as u8, + *btreemap.get(&1).unwrap_or(&0) as u8, + *btreemap.get(&2).unwrap_or(&0) as u8, + ] + }) + .collect(), + multiplicity: structure.multiplicity() as u8, complexity: structure.clone().complexity() as u8, } } @@ -236,7 +246,6 @@ fn guide_frame_to_result(structure: &GuideFrame) -> GuideResult { fn get_unimodular_basis( structures: &[GuideFrame], - scale: &[usize], step_sig: &[u8], ) -> Option<(Vec>, GuideResult)> { /* @@ -266,13 +275,12 @@ fn get_unimodular_basis( } */ for structure in structures { - if structure.multiplicity == 1 { + if structure.multiplicity() == 1 { let structure = guide_frame_to_result(structure); let gs = structure.gs.clone(); for i in 0..gs.clone().len() { for j in i..gs.clone().len() { if det3(step_sig, &gs[i], &gs[j]).abs() == 1 { - console_log!("first branch"); return Some((vec![gs[i].clone(), gs[j].clone()], structure)); } } @@ -281,29 +289,17 @@ fn get_unimodular_basis( for v in polyoffset { for w in structure.clone().gs { if det3(step_sig, &v, &w).abs() == 1 { - console_log!("second branch"); return Some((vec![v, w], structure)); } } } } else { - // this branch handles diregular scales + // this branch handles multiplicity > 1 scales let structure = guide_frame_to_result(structure); let vec_for_gs_element = structure.gs[0].clone(); - let vec_for_detempered_period = countvector_to_slice(dyad_on_degree( - scale, - 0, - scale.len() / structure.clone().multiplicity as usize, - )) - .iter() - .map(|x| *x as u8) - .collect::>(); - if det3(step_sig, &vec_for_gs_element, &vec_for_detempered_period) == 1 { - console_log!("third branch"); - return Some(( - vec![vec_for_gs_element, vec_for_detempered_period], - structure, - )); + let vec_for_offset = structure.polyoffset.last().unwrap(); + if det3(step_sig, &vec_for_gs_element, vec_for_offset).abs() == 1 { + return Some((vec![vec_for_gs_element, vec_for_offset.clone()], structure)); } } } @@ -323,12 +319,7 @@ pub fn word_to_profile(query: &[usize]) -> ScaleProfile { .iter() .map(|x| *x as u8) .collect::>(); - if let Some(pair) = get_unimodular_basis( - &guide_frames(query), - &string_to_numbers(&brightest), - &step_sig, - ) { - console_log!("{:?}: {:?}", query, guide_frames(query)); + if let Some(pair) = get_unimodular_basis(&guide_frames(query), &step_sig) { let (lattice_basis, structure) = pair; ScaleProfile { word: brightest, @@ -479,16 +470,19 @@ pub fn sig_result( let scales = scales .into_iter() .filter(|scale| filtering_cond(scale)) - .sorted_unstable_by_key(|scale| { - if let Some(first) = guide_frames(scale).first() { - first.complexity() - } else { - usize::MAX - } - }) .collect::>(); Ok(to_value(&SigResult { - profiles: scales.iter().map(|scale| word_to_profile(scale)).collect(), + profiles: scales + .iter() + .map(|scale| word_to_profile(scale)) + .sorted_by_key(|profile| { + if let Some(guide) = &profile.structure { + guide.complexity + } else { + u8::MAX + } + }) + .collect(), ji_tunings: sig_to_ji_tunings(&step_sig), ed_tunings: sig_to_ed_tunings(&step_sig), })?)