Skip to content

Commit

Permalink
Disallow iteration over hash types (powdr-labs#2167)
Browse files Browse the repository at this point in the history
Iterating over hash types introduces non-determinism.
Ban generally and require turning off the lint locally when it's fine to
do so.
This
[lint](https://rust-lang.github.io/rust-clippy/master/index.html#/iter_over_hash_type)
only applies to loops.
  • Loading branch information
Schaeff authored Dec 9, 2024
1 parent d98cf9f commit 576bdd7
Show file tree
Hide file tree
Showing 11 changed files with 30 additions and 7 deletions.
5 changes: 3 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ members = [
"executor-utils",
]

exclude = [ "riscv-runtime" ]
exclude = ["riscv-runtime"]

[workspace.package]
version = "0.1.3"
Expand Down Expand Up @@ -86,4 +86,5 @@ debug = true

[workspace.lints.clippy]
print_stdout = "deny"
uninlined_format_args = "deny"
uninlined_format_args = "deny"
iter_over_hash_type = "deny"
2 changes: 1 addition & 1 deletion ast/src/analyzed/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -648,7 +648,7 @@ pub fn type_from_definition(
/// given a list of type arguments.
#[derive(Default, Debug, Clone, Serialize, Deserialize, JsonSchema)]
pub struct SolvedTraitImpls {
impls: HashMap<String, HashMap<Vec<Type>, ImplData>>,
impls: BTreeMap<String, HashMap<Vec<Type>, ImplData>>,
}

#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
Expand Down
2 changes: 2 additions & 0 deletions executor/src/witgen/machines/block_machine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -320,6 +320,8 @@ impl<'a, T: FieldElement> BlockMachine<'a, T> {
/// _operation_id_no_change = ((1 - _block_enforcer_last_step) * (1 - <Latch>));
/// This function fixes this exception by setting _operation_id_no_change to 0.
fn handle_last_row(&self, data: &mut HashMap<PolyID, Vec<T>>) {
#[allow(clippy::iter_over_hash_type)]
// This is deterministic because there is no shared state.
for (poly_id, col) in data.iter_mut() {
if self
.parts
Expand Down
2 changes: 2 additions & 0 deletions executor/src/witgen/machines/sorted_witness_machine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -228,6 +228,8 @@ impl<'a, T: FieldElement> Machine<'a, T> for SortedWitnesses<'a, T> {
}
result.insert(self.fixed_data.column_name(&self.key_col).to_string(), keys);

#[allow(clippy::iter_over_hash_type)]
// TODO: Is this deterministic?
for (col, &i) in &self.witness_positions {
let mut col_values = values
.iter_mut()
Expand Down
8 changes: 4 additions & 4 deletions pil-analyzer/src/call_graph.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use std::collections::{HashMap, HashSet};
use std::collections::{BTreeMap, BTreeSet, HashSet};

use powdr_ast::{
analyzed::{Expression, Reference},
Expand All @@ -22,7 +22,7 @@ pub fn sort_called_first<'a, I: Iterator<Item = (&'a str, Option<&'a Expression>

fn topo_sort_visit<'a, 'b>(
name: &'a str,
graph: &'b HashMap<&'a str, HashSet<&'a str>>,
graph: &'b BTreeMap<&'a str, BTreeSet<&'a str>>,
visited: &'b mut HashSet<&'a str>,
result: &'b mut Vec<String>,
) {
Expand All @@ -39,10 +39,10 @@ fn topo_sort_visit<'a, 'b>(

fn call_graph<'a, I: Iterator<Item = (&'a str, Option<&'a Expression>)>>(
symbols: I,
) -> HashMap<&'a str, HashSet<&'a str>> {
) -> BTreeMap<&'a str, BTreeSet<&'a str>> {
symbols
.map(|(name, expr)| {
let mut called: HashSet<&str> = HashSet::new();
let mut called: BTreeSet<&str> = BTreeSet::new();
if let Some(e) = expr {
e.all_children().for_each(|e| {
if let Expression::Reference(_, Reference::Poly(r)) = e {
Expand Down
6 changes: 6 additions & 0 deletions pil-analyzer/src/condenser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,8 @@ pub fn condense<T: FieldElement>(
})
.collect::<Vec<_>>();

#[allow(clippy::iter_over_hash_type)]
// TODO: is this deterministic?
for (name, value) in condenser.extract_new_column_values() {
if new_values.insert(name.clone(), value).is_some() {
panic!("Column {name} already has a hint set, but tried to add another one.",)
Expand All @@ -162,9 +164,13 @@ pub fn condense<T: FieldElement>(
.collect();

definitions.retain(|name, _| !intermediate_columns.contains_key(name));
#[allow(clippy::iter_over_hash_type)]
// This is deterministic because insertion order does not matter.
for symbol in new_columns {
definitions.insert(symbol.absolute_name.clone(), (symbol, None));
}
#[allow(clippy::iter_over_hash_type)]
// This is deterministic because definitions can be updated in any order.
for (name, new_value) in new_values {
if let Some((_, value)) = definitions.get_mut(&name) {
if !value.is_none() {
Expand Down
2 changes: 2 additions & 0 deletions pil-analyzer/src/pil_analyzer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -204,6 +204,8 @@ impl PILAnalyzer {
/// Check that query and constr functions are used in the correct contexts.
pub fn side_effect_check(&self) -> Result<(), Vec<Error>> {
let mut errors = vec![];
#[allow(clippy::iter_over_hash_type)]
// TODO: This is not deterministic to the extent that the errors are added in arbitrary order. Source order would be better.
for (symbol, value) in self.definitions.values() {
let Some(value) = value else { continue };
let context = match symbol.kind {
Expand Down
2 changes: 2 additions & 0 deletions pil-analyzer/src/structural_checks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,8 @@ fn check_struct_declarations(
definitions: &HashMap<String, (Symbol, Option<FunctionValueDefinition>)>,
) -> Vec<Error> {
let mut errors = Vec::new();
#[allow(clippy::iter_over_hash_type)]
// TODO: This is not deterministic, because the errors are inserted in arbitrary order. Source order would be better.
for (symbol, def) in definitions.values() {
let Some(FunctionValueDefinition::TypeDeclaration(TypeDeclaration::Struct(struct_decl))) =
def
Expand Down
4 changes: 4 additions & 0 deletions pil-analyzer/src/type_inference.rs
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,8 @@ impl TypeChecker {

// Now we check for all symbols that are not declared as a type scheme that they
// can resolve to a concrete type.
#[allow(clippy::iter_over_hash_type)]
// TODO: This is not deterministic, because it returns the first error in an arbitrary order. Source order would be better.
for (name, (source_ref, declared_type)) in &self.declared_types {
if declared_type.vars.is_empty() {
// It is not a type scheme, see if we were able to derive a concrete type.
Expand Down Expand Up @@ -234,6 +236,8 @@ impl TypeChecker {

// Add builtin schemes if they are not already there and also remove them from the definitions
// (because we ignore the defined value).
#[allow(clippy::iter_over_hash_type)]
// This is deterministic, because the order does not matter.
for (name, scheme) in builtin_schemes() {
self.declared_types
.entry(name.clone())
Expand Down
2 changes: 2 additions & 0 deletions pil-analyzer/src/type_unifier.rs
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,8 @@ impl Unifier {
));
}

#[allow(clippy::iter_over_hash_type)]
// TODO: Is this deterministic?
for bound in self.type_var_bounds(&type_var) {
self.ensure_bound(&ty, bound)?;
}
Expand Down
2 changes: 2 additions & 0 deletions pilopt/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -126,13 +126,15 @@ fn build_poly_id_to_definition_name_lookup(
pil_file: &Analyzed<impl FieldElement>,
) -> BTreeMap<PolyID, &String> {
let mut poly_id_to_definition_name = BTreeMap::new();
#[allow(clippy::iter_over_hash_type)]
for (name, (symbol, _)) in &pil_file.definitions {
if matches!(symbol.kind, SymbolKind::Poly(_)) {
symbol.array_elements().for_each(|(_, id)| {
poly_id_to_definition_name.insert(id, name);
});
}
}
#[allow(clippy::iter_over_hash_type)]
for (name, (symbol, _)) in &pil_file.intermediate_columns {
symbol.array_elements().for_each(|(_, id)| {
poly_id_to_definition_name.insert(id, name);
Expand Down

0 comments on commit 576bdd7

Please sign in to comment.