Skip to content

Commit

Permalink
DEV: Quick fix to issue CederGroupHub#385.
Browse files Browse the repository at this point in the history
  • Loading branch information
qchempku2017 committed Jul 21, 2023
1 parent bf39e19 commit e3ef15b
Show file tree
Hide file tree
Showing 3 changed files with 20 additions and 9 deletions.
2 changes: 1 addition & 1 deletion smol/cofe/expansion.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ def eci(self):
num_ext_terms = len(self._subspace.external_terms) # check for extra terms
coefs = self.coefs[:-num_ext_terms] if num_ext_terms else self.coefs[:]
eci = coefs.copy()
eci /= self._subspace.function_total_multiplicities
eci = eci / self._subspace.function_total_multiplicities
return eci

@cached_property
Expand Down
2 changes: 1 addition & 1 deletion smol/cofe/wrangling/wrangler.py
Original file line number Diff line number Diff line change
Expand Up @@ -426,7 +426,7 @@ def get_property_vector(self, key, normalize=True):
) from error

if normalize:
properties /= self.sizes
properties = properties / self.sizes

return properties

Expand Down
25 changes: 18 additions & 7 deletions tests/test_utils/test_cluster_utils.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import numpy as np
import pytest

from smol.utils.cluster.container import (
Expand All @@ -13,31 +14,35 @@
"IntArrayContainer, dim", [(IntArray1DContainer, 1), (IntArray2DContainer, 2)]
)
def test_int_container(IntArrayContainer, dim, rng):
# Use np.int_ to ensure cross-platform compatibility.
arrays = tuple(
rng.integers(1, 5, size=dim * (rng.integers(1, 5),)) for _ in range(10)
rng.integers(1, 5, size=dim * (rng.integers(1, 5),)).astype(np.int_)
for _ in range(10)
)
container = IntArrayContainer(arrays)
assert len(container) == 10

# test setting the same sized list (no memory reallocation)
new_arrays = tuple(
rng.integers(1, 5, size=dim * (rng.integers(1, 5),)) for _ in range(10)
rng.integers(1, 5, size=dim * (rng.integers(1, 5),)).astype(np.int_)
for _ in range(10)
)
container.set_arrays(new_arrays)

assert len(container) == 10

# test setting a different sized list (needs memory reallocation)
new_arrays = tuple(
rng.integers(1, 5, size=dim * (rng.integers(1, 5),)) for _ in range(12)
rng.integers(1, 5, size=dim * (rng.integers(1, 5),)).astype(np.int_)
for _ in range(12)
)
container.set_arrays(new_arrays)

assert len(container) == 12

with pytest.raises(ValueError):
new_arrays = tuple(
rng.integers(1, 5, size=(dim + 1) * (rng.integers(1, 5),))
rng.integers(1, 5, size=(dim + 1) * (rng.integers(1, 5),)).astype(np.int_)
for _ in range(12)
)
container.set_arrays(new_arrays)
Expand Down Expand Up @@ -79,7 +84,9 @@ def test_orbit_container(rng):
orbit_id = i
bit_id = rng.integers(1, 5, dtype=int)
correlation_tensors = rng.random(tuple(rng.integers(1, 5, size=2)))
tensor_indices = rng.integers(1, 5, size=correlation_tensors.shape[0])
tensor_indices = rng.integers(1, 5, size=correlation_tensors.shape[0]).astype(
np.int_
)
orbit_data.append((orbit_id, bit_id, correlation_tensors, tensor_indices))

container = OrbitContainer(tuple(orbit_data))
Expand All @@ -91,7 +98,9 @@ def test_orbit_container(rng):
orbit_id = i
bit_id = rng.integers(1, 5, dtype=int)
correlation_tensors = rng.random(tuple(rng.integers(1, 5, size=2)))
tensor_indices = rng.integers(1, 5, size=correlation_tensors.shape[0])
tensor_indices = rng.integers(1, 5, size=correlation_tensors.shape[0]).astype(
np.int_
)
new_orbit_data.append((orbit_id, bit_id, correlation_tensors, tensor_indices))

container.set_orbits(tuple(new_orbit_data))
Expand All @@ -103,7 +112,9 @@ def test_orbit_container(rng):
orbit_id = i
bit_id = rng.integers(1, 5, dtype=int)
correlation_tensors = rng.random(tuple(rng.integers(1, 5, size=2)))
tensor_indices = rng.integers(1, 5, size=correlation_tensors.shape[0])
tensor_indices = rng.integers(1, 5, size=correlation_tensors.shape[0]).astype(
np.int_
)
new_orbit_data.append((orbit_id, bit_id, correlation_tensors, tensor_indices))

container.set_orbits(tuple(new_orbit_data))
Expand Down

0 comments on commit e3ef15b

Please sign in to comment.