diff --git a/Dockerfile b/Dockerfile index 011ddbeb..c6479ace 100644 --- a/Dockerfile +++ b/Dockerfile @@ -23,7 +23,8 @@ ARG SQLX_OFFLINE=true RUN cargo build --release -FROM debian:bullseye-slim +# Final Stage +FROM ubuntu:latest RUN apt-get update \ && apt-get install -y --no-install-recommends ca-certificates \ diff --git a/src/search/indexing/local_import.rs b/src/search/indexing/local_import.rs index 28a8d36a..d8bdd0d8 100644 --- a/src/search/indexing/local_import.rs +++ b/src/search/indexing/local_import.rs @@ -1,9 +1,6 @@ -use std::collections::HashMap; -use std::sync::Arc; - -use dashmap::DashSet; use futures::TryStreamExt; use log::info; +use std::collections::HashMap; use super::IndexingError; use crate::database::models::{project_item, version_item, ProjectId, VersionId}; @@ -54,10 +51,8 @@ pub async fn index_local( pool: &PgPool, redis: &RedisPool, visible_ids: HashMap, -) -> Result<(Vec, Vec), IndexingError> { +) -> Result, IndexingError> { info!("Indexing local projects!"); - let loader_field_keys: Arc> = Arc::new(DashSet::new()); - let project_ids = visible_ids .values() .map(|(project_id, _)| project_id) @@ -120,10 +115,6 @@ pub async fn index_local( let version_fields = v.version_fields.clone(); let loader_fields = models::projects::from_duplicate_version_fields(version_fields); - for v in loader_fields.keys().cloned() { - loader_field_keys.insert(v); - } - let license = match m.inner.license.split(' ').next() { Some(license) => license.to_string(), None => m.inner.license.clone(), @@ -223,11 +214,5 @@ pub async fn index_local( uploads.push(usp); } - Ok(( - uploads, - Arc::try_unwrap(loader_field_keys) - .unwrap_or_default() - .into_iter() - .collect(), - )) + Ok(uploads) } diff --git a/src/search/indexing/mod.rs b/src/search/indexing/mod.rs index b669837a..63166908 100644 --- a/src/search/indexing/mod.rs +++ b/src/search/indexing/mod.rs @@ -65,6 +65,13 @@ pub async fn index_projects( let indices = get_indexes(config).await?; + let all_loader_fields = + crate::database::models::loader_fields::LoaderField::get_fields_all(&pool, &redis) + .await? + .into_iter() + .map(|x| x.field) + .collect::>(); + let all_ids = get_all_ids(pool.clone()).await?; let all_ids_len = all_ids.len(); info!("Got all ids, indexing {} projects", all_ids_len); @@ -93,10 +100,10 @@ pub async fn index_projects( (version_id, (project_id, owner_username.to_lowercase())) }) .collect::>(); - let (uploads, loader_fields) = index_local(&pool, &redis, id_chunk).await?; + let uploads = index_local(&pool, &redis, id_chunk).await?; info!("Got chunk, adding to docs_to_add"); - add_projects(&indices, uploads, loader_fields, config).await?; + add_projects(&indices, uploads, all_loader_fields.clone(), config).await?; } info!("Done adding projects.");