Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
34 commits
Select commit Hold shift + click to select a range
22be5a9
initial elasticsearch impl
aecsocket Feb 12, 2026
31362c2
working elastic cluster
aecsocket Feb 12, 2026
d3c76fa
replace SearchError with ApiError for preparation of search backend
aecsocket Feb 19, 2026
da44fe9
start factoring meili out to trait
aecsocket Feb 13, 2026
1d17f61
move meili to backend
aecsocket Feb 13, 2026
5e112e9
update routes to use search backend trait
aecsocket Feb 13, 2026
4ffafc1
wip
aecsocket Feb 23, 2026
71e3256
Update projects.rs
aecsocket Feb 23, 2026
badb907
search backend is only init'd once in config
aecsocket Feb 15, 2026
72eb548
wip
aecsocket Feb 16, 2026
def1f6a
wip: backend agnostic
aecsocket Feb 16, 2026
9ccc391
change search internal routes to delegate to backend
aecsocket Feb 19, 2026
bf1f64d
initial elasticsearch impl
aecsocket Feb 19, 2026
e72e2fb
fix filtering
aecsocket Feb 19, 2026
495f6be
elastic impl
aecsocket Feb 19, 2026
e4c4691
refactor indexing into its own module
aecsocket Feb 19, 2026
2a8bed1
clean up elastic code
aecsocket Feb 19, 2026
8281def
fix ci
aecsocket Feb 19, 2026
9d38af8
fix tests
aecsocket Feb 19, 2026
58f432b
fix elastic health check
aecsocket Feb 19, 2026
74f350f
fix up env rebase
aecsocket Feb 19, 2026
77383fb
fix compile
aecsocket Feb 19, 2026
0520360
dummy commit to update github pr
aecsocket Feb 19, 2026
347de69
Fix rebase
aecsocket Feb 23, 2026
225c227
Elastic basic https auth
aecsocket Feb 23, 2026
0a3414e
Fix duplicate projects showing up
aecsocket Feb 23, 2026
7ba4dc1
Fix up tests
aecsocket Feb 24, 2026
04a46e6
Replace search `ApiErrors` with `eyre::Reports`, propagate background…
aecsocket Feb 24, 2026
5fd8c00
clean up agents files
aecsocket Feb 24, 2026
b7ff993
make index chunk size configurable
aecsocket Feb 24, 2026
abbab62
make `match_phrase` in elastic case-insensitive
aecsocket Feb 26, 2026
ea2e0a4
use current/next indices and swap between them
aecsocket Feb 26, 2026
110f004
test case for error body
aecsocket Feb 26, 2026
448b74b
Fix failing case
aecsocket Feb 26, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 24 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ dotenv-build = "0.1.1"
dotenvy = "0.15.7"
dunce = "1.0.5"
either = "1.15.0"
elasticsearch = "9.1.0-alpha.1"
encoding_rs = "0.8.35"
enumset = "1.1.10"
eyre = "0.6.12"
Expand Down
5 changes: 5 additions & 0 deletions apps/labrinth/.env.docker-compose
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,14 @@ DATABASE_URL=postgresql://labrinth:labrinth@labrinth-postgres/labrinth
DATABASE_MIN_CONNECTIONS=0
DATABASE_MAX_CONNECTIONS=16

SEARCH_BACKEND=meilisearch
MEILISEARCH_READ_ADDR=http://localhost:7700
MEILISEARCH_WRITE_ADDRS=http://localhost:7700
MEILISEARCH_KEY=modrinth
ELASTICSEARCH_URL=http://localhost:9200
ELASTICSEARCH_INDEX_PREFIX=labrinth
ELASTICSEARCH_USERNAME=elastic
ELASTICSEARCH_PASSWORD=elastic

REDIS_URL=redis://labrinth-redis
REDIS_MIN_CONNECTIONS=0
Expand Down
13 changes: 13 additions & 0 deletions apps/labrinth/.env.local
Original file line number Diff line number Diff line change
Expand Up @@ -16,16 +16,29 @@ DATABASE_URL=postgresql://labrinth:labrinth@localhost/labrinth
DATABASE_MIN_CONNECTIONS=0
DATABASE_MAX_CONNECTIONS=16

SEARCH_BACKEND=meilisearch

# Meilisearch configuration
MEILISEARCH_READ_ADDR=http://localhost:7700
MEILISEARCH_WRITE_ADDRS=http://localhost:7700
# 5 minutes in milliseconds
SEARCH_OPERATION_TIMEOUT=300000

ELASTICSEARCH_URL=http://localhost:9200
ELASTICSEARCH_INDEX_PREFIX=labrinth

# # For a sharded Meilisearch setup (sharded-meilisearch docker compose profile)
# MEILISEARCH_READ_ADDR=http://localhost:7710
# MEILISEARCH_WRITE_ADDRS=http://localhost:7700,http://localhost:7701

MEILISEARCH_KEY=modrinth
MEILISEARCH_META_NAMESPACE=

# Elasticsearch configuration
ELASTICSEARCH_URL=http://localhost:9200
ELASTICSEARCH_INDEX_PREFIX=labrinth
ELASTICSEARCH_USERNAME=
ELASTICSEARCH_PASSWORD=

REDIS_URL=redis://localhost
REDIS_MIN_CONNECTIONS=0
Expand Down
7 changes: 4 additions & 3 deletions apps/labrinth/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ async-stripe = { workspace = true, features = [
"billing",
"checkout",
"connect",
"webhook-events",
"webhook-events"
] }
async-trait = { workspace = true }
base64 = { workspace = true }
Expand All @@ -43,6 +43,7 @@ deadpool-redis.workspace = true
derive_more = { workspace = true, features = ["deref", "deref_mut"] }
dotenvy = { workspace = true }
either = { workspace = true }
elasticsearch = { workspace = true, features = ["experimental-apis"] }
eyre = { workspace = true }
futures = { workspace = true }
futures-util = { workspace = true }
Expand Down Expand Up @@ -85,11 +86,11 @@ reqwest = { workspace = true, features = [
"http2",
"json",
"multipart",
"rustls-tls-webpki-roots",
"rustls-tls-webpki-roots"
] }
rust_decimal = { workspace = true, features = [
"serde-with-float",
"serde-with-str",
"serde-with-str"
] }
rust_iso3166 = { workspace = true }
rust-s3 = { workspace = true }
Expand Down
4 changes: 0 additions & 4 deletions apps/labrinth/src/auth/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@ use thiserror::Error;
pub enum AuthenticationError {
#[error(transparent)]
Internal(#[from] eyre::Report),
#[error("Environment Error")]
Env(#[from] dotenvy::Error),
#[error("An unknown database error occurred: {0}")]
Sqlx(#[from] sqlx::Error),
#[error("Database Error: {0}")]
Expand Down Expand Up @@ -58,7 +56,6 @@ impl actix_web::ResponseError for AuthenticationError {
AuthenticationError::Internal(..) => {
StatusCode::INTERNAL_SERVER_ERROR
}
AuthenticationError::Env(..) => StatusCode::INTERNAL_SERVER_ERROR,
AuthenticationError::Sqlx(..) => StatusCode::INTERNAL_SERVER_ERROR,
AuthenticationError::Database(..) => {
StatusCode::INTERNAL_SERVER_ERROR
Expand Down Expand Up @@ -94,7 +91,6 @@ impl AuthenticationError {
pub fn error_name(&self) -> &'static str {
match self {
AuthenticationError::Internal(..) => "internal_error",
AuthenticationError::Env(..) => "environment_error",
AuthenticationError::Sqlx(..) => "database_error",
AuthenticationError::Database(..) => "database_error",
AuthenticationError::SerDe(..) => "invalid_input",
Expand Down
23 changes: 11 additions & 12 deletions apps/labrinth/src/background_task.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use crate::database;
use crate::database::PgPool;
use crate::database::redis::RedisPool;
use crate::queue::billing::{index_billing, index_subscriptions};
Expand All @@ -7,9 +8,9 @@ use crate::queue::payouts::{
insert_bank_balances_and_webhook, process_affiliate_payouts,
process_payout, remove_payouts_for_refunded_charges,
};
use crate::search::indexing::index_projects;
use crate::search::SearchBackend;
use crate::util::anrok;
use crate::{database, search};
use actix_web::web;
use clap::ValueEnum;
use tracing::{error, info, warn};

Expand All @@ -34,18 +35,19 @@ impl BackgroundTask {
pool: PgPool,
ro_pool: PgPool,
redis_pool: RedisPool,
search_config: search::SearchConfig,
search_backend: web::Data<dyn SearchBackend>,
clickhouse: clickhouse::Client,
stripe_client: stripe::Client,
anrok_client: anrok::Client,
email_queue: EmailQueue,
mural_client: muralpay::Client,
) {
) -> eyre::Result<()> {
use BackgroundTask::*;
// TODO: all of these tasks should return `eyre::Result`s
match self {
Migrations => run_migrations().await,
IndexSearch => {
index_search(ro_pool, redis_pool, search_config).await
return index_search(ro_pool, redis_pool, search_backend).await;
}
ReleaseScheduled => release_scheduled(pool).await,
UpdateVersions => update_versions(pool, redis_pool).await,
Expand Down Expand Up @@ -77,6 +79,7 @@ impl BackgroundTask {
run_email(email_queue).await;
}
}
Ok(())
}
}

Expand Down Expand Up @@ -122,14 +125,10 @@ pub async fn run_migrations() {
pub async fn index_search(
ro_pool: PgPool,
redis_pool: RedisPool,
search_config: search::SearchConfig,
) {
search_backend: web::Data<dyn SearchBackend>,
) -> eyre::Result<()> {
info!("Indexing local database");
let result = index_projects(ro_pool, redis_pool, &search_config).await;
if let Err(e) = result {
warn!("Local project indexing failed: {:?}", e);
}
info!("Done indexing local database");
search_backend.index_projects(ro_pool, redis_pool).await
}

pub async fn release_scheduled(pool: PgPool) {
Expand Down
15 changes: 12 additions & 3 deletions apps/labrinth/src/env.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ where
}

pub fn init() -> eyre::Result<()> {
dotenvy::dotenv().ok();
EnvVars::from_env()?;
LazyLock::force(&ENV);
Ok(())
Expand Down Expand Up @@ -128,9 +129,6 @@ vars! {
LABRINTH_EXTERNAL_NOTIFICATION_KEY: String;
RATE_LIMIT_IGNORE_KEY: String;
DATABASE_URL: String;
MEILISEARCH_READ_ADDR: String;
MEILISEARCH_WRITE_ADDRS: StringCsv;
MEILISEARCH_KEY: String;
REDIS_URL: String;
BIND_ADDR: String;
SELF_ADDR: String;
Expand All @@ -142,6 +140,17 @@ vars! {
ALLOWED_CALLBACK_URLS: Json<Vec<String>>;
ANALYTICS_ALLOWED_ORIGINS: Json<Vec<String>>;

// search
SEARCH_BACKEND: crate::search::SearchBackendKind;
MEILISEARCH_READ_ADDR: String;
MEILISEARCH_WRITE_ADDRS: StringCsv;
MEILISEARCH_KEY: String;
ELASTICSEARCH_URL: String;
ELASTICSEARCH_INDEX_PREFIX: String;
ELASTICSEARCH_USERNAME: String = "";
ELASTICSEARCH_PASSWORD: String = "";
ELASTICSEARCH_INDEX_CHUNK_SIZE: i64 = 5000i64;

// storage
STORAGE_BACKEND: crate::file_hosting::FileHostKind;

Expand Down
21 changes: 12 additions & 9 deletions apps/labrinth/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ pub struct LabrinthConfig {
pub file_host: Arc<dyn file_hosting::FileHost + Send + Sync>,
pub scheduler: Arc<scheduler::Scheduler>,
pub ip_salt: Pepper,
pub search_config: search::SearchConfig,
pub search_backend: web::Data<dyn search::SearchBackend>,
pub session_queue: web::Data<AuthQueue>,
pub payouts_queue: web::Data<PayoutsQueue>,
pub analytics_queue: Arc<AnalyticsQueue>,
Expand All @@ -75,7 +75,7 @@ pub fn app_setup(
pool: PgPool,
ro_pool: ReadOnlyPgPool,
redis_pool: RedisPool,
search_config: search::SearchConfig,
search_backend: actix_web::web::Data<dyn search::SearchBackend>,
clickhouse: &mut Client,
file_host: Arc<dyn file_hosting::FileHost + Send + Sync>,
stripe_client: stripe::Client,
Expand Down Expand Up @@ -113,19 +113,22 @@ pub fn app_setup(
let local_index_interval =
Duration::from_secs(ENV.LOCAL_INDEX_INTERVAL);
let pool_ref = pool.clone();
let search_config_ref = search_config.clone();
let redis_pool_ref = redis_pool.clone();
let search_backend_ref = search_backend.clone();
scheduler.run(local_index_interval, move || {
let pool_ref = pool_ref.clone();
let redis_pool_ref = redis_pool_ref.clone();
let search_config_ref = search_config_ref.clone();
let search_backend = search_backend_ref.clone();
async move {
background_task::index_search(
if let Err(err) = background_task::index_search(
pool_ref,
redis_pool_ref,
search_config_ref,
search_backend,
)
.await;
.await
{
warn!("Failed to index search: {err:?}");
}
}
});

Expand Down Expand Up @@ -269,7 +272,7 @@ pub fn app_setup(
file_host,
scheduler: Arc::new(scheduler),
ip_salt,
search_config,
search_backend,
session_queue,
payouts_queue: web::Data::new(PayoutsQueue::new()),
analytics_queue,
Expand Down Expand Up @@ -307,7 +310,7 @@ pub fn app_config(
.app_data(web::Data::new(labrinth_config.pool.clone()))
.app_data(web::Data::new(labrinth_config.ro_pool.clone()))
.app_data(web::Data::new(labrinth_config.file_host.clone()))
.app_data(web::Data::new(labrinth_config.search_config.clone()))
.app_data(labrinth_config.search_backend.clone())
.app_data(web::Data::new(labrinth_config.gotenberg_client.clone()))
.app_data(labrinth_config.session_queue.clone())
.app_data(labrinth_config.payouts_queue.clone())
Expand Down
34 changes: 18 additions & 16 deletions apps/labrinth/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ use labrinth::utoipa_app_config;
use labrinth::{app_config, env};
use labrinth::{clickhouse, database, file_hosting};
use std::ffi::CStr;
use std::io;
use std::sync::Arc;
use tracing::{Instrument, info, info_span};
use tracing_actix_web::TracingLogger;
Expand Down Expand Up @@ -56,7 +57,6 @@ struct Args {

fn main() -> std::io::Result<()> {
color_eyre::install().expect("failed to install `color-eyre`");
dotenvy::dotenv().ok();
modrinth_util::log::init().expect("failed to initialize logging");
env::init().expect("failed to initialize environment variables");

Expand Down Expand Up @@ -152,7 +152,8 @@ async fn app() -> std::io::Result<()> {
info!("Initializing clickhouse connection");
let mut clickhouse = clickhouse::init_client().await.unwrap();

let search_config = search::SearchConfig::new(None);
let search_backend =
actix_web::web::Data::from(Arc::from(search::backend(None)));

let stripe_client = stripe::Client::new(ENV.STRIPE_API_KEY.clone());

Expand All @@ -167,19 +168,20 @@ async fn app() -> std::io::Result<()> {

if let Some(task) = args.run_background_task {
info!("Running task {task:?} and exiting");
task.run(
pool,
ro_pool.into_inner(),
redis_pool,
search_config,
clickhouse,
stripe_client,
anrok_client.clone(),
email_queue,
muralpay,
)
.await;
return Ok(());
return task
.run(
pool,
ro_pool.into_inner(),
redis_pool,
search_backend,
clickhouse,
stripe_client,
anrok_client.clone(),
email_queue,
muralpay,
)
.await
.map_err(io::Error::other);
}

let prometheus = PrometheusMetricsBuilder::new("labrinth")
Expand All @@ -206,7 +208,7 @@ async fn app() -> std::io::Result<()> {
pool.clone(),
ro_pool.clone(),
redis_pool.clone(),
search_config.clone(),
search_backend.clone(),
&mut clickhouse,
file_host.clone(),
stripe_client,
Expand Down
4 changes: 1 addition & 3 deletions apps/labrinth/src/queue/email.rs
Original file line number Diff line number Diff line change
Expand Up @@ -102,8 +102,6 @@ impl Mailer {

#[derive(Error, Debug)]
pub enum MailError {
#[error("Environment Error")]
Env(#[from] dotenvy::Error),
#[error("Mail Error: {0}")]
Mail(#[from] lettre::error::Error),
#[error("Address Parse Error: {0}")]
Expand Down Expand Up @@ -136,7 +134,7 @@ impl EmailQueue {
pg,
redis,
mailer: Arc::new(TokioMutex::new(Mailer::Uninitialized)),
identity: templates::MailingIdentity::from_env()?,
identity: templates::MailingIdentity::from_env(),
client: Client::builder()
.user_agent("Modrinth")
.build()
Expand Down
Loading