Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@ Install Rust, Bitcoin Core (no `txindex` needed) and the `clang` and `cmake` pac
```bash
$ git clone https://github.com/blockstream/electrs && cd electrs
$ git checkout new-index
$ cargo run --release --bin electrs -- -vvvv --daemon-dir ~/.bitcoin
$ cargo run --release --bin electrs -- --daemon-dir ~/.bitcoin

# Or for liquid:
$ cargo run --features liquid --release --bin electrs -- -vvvv --network liquid --daemon-dir ~/.liquid
$ cargo run --features liquid --release --bin electrs -- --network liquid --daemon-dir ~/.liquid
```

See [electrs's original documentation](https://github.com/romanz/electrs/blob/master/doc/usage.md) for more detailed instructions.
Expand Down
2 changes: 1 addition & 1 deletion contrib/electrs.service
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ Description=Electrum Rust Server

[Service]
Type=simple
ExecStart=/path/to/electrs/target/release/electrs -vvvv --db-dir /path/to/electrs/db/
ExecStart=/path/to/electrs/target/release/electrs --db-dir /path/to/electrs/db/
Restart=on-failure
RestartSec=60
Environment="RUST_BACKTRACE=1"
Expand Down
2 changes: 1 addition & 1 deletion doc/usage.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ Otherwise, [`~/.bitcoin/.cookie`](https://github.com/bitcoin/bitcoin/blob/021218

First index sync should take ~1.5 hours:
```bash
$ cargo run --release -- -vvv --timestamp --db-dir ./db [--cookie="USER:PASSWORD"]
$ cargo run --release -- --timestamp --db-dir ./db [--cookie="USER:PASSWORD"]
2018-08-17T18:27:42 - INFO - NetworkInfo { version: 179900, subversion: "/Satoshi:0.17.99/" }
2018-08-17T18:27:42 - INFO - BlockchainInfo { chain: "main", blocks: 537204, headers: 537204, bestblockhash: "0000000000000000002956768ca9421a8ddf4e53b1d81e429bd0125a383e3636", pruned: false, initialblockdownload: false }
2018-08-17T18:27:42 - DEBUG - opening DB at "./db/mainnet"
Expand Down
9 changes: 9 additions & 0 deletions src/bin/electrs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,10 +57,13 @@ fn run_server(config: Arc<Config>, salt_rwlock: Arc<RwLock<String>>) -> Result<(
let metrics = Metrics::new(config.monitoring_addr);
metrics.start();

info!("starting electrs");
Comment thread
EddieHouston marked this conversation as resolved.

if let Some(zmq_addr) = config.zmq_addr.as_ref() {
zmq::start(&format!("tcp://{zmq_addr}"), block_hash_notify);
}

info!("connecting to daemon at {}", config.daemon_rpc_addr);
let daemon = Arc::new(Daemon::new(
&config.daemon_dir,
&config.blocks_dir,
Expand All @@ -71,14 +74,17 @@ fn run_server(config: Arc<Config>, salt_rwlock: Arc<RwLock<String>>) -> Result<(
signal.clone(),
&metrics,
)?);
info!("opening database at {}", config.db_path.display());
let store = Arc::new(Store::open(&config, &metrics, true));
let mut indexer = Indexer::open(
Arc::clone(&store),
fetch_from(&config, &store),
&config,
&metrics,
);
info!("starting initial sync");
let mut tip = indexer.update(&daemon)?;
info!("initial sync complete, tip at {}", tip);

let chain = Arc::new(ChainQuery::new(
Arc::clone(&store),
Expand All @@ -93,6 +99,7 @@ fn run_server(config: Arc<Config>, salt_rwlock: Arc<RwLock<String>>) -> Result<(
precache::precache(&chain, precache_scripthashes);
}

info!("loading mempool");
let mempool = Arc::new(RwLock::new(Mempool::new(
Arc::clone(&chain),
&metrics,
Expand Down Expand Up @@ -130,6 +137,8 @@ fn run_server(config: Arc<Config>, salt_rwlock: Arc<RwLock<String>>) -> Result<(
Arc::clone(&salt_rwlock),
);

info!("startup complete");

let main_loop_count = metrics.gauge(MetricOpts::new(
"electrs_main_loop_count",
"count of iterations of electrs main loop each 5 seconds or after interrupts",
Expand Down
6 changes: 4 additions & 2 deletions src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ impl Config {
Arg::with_name("verbosity")
.short("v")
.multiple(true)
.help("Increase logging verbosity"),
.help("Increase logging verbosity (default: info, -v: debug, -vv: trace)"),
)
.arg(
Arg::with_name("timestamp")
Expand Down Expand Up @@ -464,7 +464,9 @@ impl Config {
.map(|s| serde_json::from_str(s).expect("invalid --electrum-public-hosts"));

let mut log = stderrlog::new();
log.verbosity(m.occurrences_of("verbosity") as usize);
// Base verbosity is 2 (Info), each -v flag adds one level:
// no flags = Info, -v = Debug, -vv = Trace
log.verbosity(2 + m.occurrences_of("verbosity") as usize);
log.timestamp(if m.is_present("timestamp") {
stderrlog::Timestamp::Millisecond
} else {
Expand Down
2 changes: 1 addition & 1 deletion src/daemon.rs
Original file line number Diff line number Diff line change
Expand Up @@ -789,7 +789,7 @@ impl Daemon {

result.append(&mut headers);

info!(
debug!(
"downloaded {}/{} block headers ({:.0}%)",
result.len(),
tip_height + 1,
Expand Down
2 changes: 1 addition & 1 deletion src/electrum/discovery.rs
Original file line number Diff line number Diff line change
Expand Up @@ -588,7 +588,7 @@ mod tests {

debug!("{:#?}", discovery);

info!("{}", json!(discovery.get_servers()));
debug!("{}", json!(discovery.get_servers()));

Ok(())
}
Expand Down
4 changes: 2 additions & 2 deletions src/electrum/server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -870,7 +870,7 @@ impl RPC {
let salt = salt_rwlock.read().unwrap().clone();

let spawned = spawn_thread("peer", move || {
info!("[{}] connected peer", addr);
debug!("[{}] connected peer", addr);
let conn = Connection::new(
query,
stream,
Expand All @@ -884,7 +884,7 @@ impl RPC {
salt,
);
conn.run(receiver);
info!("[{}] disconnected peer", addr);
debug!("[{}] disconnected peer", addr);
let _ = garbage_sender.send(std::thread::current().id());
});

Expand Down
2 changes: 1 addition & 1 deletion src/new_index/db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ pub enum DBFlush {

impl DB {
pub fn open(path: &Path, config: &Config, verify_compat: bool) -> DB {
debug!("opening DB at {:?}", path);
info!("opening DB at {:?}", path);
let mut db_opts = rocksdb::Options::default();
db_opts.create_if_missing(true);
db_opts.set_max_open_files(100_000); // TODO: make sure to `ulimit -n` this process correctly
Expand Down
6 changes: 3 additions & 3 deletions src/new_index/schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,11 +63,11 @@ impl Store {

let txstore_db = DB::open(&path.join("txstore"), config, verify_compat);
let added_blockhashes = load_blockhashes(&txstore_db, &BlockRow::done_filter());
debug!("{} blocks were added", added_blockhashes.len());
info!("{} blocks were added", added_blockhashes.len());

let history_db = DB::open(&path.join("history"), config, verify_compat);
let indexed_blockhashes = load_blockhashes(&history_db, &BlockRow::done_filter());
debug!("{} blocks were indexed", indexed_blockhashes.len());
info!("{} blocks were indexed", indexed_blockhashes.len());

let cache_db = DB::open(&path.join("cache"), config, verify_compat);

Expand All @@ -92,7 +92,7 @@ impl Store {
.expect("invalid header chain")
.prev_blockhash;
}
debug!(
info!(
"{} headers were loaded, tip at {:?}",
headers_map.len(),
tip_hash
Expand Down
2 changes: 1 addition & 1 deletion src/rest.rs
Original file line number Diff line number Diff line change
Expand Up @@ -685,7 +685,7 @@ fn handle_request(
None => HashMap::new(),
};

info!("handle {:?} {:?}", method, uri);
debug!("handle {:?} {:?}", method, uri);
match (
&method,
path.get(0),
Expand Down
Loading