Skip to content

Commit 5f14ca2

Browse files
committed
ACME: use increasing intervals with timeout when polling.
Previously, we limited polling for challenge and order status to a fixed number of tries. The updated algorithm will use increasing intervals and give up when the total wait timeout has elapsed.
1 parent 23bd138 commit 5f14ca2

File tree

1 file changed

+56
-31
lines changed

1 file changed

+56
-31
lines changed

src/acme.rs

Lines changed: 56 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@ pub mod solvers;
3232
pub mod types;
3333

3434
const DEFAULT_RETRY_INTERVAL: Duration = Duration::from_secs(1);
35+
const MAX_RETRY_INTERVAL: Duration = Duration::from_secs(8);
3536
static REPLAY_NONCE: http::HeaderName = http::HeaderName::from_static("replay-nonce");
3637

3738
pub struct NewCertificateOutput {
@@ -55,6 +56,9 @@ where
5556
nonce: NoncePool,
5657
directory: types::Directory,
5758
solvers: Vec<Box<dyn solvers::ChallengeSolver + Send + 'a>>,
59+
authorization_timeout: Duration,
60+
finalize_timeout: Duration,
61+
network_error_retries: usize,
5862
}
5963

6064
#[derive(Default)]
@@ -106,6 +110,9 @@ where
106110
nonce: Default::default(),
107111
directory: Default::default(),
108112
solvers: Vec::new(),
113+
authorization_timeout: Duration::from_secs(60),
114+
finalize_timeout: Duration::from_secs(60),
115+
network_error_retries: 3,
109116
})
110117
}
111118

@@ -152,14 +159,14 @@ where
152159
url: &Uri,
153160
payload: P,
154161
) -> Result<http::Response<Bytes>> {
155-
let mut fails = 0;
156-
157162
let mut nonce = if let Some(nonce) = self.nonce.get() {
158163
nonce
159164
} else {
160165
self.get_nonce().await?
161166
};
162167

168+
let mut tries = core::iter::repeat(DEFAULT_RETRY_INTERVAL).take(self.network_error_retries);
169+
163170
ngx_log_debug!(self.log.as_ptr(), "sending request to {url:?}");
164171
let res = loop {
165172
let body = crate::jws::sign_jws(
@@ -183,13 +190,15 @@ where
183190

184191
let res = match self.http.request(req).await {
185192
Ok(res) => res,
186-
Err(e) if fails >= 3 => return Err(e.into()),
187-
// TODO: limit retries to connection errors
188-
Err(_) => {
189-
fails += 1;
190-
sleep(DEFAULT_RETRY_INTERVAL).await;
191-
ngx_log_debug!(self.log.as_ptr(), "retrying: {} of 3", fails + 1);
192-
continue;
193+
Err(err) => {
194+
// TODO: limit retries to connection errors
195+
if let Some(tm) = tries.next() {
196+
sleep(tm).await;
197+
ngx_log_debug!(self.log.as_ptr(), "retrying failed request ({err})");
198+
continue;
199+
} else {
200+
return Err(err.into());
201+
}
193202
}
194203
};
195204

@@ -210,15 +219,13 @@ where
210219
types::ErrorKind::BadNonce | types::ErrorKind::RateLimited
211220
);
212221

213-
if !retriable || fails >= 3 {
214-
self.nonce.add(nonce);
215-
return Err(err.into());
222+
if retriable && wait_for_retry(&res, &mut tries).await {
223+
ngx_log_debug!(self.log.as_ptr(), "retrying failed request ({err})");
224+
continue;
216225
}
217226

218-
fails += 1;
219-
220-
wait_for_retry(&res).await;
221-
ngx_log_debug!(self.log.as_ptr(), "retrying: {} of 3", fails + 1);
227+
self.nonce.add(nonce);
228+
return Err(err.into());
222229
};
223230

224231
self.nonce.add_from_response(&res);
@@ -381,12 +388,9 @@ where
381388
}
382389
};
383390

384-
let mut tries = 10;
385-
386-
while order.status == OrderStatus::Processing && tries > 0 {
387-
tries -= 1;
388-
wait_for_retry(&res).await;
391+
let mut tries = backoff(MAX_RETRY_INTERVAL, self.finalize_timeout);
389392

393+
while order.status == OrderStatus::Processing && wait_for_retry(&res, &mut tries).await {
390394
drop(order);
391395
res = self.post(&order_url, b"").await?;
392396
order = serde_json::from_slice(res.body())?;
@@ -431,20 +435,18 @@ where
431435
return Err(anyhow!("unexpected challenge status {:?}", result.status));
432436
}
433437

434-
wait_for_retry(&res).await;
435-
436-
let mut tries = 10;
438+
let mut tries = backoff(MAX_RETRY_INTERVAL, self.authorization_timeout);
439+
wait_for_retry(&res, &mut tries).await;
437440

438441
let result = loop {
439442
let res = self.post(&url, b"").await?;
440443
let result: types::Authorization = serde_json::from_slice(res.body())?;
441444

442-
if result.status != AuthorizationStatus::Pending || tries == 0 {
445+
if result.status != AuthorizationStatus::Pending
446+
|| !wait_for_retry(&res, &mut tries).await
447+
{
443448
break result;
444449
}
445-
446-
tries -= 1;
447-
wait_for_retry(&res).await;
448450
};
449451

450452
ngx_log_debug!(
@@ -498,13 +500,36 @@ pub fn make_certificate_request(
498500
}
499501

500502
/// Waits until the next retry attempt is allowed.
501-
async fn wait_for_retry<B>(res: &http::Response<B>) {
503+
async fn wait_for_retry<B>(
504+
res: &http::Response<B>,
505+
policy: &mut impl Iterator<Item = Duration>,
506+
) -> bool {
507+
let Some(interval) = policy.next() else {
508+
return false;
509+
};
510+
502511
let retry_after = res
503512
.headers()
504513
.get(http::header::RETRY_AFTER)
505514
.and_then(parse_retry_after)
506-
.unwrap_or(DEFAULT_RETRY_INTERVAL);
507-
sleep(retry_after).await
515+
.unwrap_or(interval);
516+
517+
sleep(retry_after).await;
518+
true
519+
}
520+
521+
/// Generate increasing intervals saturated at `max` until `timeout` has passed.
522+
fn backoff(max: Duration, timeout: Duration) -> impl Iterator<Item = Duration> {
523+
let first = (Duration::ZERO, Duration::from_secs(1));
524+
let stop = Time::now() + timeout;
525+
526+
core::iter::successors(Some(first), move |prev: &(Duration, Duration)| {
527+
if Time::now() >= stop {
528+
return None;
529+
}
530+
Some((prev.1, prev.0.saturating_add(prev.1)))
531+
})
532+
.map(move |(_, x)| x.min(max))
508533
}
509534

510535
fn parse_retry_after(val: &http::HeaderValue) -> Option<Duration> {

0 commit comments

Comments
 (0)