LCOV - code coverage report
Current view: top level - libs/utils/src/http - endpoint.rs (source / functions) Coverage Total Hit
Test: 8ac049b474321fdc72ddcb56d7165153a1a900e8.info Lines: 86.6 % 380 329
Test Date: 2023-09-06 10:18:01 Functions: 56.2 % 365 205

            Line data    Source code
       1              : use crate::auth::{Claims, JwtAuth};
       2              : use crate::http::error::{api_error_handler, route_error_handler, ApiError};
       3              : use anyhow::Context;
       4              : use hyper::header::{HeaderName, AUTHORIZATION};
       5              : use hyper::http::HeaderValue;
       6              : use hyper::Method;
       7              : use hyper::{header::CONTENT_TYPE, Body, Request, Response};
       8              : use metrics::{register_int_counter, Encoder, IntCounter, TextEncoder};
       9              : use once_cell::sync::Lazy;
      10              : use routerify::ext::RequestExt;
      11              : use routerify::{Middleware, RequestInfo, Router, RouterBuilder};
      12              : use tracing::{self, debug, info, info_span, warn, Instrument};
      13              : 
      14              : use std::future::Future;
      15              : use std::str::FromStr;
      16              : 
      17           69 : static SERVE_METRICS_COUNT: Lazy<IntCounter> = Lazy::new(|| {
      18           69 :     register_int_counter!(
      19           69 :         "libmetrics_metric_handler_requests_total",
      20           69 :         "Number of metric requests made"
      21           69 :     )
      22           69 :     .expect("failed to define a metric")
      23           69 : });
      24              : 
      25              : static X_REQUEST_ID_HEADER_STR: &str = "x-request-id";
      26              : 
      27              : static X_REQUEST_ID_HEADER: HeaderName = HeaderName::from_static(X_REQUEST_ID_HEADER_STR);
      28        15943 : #[derive(Debug, Default, Clone)]
      29              : struct RequestId(String);
      30              : 
      31              : /// Adds a tracing info_span! instrumentation around the handler events,
      32              : /// logs the request start and end events for non-GET requests and non-200 responses.
      33              : ///
      34              : /// Usage: Replace `my_handler` with `|r| request_span(r, my_handler)`
      35              : ///
      36              : /// Use this to distinguish between logs of different HTTP requests: every request handler wrapped
      37              : /// with this will get request info logged in the wrapping span, including the unique request ID.
      38              : ///
      39              : /// This also handles errors, logging them and converting them to an HTTP error response.
      40              : ///
      41              : /// NB: If the client disconnects, Hyper will drop the Future, without polling it to
      42              : /// completion. In other words, the handler must be async cancellation safe! request_span
      43              : /// prints a warning to the log when that happens, so that you have some trace of it in
      44              : /// the log.
      45              : ///
      46              : ///
      47              : /// There could be other ways to implement similar functionality:
      48              : ///
      49              : /// * procmacros placed on top of all handler methods
      50              : /// With all the drawbacks of procmacros, brings no difference implementation-wise,
      51              : /// and little code reduction compared to the existing approach.
      52              : ///
      53              : /// * Another `TraitExt` with e.g. the `get_with_span`, `post_with_span` methods to do similar logic,
      54              : /// implemented for [`RouterBuilder`].
      55              : /// Could be simpler, but we don't want to depend on [`routerify`] more, targeting to use other library later.
      56              : ///
      57              : /// * In theory, a span guard could've been created in a pre-request middleware and placed into a global collection, to be dropped
      58              : /// later, in a post-response middleware.
      59              : /// Due to suspendable nature of the futures, would give contradictive results which is exactly the opposite of what `tracing-futures`
      60              : /// tries to achive with its `.instrument` used in the current approach.
      61              : ///
      62              : /// If needed, a declarative macro to substitute the |r| ... closure boilerplate could be introduced.
      63         7962 : pub async fn request_span<R, H>(request: Request<Body>, handler: H) -> R::Output
      64         7962 : where
      65         7962 :     R: Future<Output = Result<Response<Body>, ApiError>> + Send + 'static,
      66         7962 :     H: FnOnce(Request<Body>) -> R + Send + Sync + 'static,
      67         7962 : {
      68         7962 :     let request_id = request.context::<RequestId>().unwrap_or_default().0;
      69         7962 :     let method = request.method();
      70         7962 :     let path = request.uri().path();
      71         7962 :     let request_span = info_span!("request", %method, %path, %request_id);
      72              : 
      73         7962 :     let log_quietly = method == Method::GET;
      74         7962 :     async move {
      75         7962 :         let cancellation_guard = RequestCancelled::warn_when_dropped_without_responding();
      76         7962 :         if log_quietly {
      77         4541 :             debug!("Handling request");
      78              :         } else {
      79         3421 :             info!("Handling request");
      80              :         }
      81              : 
      82              :         // No special handling for panics here. There's a `tracing_panic_hook` from another
      83              :         // module to do that globally.
      84         7962 :         let res = handler(request).await;
      85              : 
      86         7960 :         cancellation_guard.disarm();
      87         7960 : 
      88         7960 :         // Log the result if needed.
      89         7960 :         //
      90         7960 :         // We also convert any errors into an Ok response with HTTP error code here.
      91         7960 :         // `make_router` sets a last-resort error handler that would do the same, but
      92         7960 :         // we prefer to do it here, before we exit the request span, so that the error
      93         7960 :         // is still logged with the span.
      94         7960 :         //
      95         7960 :         // (Because we convert errors to Ok response, we never actually return an error,
      96         7960 :         // and we could declare the function to return the never type (`!`). However,
      97         7960 :         // using `routerify::RouterBuilder` requires a proper error type.)
      98         7960 :         match res {
      99         7707 :             Ok(response) => {
     100         7707 :                 let response_status = response.status();
     101         7707 :                 if log_quietly && response_status.is_success() {
     102         4361 :                     debug!("Request handled, status: {response_status}");
     103              :                 } else {
     104         3346 :                     info!("Request handled, status: {response_status}");
     105              :                 }
     106         7707 :                 Ok(response)
     107              :             }
     108          253 :             Err(err) => Ok(api_error_handler(err)),
     109              :         }
     110         7960 :     }
     111         7962 :     .instrument(request_span)
     112         6695 :     .await
     113         7960 : }
     114              : 
     115              : /// Drop guard to WARN in case the request was dropped before completion.
     116              : struct RequestCancelled {
     117              :     warn: Option<tracing::Span>,
     118              : }
     119              : 
     120              : impl RequestCancelled {
     121              :     /// Create the drop guard using the [`tracing::Span::current`] as the span.
     122         7962 :     fn warn_when_dropped_without_responding() -> Self {
     123         7962 :         RequestCancelled {
     124         7962 :             warn: Some(tracing::Span::current()),
     125         7962 :         }
     126         7962 :     }
     127              : 
     128              :     /// Consume the drop guard without logging anything.
     129         7960 :     fn disarm(mut self) {
     130         7960 :         self.warn = None;
     131         7960 :     }
     132              : }
     133              : 
     134              : impl Drop for RequestCancelled {
     135         7962 :     fn drop(&mut self) {
     136         7962 :         if std::thread::panicking() {
     137            0 :             // we are unwinding due to panicking, assume we are not dropped for cancellation
     138         7962 :         } else if let Some(span) = self.warn.take() {
     139              :             // the span has all of the info already, but the outer `.instrument(span)` has already
     140              :             // been dropped, so we need to manually re-enter it for this message.
     141              :             //
     142              :             // this is what the instrument would do before polling so it is fine.
     143            2 :             let _g = span.entered();
     144            2 :             warn!("request was dropped before completing");
     145         7960 :         }
     146         7962 :     }
     147              : }
     148              : 
     149          416 : async fn prometheus_metrics_handler(_req: Request<Body>) -> Result<Response<Body>, ApiError> {
     150          416 :     use bytes::{Bytes, BytesMut};
     151          416 :     use std::io::Write as _;
     152          416 :     use tokio::sync::mpsc;
     153          416 :     use tokio_stream::wrappers::ReceiverStream;
     154          416 : 
     155          416 :     SERVE_METRICS_COUNT.inc();
     156          416 : 
     157          416 :     /// An [`std::io::Write`] implementation on top of a channel sending [`bytes::Bytes`] chunks.
     158          416 :     struct ChannelWriter {
     159          416 :         buffer: BytesMut,
     160          416 :         tx: mpsc::Sender<std::io::Result<Bytes>>,
     161          416 :         written: usize,
     162          416 :     }
     163          416 : 
     164          416 :     impl ChannelWriter {
     165          416 :         fn new(buf_len: usize, tx: mpsc::Sender<std::io::Result<Bytes>>) -> Self {
     166          416 :             assert_ne!(buf_len, 0);
     167          416 :             ChannelWriter {
     168          416 :                 // split about half off the buffer from the start, because we flush depending on
     169          416 :                 // capacity. first flush will come sooner than without this, but now resizes will
     170          416 :                 // have better chance of picking up the "other" half. not guaranteed of course.
     171          416 :                 buffer: BytesMut::with_capacity(buf_len).split_off(buf_len / 2),
     172          416 :                 tx,
     173          416 :                 written: 0,
     174          416 :             }
     175          416 :         }
     176          416 : 
     177          584 :         fn flush0(&mut self) -> std::io::Result<usize> {
     178          584 :             let n = self.buffer.len();
     179          584 :             if n == 0 {
     180          416 :                 return Ok(0);
     181          584 :             }
     182          584 : 
     183          584 :             tracing::trace!(n, "flushing");
     184          584 :             let ready = self.buffer.split().freeze();
     185          584 : 
     186          584 :             // not ideal to call from blocking code to block_on, but we are sure that this
     187          584 :             // operation does not spawn_blocking other tasks
     188          584 :             let res: Result<(), ()> = tokio::runtime::Handle::current().block_on(async {
     189          584 :                 self.tx.send(Ok(ready)).await.map_err(|_| ())?;
     190          416 : 
     191          416 :                 // throttle sending to allow reuse of our buffer in `write`.
     192          584 :                 self.tx.reserve().await.map_err(|_| ())?;
     193          416 : 
     194          416 :                 // now the response task has picked up the buffer and hopefully started
     195          416 :                 // sending it to the client.
     196          584 :                 Ok(())
     197          584 :             });
     198          584 :             if res.is_err() {
     199          416 :                 return Err(std::io::ErrorKind::BrokenPipe.into());
     200          584 :             }
     201          584 :             self.written += n;
     202          584 :             Ok(n)
     203          584 :         }
     204          416 : 
     205          416 :         fn flushed_bytes(&self) -> usize {
     206          416 :             self.written
     207          416 :         }
     208          416 :     }
     209          416 : 
     210          416 :     impl std::io::Write for ChannelWriter {
     211      3991299 :         fn write(&mut self, mut buf: &[u8]) -> std::io::Result<usize> {
     212      3991299 :             let remaining = self.buffer.capacity() - self.buffer.len();
     213      3991299 : 
     214      3991299 :             let out_of_space = remaining < buf.len();
     215      3991299 : 
     216      3991299 :             let original_len = buf.len();
     217      3991299 : 
     218      3991299 :             if out_of_space {
     219          416 :                 let can_still_fit = buf.len() - remaining;
     220          168 :                 self.buffer.extend_from_slice(&buf[..can_still_fit]);
     221          168 :                 buf = &buf[can_still_fit..];
     222          168 :                 self.flush0()?;
     223      3991131 :             }
     224          416 : 
     225          416 :             // assume that this will often under normal operation just move the pointer back to the
     226          416 :             // beginning of allocation, because previous split off parts are already sent and
     227          416 :             // dropped.
     228      3991299 :             self.buffer.extend_from_slice(buf);
     229      3991299 :             Ok(original_len)
     230      3991299 :         }
     231          416 : 
     232          416 :         fn flush(&mut self) -> std::io::Result<()> {
     233          416 :             self.flush0().map(|_| ())
     234          416 :         }
     235          416 :     }
     236          416 : 
     237          416 :     let started_at = std::time::Instant::now();
     238          416 : 
     239          416 :     let (tx, rx) = mpsc::channel(1);
     240          416 : 
     241          416 :     let body = Body::wrap_stream(ReceiverStream::new(rx));
     242          416 : 
     243          416 :     let mut writer = ChannelWriter::new(128 * 1024, tx);
     244          416 : 
     245          416 :     let encoder = TextEncoder::new();
     246          416 : 
     247          416 :     let response = Response::builder()
     248          416 :         .status(200)
     249          416 :         .header(CONTENT_TYPE, encoder.format_type())
     250          416 :         .body(body)
     251          416 :         .unwrap();
     252              : 
     253          416 :     let span = info_span!("blocking");
     254          416 :     tokio::task::spawn_blocking(move || {
     255          416 :         let _span = span.entered();
     256          416 :         let metrics = metrics::gather();
     257          416 :         let res = encoder
     258          416 :             .encode(&metrics, &mut writer)
     259          416 :             .and_then(|_| writer.flush().map_err(|e| e.into()));
     260          416 : 
     261          416 :         match res {
     262              :             Ok(()) => {
     263          416 :                 tracing::info!(
     264          416 :                     bytes = writer.flushed_bytes(),
     265          416 :                     elapsed_ms = started_at.elapsed().as_millis(),
     266          416 :                     "responded /metrics"
     267          416 :                 );
     268              :             }
     269            0 :             Err(e) => {
     270            0 :                 tracing::warn!("failed to write out /metrics response: {e:#}");
     271              :                 // semantics of this error are quite... unclear. we want to error the stream out to
     272              :                 // abort the response to somehow notify the client that we failed.
     273              :                 //
     274              :                 // though, most likely the reason for failure is that the receiver is already gone.
     275            0 :                 drop(
     276            0 :                     writer
     277            0 :                         .tx
     278            0 :                         .blocking_send(Err(std::io::ErrorKind::BrokenPipe.into())),
     279            0 :                 );
     280              :             }
     281              :         }
     282          416 :     });
     283          416 : 
     284          416 :     Ok(response)
     285          416 : }
     286              : 
     287         1108 : pub fn add_request_id_middleware<B: hyper::body::HttpBody + Send + Sync + 'static>(
     288         1108 : ) -> Middleware<B, ApiError> {
     289         7983 :     Middleware::pre(move |req| async move {
     290         7983 :         let request_id = match req.headers().get(&X_REQUEST_ID_HEADER) {
     291            1 :             Some(request_id) => request_id
     292            1 :                 .to_str()
     293            1 :                 .expect("extract request id value")
     294            1 :                 .to_owned(),
     295              :             None => {
     296         7982 :                 let request_id = uuid::Uuid::new_v4();
     297         7982 :                 request_id.to_string()
     298              :             }
     299              :         };
     300         7983 :         req.set_context(RequestId(request_id));
     301         7983 : 
     302         7983 :         Ok(req)
     303         7983 :     })
     304         1108 : }
     305              : 
     306         7981 : async fn add_request_id_header_to_response(
     307         7981 :     mut res: Response<Body>,
     308         7981 :     req_info: RequestInfo,
     309         7981 : ) -> Result<Response<Body>, ApiError> {
     310         7981 :     if let Some(request_id) = req_info.context::<RequestId>() {
     311         7981 :         if let Ok(request_header_value) = HeaderValue::from_str(&request_id.0) {
     312         7981 :             res.headers_mut()
     313         7981 :                 .insert(&X_REQUEST_ID_HEADER, request_header_value);
     314         7981 :         };
     315            0 :     };
     316              : 
     317         7981 :     Ok(res)
     318         7981 : }
     319              : 
     320         1108 : pub fn make_router() -> RouterBuilder<hyper::Body, ApiError> {
     321         1108 :     Router::builder()
     322         1108 :         .middleware(add_request_id_middleware())
     323         1108 :         .middleware(Middleware::post_with_info(
     324         1108 :             add_request_id_header_to_response,
     325         1108 :         ))
     326         1108 :         .get("/metrics", |r| request_span(r, prometheus_metrics_handler))
     327         1108 :         .err_handler(route_error_handler)
     328         1108 : }
     329              : 
     330          575 : pub fn attach_openapi_ui(
     331          575 :     router_builder: RouterBuilder<hyper::Body, ApiError>,
     332          575 :     spec: &'static [u8],
     333          575 :     spec_mount_path: &'static str,
     334          575 :     ui_mount_path: &'static str,
     335          575 : ) -> RouterBuilder<hyper::Body, ApiError> {
     336          575 :     router_builder
     337          575 :         .get(spec_mount_path,
     338          575 :             move |r| request_span(r, move |_| async move {
     339            0 :                 Ok(Response::builder().body(Body::from(spec)).unwrap())
     340          575 :             })
     341          575 :         )
     342          575 :         .get(ui_mount_path,
     343          575 :              move |r| request_span(r, move |_| async move {
     344            0 :                  Ok(Response::builder().body(Body::from(format!(r#"
     345            0 :                 <!DOCTYPE html>
     346            0 :                 <html lang="en">
     347            0 :                 <head>
     348            0 :                 <title>rweb</title>
     349            0 :                 <link href="https://cdn.jsdelivr.net/npm/swagger-ui-dist@3/swagger-ui.css" rel="stylesheet">
     350            0 :                 </head>
     351            0 :                 <body>
     352            0 :                     <div id="swagger-ui"></div>
     353            0 :                     <script src="https://cdn.jsdelivr.net/npm/swagger-ui-dist@3/swagger-ui-bundle.js" charset="UTF-8"> </script>
     354            0 :                     <script>
     355            0 :                         window.onload = function() {{
     356            0 :                         const ui = SwaggerUIBundle({{
     357            0 :                             "dom_id": "\#swagger-ui",
     358            0 :                             presets: [
     359            0 :                             SwaggerUIBundle.presets.apis,
     360            0 :                             SwaggerUIBundle.SwaggerUIStandalonePreset
     361            0 :                             ],
     362            0 :                             layout: "BaseLayout",
     363            0 :                             deepLinking: true,
     364            0 :                             showExtensions: true,
     365            0 :                             showCommonExtensions: true,
     366            0 :                             url: "{}",
     367            0 :                         }})
     368            0 :                         window.ui = ui;
     369            0 :                     }};
     370            0 :                 </script>
     371            0 :                 </body>
     372            0 :                 </html>
     373            0 :             "#, spec_mount_path))).unwrap())
     374          575 :              })
     375          575 :         )
     376          575 : }
     377              : 
     378           74 : fn parse_token(header_value: &str) -> Result<&str, ApiError> {
     379              :     // header must be in form Bearer <token>
     380           74 :     let (prefix, token) = header_value
     381           74 :         .split_once(' ')
     382           74 :         .ok_or_else(|| ApiError::Unauthorized("malformed authorization header".to_string()))?;
     383           74 :     if prefix != "Bearer" {
     384            0 :         return Err(ApiError::Unauthorized(
     385            0 :             "malformed authorization header".to_string(),
     386            0 :         ));
     387           74 :     }
     388           74 :     Ok(token)
     389           74 : }
     390              : 
     391           27 : pub fn auth_middleware<B: hyper::body::HttpBody + Send + Sync + 'static>(
     392           27 :     provide_auth: fn(&Request<Body>) -> Option<&JwtAuth>,
     393           27 : ) -> Middleware<B, ApiError> {
     394           27 :     Middleware::pre(move |req| async move {
     395          129 :         if let Some(auth) = provide_auth(&req) {
     396           79 :             match req.headers().get(AUTHORIZATION) {
     397           74 :                 Some(value) => {
     398           74 :                     let header_value = value.to_str().map_err(|_| {
     399            0 :                         ApiError::Unauthorized("malformed authorization header".to_string())
     400           74 :                     })?;
     401           74 :                     let token = parse_token(header_value)?;
     402              : 
     403           74 :                     let data = auth
     404           74 :                         .decode(token)
     405           74 :                         .map_err(|_| ApiError::Unauthorized("malformed jwt token".to_string()))?;
     406           74 :                     req.set_context(data.claims);
     407              :                 }
     408              :                 None => {
     409            5 :                     return Err(ApiError::Unauthorized(
     410            5 :                         "missing authorization header".to_string(),
     411            5 :                     ))
     412              :                 }
     413              :             }
     414           50 :         }
     415          124 :         Ok(req)
     416          129 :     })
     417           27 : }
     418              : 
     419          575 : pub fn add_response_header_middleware<B>(
     420          575 :     header: &str,
     421          575 :     value: &str,
     422          575 : ) -> anyhow::Result<Middleware<B, ApiError>>
     423          575 : where
     424          575 :     B: hyper::body::HttpBody + Send + Sync + 'static,
     425          575 : {
     426          575 :     let name =
     427          575 :         HeaderName::from_str(header).with_context(|| format!("invalid header name: {header}"))?;
     428          575 :     let value =
     429          575 :         HeaderValue::from_str(value).with_context(|| format!("invalid header value: {value}"))?;
     430          575 :     Ok(Middleware::post_with_info(
     431         6660 :         move |mut response, request_info| {
     432         6660 :             let name = name.clone();
     433         6660 :             let value = value.clone();
     434         6660 :             async move {
     435         6660 :                 let headers = response.headers_mut();
     436         6660 :                 if headers.contains_key(&name) {
     437            0 :                     warn!(
     438            0 :                         "{} response already contains header {:?}",
     439            0 :                         request_info.uri(),
     440            0 :                         &name,
     441            0 :                     );
     442         6660 :                 } else {
     443         6660 :                     headers.insert(name, value);
     444         6660 :                 }
     445         6660 :                 Ok(response)
     446         6660 :             }
     447         6660 :         },
     448          575 :     ))
     449          575 : }
     450              : 
     451         7299 : pub fn check_permission_with(
     452         7299 :     req: &Request<Body>,
     453         7299 :     check_permission: impl Fn(&Claims) -> Result<(), anyhow::Error>,
     454         7299 : ) -> Result<(), ApiError> {
     455         7299 :     match req.context::<Claims>() {
     456           74 :         Some(claims) => {
     457           74 :             Ok(check_permission(&claims).map_err(|err| ApiError::Forbidden(err.to_string()))?)
     458              :         }
     459         7225 :         None => Ok(()), // claims is None because auth is disabled
     460              :     }
     461         7299 : }
     462              : 
     463              : #[cfg(test)]
     464              : mod tests {
     465              :     use super::*;
     466              :     use futures::future::poll_fn;
     467              :     use hyper::service::Service;
     468              :     use routerify::RequestServiceBuilder;
     469              :     use std::net::{IpAddr, SocketAddr};
     470              : 
     471            1 :     #[tokio::test]
     472            1 :     async fn test_request_id_returned() {
     473            1 :         let builder = RequestServiceBuilder::new(make_router().build().unwrap()).unwrap();
     474            1 :         let remote_addr = SocketAddr::new(IpAddr::from_str("127.0.0.1").unwrap(), 80);
     475            1 :         let mut service = builder.build(remote_addr);
     476            1 :         if let Err(e) = poll_fn(|ctx| service.poll_ready(ctx)).await {
     477            0 :             panic!("request service is not ready: {:?}", e);
     478            1 :         }
     479            1 : 
     480            1 :         let mut req: Request<Body> = Request::default();
     481            1 :         req.headers_mut()
     482            1 :             .append(&X_REQUEST_ID_HEADER, HeaderValue::from_str("42").unwrap());
     483              : 
     484            1 :         let resp: Response<hyper::body::Body> = service.call(req).await.unwrap();
     485            1 : 
     486            1 :         let header_val = resp.headers().get(&X_REQUEST_ID_HEADER).unwrap();
     487            1 : 
     488            1 :         assert!(header_val == "42", "response header mismatch");
     489              :     }
     490              : 
     491            1 :     #[tokio::test]
     492            1 :     async fn test_request_id_empty() {
     493            1 :         let builder = RequestServiceBuilder::new(make_router().build().unwrap()).unwrap();
     494            1 :         let remote_addr = SocketAddr::new(IpAddr::from_str("127.0.0.1").unwrap(), 80);
     495            1 :         let mut service = builder.build(remote_addr);
     496            1 :         if let Err(e) = poll_fn(|ctx| service.poll_ready(ctx)).await {
     497            0 :             panic!("request service is not ready: {:?}", e);
     498            1 :         }
     499            1 : 
     500            1 :         let req: Request<Body> = Request::default();
     501            1 :         let resp: Response<hyper::body::Body> = service.call(req).await.unwrap();
     502            1 : 
     503            1 :         let header_val = resp.headers().get(&X_REQUEST_ID_HEADER);
     504            1 : 
     505            1 :         assert_ne!(header_val, None, "response header should NOT be empty");
     506              :     }
     507              : }
        

Generated by: LCOV version 2.1-beta