···
use dropshot::ConfigLogging;
use dropshot::ConfigLoggingLevel;
+
use dropshot::HttpResponse;
use dropshot::RequestContext;
use dropshot::ServerBuilder;
+
use dropshot::ServerContext;
+
header::{ORIGIN, USER_AGENT},
+
use metrics::{counter, describe_counter, describe_histogram, histogram, Unit};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::collections::{HashMap, HashSet};
+
use std::future::Future;
+
use std::time::Instant;
use std::time::{Duration, SystemTime, UNIX_EPOCH};
+
fn describe_metrics() {
+
"server_requests_total",
+
"total requests handled"
+
"server_handler_latency",
+
"time to respond to a request in microseconds, excluding dropshot overhead"
+
async fn instrument_handler<T, H, R>(ctx: &RequestContext<T>, handler: H) -> Result<R, HttpError>
+
H: Future<Output = Result<R, HttpError>>,
+
let start = Instant::now();
+
let result = handler.await;
+
let latency = start.elapsed();
+
let status_code = match &result {
+
Ok(response) => response.status_code(),
+
Err(ref e) => e.status_code.as_status(),
+
let endpoint = ctx.endpoint.operation_id.clone();
+
let headers = ctx.request.headers();
+
.and_then(|v| v.to_str().ok())
+
.and_then(|v| v.to_str().ok())
+
if ua.starts_with("Mozilla/5.0 ") {
+
counter!("server_requests_total",
+
"endpoint" => endpoint.clone(),
+
"status_code" => status_code,
+
histogram!("server_handler_latency", "endpoint" => endpoint).record(latency.as_micros() as f64);
pub spec: Arc<serde_json::Value>,
storage: Box<dyn StoreReader>,
···
+
async fn index(ctx: RequestContext<Context>) -> Result<Response<Body>, HttpError> {
+
instrument_handler(&ctx, async {
+
.status(StatusCode::OK)
+
.header(http::header::CONTENT_TYPE, "text/html")
+
.body(INDEX_HTML.into())?)
/// Meta: get the openapi spec for this api
···
async fn get_openapi(ctx: RequestContext<Context>) -> OkCorsResponse<serde_json::Value> {
+
instrument_handler(&ctx, async {
+
let spec = (*ctx.context().spec).clone();
#[derive(Debug, Serialize, JsonSchema)]
···
|what| move |e| HttpError::for_internal_error(format!("failed to get {what}: {e:?}"));
+
instrument_handler(&ctx, async {
+
let storage_info = storage
+
.map_err(failed_to_get("storage info"))?;
+
.map_err(failed_to_get("consumer info"))?;
+
storage_name: storage.name(),
// TODO: replace with normal (🙃) multi-qs value somehow
···
collection_query: Query<RecordsCollectionsQuery>,
) -> OkCorsResponse<Vec<ApiRecord>> {
let Context { storage, .. } = ctx.context();
+
instrument_handler(&ctx, async {
+
let query = collection_query.into_inner();
+
let collections = if let Some(provided_collection) = query.collection {
+
to_multiple_nsids(&provided_collection)
+
.map_err(|reason| HttpError::for_bad_request(None, reason))?
+
let min_time_ago = SystemTime::now() - Duration::from_secs(86_400 * 3); // we want at least 3 days of data
+
let since: WeekTruncatedCursor = Cursor::at(min_time_ago).into();
+
let (collections, _) = storage
+
Some(since.try_as().unwrap()),
+
.map_err(|e| HttpError::for_internal_error(e.to_string()))?;
+
.map(|c| Nsid::new(c.nsid).unwrap())
+
.get_records_by_collections(collections, limit, true)
+
.map_err(|e| HttpError::for_internal_error(e.to_string()))?
#[derive(Debug, Deserialize, JsonSchema)]
···
query: Query<CollectionsStatsQuery>,
) -> OkCorsResponse<HashMap<String, JustCount>> {
let Context { storage, .. } = ctx.context();
+
instrument_handler(&ctx, async {
+
let q = query.into_inner();
+
let collections: HashSet<Nsid> = collections_query.try_into()?;
+
let since = q.since.map(dt_to_cursor).transpose()?.unwrap_or_else(|| {
+
let week_ago_secs = 7 * 86_400;
+
let week_ago = SystemTime::now() - Duration::from_secs(week_ago_secs);
+
Cursor::at(week_ago).into()
+
let until = q.until.map(dt_to_cursor).transpose()?;
+
let mut seen_by_collection = HashMap::with_capacity(collections.len());
+
for collection in &collections {
+
.get_collection_counts(collection, since, until)
+
.map_err(|e| HttpError::for_internal_error(format!("boooo: {e:?}")))?;
+
seen_by_collection.insert(collection.to_string(), counts);
+
OkCors(seen_by_collection).into()
#[derive(Debug, Serialize, JsonSchema)]
···
let Context { storage, .. } = ctx.context();
let q = query.into_inner();
+
instrument_handler(&ctx, async {
+
if q.cursor.is_some() && q.order.is_some() {
+
"`cursor` is mutually exclusive with `order`. ordered results cannot be paged.";
+
return Err(HttpError::for_bad_request(None, msg.to_string()));
+
let order = if let Some(ref o) = q.order {
+
.and_then(|c| if c.is_empty() { None } else { Some(c) })
+
.map(|c| URL_SAFE_NO_PAD.decode(&c))
+
.map_err(|e| HttpError::for_bad_request(None, format!("invalid cursor: {e:?}")))?;
+
OrderCollectionsBy::Lexi { cursor }
+
let limit = match (q.limit, q.order) {
+
(Some(limit), _) => limit,
+
if !(1..=200).contains(&limit) {
+
let msg = format!("limit not in 1..=200: {}", limit);
+
return Err(HttpError::for_bad_request(None, msg));
+
let since = q.since.map(dt_to_cursor).transpose()?;
+
let until = q.until.map(dt_to_cursor).transpose()?;
+
let (collections, next_cursor) = storage
+
.get_collections(limit, order, since, until)
+
.map_err(|e| HttpError::for_internal_error(format!("oh shoot: {e:?}")))?;
+
let next_cursor = next_cursor.map(|c| URL_SAFE_NO_PAD.encode(c));
+
OkCors(CollectionsResponse {
#[derive(Debug, Serialize, JsonSchema)]
···
let Context { storage, .. } = ctx.context();
let q = query.into_inner();
+
instrument_handler(&ctx, async {
+
let prefix = NsidPrefix::new(&q.prefix).map_err(|e| {
+
HttpError::for_bad_request(
+
format!("{:?} was not a valid NSID prefix: {e:?}", q.prefix),
+
if q.cursor.is_some() && q.order.is_some() {
+
"`cursor` is mutually exclusive with `order`. ordered results cannot be paged.";
+
return Err(HttpError::for_bad_request(None, msg.to_string()));
+
let order = if let Some(ref o) = q.order {
+
.and_then(|c| if c.is_empty() { None } else { Some(c) })
+
.map(|c| URL_SAFE_NO_PAD.decode(&c))
+
.map_err(|e| HttpError::for_bad_request(None, format!("invalid cursor: {e:?}")))?;
+
OrderCollectionsBy::Lexi { cursor }
+
let limit = match (q.limit, q.order) {
+
(Some(limit), _) => limit,
+
if !(1..=200).contains(&limit) {
+
let msg = format!("limit not in 1..=200: {}", limit);
+
return Err(HttpError::for_bad_request(None, msg));
+
let since = q.since.map(dt_to_cursor).transpose()?;
+
let until = q.until.map(dt_to_cursor).transpose()?;
+
let (total, children, next_cursor) = storage
+
.get_prefix(prefix, limit, order, since, until)
+
.map_err(|e| HttpError::for_internal_error(format!("oh shoot: {e:?}")))?;
+
let next_cursor = next_cursor.map(|c| URL_SAFE_NO_PAD.encode(c));
+
OkCors(PrefixResponse {
#[derive(Debug, Deserialize, JsonSchema)]
···
let Context { storage, .. } = ctx.context();
let q = query.into_inner();
+
instrument_handler(&ctx, async {
+
let since = q.since.map(dt_to_cursor).transpose()?.unwrap_or_else(|| {
+
let week_ago_secs = 7 * 86_400;
+
let week_ago = SystemTime::now() - Duration::from_secs(week_ago_secs);
+
Cursor::at(week_ago).into()
+
let until = q.until.map(dt_to_cursor).transpose()?;
+
let step = if let Some(secs) = q.step {
+
let msg = format!("step is too small: {}", secs);
+
Err(HttpError::for_bad_request(None, msg))?;
+
(secs / 3600) * 3600 // trucate to hour
+
let nsid = Nsid::new(q.collection).map_err(|e| {
+
HttpError::for_bad_request(None, format!("collection was not a valid NSID: {:?}", e))
+
let (range_cursors, series) = storage
+
.get_timeseries(vec![nsid], since, until, step)
+
.map_err(|e| HttpError::for_internal_error(format!("oh shoot: {e:?}")))?;
+
let range = range_cursors
+
.map(|c| DateTime::<Utc>::from_timestamp_micros(c.to_raw_u64() as i64).unwrap())
+
.map(|(k, v)| (k.to_string(), v.iter().map(Into::into).collect()))
+
OkCors(CollectionTimeseriesResponse { range, series }).into()
#[derive(Debug, Deserialize, JsonSchema)]
···
) -> OkCorsResponse<SearchResponse> {
let Context { storage, .. } = ctx.context();
let q = query.into_inner();
+
instrument_handler(&ctx, async {
+
// TODO: query validation
+
// TODO: also handle multi-space stuff (ufos-app tries to on client)
+
let terms: Vec<String> = q.q.split(' ').map(Into::into).collect();
+
.search_collections(terms)
+
.map_err(|e| HttpError::for_internal_error(format!("oh ugh: {e:?}")))?;
+
OkCors(SearchResponse { matches }).into()
pub async fn serve(storage: impl StoreReader + 'static) -> Result<(), String> {
let log = ConfigLogging::StderrTerminal {
+
level: ConfigLoggingLevel::Warn,
.map_err(|e| e.to_string())?;
let mut api = ApiDescription::new();