LCOV - code coverage report
Current view: top level - pageserver/src/tenant/remote_timeline_client - index.rs (source / functions) Coverage Total Hit
Test: ccf45ed1c149555259baec52d6229a81013dcd6a.info Lines: 86.9 % 596 518
Test Date: 2024-08-21 17:32:46 Functions: 41.3 % 104 43

            Line data    Source code
       1              : //! In-memory index to track the tenant files on the remote storage.
       2              : //! Able to restore itself from the storage index parts, that are located in every timeline's remote directory and contain all data about
       3              : //! remote timeline layers and its metadata.
       4              : 
       5              : use std::collections::HashMap;
       6              : 
       7              : use chrono::NaiveDateTime;
       8              : use pageserver_api::models::AuxFilePolicy;
       9              : use serde::{Deserialize, Serialize};
      10              : use utils::id::TimelineId;
      11              : 
      12              : use crate::tenant::metadata::TimelineMetadata;
      13              : use crate::tenant::storage_layer::LayerName;
      14              : use crate::tenant::Generation;
      15              : use pageserver_api::shard::ShardIndex;
      16              : 
      17              : use utils::lsn::Lsn;
      18              : 
      19              : /// In-memory representation of an `index_part.json` file
      20              : ///
      21              : /// Contains the data about all files in the timeline, present remotely and its metadata.
      22              : ///
      23              : /// This type needs to be backwards and forwards compatible. When changing the fields,
      24              : /// remember to add a test case for the changed version.
      25         2866 : #[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
      26              : pub struct IndexPart {
      27              :     /// Debugging aid describing the version of this type.
      28              :     #[serde(default)]
      29              :     version: usize,
      30              : 
      31              :     #[serde(default)]
      32              :     #[serde(skip_serializing_if = "Option::is_none")]
      33              :     pub deleted_at: Option<NaiveDateTime>,
      34              : 
      35              :     #[serde(default)]
      36              :     #[serde(skip_serializing_if = "Option::is_none")]
      37              :     pub archived_at: Option<NaiveDateTime>,
      38              : 
      39              :     /// Per layer file name metadata, which can be present for a present or missing layer file.
      40              :     ///
      41              :     /// Older versions of `IndexPart` will not have this property or have only a part of metadata
      42              :     /// that latest version stores.
      43              :     pub layer_metadata: HashMap<LayerName, LayerFileMetadata>,
      44              : 
      45              :     /// Because of the trouble of eyeballing the legacy "metadata" field, we copied the
      46              :     /// "disk_consistent_lsn" out. After version 7 this is no longer needed, but the name cannot be
      47              :     /// reused.
      48              :     pub(super) disk_consistent_lsn: Lsn,
      49              : 
      50              :     // TODO: rename as "metadata" next week, keep the alias = "metadata_bytes", bump version Adding
      51              :     // the "alias = metadata" was forgotten in #7693, so we have to use "rewrite = metadata_bytes"
      52              :     // for backwards compatibility.
      53              :     #[serde(
      54              :         rename = "metadata_bytes",
      55              :         alias = "metadata",
      56              :         with = "crate::tenant::metadata::modern_serde"
      57              :     )]
      58              :     pub metadata: TimelineMetadata,
      59              : 
      60              :     #[serde(default)]
      61              :     pub(crate) lineage: Lineage,
      62              : 
      63              :     #[serde(skip_serializing_if = "Option::is_none", default)]
      64              :     pub(crate) gc_blocking: Option<GcBlocking>,
      65              : 
      66              :     /// Describes the kind of aux files stored in the timeline.
      67              :     ///
      68              :     /// The value is modified during file ingestion when the latest wanted value communicated via tenant config is applied if it is acceptable.
      69              :     /// A V1 setting after V2 files have been committed is not accepted.
      70              :     ///
      71              :     /// None means no aux files have been written to the storage before the point
      72              :     /// when this flag is introduced.
      73              :     #[serde(skip_serializing_if = "Option::is_none", default)]
      74              :     pub(crate) last_aux_file_policy: Option<AuxFilePolicy>,
      75              : }
      76              : 
      77              : impl IndexPart {
      78              :     /// When adding or modifying any parts of `IndexPart`, increment the version so that it can be
      79              :     /// used to understand later versions.
      80              :     ///
      81              :     /// Version is currently informative only.
      82              :     /// Version history
      83              :     /// - 2: added `deleted_at`
      84              :     /// - 3: no longer deserialize `timeline_layers` (serialized format is the same, but timeline_layers
      85              :     ///      is always generated from the keys of `layer_metadata`)
      86              :     /// - 4: timeline_layers is fully removed.
      87              :     /// - 5: lineage was added
      88              :     /// - 6: last_aux_file_policy is added.
      89              :     /// - 7: metadata_bytes is no longer written, but still read
      90              :     /// - 8: added `archived_at`
      91              :     /// - 9: +gc_blocking
      92              :     const LATEST_VERSION: usize = 9;
      93              : 
      94              :     // Versions we may see when reading from a bucket.
      95              :     pub const KNOWN_VERSIONS: &'static [usize] = &[1, 2, 3, 4, 5, 6, 7, 8, 9];
      96              : 
      97              :     pub const FILE_NAME: &'static str = "index_part.json";
      98              : 
      99          412 :     pub(crate) fn empty(metadata: TimelineMetadata) -> Self {
     100          412 :         IndexPart {
     101          412 :             version: Self::LATEST_VERSION,
     102          412 :             layer_metadata: Default::default(),
     103          412 :             disk_consistent_lsn: metadata.disk_consistent_lsn(),
     104          412 :             metadata,
     105          412 :             deleted_at: None,
     106          412 :             archived_at: None,
     107          412 :             lineage: Default::default(),
     108          412 :             gc_blocking: None,
     109          412 :             last_aux_file_policy: None,
     110          412 :         }
     111          412 :     }
     112              : 
     113            0 :     pub fn version(&self) -> usize {
     114            0 :         self.version
     115            0 :     }
     116              : 
     117              :     /// If you want this under normal operations, read it from self.metadata:
     118              :     /// this method is just for the scrubber to use when validating an index.
     119            0 :     pub fn duplicated_disk_consistent_lsn(&self) -> Lsn {
     120            0 :         self.disk_consistent_lsn
     121            0 :     }
     122              : 
     123           20 :     pub fn from_s3_bytes(bytes: &[u8]) -> Result<Self, serde_json::Error> {
     124           20 :         serde_json::from_slice::<IndexPart>(bytes)
     125           20 :     }
     126              : 
     127         1406 :     pub fn to_s3_bytes(&self) -> serde_json::Result<Vec<u8>> {
     128         1406 :         serde_json::to_vec(self)
     129         1406 :     }
     130              : 
     131              :     #[cfg(test)]
     132           12 :     pub(crate) fn example() -> Self {
     133           12 :         Self::empty(TimelineMetadata::example())
     134           12 :     }
     135              : 
     136           12 :     pub(crate) fn last_aux_file_policy(&self) -> Option<AuxFilePolicy> {
     137           12 :         self.last_aux_file_policy
     138           12 :     }
     139              : }
     140              : 
     141              : /// Metadata gathered for each of the layer files.
     142              : ///
     143              : /// Fields have to be `Option`s because remote [`IndexPart`]'s can be from different version, which
     144              : /// might have less or more metadata depending if upgrading or rolling back an upgrade.
     145        12660 : #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
     146              : pub struct LayerFileMetadata {
     147              :     pub file_size: u64,
     148              : 
     149              :     #[serde(default = "Generation::none")]
     150              :     #[serde(skip_serializing_if = "Generation::is_none")]
     151              :     pub generation: Generation,
     152              : 
     153              :     #[serde(default = "ShardIndex::unsharded")]
     154              :     #[serde(skip_serializing_if = "ShardIndex::is_unsharded")]
     155              :     pub shard: ShardIndex,
     156              : }
     157              : 
     158              : impl LayerFileMetadata {
     159         2434 :     pub fn new(file_size: u64, generation: Generation, shard: ShardIndex) -> Self {
     160         2434 :         LayerFileMetadata {
     161         2434 :             file_size,
     162         2434 :             generation,
     163         2434 :             shard,
     164         2434 :         }
     165         2434 :     }
     166              : }
     167              : 
     168              : /// Limited history of earlier ancestors.
     169              : ///
     170              : /// A timeline can have more than 1 earlier ancestor, in the rare case that it was repeatedly
     171              : /// reparented by having an later timeline be detached from it's ancestor.
     172           34 : #[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize, Default)]
     173              : pub(crate) struct Lineage {
     174              :     /// Has the `reparenting_history` been truncated to [`Lineage::REMEMBER_AT_MOST`].
     175              :     #[serde(skip_serializing_if = "is_false", default)]
     176              :     reparenting_history_truncated: bool,
     177              : 
     178              :     /// Earlier ancestors, truncated when [`Self::reparenting_history_truncated`]
     179              :     ///
     180              :     /// These are stored in case we want to support WAL based DR on the timeline. There can be many
     181              :     /// of these and at most one [`Self::original_ancestor`]. There cannot be more reparentings
     182              :     /// after [`Self::original_ancestor`] has been set.
     183              :     #[serde(skip_serializing_if = "Vec::is_empty", default)]
     184              :     reparenting_history: Vec<TimelineId>,
     185              : 
     186              :     /// The ancestor from which this timeline has been detached from and when.
     187              :     ///
     188              :     /// If you are adding support for detaching from a hierarchy, consider changing the ancestry
     189              :     /// into a `Vec<(TimelineId, Lsn)>` to be a path instead.
     190              :     // FIXME: this is insufficient even for path of two timelines for future wal recovery
     191              :     // purposes:
     192              :     //
     193              :     // assuming a "old main" which has received most of the WAL, and has a branch "new main",
     194              :     // starting a bit before "old main" last_record_lsn. the current version works fine,
     195              :     // because we will know to replay wal and branch at the recorded Lsn to do wal recovery.
     196              :     //
     197              :     // then assuming "new main" would similarly receive a branch right before its last_record_lsn,
     198              :     // "new new main". the current implementation would just store ("new main", ancestor_lsn, _)
     199              :     // here. however, we cannot recover from WAL using only that information, we would need the
     200              :     // whole ancestry here:
     201              :     //
     202              :     // ```json
     203              :     // [
     204              :     //   ["old main", ancestor_lsn("new main"), _],
     205              :     //   ["new main", ancestor_lsn("new new main"), _]
     206              :     // ]
     207              :     // ```
     208              :     #[serde(skip_serializing_if = "Option::is_none", default)]
     209              :     original_ancestor: Option<(TimelineId, Lsn, NaiveDateTime)>,
     210              : }
     211              : 
     212         5732 : fn is_false(b: &bool) -> bool {
     213         5732 :     !b
     214         5732 : }
     215              : 
     216              : impl Lineage {
     217              :     const REMEMBER_AT_MOST: usize = 100;
     218              : 
     219            0 :     pub(crate) fn record_previous_ancestor(&mut self, old_ancestor: &TimelineId) -> bool {
     220            0 :         if self.reparenting_history.last() == Some(old_ancestor) {
     221              :             // do not re-record it
     222            0 :             false
     223              :         } else {
     224              :             #[cfg(feature = "testing")]
     225              :             {
     226            0 :                 let existing = self
     227            0 :                     .reparenting_history
     228            0 :                     .iter()
     229            0 :                     .position(|x| x == old_ancestor);
     230            0 :                 assert_eq!(
     231              :                     existing, None,
     232            0 :                     "we cannot reparent onto and off and onto the same timeline twice"
     233              :                 );
     234              :             }
     235            0 :             let drop_oldest = self.reparenting_history.len() + 1 >= Self::REMEMBER_AT_MOST;
     236            0 : 
     237            0 :             self.reparenting_history_truncated |= drop_oldest;
     238            0 :             if drop_oldest {
     239            0 :                 self.reparenting_history.remove(0);
     240            0 :             }
     241            0 :             self.reparenting_history.push(*old_ancestor);
     242            0 :             true
     243              :         }
     244            0 :     }
     245              : 
     246              :     /// Returns true if anything changed.
     247            0 :     pub(crate) fn record_detaching(&mut self, branchpoint: &(TimelineId, Lsn)) -> bool {
     248            0 :         if let Some((id, lsn, _)) = self.original_ancestor {
     249            0 :             assert_eq!(
     250            0 :                 &(id, lsn),
     251              :                 branchpoint,
     252            0 :                 "detaching attempt has to be for the same ancestor we are already detached from"
     253              :             );
     254            0 :             false
     255              :         } else {
     256            0 :             self.original_ancestor =
     257            0 :                 Some((branchpoint.0, branchpoint.1, chrono::Utc::now().naive_utc()));
     258            0 :             true
     259              :         }
     260            0 :     }
     261              : 
     262              :     /// The queried lsn is most likely the basebackup lsn, and this answers question "is it allowed
     263              :     /// to start a read/write primary at this lsn".
     264              :     ///
     265              :     /// Returns true if the Lsn was previously our branch point.
     266            0 :     pub(crate) fn is_previous_ancestor_lsn(&self, lsn: Lsn) -> bool {
     267            0 :         self.original_ancestor
     268            0 :             .is_some_and(|(_, ancestor_lsn, _)| ancestor_lsn == lsn)
     269            0 :     }
     270              : 
     271              :     /// Returns true if the timeline originally had an ancestor, and no longer has one.
     272            0 :     pub(crate) fn is_detached_from_ancestor(&self) -> bool {
     273            0 :         self.original_ancestor.is_some()
     274            0 :     }
     275              : 
     276              :     /// Returns original ancestor timeline id and lsn that this timeline has been detached from.
     277            0 :     pub(crate) fn detached_previous_ancestor(&self) -> Option<(TimelineId, Lsn)> {
     278            0 :         self.original_ancestor.map(|(id, lsn, _)| (id, lsn))
     279            0 :     }
     280              : 
     281            0 :     pub(crate) fn is_reparented(&self) -> bool {
     282            0 :         !self.reparenting_history.is_empty()
     283            0 :     }
     284              : }
     285              : 
     286            6 : #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
     287              : pub(crate) struct GcBlocking {
     288              :     pub(crate) started_at: NaiveDateTime,
     289              :     pub(crate) reasons: enumset::EnumSet<GcBlockingReason>,
     290              : }
     291              : 
     292            4 : #[derive(Debug, enumset::EnumSetType, serde::Serialize, serde::Deserialize)]
     293              : #[enumset(serialize_repr = "list")]
     294              : pub(crate) enum GcBlockingReason {
     295              :     Manual,
     296              :     DetachAncestor,
     297              : }
     298              : 
     299              : impl GcBlocking {
     300            0 :     pub(super) fn started_now_for(reason: GcBlockingReason) -> Self {
     301            0 :         GcBlocking {
     302            0 :             started_at: chrono::Utc::now().naive_utc(),
     303            0 :             reasons: enumset::EnumSet::only(reason),
     304            0 :         }
     305            0 :     }
     306              : 
     307              :     /// Returns true if the given reason is one of the reasons why the gc is blocked.
     308            0 :     pub(crate) fn blocked_by(&self, reason: GcBlockingReason) -> bool {
     309            0 :         self.reasons.contains(reason)
     310            0 :     }
     311              : 
     312              :     /// Returns a version of self with the given reason.
     313            0 :     pub(super) fn with_reason(&self, reason: GcBlockingReason) -> Self {
     314            0 :         assert!(!self.blocked_by(reason));
     315            0 :         let mut reasons = self.reasons;
     316            0 :         reasons.insert(reason);
     317            0 : 
     318            0 :         Self {
     319            0 :             started_at: self.started_at,
     320            0 :             reasons,
     321            0 :         }
     322            0 :     }
     323              : 
     324              :     /// Returns a version of self without the given reason. Assumption is that if
     325              :     /// there are no more reasons, we can unblock the gc by returning `None`.
     326            0 :     pub(super) fn without_reason(&self, reason: GcBlockingReason) -> Option<Self> {
     327            0 :         assert!(self.blocked_by(reason));
     328              : 
     329            0 :         if self.reasons.len() == 1 {
     330            0 :             None
     331              :         } else {
     332            0 :             let mut reasons = self.reasons;
     333            0 :             assert!(reasons.remove(reason));
     334            0 :             assert!(!reasons.is_empty());
     335              : 
     336            0 :             Some(Self {
     337            0 :                 started_at: self.started_at,
     338            0 :                 reasons,
     339            0 :             })
     340              :         }
     341            0 :     }
     342              : }
     343              : 
     344              : #[cfg(test)]
     345              : mod tests {
     346              :     use super::*;
     347              :     use std::str::FromStr;
     348              :     use utils::id::TimelineId;
     349              : 
     350              :     #[test]
     351            2 :     fn v1_indexpart_is_parsed() {
     352            2 :         let example = r#"{
     353            2 :             "version":1,
     354            2 :             "timeline_layers":["000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9"],
     355            2 :             "layer_metadata":{
     356            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
     357            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
     358            2 :             },
     359            2 :             "disk_consistent_lsn":"0/16960E8",
     360            2 :             "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
     361            2 :         }"#;
     362            2 : 
     363            2 :         let expected = IndexPart {
     364            2 :             // note this is not verified, could be anything, but exists for humans debugging.. could be the git version instead?
     365            2 :             version: 1,
     366            2 :             layer_metadata: HashMap::from([
     367            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
     368            2 :                     file_size: 25600000,
     369            2 :                     generation: Generation::none(),
     370            2 :                     shard: ShardIndex::unsharded()
     371            2 :                 }),
     372            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
     373            2 :                     // serde_json should always parse this but this might be a double with jq for
     374            2 :                     // example.
     375            2 :                     file_size: 9007199254741001,
     376            2 :                     generation: Generation::none(),
     377            2 :                     shard: ShardIndex::unsharded()
     378            2 :                 })
     379            2 :             ]),
     380            2 :             disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
     381            2 :             metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
     382            2 :             deleted_at: None,
     383            2 :             archived_at: None,
     384            2 :             lineage: Lineage::default(),
     385            2 :             gc_blocking: None,
     386            2 :             last_aux_file_policy: None,
     387            2 :         };
     388            2 : 
     389            2 :         let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
     390            2 :         assert_eq!(part, expected);
     391            2 :     }
     392              : 
     393              :     #[test]
     394            2 :     fn v1_indexpart_is_parsed_with_optional_missing_layers() {
     395            2 :         let example = r#"{
     396            2 :             "version":1,
     397            2 :             "timeline_layers":["000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9"],
     398            2 :             "missing_layers":["This shouldn't fail deserialization"],
     399            2 :             "layer_metadata":{
     400            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
     401            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
     402            2 :             },
     403            2 :             "disk_consistent_lsn":"0/16960E8",
     404            2 :             "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
     405            2 :         }"#;
     406            2 : 
     407            2 :         let expected = IndexPart {
     408            2 :             // note this is not verified, could be anything, but exists for humans debugging.. could be the git version instead?
     409            2 :             version: 1,
     410            2 :             layer_metadata: HashMap::from([
     411            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
     412            2 :                     file_size: 25600000,
     413            2 :                     generation: Generation::none(),
     414            2 :                     shard: ShardIndex::unsharded()
     415            2 :                 }),
     416            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
     417            2 :                     // serde_json should always parse this but this might be a double with jq for
     418            2 :                     // example.
     419            2 :                     file_size: 9007199254741001,
     420            2 :                     generation: Generation::none(),
     421            2 :                     shard: ShardIndex::unsharded()
     422            2 :                 })
     423            2 :             ]),
     424            2 :             disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
     425            2 :             metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
     426            2 :             deleted_at: None,
     427            2 :             archived_at: None,
     428            2 :             lineage: Lineage::default(),
     429            2 :             gc_blocking: None,
     430            2 :             last_aux_file_policy: None,
     431            2 :         };
     432            2 : 
     433            2 :         let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
     434            2 :         assert_eq!(part, expected);
     435            2 :     }
     436              : 
     437              :     #[test]
     438            2 :     fn v2_indexpart_is_parsed_with_deleted_at() {
     439            2 :         let example = r#"{
     440            2 :             "version":2,
     441            2 :             "timeline_layers":["000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9"],
     442            2 :             "missing_layers":["This shouldn't fail deserialization"],
     443            2 :             "layer_metadata":{
     444            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
     445            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
     446            2 :             },
     447            2 :             "disk_consistent_lsn":"0/16960E8",
     448            2 :             "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
     449            2 :             "deleted_at": "2023-07-31T09:00:00.123"
     450            2 :         }"#;
     451            2 : 
     452            2 :         let expected = IndexPart {
     453            2 :             // note this is not verified, could be anything, but exists for humans debugging.. could be the git version instead?
     454            2 :             version: 2,
     455            2 :             layer_metadata: HashMap::from([
     456            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
     457            2 :                     file_size: 25600000,
     458            2 :                     generation: Generation::none(),
     459            2 :                     shard: ShardIndex::unsharded()
     460            2 :                 }),
     461            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
     462            2 :                     // serde_json should always parse this but this might be a double with jq for
     463            2 :                     // example.
     464            2 :                     file_size: 9007199254741001,
     465            2 :                     generation: Generation::none(),
     466            2 :                     shard: ShardIndex::unsharded()
     467            2 :                 })
     468            2 :             ]),
     469            2 :             disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
     470            2 :             metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
     471            2 :             deleted_at: Some(parse_naive_datetime("2023-07-31T09:00:00.123000000")),
     472            2 :             archived_at: None,
     473            2 :             lineage: Lineage::default(),
     474            2 :             gc_blocking: None,
     475            2 :             last_aux_file_policy: None,
     476            2 :         };
     477            2 : 
     478            2 :         let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
     479            2 :         assert_eq!(part, expected);
     480            2 :     }
     481              : 
     482              :     #[test]
     483            2 :     fn empty_layers_are_parsed() {
     484            2 :         let empty_layers_json = r#"{
     485            2 :             "version":1,
     486            2 :             "timeline_layers":[],
     487            2 :             "layer_metadata":{},
     488            2 :             "disk_consistent_lsn":"0/2532648",
     489            2 :             "metadata_bytes":[136,151,49,208,0,70,0,4,0,0,0,0,2,83,38,72,1,0,0,0,0,2,83,38,32,1,87,198,240,135,97,119,45,125,38,29,155,161,140,141,255,210,0,0,0,0,2,83,38,72,0,0,0,0,1,73,240,192,0,0,0,0,1,73,240,192,0,0,0,15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
     490            2 :         }"#;
     491            2 : 
     492            2 :         let expected = IndexPart {
     493            2 :             version: 1,
     494            2 :             layer_metadata: HashMap::new(),
     495            2 :             disk_consistent_lsn: "0/2532648".parse::<Lsn>().unwrap(),
     496            2 :             metadata: TimelineMetadata::from_bytes(&[
     497            2 :                 136, 151, 49, 208, 0, 70, 0, 4, 0, 0, 0, 0, 2, 83, 38, 72, 1, 0, 0, 0, 0, 2, 83,
     498            2 :                 38, 32, 1, 87, 198, 240, 135, 97, 119, 45, 125, 38, 29, 155, 161, 140, 141, 255,
     499            2 :                 210, 0, 0, 0, 0, 2, 83, 38, 72, 0, 0, 0, 0, 1, 73, 240, 192, 0, 0, 0, 0, 1, 73,
     500            2 :                 240, 192, 0, 0, 0, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     501            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     502            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     503            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     504            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     505            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     506            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     507            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     508            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     509            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     510            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     511            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     512            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     513            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     514            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     515            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     516            2 :                 0, 0,
     517            2 :             ])
     518            2 :             .unwrap(),
     519            2 :             deleted_at: None,
     520            2 :             archived_at: None,
     521            2 :             lineage: Lineage::default(),
     522            2 :             gc_blocking: None,
     523            2 :             last_aux_file_policy: None,
     524            2 :         };
     525            2 : 
     526            2 :         let empty_layers_parsed = IndexPart::from_s3_bytes(empty_layers_json.as_bytes()).unwrap();
     527            2 : 
     528            2 :         assert_eq!(empty_layers_parsed, expected);
     529            2 :     }
     530              : 
     531              :     #[test]
     532            2 :     fn v4_indexpart_is_parsed() {
     533            2 :         let example = r#"{
     534            2 :             "version":4,
     535            2 :             "layer_metadata":{
     536            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
     537            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
     538            2 :             },
     539            2 :             "disk_consistent_lsn":"0/16960E8",
     540            2 :             "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
     541            2 :             "deleted_at": "2023-07-31T09:00:00.123"
     542            2 :         }"#;
     543            2 : 
     544            2 :         let expected = IndexPart {
     545            2 :             version: 4,
     546            2 :             layer_metadata: HashMap::from([
     547            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
     548            2 :                     file_size: 25600000,
     549            2 :                     generation: Generation::none(),
     550            2 :                     shard: ShardIndex::unsharded()
     551            2 :                 }),
     552            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
     553            2 :                     // serde_json should always parse this but this might be a double with jq for
     554            2 :                     // example.
     555            2 :                     file_size: 9007199254741001,
     556            2 :                     generation: Generation::none(),
     557            2 :                     shard: ShardIndex::unsharded()
     558            2 :                 })
     559            2 :             ]),
     560            2 :             disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
     561            2 :             metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
     562            2 :             deleted_at: Some(parse_naive_datetime("2023-07-31T09:00:00.123000000")),
     563            2 :             archived_at: None,
     564            2 :             lineage: Lineage::default(),
     565            2 :             gc_blocking: None,
     566            2 :             last_aux_file_policy: None,
     567            2 :         };
     568            2 : 
     569            2 :         let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
     570            2 :         assert_eq!(part, expected);
     571            2 :     }
     572              : 
     573              :     #[test]
     574            2 :     fn v5_indexpart_is_parsed() {
     575            2 :         let example = r#"{
     576            2 :             "version":5,
     577            2 :             "layer_metadata":{
     578            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000014EF420-00000000014EF499":{"file_size":23289856,"generation":1},
     579            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000014EF499-00000000015A7619":{"file_size":1015808,"generation":1}},
     580            2 :                 "disk_consistent_lsn":"0/15A7618",
     581            2 :                 "metadata_bytes":[226,88,25,241,0,46,0,4,0,0,0,0,1,90,118,24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,78,244,32,0,0,0,0,1,78,244,32,0,0,0,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
     582            2 :                 "lineage":{
     583            2 :                     "original_ancestor":["e2bfd8c633d713d279e6fcd2bcc15b6d","0/15A7618","2024-05-07T18:52:36.322426563"],
     584            2 :                     "reparenting_history":["e1bfd8c633d713d279e6fcd2bcc15b6d"]
     585            2 :                 }
     586            2 :         }"#;
     587            2 : 
     588            2 :         let expected = IndexPart {
     589            2 :             version: 5,
     590            2 :             layer_metadata: HashMap::from([
     591            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000014EF420-00000000014EF499".parse().unwrap(), LayerFileMetadata {
     592            2 :                     file_size: 23289856,
     593            2 :                     generation: Generation::new(1),
     594            2 :                     shard: ShardIndex::unsharded(),
     595            2 :                 }),
     596            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000014EF499-00000000015A7619".parse().unwrap(), LayerFileMetadata {
     597            2 :                     file_size: 1015808,
     598            2 :                     generation: Generation::new(1),
     599            2 :                     shard: ShardIndex::unsharded(),
     600            2 :                 })
     601            2 :             ]),
     602            2 :             disk_consistent_lsn: Lsn::from_str("0/15A7618").unwrap(),
     603            2 :             metadata: TimelineMetadata::from_bytes(&[226,88,25,241,0,46,0,4,0,0,0,0,1,90,118,24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,78,244,32,0,0,0,0,1,78,244,32,0,0,0,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
     604            2 :             deleted_at: None,
     605            2 :             archived_at: None,
     606            2 :             lineage: Lineage {
     607            2 :                 reparenting_history_truncated: false,
     608            2 :                 reparenting_history: vec![TimelineId::from_str("e1bfd8c633d713d279e6fcd2bcc15b6d").unwrap()],
     609            2 :                 original_ancestor: Some((TimelineId::from_str("e2bfd8c633d713d279e6fcd2bcc15b6d").unwrap(), Lsn::from_str("0/15A7618").unwrap(), parse_naive_datetime("2024-05-07T18:52:36.322426563"))),
     610            2 :             },
     611            2 :             gc_blocking: None,
     612            2 :             last_aux_file_policy: None,
     613            2 :         };
     614            2 : 
     615            2 :         let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
     616            2 :         assert_eq!(part, expected);
     617            2 :     }
     618              : 
     619              :     #[test]
     620            2 :     fn v6_indexpart_is_parsed() {
     621            2 :         let example = r#"{
     622            2 :             "version":6,
     623            2 :             "layer_metadata":{
     624            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
     625            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
     626            2 :             },
     627            2 :             "disk_consistent_lsn":"0/16960E8",
     628            2 :             "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
     629            2 :             "deleted_at": "2023-07-31T09:00:00.123",
     630            2 :             "lineage":{
     631            2 :                 "original_ancestor":["e2bfd8c633d713d279e6fcd2bcc15b6d","0/15A7618","2024-05-07T18:52:36.322426563"],
     632            2 :                 "reparenting_history":["e1bfd8c633d713d279e6fcd2bcc15b6d"]
     633            2 :             },
     634            2 :             "last_aux_file_policy": "V2"
     635            2 :         }"#;
     636            2 : 
     637            2 :         let expected = IndexPart {
     638            2 :             version: 6,
     639            2 :             layer_metadata: HashMap::from([
     640            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
     641            2 :                     file_size: 25600000,
     642            2 :                     generation: Generation::none(),
     643            2 :                     shard: ShardIndex::unsharded()
     644            2 :                 }),
     645            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
     646            2 :                     // serde_json should always parse this but this might be a double with jq for
     647            2 :                     // example.
     648            2 :                     file_size: 9007199254741001,
     649            2 :                     generation: Generation::none(),
     650            2 :                     shard: ShardIndex::unsharded()
     651            2 :                 })
     652            2 :             ]),
     653            2 :             disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
     654            2 :             metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
     655            2 :             deleted_at: Some(parse_naive_datetime("2023-07-31T09:00:00.123000000")),
     656            2 :             archived_at: None,
     657            2 :             lineage: Lineage {
     658            2 :                 reparenting_history_truncated: false,
     659            2 :                 reparenting_history: vec![TimelineId::from_str("e1bfd8c633d713d279e6fcd2bcc15b6d").unwrap()],
     660            2 :                 original_ancestor: Some((TimelineId::from_str("e2bfd8c633d713d279e6fcd2bcc15b6d").unwrap(), Lsn::from_str("0/15A7618").unwrap(), parse_naive_datetime("2024-05-07T18:52:36.322426563"))),
     661            2 :             },
     662            2 :             gc_blocking: None,
     663            2 :             last_aux_file_policy: Some(AuxFilePolicy::V2),
     664            2 :         };
     665            2 : 
     666            2 :         let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
     667            2 :         assert_eq!(part, expected);
     668            2 :     }
     669              : 
     670              :     #[test]
     671            2 :     fn v7_indexpart_is_parsed() {
     672            2 :         let example = r#"{
     673            2 :             "version": 7,
     674            2 :             "layer_metadata":{
     675            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
     676            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
     677            2 :             },
     678            2 :             "disk_consistent_lsn":"0/16960E8",
     679            2 :             "metadata": {
     680            2 :                 "disk_consistent_lsn": "0/16960E8",
     681            2 :                 "prev_record_lsn": "0/1696070",
     682            2 :                 "ancestor_timeline": "e45a7f37d3ee2ff17dc14bf4f4e3f52e",
     683            2 :                 "ancestor_lsn": "0/0",
     684            2 :                 "latest_gc_cutoff_lsn": "0/1696070",
     685            2 :                 "initdb_lsn": "0/1696070",
     686            2 :                 "pg_version": 14
     687            2 :             },
     688            2 :             "deleted_at": "2023-07-31T09:00:00.123"
     689            2 :         }"#;
     690            2 : 
     691            2 :         let expected = IndexPart {
     692            2 :             version: 7,
     693            2 :             layer_metadata: HashMap::from([
     694            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
     695            2 :                     file_size: 25600000,
     696            2 :                     generation: Generation::none(),
     697            2 :                     shard: ShardIndex::unsharded()
     698            2 :                 }),
     699            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
     700            2 :                     file_size: 9007199254741001,
     701            2 :                     generation: Generation::none(),
     702            2 :                     shard: ShardIndex::unsharded()
     703            2 :                 })
     704            2 :             ]),
     705            2 :             disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
     706            2 :             metadata: TimelineMetadata::new(
     707            2 :                 Lsn::from_str("0/16960E8").unwrap(),
     708            2 :                 Some(Lsn::from_str("0/1696070").unwrap()),
     709            2 :                 Some(TimelineId::from_str("e45a7f37d3ee2ff17dc14bf4f4e3f52e").unwrap()),
     710            2 :                 Lsn::INVALID,
     711            2 :                 Lsn::from_str("0/1696070").unwrap(),
     712            2 :                 Lsn::from_str("0/1696070").unwrap(),
     713            2 :                 14,
     714            2 :             ).with_recalculated_checksum().unwrap(),
     715            2 :             deleted_at: Some(parse_naive_datetime("2023-07-31T09:00:00.123000000")),
     716            2 :             archived_at: None,
     717            2 :             lineage: Default::default(),
     718            2 :             gc_blocking: None,
     719            2 :             last_aux_file_policy: Default::default(),
     720            2 :         };
     721            2 : 
     722            2 :         let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
     723            2 :         assert_eq!(part, expected);
     724            2 :     }
     725              : 
     726              :     #[test]
     727            2 :     fn v8_indexpart_is_parsed() {
     728            2 :         let example = r#"{
     729            2 :             "version": 8,
     730            2 :             "layer_metadata":{
     731            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
     732            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
     733            2 :             },
     734            2 :             "disk_consistent_lsn":"0/16960E8",
     735            2 :             "metadata": {
     736            2 :                 "disk_consistent_lsn": "0/16960E8",
     737            2 :                 "prev_record_lsn": "0/1696070",
     738            2 :                 "ancestor_timeline": "e45a7f37d3ee2ff17dc14bf4f4e3f52e",
     739            2 :                 "ancestor_lsn": "0/0",
     740            2 :                 "latest_gc_cutoff_lsn": "0/1696070",
     741            2 :                 "initdb_lsn": "0/1696070",
     742            2 :                 "pg_version": 14
     743            2 :             },
     744            2 :             "deleted_at": "2023-07-31T09:00:00.123",
     745            2 :             "archived_at": "2023-04-29T09:00:00.123"
     746            2 :         }"#;
     747            2 : 
     748            2 :         let expected = IndexPart {
     749            2 :             version: 8,
     750            2 :             layer_metadata: HashMap::from([
     751            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
     752            2 :                     file_size: 25600000,
     753            2 :                     generation: Generation::none(),
     754            2 :                     shard: ShardIndex::unsharded()
     755            2 :                 }),
     756            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
     757            2 :                     file_size: 9007199254741001,
     758            2 :                     generation: Generation::none(),
     759            2 :                     shard: ShardIndex::unsharded()
     760            2 :                 })
     761            2 :             ]),
     762            2 :             disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
     763            2 :             metadata: TimelineMetadata::new(
     764            2 :                 Lsn::from_str("0/16960E8").unwrap(),
     765            2 :                 Some(Lsn::from_str("0/1696070").unwrap()),
     766            2 :                 Some(TimelineId::from_str("e45a7f37d3ee2ff17dc14bf4f4e3f52e").unwrap()),
     767            2 :                 Lsn::INVALID,
     768            2 :                 Lsn::from_str("0/1696070").unwrap(),
     769            2 :                 Lsn::from_str("0/1696070").unwrap(),
     770            2 :                 14,
     771            2 :             ).with_recalculated_checksum().unwrap(),
     772            2 :             deleted_at: Some(parse_naive_datetime("2023-07-31T09:00:00.123000000")),
     773            2 :             archived_at: Some(parse_naive_datetime("2023-04-29T09:00:00.123000000")),
     774            2 :             lineage: Default::default(),
     775            2 :             gc_blocking: None,
     776            2 :             last_aux_file_policy: Default::default(),
     777            2 :         };
     778            2 : 
     779            2 :         let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
     780            2 :         assert_eq!(part, expected);
     781            2 :     }
     782              : 
     783              :     #[test]
     784            2 :     fn v9_indexpart_is_parsed() {
     785            2 :         let example = r#"{
     786            2 :             "version": 9,
     787            2 :             "layer_metadata":{
     788            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
     789            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
     790            2 :             },
     791            2 :             "disk_consistent_lsn":"0/16960E8",
     792            2 :             "metadata": {
     793            2 :                 "disk_consistent_lsn": "0/16960E8",
     794            2 :                 "prev_record_lsn": "0/1696070",
     795            2 :                 "ancestor_timeline": "e45a7f37d3ee2ff17dc14bf4f4e3f52e",
     796            2 :                 "ancestor_lsn": "0/0",
     797            2 :                 "latest_gc_cutoff_lsn": "0/1696070",
     798            2 :                 "initdb_lsn": "0/1696070",
     799            2 :                 "pg_version": 14
     800            2 :             },
     801            2 :             "gc_blocking": {
     802            2 :                 "started_at": "2024-07-19T09:00:00.123",
     803            2 :                 "reasons": ["DetachAncestor"]
     804            2 :             }
     805            2 :         }"#;
     806            2 : 
     807            2 :         let expected = IndexPart {
     808            2 :             version: 9,
     809            2 :             layer_metadata: HashMap::from([
     810            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
     811            2 :                     file_size: 25600000,
     812            2 :                     generation: Generation::none(),
     813            2 :                     shard: ShardIndex::unsharded()
     814            2 :                 }),
     815            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
     816            2 :                     file_size: 9007199254741001,
     817            2 :                     generation: Generation::none(),
     818            2 :                     shard: ShardIndex::unsharded()
     819            2 :                 })
     820            2 :             ]),
     821            2 :             disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
     822            2 :             metadata: TimelineMetadata::new(
     823            2 :                 Lsn::from_str("0/16960E8").unwrap(),
     824            2 :                 Some(Lsn::from_str("0/1696070").unwrap()),
     825            2 :                 Some(TimelineId::from_str("e45a7f37d3ee2ff17dc14bf4f4e3f52e").unwrap()),
     826            2 :                 Lsn::INVALID,
     827            2 :                 Lsn::from_str("0/1696070").unwrap(),
     828            2 :                 Lsn::from_str("0/1696070").unwrap(),
     829            2 :                 14,
     830            2 :             ).with_recalculated_checksum().unwrap(),
     831            2 :             deleted_at: None,
     832            2 :             lineage: Default::default(),
     833            2 :             gc_blocking: Some(GcBlocking {
     834            2 :                 started_at: parse_naive_datetime("2024-07-19T09:00:00.123000000"),
     835            2 :                 reasons: enumset::EnumSet::from_iter([GcBlockingReason::DetachAncestor]),
     836            2 :             }),
     837            2 :             last_aux_file_policy: Default::default(),
     838            2 :             archived_at: None,
     839            2 :         };
     840            2 : 
     841            2 :         let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
     842            2 :         assert_eq!(part, expected);
     843            2 :     }
     844              : 
     845           18 :     fn parse_naive_datetime(s: &str) -> NaiveDateTime {
     846           18 :         chrono::NaiveDateTime::parse_from_str(s, "%Y-%m-%dT%H:%M:%S.%f").unwrap()
     847           18 :     }
     848              : }
        

Generated by: LCOV version 2.1-beta