LCOV - code coverage report
Current view: top level - pageserver/src/tenant/remote_timeline_client - index.rs (source / functions) Coverage Total Hit
Test: 496e96cdfff2df79370229591d6427cda12fde29.info Lines: 92.9 % 410 381
Test Date: 2024-05-21 18:28:29 Functions: 58.0 % 50 29

            Line data    Source code
       1              : //! In-memory index to track the tenant files on the remote storage.
       2              : //! Able to restore itself from the storage index parts, that are located in every timeline's remote directory and contain all data about
       3              : //! remote timeline layers and its metadata.
       4              : 
       5              : use std::collections::HashMap;
       6              : 
       7              : use chrono::NaiveDateTime;
       8              : use pageserver_api::models::AuxFilePolicy;
       9              : use serde::{Deserialize, Serialize};
      10              : use utils::id::TimelineId;
      11              : 
      12              : use crate::tenant::metadata::TimelineMetadata;
      13              : use crate::tenant::storage_layer::LayerName;
      14              : use crate::tenant::upload_queue::UploadQueueInitialized;
      15              : use crate::tenant::Generation;
      16              : use pageserver_api::shard::ShardIndex;
      17              : 
      18              : use utils::lsn::Lsn;
      19              : 
      20              : /// Metadata gathered for each of the layer files.
      21              : ///
      22              : /// Fields have to be `Option`s because remote [`IndexPart`]'s can be from different version, which
      23              : /// might have less or more metadata depending if upgrading or rolling back an upgrade.
      24              : #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
      25              : //#[cfg_attr(test, derive(Default))]
      26              : pub struct LayerFileMetadata {
      27              :     file_size: u64,
      28              : 
      29              :     pub(crate) generation: Generation,
      30              : 
      31              :     pub(crate) shard: ShardIndex,
      32              : }
      33              : 
      34              : impl From<&'_ IndexLayerMetadata> for LayerFileMetadata {
      35           32 :     fn from(other: &IndexLayerMetadata) -> Self {
      36           32 :         LayerFileMetadata {
      37           32 :             file_size: other.file_size,
      38           32 :             generation: other.generation,
      39           32 :             shard: other.shard,
      40           32 :         }
      41           32 :     }
      42              : }
      43              : 
      44              : impl LayerFileMetadata {
      45         1661 :     pub fn new(file_size: u64, generation: Generation, shard: ShardIndex) -> Self {
      46         1661 :         LayerFileMetadata {
      47         1661 :             file_size,
      48         1661 :             generation,
      49         1661 :             shard,
      50         1661 :         }
      51         1661 :     }
      52              : 
      53         3699 :     pub fn file_size(&self) -> u64 {
      54         3699 :         self.file_size
      55         3699 :     }
      56              : }
      57              : 
      58              : // TODO seems like another part of the remote storage file format
      59              : // compatibility issue, see https://github.com/neondatabase/neon/issues/3072
      60              : /// In-memory representation of an `index_part.json` file
      61              : ///
      62              : /// Contains the data about all files in the timeline, present remotely and its metadata.
      63              : ///
      64              : /// This type needs to be backwards and forwards compatible. When changing the fields,
      65              : /// remember to add a test case for the changed version.
      66          224 : #[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
      67              : pub struct IndexPart {
      68              :     /// Debugging aid describing the version of this type.
      69              :     #[serde(default)]
      70              :     version: usize,
      71              : 
      72              :     #[serde(default)]
      73              :     #[serde(skip_serializing_if = "Option::is_none")]
      74              :     pub deleted_at: Option<NaiveDateTime>,
      75              : 
      76              :     /// Per layer file name metadata, which can be present for a present or missing layer file.
      77              :     ///
      78              :     /// Older versions of `IndexPart` will not have this property or have only a part of metadata
      79              :     /// that latest version stores.
      80              :     pub layer_metadata: HashMap<LayerName, IndexLayerMetadata>,
      81              : 
      82              :     // 'disk_consistent_lsn' is a copy of the 'disk_consistent_lsn' in the metadata.
      83              :     // It's duplicated for convenience when reading the serialized structure, but is
      84              :     // private because internally we would read from metadata instead.
      85              :     disk_consistent_lsn: Lsn,
      86              : 
      87              :     #[serde(rename = "metadata_bytes")]
      88              :     pub metadata: TimelineMetadata,
      89              : 
      90              :     #[serde(default)]
      91              :     pub(crate) lineage: Lineage,
      92              : 
      93              :     /// Describes the kind of aux files stored in the timeline.
      94              :     ///
      95              :     /// The value is modified during file ingestion when the latest wanted value communicated via tenant config is applied if it is acceptable.
      96              :     /// A V1 setting after V2 files have been committed is not accepted.
      97              :     ///
      98              :     /// None means no aux files have been written to the storage before the point
      99              :     /// when this flag is introduced.
     100              :     #[serde(skip_serializing_if = "Option::is_none", default)]
     101              :     pub(crate) last_aux_file_policy: Option<AuxFilePolicy>,
     102              : }
     103              : 
     104              : impl IndexPart {
     105              :     /// When adding or modifying any parts of `IndexPart`, increment the version so that it can be
     106              :     /// used to understand later versions.
     107              :     ///
     108              :     /// Version is currently informative only.
     109              :     /// Version history
     110              :     /// - 2: added `deleted_at`
     111              :     /// - 3: no longer deserialize `timeline_layers` (serialized format is the same, but timeline_layers
     112              :     ///      is always generated from the keys of `layer_metadata`)
     113              :     /// - 4: timeline_layers is fully removed.
     114              :     /// - 5: lineage was added
     115              :     /// - 6: last_aux_file_policy is added.
     116              :     const LATEST_VERSION: usize = 6;
     117              : 
     118              :     // Versions we may see when reading from a bucket.
     119              :     pub const KNOWN_VERSIONS: &'static [usize] = &[1, 2, 3, 4, 5, 6];
     120              : 
     121              :     pub const FILE_NAME: &'static str = "index_part.json";
     122              : 
     123         1314 :     fn new(
     124         1314 :         layers_and_metadata: &HashMap<LayerName, LayerFileMetadata>,
     125         1314 :         disk_consistent_lsn: Lsn,
     126         1314 :         metadata: TimelineMetadata,
     127         1314 :         lineage: Lineage,
     128         1314 :         last_aux_file_policy: Option<AuxFilePolicy>,
     129         1314 :     ) -> Self {
     130         1314 :         let layer_metadata = layers_and_metadata
     131         1314 :             .iter()
     132        14400 :             .map(|(k, v)| (k.to_owned(), IndexLayerMetadata::from(v)))
     133         1314 :             .collect();
     134         1314 : 
     135         1314 :         Self {
     136         1314 :             version: Self::LATEST_VERSION,
     137         1314 :             layer_metadata,
     138         1314 :             disk_consistent_lsn,
     139         1314 :             metadata,
     140         1314 :             deleted_at: None,
     141         1314 :             lineage,
     142         1314 :             last_aux_file_policy,
     143         1314 :         }
     144         1314 :     }
     145              : 
     146            0 :     pub fn get_version(&self) -> usize {
     147            0 :         self.version
     148            0 :     }
     149              : 
     150              :     /// If you want this under normal operations, read it from self.metadata:
     151              :     /// this method is just for the scrubber to use when validating an index.
     152            0 :     pub fn get_disk_consistent_lsn(&self) -> Lsn {
     153            0 :         self.disk_consistent_lsn
     154            0 :     }
     155              : 
     156           14 :     pub fn from_s3_bytes(bytes: &[u8]) -> Result<Self, serde_json::Error> {
     157           14 :         serde_json::from_slice::<IndexPart>(bytes)
     158           14 :     }
     159              : 
     160         1265 :     pub fn to_s3_bytes(&self) -> serde_json::Result<Vec<u8>> {
     161         1265 :         serde_json::to_vec(self)
     162         1265 :     }
     163              : 
     164              :     #[cfg(test)]
     165           12 :     pub(crate) fn example() -> Self {
     166           12 :         let example_metadata = TimelineMetadata::example();
     167           12 :         Self::new(
     168           12 :             &HashMap::new(),
     169           12 :             example_metadata.disk_consistent_lsn(),
     170           12 :             example_metadata,
     171           12 :             Default::default(),
     172           12 :             Some(AuxFilePolicy::V1),
     173           12 :         )
     174           12 :     }
     175              : 
     176           18 :     pub(crate) fn last_aux_file_policy(&self) -> Option<AuxFilePolicy> {
     177           18 :         self.last_aux_file_policy
     178           18 :     }
     179              : }
     180              : 
     181              : impl From<&UploadQueueInitialized> for IndexPart {
     182         1302 :     fn from(uq: &UploadQueueInitialized) -> Self {
     183         1302 :         let disk_consistent_lsn = uq.latest_metadata.disk_consistent_lsn();
     184         1302 :         let metadata = uq.latest_metadata.clone();
     185         1302 :         let lineage = uq.latest_lineage.clone();
     186         1302 : 
     187         1302 :         Self::new(
     188         1302 :             &uq.latest_files,
     189         1302 :             disk_consistent_lsn,
     190         1302 :             metadata,
     191         1302 :             lineage,
     192         1302 :             uq.last_aux_file_policy,
     193         1302 :         )
     194         1302 :     }
     195              : }
     196              : 
     197              : /// Serialized form of [`LayerFileMetadata`].
     198          124 : #[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
     199              : pub struct IndexLayerMetadata {
     200              :     pub file_size: u64,
     201              : 
     202              :     #[serde(default = "Generation::none")]
     203              :     #[serde(skip_serializing_if = "Generation::is_none")]
     204              :     pub generation: Generation,
     205              : 
     206              :     #[serde(default = "ShardIndex::unsharded")]
     207              :     #[serde(skip_serializing_if = "ShardIndex::is_unsharded")]
     208              :     pub shard: ShardIndex,
     209              : }
     210              : 
     211              : impl From<&LayerFileMetadata> for IndexLayerMetadata {
     212        14400 :     fn from(other: &LayerFileMetadata) -> Self {
     213        14400 :         IndexLayerMetadata {
     214        14400 :             file_size: other.file_size,
     215        14400 :             generation: other.generation,
     216        14400 :             shard: other.shard,
     217        14400 :         }
     218        14400 :     }
     219              : }
     220              : 
     221              : /// Limited history of earlier ancestors.
     222              : ///
     223              : /// A timeline can have more than 1 earlier ancestor, in the rare case that it was repeatedly
     224              : /// reparented by having an later timeline be detached from it's ancestor.
     225           32 : #[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize, Default)]
     226              : pub(crate) struct Lineage {
     227              :     /// Has the `reparenting_history` been truncated to [`Lineage::REMEMBER_AT_MOST`].
     228              :     #[serde(skip_serializing_if = "is_false", default)]
     229              :     reparenting_history_truncated: bool,
     230              : 
     231              :     /// Earlier ancestors, truncated when [`Self::reparenting_history_truncated`]
     232              :     ///
     233              :     /// These are stored in case we want to support WAL based DR on the timeline. There can be many
     234              :     /// of these and at most one [`Self::original_ancestor`]. There cannot be more reparentings
     235              :     /// after [`Self::original_ancestor`] has been set.
     236              :     #[serde(skip_serializing_if = "Vec::is_empty", default)]
     237              :     reparenting_history: Vec<TimelineId>,
     238              : 
     239              :     /// The ancestor from which this timeline has been detached from and when.
     240              :     ///
     241              :     /// If you are adding support for detaching from a hierarchy, consider changing the ancestry
     242              :     /// into a `Vec<(TimelineId, Lsn)>` to be a path instead.
     243              :     #[serde(skip_serializing_if = "Option::is_none", default)]
     244              :     original_ancestor: Option<(TimelineId, Lsn, NaiveDateTime)>,
     245              : }
     246              : 
     247         2554 : fn is_false(b: &bool) -> bool {
     248         2554 :     !b
     249         2554 : }
     250              : 
     251              : impl Lineage {
     252              :     const REMEMBER_AT_MOST: usize = 100;
     253              : 
     254            0 :     pub(crate) fn record_previous_ancestor(&mut self, old_ancestor: &TimelineId) {
     255            0 :         if self.reparenting_history.last() == Some(old_ancestor) {
     256              :             // do not re-record it
     257            0 :             return;
     258            0 :         }
     259            0 : 
     260            0 :         let drop_oldest = self.reparenting_history.len() + 1 >= Self::REMEMBER_AT_MOST;
     261            0 : 
     262            0 :         self.reparenting_history_truncated |= drop_oldest;
     263            0 :         if drop_oldest {
     264            0 :             self.reparenting_history.remove(0);
     265            0 :         }
     266            0 :         self.reparenting_history.push(*old_ancestor);
     267            0 :     }
     268              : 
     269            0 :     pub(crate) fn record_detaching(&mut self, branchpoint: &(TimelineId, Lsn)) {
     270            0 :         assert!(self.original_ancestor.is_none());
     271              : 
     272            0 :         self.original_ancestor =
     273            0 :             Some((branchpoint.0, branchpoint.1, chrono::Utc::now().naive_utc()));
     274            0 :     }
     275              : 
     276              :     /// The queried lsn is most likely the basebackup lsn, and this answers question "is it allowed
     277              :     /// to start a read/write primary at this lsn".
     278              :     ///
     279              :     /// Returns true if the Lsn was previously a branch point.
     280            0 :     pub(crate) fn is_previous_ancestor_lsn(&self, lsn: Lsn) -> bool {
     281            0 :         self.original_ancestor
     282            0 :             .as_ref()
     283            0 :             .is_some_and(|(_, ancestor_lsn, _)| lsn == *ancestor_lsn)
     284            0 :     }
     285              : }
     286              : 
     287              : #[cfg(test)]
     288              : mod tests {
     289              :     use std::str::FromStr;
     290              : 
     291              :     use super::*;
     292              : 
     293              :     #[test]
     294            2 :     fn v1_indexpart_is_parsed() {
     295            2 :         let example = r#"{
     296            2 :             "version":1,
     297            2 :             "timeline_layers":["000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9"],
     298            2 :             "layer_metadata":{
     299            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
     300            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
     301            2 :             },
     302            2 :             "disk_consistent_lsn":"0/16960E8",
     303            2 :             "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
     304            2 :         }"#;
     305            2 : 
     306            2 :         let expected = IndexPart {
     307            2 :             // note this is not verified, could be anything, but exists for humans debugging.. could be the git version instead?
     308            2 :             version: 1,
     309            2 :             layer_metadata: HashMap::from([
     310            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), IndexLayerMetadata {
     311            2 :                     file_size: 25600000,
     312            2 :                     generation: Generation::none(),
     313            2 :                     shard: ShardIndex::unsharded()
     314            2 :                 }),
     315            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), IndexLayerMetadata {
     316            2 :                     // serde_json should always parse this but this might be a double with jq for
     317            2 :                     // example.
     318            2 :                     file_size: 9007199254741001,
     319            2 :                     generation: Generation::none(),
     320            2 :                     shard: ShardIndex::unsharded()
     321            2 :                 })
     322            2 :             ]),
     323            2 :             disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
     324            2 :             metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
     325            2 :             deleted_at: None,
     326            2 :             lineage: Lineage::default(),
     327            2 :             last_aux_file_policy: None,
     328            2 :         };
     329            2 : 
     330            2 :         let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
     331            2 :         assert_eq!(part, expected);
     332            2 :     }
     333              : 
     334              :     #[test]
     335            2 :     fn v1_indexpart_is_parsed_with_optional_missing_layers() {
     336            2 :         let example = r#"{
     337            2 :             "version":1,
     338            2 :             "timeline_layers":["000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9"],
     339            2 :             "missing_layers":["This shouldn't fail deserialization"],
     340            2 :             "layer_metadata":{
     341            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
     342            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
     343            2 :             },
     344            2 :             "disk_consistent_lsn":"0/16960E8",
     345            2 :             "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
     346            2 :         }"#;
     347            2 : 
     348            2 :         let expected = IndexPart {
     349            2 :             // note this is not verified, could be anything, but exists for humans debugging.. could be the git version instead?
     350            2 :             version: 1,
     351            2 :             layer_metadata: HashMap::from([
     352            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), IndexLayerMetadata {
     353            2 :                     file_size: 25600000,
     354            2 :                     generation: Generation::none(),
     355            2 :                     shard: ShardIndex::unsharded()
     356            2 :                 }),
     357            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), IndexLayerMetadata {
     358            2 :                     // serde_json should always parse this but this might be a double with jq for
     359            2 :                     // example.
     360            2 :                     file_size: 9007199254741001,
     361            2 :                     generation: Generation::none(),
     362            2 :                     shard: ShardIndex::unsharded()
     363            2 :                 })
     364            2 :             ]),
     365            2 :             disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
     366            2 :             metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
     367            2 :             deleted_at: None,
     368            2 :             lineage: Lineage::default(),
     369            2 :             last_aux_file_policy: None,
     370            2 :         };
     371            2 : 
     372            2 :         let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
     373            2 :         assert_eq!(part, expected);
     374            2 :     }
     375              : 
     376              :     #[test]
     377            2 :     fn v2_indexpart_is_parsed_with_deleted_at() {
     378            2 :         let example = r#"{
     379            2 :             "version":2,
     380            2 :             "timeline_layers":["000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9"],
     381            2 :             "missing_layers":["This shouldn't fail deserialization"],
     382            2 :             "layer_metadata":{
     383            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
     384            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
     385            2 :             },
     386            2 :             "disk_consistent_lsn":"0/16960E8",
     387            2 :             "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
     388            2 :             "deleted_at": "2023-07-31T09:00:00.123"
     389            2 :         }"#;
     390            2 : 
     391            2 :         let expected = IndexPart {
     392            2 :             // note this is not verified, could be anything, but exists for humans debugging.. could be the git version instead?
     393            2 :             version: 2,
     394            2 :             layer_metadata: HashMap::from([
     395            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), IndexLayerMetadata {
     396            2 :                     file_size: 25600000,
     397            2 :                     generation: Generation::none(),
     398            2 :                     shard: ShardIndex::unsharded()
     399            2 :                 }),
     400            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), IndexLayerMetadata {
     401            2 :                     // serde_json should always parse this but this might be a double with jq for
     402            2 :                     // example.
     403            2 :                     file_size: 9007199254741001,
     404            2 :                     generation: Generation::none(),
     405            2 :                     shard: ShardIndex::unsharded()
     406            2 :                 })
     407            2 :             ]),
     408            2 :             disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
     409            2 :             metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
     410            2 :             deleted_at: Some(chrono::NaiveDateTime::parse_from_str(
     411            2 :                 "2023-07-31T09:00:00.123000000", "%Y-%m-%dT%H:%M:%S.%f").unwrap()),
     412            2 :             lineage: Lineage::default(),
     413            2 :             last_aux_file_policy: None,
     414            2 :         };
     415            2 : 
     416            2 :         let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
     417            2 :         assert_eq!(part, expected);
     418            2 :     }
     419              : 
     420              :     #[test]
     421            2 :     fn empty_layers_are_parsed() {
     422            2 :         let empty_layers_json = r#"{
     423            2 :             "version":1,
     424            2 :             "timeline_layers":[],
     425            2 :             "layer_metadata":{},
     426            2 :             "disk_consistent_lsn":"0/2532648",
     427            2 :             "metadata_bytes":[136,151,49,208,0,70,0,4,0,0,0,0,2,83,38,72,1,0,0,0,0,2,83,38,32,1,87,198,240,135,97,119,45,125,38,29,155,161,140,141,255,210,0,0,0,0,2,83,38,72,0,0,0,0,1,73,240,192,0,0,0,0,1,73,240,192,0,0,0,15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
     428            2 :         }"#;
     429            2 : 
     430            2 :         let expected = IndexPart {
     431            2 :             version: 1,
     432            2 :             layer_metadata: HashMap::new(),
     433            2 :             disk_consistent_lsn: "0/2532648".parse::<Lsn>().unwrap(),
     434            2 :             metadata: TimelineMetadata::from_bytes(&[
     435            2 :                 136, 151, 49, 208, 0, 70, 0, 4, 0, 0, 0, 0, 2, 83, 38, 72, 1, 0, 0, 0, 0, 2, 83,
     436            2 :                 38, 32, 1, 87, 198, 240, 135, 97, 119, 45, 125, 38, 29, 155, 161, 140, 141, 255,
     437            2 :                 210, 0, 0, 0, 0, 2, 83, 38, 72, 0, 0, 0, 0, 1, 73, 240, 192, 0, 0, 0, 0, 1, 73,
     438            2 :                 240, 192, 0, 0, 0, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     439            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     440            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     441            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     442            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     443            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     444            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     445            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     446            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     447            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     448            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     449            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     450            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     451            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     452            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     453            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     454            2 :                 0, 0,
     455            2 :             ])
     456            2 :             .unwrap(),
     457            2 :             deleted_at: None,
     458            2 :             lineage: Lineage::default(),
     459            2 :             last_aux_file_policy: None,
     460            2 :         };
     461            2 : 
     462            2 :         let empty_layers_parsed = IndexPart::from_s3_bytes(empty_layers_json.as_bytes()).unwrap();
     463            2 : 
     464            2 :         assert_eq!(empty_layers_parsed, expected);
     465            2 :     }
     466              : 
     467              :     #[test]
     468            2 :     fn v4_indexpart_is_parsed() {
     469            2 :         let example = r#"{
     470            2 :             "version":4,
     471            2 :             "layer_metadata":{
     472            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
     473            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
     474            2 :             },
     475            2 :             "disk_consistent_lsn":"0/16960E8",
     476            2 :             "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
     477            2 :             "deleted_at": "2023-07-31T09:00:00.123"
     478            2 :         }"#;
     479            2 : 
     480            2 :         let expected = IndexPart {
     481            2 :             version: 4,
     482            2 :             layer_metadata: HashMap::from([
     483            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), IndexLayerMetadata {
     484            2 :                     file_size: 25600000,
     485            2 :                     generation: Generation::none(),
     486            2 :                     shard: ShardIndex::unsharded()
     487            2 :                 }),
     488            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), IndexLayerMetadata {
     489            2 :                     // serde_json should always parse this but this might be a double with jq for
     490            2 :                     // example.
     491            2 :                     file_size: 9007199254741001,
     492            2 :                     generation: Generation::none(),
     493            2 :                     shard: ShardIndex::unsharded()
     494            2 :                 })
     495            2 :             ]),
     496            2 :             disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
     497            2 :             metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
     498            2 :             deleted_at: Some(parse_naive_datetime("2023-07-31T09:00:00.123000000")),
     499            2 :             lineage: Lineage::default(),
     500            2 :             last_aux_file_policy: None,
     501            2 :         };
     502            2 : 
     503            2 :         let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
     504            2 :         assert_eq!(part, expected);
     505            2 :     }
     506              : 
     507              :     #[test]
     508            2 :     fn v5_indexpart_is_parsed() {
     509            2 :         let example = r#"{
     510            2 :             "version":5,
     511            2 :             "layer_metadata":{
     512            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000014EF420-00000000014EF499":{"file_size":23289856,"generation":1},
     513            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000014EF499-00000000015A7619":{"file_size":1015808,"generation":1}},
     514            2 :                 "disk_consistent_lsn":"0/15A7618",
     515            2 :                 "metadata_bytes":[226,88,25,241,0,46,0,4,0,0,0,0,1,90,118,24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,78,244,32,0,0,0,0,1,78,244,32,0,0,0,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
     516            2 :                 "lineage":{
     517            2 :                     "original_ancestor":["e2bfd8c633d713d279e6fcd2bcc15b6d","0/15A7618","2024-05-07T18:52:36.322426563"],
     518            2 :                     "reparenting_history":["e1bfd8c633d713d279e6fcd2bcc15b6d"]
     519            2 :                 }
     520            2 :         }"#;
     521            2 : 
     522            2 :         let expected = IndexPart {
     523            2 :             version: 5,
     524            2 :             layer_metadata: HashMap::from([
     525            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000014EF420-00000000014EF499".parse().unwrap(), IndexLayerMetadata {
     526            2 :                     file_size: 23289856,
     527            2 :                     generation: Generation::new(1),
     528            2 :                     shard: ShardIndex::unsharded(),
     529            2 :                 }),
     530            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000014EF499-00000000015A7619".parse().unwrap(), IndexLayerMetadata {
     531            2 :                     file_size: 1015808,
     532            2 :                     generation: Generation::new(1),
     533            2 :                     shard: ShardIndex::unsharded(),
     534            2 :                 })
     535            2 :             ]),
     536            2 :             disk_consistent_lsn: Lsn::from_str("0/15A7618").unwrap(),
     537            2 :             metadata: TimelineMetadata::from_bytes(&[226,88,25,241,0,46,0,4,0,0,0,0,1,90,118,24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,78,244,32,0,0,0,0,1,78,244,32,0,0,0,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
     538            2 :             deleted_at: None,
     539            2 :             lineage: Lineage {
     540            2 :                 reparenting_history_truncated: false,
     541            2 :                 reparenting_history: vec![TimelineId::from_str("e1bfd8c633d713d279e6fcd2bcc15b6d").unwrap()],
     542            2 :                 original_ancestor: Some((TimelineId::from_str("e2bfd8c633d713d279e6fcd2bcc15b6d").unwrap(), Lsn::from_str("0/15A7618").unwrap(), parse_naive_datetime("2024-05-07T18:52:36.322426563"))),
     543            2 :             },
     544            2 :             last_aux_file_policy: None,
     545            2 :         };
     546            2 : 
     547            2 :         let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
     548            2 :         assert_eq!(part, expected);
     549            2 :     }
     550              : 
     551              :     #[test]
     552            2 :     fn v6_indexpart_is_parsed() {
     553            2 :         let example = r#"{
     554            2 :             "version":6,
     555            2 :             "layer_metadata":{
     556            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
     557            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
     558            2 :             },
     559            2 :             "disk_consistent_lsn":"0/16960E8",
     560            2 :             "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
     561            2 :             "deleted_at": "2023-07-31T09:00:00.123",
     562            2 :             "lineage":{
     563            2 :                 "original_ancestor":["e2bfd8c633d713d279e6fcd2bcc15b6d","0/15A7618","2024-05-07T18:52:36.322426563"],
     564            2 :                 "reparenting_history":["e1bfd8c633d713d279e6fcd2bcc15b6d"]
     565            2 :             },
     566            2 :             "last_aux_file_policy": "V2"
     567            2 :         }"#;
     568            2 : 
     569            2 :         let expected = IndexPart {
     570            2 :             version: 6,
     571            2 :             layer_metadata: HashMap::from([
     572            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), IndexLayerMetadata {
     573            2 :                     file_size: 25600000,
     574            2 :                     generation: Generation::none(),
     575            2 :                     shard: ShardIndex::unsharded()
     576            2 :                 }),
     577            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), IndexLayerMetadata {
     578            2 :                     // serde_json should always parse this but this might be a double with jq for
     579            2 :                     // example.
     580            2 :                     file_size: 9007199254741001,
     581            2 :                     generation: Generation::none(),
     582            2 :                     shard: ShardIndex::unsharded()
     583            2 :                 })
     584            2 :             ]),
     585            2 :             disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
     586            2 :             metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
     587            2 :             deleted_at: Some(chrono::NaiveDateTime::parse_from_str(
     588            2 :                 "2023-07-31T09:00:00.123000000", "%Y-%m-%dT%H:%M:%S.%f").unwrap()),
     589            2 :             lineage: Lineage {
     590            2 :                 reparenting_history_truncated: false,
     591            2 :                 reparenting_history: vec![TimelineId::from_str("e1bfd8c633d713d279e6fcd2bcc15b6d").unwrap()],
     592            2 :                 original_ancestor: Some((TimelineId::from_str("e2bfd8c633d713d279e6fcd2bcc15b6d").unwrap(), Lsn::from_str("0/15A7618").unwrap(), parse_naive_datetime("2024-05-07T18:52:36.322426563"))),
     593            2 :             },
     594            2 :             last_aux_file_policy: Some(AuxFilePolicy::V2),
     595            2 :         };
     596            2 : 
     597            2 :         let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
     598            2 :         assert_eq!(part, expected);
     599            2 :     }
     600              : 
     601            6 :     fn parse_naive_datetime(s: &str) -> NaiveDateTime {
     602            6 :         chrono::NaiveDateTime::parse_from_str(s, "%Y-%m-%dT%H:%M:%S.%f").unwrap()
     603            6 :     }
     604              : }
        

Generated by: LCOV version 2.1-beta