LCOV - code coverage report
Current view: top level - pageserver/src/tenant/remote_timeline_client - index.rs (source / functions) Coverage Total Hit
Test: 691a4c28fe7169edd60b367c52d448a0a6605f1f.info Lines: 97.7 % 265 259
Test Date: 2024-05-10 13:18:37 Functions: 63.6 % 33 21

            Line data    Source code
       1              : //! In-memory index to track the tenant files on the remote storage.
       2              : //! Able to restore itself from the storage index parts, that are located in every timeline's remote directory and contain all data about
       3              : //! remote timeline layers and its metadata.
       4              : 
       5              : use std::collections::HashMap;
       6              : 
       7              : use chrono::NaiveDateTime;
       8              : use serde::{Deserialize, Serialize};
       9              : 
      10              : use crate::tenant::metadata::TimelineMetadata;
      11              : use crate::tenant::storage_layer::LayerName;
      12              : use crate::tenant::upload_queue::UploadQueueInitialized;
      13              : use crate::tenant::Generation;
      14              : use pageserver_api::shard::ShardIndex;
      15              : 
      16              : use utils::lsn::Lsn;
      17              : 
      18              : /// Metadata gathered for each of the layer files.
      19              : ///
      20              : /// Fields have to be `Option`s because remote [`IndexPart`]'s can be from different version, which
      21              : /// might have less or more metadata depending if upgrading or rolling back an upgrade.
      22              : #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
      23              : //#[cfg_attr(test, derive(Default))]
      24              : pub struct LayerFileMetadata {
      25              :     file_size: u64,
      26              : 
      27              :     pub(crate) generation: Generation,
      28              : 
      29              :     pub(crate) shard: ShardIndex,
      30              : }
      31              : 
      32              : impl From<&'_ IndexLayerMetadata> for LayerFileMetadata {
      33           32 :     fn from(other: &IndexLayerMetadata) -> Self {
      34           32 :         LayerFileMetadata {
      35           32 :             file_size: other.file_size,
      36           32 :             generation: other.generation,
      37           32 :             shard: other.shard,
      38           32 :         }
      39           32 :     }
      40              : }
      41              : 
      42              : impl LayerFileMetadata {
      43         1480 :     pub fn new(file_size: u64, generation: Generation, shard: ShardIndex) -> Self {
      44         1480 :         LayerFileMetadata {
      45         1480 :             file_size,
      46         1480 :             generation,
      47         1480 :             shard,
      48         1480 :         }
      49         1480 :     }
      50              : 
      51         3275 :     pub fn file_size(&self) -> u64 {
      52         3275 :         self.file_size
      53         3275 :     }
      54              : }
      55              : 
      56              : // TODO seems like another part of the remote storage file format
      57              : // compatibility issue, see https://github.com/neondatabase/neon/issues/3072
      58              : /// In-memory representation of an `index_part.json` file
      59              : ///
      60              : /// Contains the data about all files in the timeline, present remotely and its metadata.
      61              : ///
      62              : /// This type needs to be backwards and forwards compatible. When changing the fields,
      63              : /// remember to add a test case for the changed version.
      64          166 : #[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
      65              : pub struct IndexPart {
      66              :     /// Debugging aid describing the version of this type.
      67              :     #[serde(default)]
      68              :     version: usize,
      69              : 
      70              :     #[serde(default)]
      71              :     #[serde(skip_serializing_if = "Option::is_none")]
      72              :     pub deleted_at: Option<NaiveDateTime>,
      73              : 
      74              :     /// Per layer file name metadata, which can be present for a present or missing layer file.
      75              :     ///
      76              :     /// Older versions of `IndexPart` will not have this property or have only a part of metadata
      77              :     /// that latest version stores.
      78              :     pub layer_metadata: HashMap<LayerName, IndexLayerMetadata>,
      79              : 
      80              :     // 'disk_consistent_lsn' is a copy of the 'disk_consistent_lsn' in the metadata.
      81              :     // It's duplicated for convenience when reading the serialized structure, but is
      82              :     // private because internally we would read from metadata instead.
      83              :     disk_consistent_lsn: Lsn,
      84              : 
      85              :     #[serde(rename = "metadata_bytes")]
      86              :     pub metadata: TimelineMetadata,
      87              : }
      88              : 
      89              : impl IndexPart {
      90              :     /// When adding or modifying any parts of `IndexPart`, increment the version so that it can be
      91              :     /// used to understand later versions.
      92              :     ///
      93              :     /// Version is currently informative only.
      94              :     /// Version history
      95              :     /// - 2: added `deleted_at`
      96              :     /// - 3: no longer deserialize `timeline_layers` (serialized format is the same, but timeline_layers
      97              :     ///      is always generated from the keys of `layer_metadata`)
      98              :     /// - 4: timeline_layers is fully removed.
      99              :     const LATEST_VERSION: usize = 4;
     100              : 
     101              :     // Versions we may see when reading from a bucket.
     102              :     pub const KNOWN_VERSIONS: &'static [usize] = &[1, 2, 3, 4];
     103              : 
     104              :     pub const FILE_NAME: &'static str = "index_part.json";
     105              : 
     106         1188 :     fn new(
     107         1188 :         layers_and_metadata: &HashMap<LayerName, LayerFileMetadata>,
     108         1188 :         disk_consistent_lsn: Lsn,
     109         1188 :         metadata: TimelineMetadata,
     110         1188 :     ) -> Self {
     111         1188 :         let layer_metadata = layers_and_metadata
     112         1188 :             .iter()
     113        13570 :             .map(|(k, v)| (k.to_owned(), IndexLayerMetadata::from(v)))
     114         1188 :             .collect();
     115         1188 : 
     116         1188 :         Self {
     117         1188 :             version: Self::LATEST_VERSION,
     118         1188 :             layer_metadata,
     119         1188 :             disk_consistent_lsn,
     120         1188 :             metadata,
     121         1188 :             deleted_at: None,
     122         1188 :         }
     123         1188 :     }
     124              : 
     125            0 :     pub fn get_version(&self) -> usize {
     126            0 :         self.version
     127            0 :     }
     128              : 
     129              :     /// If you want this under normal operations, read it from self.metadata:
     130              :     /// this method is just for the scrubber to use when validating an index.
     131            0 :     pub fn get_disk_consistent_lsn(&self) -> Lsn {
     132            0 :         self.disk_consistent_lsn
     133            0 :     }
     134              : 
     135           10 :     pub fn from_s3_bytes(bytes: &[u8]) -> Result<Self, serde_json::Error> {
     136           10 :         serde_json::from_slice::<IndexPart>(bytes)
     137           10 :     }
     138              : 
     139         1150 :     pub fn to_s3_bytes(&self) -> serde_json::Result<Vec<u8>> {
     140         1150 :         serde_json::to_vec(self)
     141         1150 :     }
     142              : 
     143              :     #[cfg(test)]
     144           12 :     pub(crate) fn example() -> Self {
     145           12 :         let example_metadata = TimelineMetadata::example();
     146           12 :         Self::new(
     147           12 :             &HashMap::new(),
     148           12 :             example_metadata.disk_consistent_lsn(),
     149           12 :             example_metadata,
     150           12 :         )
     151           12 :     }
     152              : }
     153              : 
     154              : impl From<&UploadQueueInitialized> for IndexPart {
     155         1176 :     fn from(uq: &UploadQueueInitialized) -> Self {
     156         1176 :         let disk_consistent_lsn = uq.latest_metadata.disk_consistent_lsn();
     157         1176 :         let metadata = uq.latest_metadata.clone();
     158         1176 : 
     159         1176 :         Self::new(&uq.latest_files, disk_consistent_lsn, metadata)
     160         1176 :     }
     161              : }
     162              : 
     163              : /// Serialized form of [`LayerFileMetadata`].
     164          104 : #[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
     165              : pub struct IndexLayerMetadata {
     166              :     pub file_size: u64,
     167              : 
     168              :     #[serde(default = "Generation::none")]
     169              :     #[serde(skip_serializing_if = "Generation::is_none")]
     170              :     pub generation: Generation,
     171              : 
     172              :     #[serde(default = "ShardIndex::unsharded")]
     173              :     #[serde(skip_serializing_if = "ShardIndex::is_unsharded")]
     174              :     pub shard: ShardIndex,
     175              : }
     176              : 
     177              : impl From<&LayerFileMetadata> for IndexLayerMetadata {
     178        13570 :     fn from(other: &LayerFileMetadata) -> Self {
     179        13570 :         IndexLayerMetadata {
     180        13570 :             file_size: other.file_size,
     181        13570 :             generation: other.generation,
     182        13570 :             shard: other.shard,
     183        13570 :         }
     184        13570 :     }
     185              : }
     186              : 
     187              : #[cfg(test)]
     188              : mod tests {
     189              :     use super::*;
     190              : 
     191              :     #[test]
     192            2 :     fn v1_indexpart_is_parsed() {
     193            2 :         let example = r#"{
     194            2 :             "version":1,
     195            2 :             "timeline_layers":["000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9"],
     196            2 :             "layer_metadata":{
     197            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
     198            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
     199            2 :             },
     200            2 :             "disk_consistent_lsn":"0/16960E8",
     201            2 :             "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
     202            2 :         }"#;
     203            2 : 
     204            2 :         let expected = IndexPart {
     205            2 :             // note this is not verified, could be anything, but exists for humans debugging.. could be the git version instead?
     206            2 :             version: 1,
     207            2 :             layer_metadata: HashMap::from([
     208            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), IndexLayerMetadata {
     209            2 :                     file_size: 25600000,
     210            2 :                     generation: Generation::none(),
     211            2 :                     shard: ShardIndex::unsharded()
     212            2 :                 }),
     213            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), IndexLayerMetadata {
     214            2 :                     // serde_json should always parse this but this might be a double with jq for
     215            2 :                     // example.
     216            2 :                     file_size: 9007199254741001,
     217            2 :                     generation: Generation::none(),
     218            2 :                     shard: ShardIndex::unsharded()
     219            2 :                 })
     220            2 :             ]),
     221            2 :             disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
     222            2 :             metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
     223            2 :             deleted_at: None,
     224            2 :         };
     225            2 : 
     226            2 :         let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
     227            2 :         assert_eq!(part, expected);
     228            2 :     }
     229              : 
     230              :     #[test]
     231            2 :     fn v1_indexpart_is_parsed_with_optional_missing_layers() {
     232            2 :         let example = r#"{
     233            2 :             "version":1,
     234            2 :             "timeline_layers":["000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9"],
     235            2 :             "missing_layers":["This shouldn't fail deserialization"],
     236            2 :             "layer_metadata":{
     237            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
     238            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
     239            2 :             },
     240            2 :             "disk_consistent_lsn":"0/16960E8",
     241            2 :             "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
     242            2 :         }"#;
     243            2 : 
     244            2 :         let expected = IndexPart {
     245            2 :             // note this is not verified, could be anything, but exists for humans debugging.. could be the git version instead?
     246            2 :             version: 1,
     247            2 :             layer_metadata: HashMap::from([
     248            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), IndexLayerMetadata {
     249            2 :                     file_size: 25600000,
     250            2 :                     generation: Generation::none(),
     251            2 :                     shard: ShardIndex::unsharded()
     252            2 :                 }),
     253            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), IndexLayerMetadata {
     254            2 :                     // serde_json should always parse this but this might be a double with jq for
     255            2 :                     // example.
     256            2 :                     file_size: 9007199254741001,
     257            2 :                     generation: Generation::none(),
     258            2 :                     shard: ShardIndex::unsharded()
     259            2 :                 })
     260            2 :             ]),
     261            2 :             disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
     262            2 :             metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
     263            2 :             deleted_at: None,
     264            2 :         };
     265            2 : 
     266            2 :         let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
     267            2 :         assert_eq!(part, expected);
     268            2 :     }
     269              : 
     270              :     #[test]
     271            2 :     fn v2_indexpart_is_parsed_with_deleted_at() {
     272            2 :         let example = r#"{
     273            2 :             "version":2,
     274            2 :             "timeline_layers":["000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9"],
     275            2 :             "missing_layers":["This shouldn't fail deserialization"],
     276            2 :             "layer_metadata":{
     277            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
     278            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
     279            2 :             },
     280            2 :             "disk_consistent_lsn":"0/16960E8",
     281            2 :             "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
     282            2 :             "deleted_at": "2023-07-31T09:00:00.123"
     283            2 :         }"#;
     284            2 : 
     285            2 :         let expected = IndexPart {
     286            2 :             // note this is not verified, could be anything, but exists for humans debugging.. could be the git version instead?
     287            2 :             version: 2,
     288            2 :             layer_metadata: HashMap::from([
     289            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), IndexLayerMetadata {
     290            2 :                     file_size: 25600000,
     291            2 :                     generation: Generation::none(),
     292            2 :                     shard: ShardIndex::unsharded()
     293            2 :                 }),
     294            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), IndexLayerMetadata {
     295            2 :                     // serde_json should always parse this but this might be a double with jq for
     296            2 :                     // example.
     297            2 :                     file_size: 9007199254741001,
     298            2 :                     generation: Generation::none(),
     299            2 :                     shard: ShardIndex::unsharded()
     300            2 :                 })
     301            2 :             ]),
     302            2 :             disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
     303            2 :             metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
     304            2 :             deleted_at: Some(chrono::NaiveDateTime::parse_from_str(
     305            2 :                 "2023-07-31T09:00:00.123000000", "%Y-%m-%dT%H:%M:%S.%f").unwrap())
     306            2 :         };
     307            2 : 
     308            2 :         let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
     309            2 :         assert_eq!(part, expected);
     310            2 :     }
     311              : 
     312              :     #[test]
     313            2 :     fn empty_layers_are_parsed() {
     314            2 :         let empty_layers_json = r#"{
     315            2 :             "version":1,
     316            2 :             "timeline_layers":[],
     317            2 :             "layer_metadata":{},
     318            2 :             "disk_consistent_lsn":"0/2532648",
     319            2 :             "metadata_bytes":[136,151,49,208,0,70,0,4,0,0,0,0,2,83,38,72,1,0,0,0,0,2,83,38,32,1,87,198,240,135,97,119,45,125,38,29,155,161,140,141,255,210,0,0,0,0,2,83,38,72,0,0,0,0,1,73,240,192,0,0,0,0,1,73,240,192,0,0,0,15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
     320            2 :         }"#;
     321            2 : 
     322            2 :         let expected = IndexPart {
     323            2 :             version: 1,
     324            2 :             layer_metadata: HashMap::new(),
     325            2 :             disk_consistent_lsn: "0/2532648".parse::<Lsn>().unwrap(),
     326            2 :             metadata: TimelineMetadata::from_bytes(&[
     327            2 :                 136, 151, 49, 208, 0, 70, 0, 4, 0, 0, 0, 0, 2, 83, 38, 72, 1, 0, 0, 0, 0, 2, 83,
     328            2 :                 38, 32, 1, 87, 198, 240, 135, 97, 119, 45, 125, 38, 29, 155, 161, 140, 141, 255,
     329            2 :                 210, 0, 0, 0, 0, 2, 83, 38, 72, 0, 0, 0, 0, 1, 73, 240, 192, 0, 0, 0, 0, 1, 73,
     330            2 :                 240, 192, 0, 0, 0, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     331            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     332            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     333            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     334            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     335            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     336            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     337            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     338            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     339            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     340            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     341            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     342            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     343            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     344            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     345            2 :                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     346            2 :                 0, 0,
     347            2 :             ])
     348            2 :             .unwrap(),
     349            2 :             deleted_at: None,
     350            2 :         };
     351            2 : 
     352            2 :         let empty_layers_parsed = IndexPart::from_s3_bytes(empty_layers_json.as_bytes()).unwrap();
     353            2 : 
     354            2 :         assert_eq!(empty_layers_parsed, expected);
     355            2 :     }
     356              : 
     357              :     #[test]
     358            2 :     fn v4_indexpart_is_parsed() {
     359            2 :         let example = r#"{
     360            2 :             "version":4,
     361            2 :             "layer_metadata":{
     362            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
     363            2 :                 "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
     364            2 :             },
     365            2 :             "disk_consistent_lsn":"0/16960E8",
     366            2 :             "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
     367            2 :             "deleted_at": "2023-07-31T09:00:00.123"
     368            2 :         }"#;
     369            2 : 
     370            2 :         let expected = IndexPart {
     371            2 :             version: 4,
     372            2 :             layer_metadata: HashMap::from([
     373            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), IndexLayerMetadata {
     374            2 :                     file_size: 25600000,
     375            2 :                     generation: Generation::none(),
     376            2 :                     shard: ShardIndex::unsharded()
     377            2 :                 }),
     378            2 :                 ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), IndexLayerMetadata {
     379            2 :                     // serde_json should always parse this but this might be a double with jq for
     380            2 :                     // example.
     381            2 :                     file_size: 9007199254741001,
     382            2 :                     generation: Generation::none(),
     383            2 :                     shard: ShardIndex::unsharded()
     384            2 :                 })
     385            2 :             ]),
     386            2 :             disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
     387            2 :             metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
     388            2 :             deleted_at: Some(chrono::NaiveDateTime::parse_from_str(
     389            2 :                 "2023-07-31T09:00:00.123000000", "%Y-%m-%dT%H:%M:%S.%f").unwrap()),
     390            2 :         };
     391            2 : 
     392            2 :         let part = IndexPart::from_s3_bytes(example.as_bytes()).unwrap();
     393            2 :         assert_eq!(part, expected);
     394            2 :     }
     395              : }
        

Generated by: LCOV version 2.1-beta