Line data Source code
1 : //! In-memory index to track the tenant files on the remote storage.
2 : //!
3 : //! Able to restore itself from the storage index parts, that are located in every timeline's remote directory and contain all data about
4 : //! remote timeline layers and its metadata.
5 :
6 : use std::collections::HashMap;
7 :
8 : use chrono::NaiveDateTime;
9 : use pageserver_api::models::AuxFilePolicy;
10 : use pageserver_api::shard::ShardIndex;
11 : use serde::{Deserialize, Serialize};
12 : use utils::id::TimelineId;
13 : use utils::lsn::Lsn;
14 :
15 : use super::is_same_remote_layer_path;
16 : use crate::tenant::Generation;
17 : use crate::tenant::metadata::TimelineMetadata;
18 : use crate::tenant::storage_layer::LayerName;
19 : use crate::tenant::timeline::import_pgdata;
20 :
21 : /// In-memory representation of an `index_part.json` file
22 : ///
23 : /// Contains the data about all files in the timeline, present remotely and its metadata.
24 : ///
25 : /// This type needs to be backwards and forwards compatible. When changing the fields,
26 : /// remember to add a test case for the changed version.
27 6094 : #[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
28 : pub struct IndexPart {
29 : /// Debugging aid describing the version of this type.
30 : #[serde(default)]
31 : version: usize,
32 :
33 : #[serde(default)]
34 : #[serde(skip_serializing_if = "Option::is_none")]
35 : pub deleted_at: Option<NaiveDateTime>,
36 :
37 : #[serde(default)]
38 : #[serde(skip_serializing_if = "Option::is_none")]
39 : pub archived_at: Option<NaiveDateTime>,
40 :
41 : /// This field supports import-from-pgdata ("fast imports" platform feature).
42 : /// We don't currently use fast imports, so, this field is None for all production timelines.
43 : /// See <https://github.com/neondatabase/neon/pull/9218> for more information.
44 : #[serde(default)]
45 : #[serde(skip_serializing_if = "Option::is_none")]
46 : pub import_pgdata: Option<import_pgdata::index_part_format::Root>,
47 :
48 : /// Layer filenames and metadata. For an index persisted in remote storage, all layers must
49 : /// exist in remote storage.
50 : pub layer_metadata: HashMap<LayerName, LayerFileMetadata>,
51 :
52 : /// Because of the trouble of eyeballing the legacy "metadata" field, we copied the
53 : /// "disk_consistent_lsn" out. After version 7 this is no longer needed, but the name cannot be
54 : /// reused.
55 : pub(super) disk_consistent_lsn: Lsn,
56 :
57 : // TODO: rename as "metadata" next week, keep the alias = "metadata_bytes", bump version Adding
58 : // the "alias = metadata" was forgotten in #7693, so we have to use "rewrite = metadata_bytes"
59 : // for backwards compatibility.
60 : #[serde(
61 : rename = "metadata_bytes",
62 : alias = "metadata",
63 : with = "crate::tenant::metadata::modern_serde"
64 : )]
65 : pub metadata: TimelineMetadata,
66 :
67 : #[serde(default)]
68 : pub(crate) lineage: Lineage,
69 :
70 : #[serde(skip_serializing_if = "Option::is_none", default)]
71 : pub(crate) gc_blocking: Option<GcBlocking>,
72 :
73 : /// Describes the kind of aux files stored in the timeline.
74 : ///
75 : /// The value is modified during file ingestion when the latest wanted value communicated via tenant config is applied if it is acceptable.
76 : /// A V1 setting after V2 files have been committed is not accepted.
77 : ///
78 : /// None means no aux files have been written to the storage before the point
79 : /// when this flag is introduced.
80 : ///
81 : /// This flag is not used any more as all tenants have been transitioned to the new aux file policy.
82 : #[serde(skip_serializing_if = "Option::is_none", default)]
83 : pub(crate) last_aux_file_policy: Option<AuxFilePolicy>,
84 :
85 : #[serde(skip_serializing_if = "Option::is_none", default)]
86 : pub(crate) rel_size_migration: Option<RelSizeMigration>,
87 :
88 : /// Not used anymore -- kept here for backwards compatibility. Merged into the `gc_compaction` field.
89 : #[serde(skip_serializing_if = "Option::is_none", default)]
90 : l2_lsn: Option<Lsn>,
91 :
92 : /// State for the garbage-collecting compaction pass.
93 : ///
94 : /// Garbage-collecting compaction (gc-compaction) prunes `Value`s that are outside
95 : /// the PITR window and not needed by child timelines.
96 : ///
97 : /// A commonly used synonym for this compaction pass is
98 : /// "bottommost-compaction" because the affected LSN range
99 : /// is the "bottom" of the (key,lsn) map.
100 : ///
101 : /// Gc-compaction is a quite expensive operation; that's why we use
102 : /// trigger condition.
103 : /// This field here holds the state pertaining to that trigger condition
104 : /// and (in future) to the progress of the gc-compaction, so that it's
105 : /// resumable across restarts & migrations.
106 : ///
107 : /// Note that the underlying algorithm is _also_ called `gc-compaction`
108 : /// in most places & design docs; but in fact it is more flexible than
109 : /// just the specific use case here; it needs a new name.
110 : #[serde(skip_serializing_if = "Option::is_none", default)]
111 : pub(crate) gc_compaction: Option<GcCompactionState>,
112 : }
113 :
114 4 : #[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
115 : pub struct GcCompactionState {
116 : /// The upper bound of the last completed garbage-collecting compaction, aka. L2 LSN.
117 : pub(crate) last_completed_lsn: Lsn,
118 : }
119 :
120 8 : #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
121 : #[serde(rename_all = "camelCase")]
122 : pub enum RelSizeMigration {
123 : /// The tenant is using the old rel_size format.
124 : /// Note that this enum is persisted as `Option<RelSizeMigration>` in the index part, so
125 : /// `None` is the same as `Some(RelSizeMigration::Legacy)`.
126 : Legacy,
127 : /// The tenant is migrating to the new rel_size format. Both old and new rel_size format are
128 : /// persisted in the index part. The read path will read both formats and merge them.
129 : Migrating,
130 : /// The tenant has migrated to the new rel_size format. Only the new rel_size format is persisted
131 : /// in the index part, and the read path will not read the old format.
132 : Migrated,
133 : }
134 :
135 : impl IndexPart {
136 : /// When adding or modifying any parts of `IndexPart`, increment the version so that it can be
137 : /// used to understand later versions.
138 : ///
139 : /// Version is currently informative only.
140 : /// Version history
141 : /// - 2: added `deleted_at`
142 : /// - 3: no longer deserialize `timeline_layers` (serialized format is the same, but timeline_layers
143 : /// is always generated from the keys of `layer_metadata`)
144 : /// - 4: timeline_layers is fully removed.
145 : /// - 5: lineage was added
146 : /// - 6: last_aux_file_policy is added.
147 : /// - 7: metadata_bytes is no longer written, but still read
148 : /// - 8: added `archived_at`
149 : /// - 9: +gc_blocking
150 : /// - 10: +import_pgdata
151 : /// - 11: +rel_size_migration
152 : /// - 12: +l2_lsn
153 : /// - 13: +gc_compaction
154 : const LATEST_VERSION: usize = 13;
155 :
156 : // Versions we may see when reading from a bucket.
157 : pub const KNOWN_VERSIONS: &'static [usize] = &[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13];
158 :
159 : pub const FILE_NAME: &'static str = "index_part.json";
160 :
161 1052 : pub fn empty(metadata: TimelineMetadata) -> Self {
162 1052 : IndexPart {
163 1052 : version: Self::LATEST_VERSION,
164 1052 : layer_metadata: Default::default(),
165 1052 : disk_consistent_lsn: metadata.disk_consistent_lsn(),
166 1052 : metadata,
167 1052 : deleted_at: None,
168 1052 : archived_at: None,
169 1052 : lineage: Default::default(),
170 1052 : gc_blocking: None,
171 1052 : last_aux_file_policy: None,
172 1052 : import_pgdata: None,
173 1052 : rel_size_migration: None,
174 1052 : l2_lsn: None,
175 1052 : gc_compaction: None,
176 1052 : }
177 1052 : }
178 :
179 0 : pub fn version(&self) -> usize {
180 0 : self.version
181 0 : }
182 :
183 : /// If you want this under normal operations, read it from self.metadata:
184 : /// this method is just for the scrubber to use when validating an index.
185 0 : pub fn duplicated_disk_consistent_lsn(&self) -> Lsn {
186 0 : self.disk_consistent_lsn
187 0 : }
188 :
189 52 : pub fn from_json_bytes(bytes: &[u8]) -> Result<Self, serde_json::Error> {
190 52 : serde_json::from_slice::<IndexPart>(bytes)
191 52 : }
192 :
193 2986 : pub fn to_json_bytes(&self) -> serde_json::Result<Vec<u8>> {
194 2986 : serde_json::to_vec(self)
195 2986 : }
196 :
197 : #[cfg(test)]
198 56 : pub(crate) fn example() -> Self {
199 56 : Self::empty(TimelineMetadata::example())
200 56 : }
201 :
202 : /// Returns true if the index contains a reference to the given layer (i.e. file path).
203 : ///
204 : /// TODO: there should be a variant of LayerName for the physical remote path that contains
205 : /// information about the shard and generation, to avoid passing in metadata.
206 70419 : pub fn references(&self, name: &LayerName, metadata: &LayerFileMetadata) -> bool {
207 70419 : let Some(index_metadata) = self.layer_metadata.get(name) else {
208 34101 : return false;
209 : };
210 36318 : is_same_remote_layer_path(name, metadata, name, index_metadata)
211 70419 : }
212 :
213 : /// Check for invariants in the index: this is useful when uploading an index to ensure that if
214 : /// we encounter a bug, we do not persist buggy metadata.
215 5434 : pub(crate) fn validate(&self) -> Result<(), String> {
216 5434 : if self.import_pgdata.is_none()
217 5434 : && self.metadata.ancestor_timeline().is_none()
218 4024 : && self.layer_metadata.is_empty()
219 : {
220 : // Unless we're in the middle of a raw pgdata import, or this is a child timeline,the index must
221 : // always have at least one layer.
222 0 : return Err("Index has no ancestor and no layers".to_string());
223 5434 : }
224 5434 :
225 5434 : Ok(())
226 5434 : }
227 : }
228 :
229 : /// Metadata gathered for each of the layer files.
230 : ///
231 : /// Fields have to be `Option`s because remote [`IndexPart`]'s can be from different version, which
232 : /// might have less or more metadata depending if upgrading or rolling back an upgrade.
233 18968 : #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
234 : pub struct LayerFileMetadata {
235 : pub file_size: u64,
236 :
237 : #[serde(default = "Generation::none")]
238 : #[serde(skip_serializing_if = "Generation::is_none")]
239 : pub generation: Generation,
240 :
241 : #[serde(default = "ShardIndex::unsharded")]
242 : #[serde(skip_serializing_if = "ShardIndex::is_unsharded")]
243 : pub shard: ShardIndex,
244 : }
245 :
246 : impl LayerFileMetadata {
247 6319 : pub fn new(file_size: u64, generation: Generation, shard: ShardIndex) -> Self {
248 6319 : LayerFileMetadata {
249 6319 : file_size,
250 6319 : generation,
251 6319 : shard,
252 6319 : }
253 6319 : }
254 : /// Helper to get both generation and file size in a tuple
255 0 : pub fn generation_file_size(&self) -> (Generation, u64) {
256 0 : (self.generation, self.file_size)
257 0 : }
258 : }
259 :
260 : /// Limited history of earlier ancestors.
261 : ///
262 : /// A timeline can have more than 1 earlier ancestor, in the rare case that it was repeatedly
263 : /// reparented by having an later timeline be detached from it's ancestor.
264 16 : #[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize, Default)]
265 : pub(crate) struct Lineage {
266 : /// Has the `reparenting_history` been truncated to [`Lineage::REMEMBER_AT_MOST`].
267 : #[serde(skip_serializing_if = "is_false", default)]
268 : reparenting_history_truncated: bool,
269 :
270 : /// Earlier ancestors, truncated when [`Self::reparenting_history_truncated`]
271 : ///
272 : /// These are stored in case we want to support WAL based DR on the timeline. There can be many
273 : /// of these and at most one [`Self::original_ancestor`]. There cannot be more reparentings
274 : /// after [`Self::original_ancestor`] has been set.
275 : #[serde(skip_serializing_if = "Vec::is_empty", default)]
276 : reparenting_history: Vec<TimelineId>,
277 :
278 : /// The ancestor from which this timeline has been detached from and when.
279 : ///
280 : /// If you are adding support for detaching from a hierarchy, consider changing the ancestry
281 : /// into a `Vec<(TimelineId, Lsn)>` to be a path instead.
282 : // FIXME: this is insufficient even for path of two timelines for future wal recovery
283 : // purposes:
284 : //
285 : // assuming a "old main" which has received most of the WAL, and has a branch "new main",
286 : // starting a bit before "old main" last_record_lsn. the current version works fine,
287 : // because we will know to replay wal and branch at the recorded Lsn to do wal recovery.
288 : //
289 : // then assuming "new main" would similarly receive a branch right before its last_record_lsn,
290 : // "new new main". the current implementation would just store ("new main", ancestor_lsn, _)
291 : // here. however, we cannot recover from WAL using only that information, we would need the
292 : // whole ancestry here:
293 : //
294 : // ```json
295 : // [
296 : // ["old main", ancestor_lsn("new main"), _],
297 : // ["new main", ancestor_lsn("new new main"), _]
298 : // ]
299 : // ```
300 : #[serde(skip_serializing_if = "Option::is_none", default)]
301 : original_ancestor: Option<(TimelineId, Lsn, NaiveDateTime)>,
302 : }
303 :
304 12188 : fn is_false(b: &bool) -> bool {
305 12188 : !b
306 12188 : }
307 :
308 : impl Lineage {
309 : const REMEMBER_AT_MOST: usize = 100;
310 :
311 0 : pub(crate) fn record_previous_ancestor(&mut self, old_ancestor: &TimelineId) -> bool {
312 0 : if self.reparenting_history.last() == Some(old_ancestor) {
313 : // do not re-record it
314 0 : false
315 : } else {
316 : #[cfg(feature = "testing")]
317 : {
318 0 : let existing = self
319 0 : .reparenting_history
320 0 : .iter()
321 0 : .position(|x| x == old_ancestor);
322 0 : assert_eq!(
323 : existing, None,
324 0 : "we cannot reparent onto and off and onto the same timeline twice"
325 : );
326 : }
327 0 : let drop_oldest = self.reparenting_history.len() + 1 >= Self::REMEMBER_AT_MOST;
328 0 :
329 0 : self.reparenting_history_truncated |= drop_oldest;
330 0 : if drop_oldest {
331 0 : self.reparenting_history.remove(0);
332 0 : }
333 0 : self.reparenting_history.push(*old_ancestor);
334 0 : true
335 : }
336 0 : }
337 :
338 : /// Returns true if anything changed.
339 0 : pub(crate) fn record_detaching(&mut self, branchpoint: &(TimelineId, Lsn)) -> bool {
340 0 : if let Some((id, lsn, _)) = self.original_ancestor {
341 0 : assert_eq!(
342 0 : &(id, lsn),
343 : branchpoint,
344 0 : "detaching attempt has to be for the same ancestor we are already detached from"
345 : );
346 0 : false
347 : } else {
348 0 : self.original_ancestor =
349 0 : Some((branchpoint.0, branchpoint.1, chrono::Utc::now().naive_utc()));
350 0 : true
351 : }
352 0 : }
353 :
354 : /// The queried lsn is most likely the basebackup lsn, and this answers question "is it allowed
355 : /// to start a read/write primary at this lsn".
356 : ///
357 : /// Returns true if the Lsn was previously our branch point.
358 0 : pub(crate) fn is_previous_ancestor_lsn(&self, lsn: Lsn) -> bool {
359 0 : self.original_ancestor
360 0 : .is_some_and(|(_, ancestor_lsn, _)| ancestor_lsn == lsn)
361 0 : }
362 :
363 : /// Returns true if the timeline originally had an ancestor, and no longer has one.
364 0 : pub(crate) fn is_detached_from_ancestor(&self) -> bool {
365 0 : self.original_ancestor.is_some()
366 0 : }
367 :
368 : /// Returns original ancestor timeline id and lsn that this timeline has been detached from.
369 0 : pub(crate) fn detached_previous_ancestor(&self) -> Option<(TimelineId, Lsn)> {
370 0 : self.original_ancestor.map(|(id, lsn, _)| (id, lsn))
371 0 : }
372 :
373 0 : pub(crate) fn is_reparented(&self) -> bool {
374 0 : !self.reparenting_history.is_empty()
375 0 : }
376 : }
377 :
378 32 : #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
379 : pub(crate) struct GcBlocking {
380 : pub(crate) started_at: NaiveDateTime,
381 : pub(crate) reasons: enumset::EnumSet<GcBlockingReason>,
382 : }
383 :
384 16 : #[derive(Debug, enumset::EnumSetType, serde::Serialize, serde::Deserialize)]
385 : #[enumset(serialize_repr = "list")]
386 : pub(crate) enum GcBlockingReason {
387 : Manual,
388 : DetachAncestor,
389 : }
390 :
391 : impl GcBlocking {
392 0 : pub(super) fn started_now_for(reason: GcBlockingReason) -> Self {
393 0 : GcBlocking {
394 0 : started_at: chrono::Utc::now().naive_utc(),
395 0 : reasons: enumset::EnumSet::only(reason),
396 0 : }
397 0 : }
398 :
399 : /// Returns true if the given reason is one of the reasons why the gc is blocked.
400 0 : pub(crate) fn blocked_by(&self, reason: GcBlockingReason) -> bool {
401 0 : self.reasons.contains(reason)
402 0 : }
403 :
404 : /// Returns a version of self with the given reason.
405 0 : pub(super) fn with_reason(&self, reason: GcBlockingReason) -> Self {
406 0 : assert!(!self.blocked_by(reason));
407 0 : let mut reasons = self.reasons;
408 0 : reasons.insert(reason);
409 0 :
410 0 : Self {
411 0 : started_at: self.started_at,
412 0 : reasons,
413 0 : }
414 0 : }
415 :
416 : /// Returns a version of self without the given reason. Assumption is that if
417 : /// there are no more reasons, we can unblock the gc by returning `None`.
418 0 : pub(super) fn without_reason(&self, reason: GcBlockingReason) -> Option<Self> {
419 0 : assert!(self.blocked_by(reason));
420 :
421 0 : if self.reasons.len() == 1 {
422 0 : None
423 : } else {
424 0 : let mut reasons = self.reasons;
425 0 : assert!(reasons.remove(reason));
426 0 : assert!(!reasons.is_empty());
427 :
428 0 : Some(Self {
429 0 : started_at: self.started_at,
430 0 : reasons,
431 0 : })
432 : }
433 0 : }
434 : }
435 :
436 : #[cfg(test)]
437 : mod tests {
438 : use std::str::FromStr;
439 :
440 : use utils::id::TimelineId;
441 :
442 : use super::*;
443 :
444 : #[test]
445 4 : fn v1_indexpart_is_parsed() {
446 4 : let example = r#"{
447 4 : "version":1,
448 4 : "timeline_layers":["000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9"],
449 4 : "layer_metadata":{
450 4 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
451 4 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
452 4 : },
453 4 : "disk_consistent_lsn":"0/16960E8",
454 4 : "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
455 4 : }"#;
456 4 :
457 4 : let expected = IndexPart {
458 4 : // note this is not verified, could be anything, but exists for humans debugging.. could be the git version instead?
459 4 : version: 1,
460 4 : layer_metadata: HashMap::from([
461 4 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
462 4 : file_size: 25600000,
463 4 : generation: Generation::none(),
464 4 : shard: ShardIndex::unsharded()
465 4 : }),
466 4 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
467 4 : // serde_json should always parse this but this might be a double with jq for
468 4 : // example.
469 4 : file_size: 9007199254741001,
470 4 : generation: Generation::none(),
471 4 : shard: ShardIndex::unsharded()
472 4 : })
473 4 : ]),
474 4 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
475 4 : metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
476 4 : deleted_at: None,
477 4 : archived_at: None,
478 4 : lineage: Lineage::default(),
479 4 : gc_blocking: None,
480 4 : last_aux_file_policy: None,
481 4 : import_pgdata: None,
482 4 : rel_size_migration: None,
483 4 : l2_lsn: None,
484 4 : gc_compaction: None,
485 4 : };
486 4 :
487 4 : let part = IndexPart::from_json_bytes(example.as_bytes()).unwrap();
488 4 : assert_eq!(part, expected);
489 4 : }
490 :
491 : #[test]
492 4 : fn v1_indexpart_is_parsed_with_optional_missing_layers() {
493 4 : let example = r#"{
494 4 : "version":1,
495 4 : "timeline_layers":["000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9"],
496 4 : "missing_layers":["This shouldn't fail deserialization"],
497 4 : "layer_metadata":{
498 4 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
499 4 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
500 4 : },
501 4 : "disk_consistent_lsn":"0/16960E8",
502 4 : "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
503 4 : }"#;
504 4 :
505 4 : let expected = IndexPart {
506 4 : // note this is not verified, could be anything, but exists for humans debugging.. could be the git version instead?
507 4 : version: 1,
508 4 : layer_metadata: HashMap::from([
509 4 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
510 4 : file_size: 25600000,
511 4 : generation: Generation::none(),
512 4 : shard: ShardIndex::unsharded()
513 4 : }),
514 4 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
515 4 : // serde_json should always parse this but this might be a double with jq for
516 4 : // example.
517 4 : file_size: 9007199254741001,
518 4 : generation: Generation::none(),
519 4 : shard: ShardIndex::unsharded()
520 4 : })
521 4 : ]),
522 4 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
523 4 : metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
524 4 : deleted_at: None,
525 4 : archived_at: None,
526 4 : lineage: Lineage::default(),
527 4 : gc_blocking: None,
528 4 : last_aux_file_policy: None,
529 4 : import_pgdata: None,
530 4 : rel_size_migration: None,
531 4 : l2_lsn: None,
532 4 : gc_compaction: None,
533 4 : };
534 4 :
535 4 : let part = IndexPart::from_json_bytes(example.as_bytes()).unwrap();
536 4 : assert_eq!(part, expected);
537 4 : }
538 :
539 : #[test]
540 4 : fn v2_indexpart_is_parsed_with_deleted_at() {
541 4 : let example = r#"{
542 4 : "version":2,
543 4 : "timeline_layers":["000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9"],
544 4 : "missing_layers":["This shouldn't fail deserialization"],
545 4 : "layer_metadata":{
546 4 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
547 4 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
548 4 : },
549 4 : "disk_consistent_lsn":"0/16960E8",
550 4 : "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
551 4 : "deleted_at": "2023-07-31T09:00:00.123"
552 4 : }"#;
553 4 :
554 4 : let expected = IndexPart {
555 4 : // note this is not verified, could be anything, but exists for humans debugging.. could be the git version instead?
556 4 : version: 2,
557 4 : layer_metadata: HashMap::from([
558 4 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
559 4 : file_size: 25600000,
560 4 : generation: Generation::none(),
561 4 : shard: ShardIndex::unsharded()
562 4 : }),
563 4 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
564 4 : // serde_json should always parse this but this might be a double with jq for
565 4 : // example.
566 4 : file_size: 9007199254741001,
567 4 : generation: Generation::none(),
568 4 : shard: ShardIndex::unsharded()
569 4 : })
570 4 : ]),
571 4 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
572 4 : metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
573 4 : deleted_at: Some(parse_naive_datetime("2023-07-31T09:00:00.123000000")),
574 4 : archived_at: None,
575 4 : lineage: Lineage::default(),
576 4 : gc_blocking: None,
577 4 : last_aux_file_policy: None,
578 4 : import_pgdata: None,
579 4 : rel_size_migration: None,
580 4 : l2_lsn: None,
581 4 : gc_compaction: None,
582 4 : };
583 4 :
584 4 : let part = IndexPart::from_json_bytes(example.as_bytes()).unwrap();
585 4 : assert_eq!(part, expected);
586 4 : }
587 :
588 : #[test]
589 4 : fn empty_layers_are_parsed() {
590 4 : let empty_layers_json = r#"{
591 4 : "version":1,
592 4 : "timeline_layers":[],
593 4 : "layer_metadata":{},
594 4 : "disk_consistent_lsn":"0/2532648",
595 4 : "metadata_bytes":[136,151,49,208,0,70,0,4,0,0,0,0,2,83,38,72,1,0,0,0,0,2,83,38,32,1,87,198,240,135,97,119,45,125,38,29,155,161,140,141,255,210,0,0,0,0,2,83,38,72,0,0,0,0,1,73,240,192,0,0,0,0,1,73,240,192,0,0,0,15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
596 4 : }"#;
597 4 :
598 4 : let expected = IndexPart {
599 4 : version: 1,
600 4 : layer_metadata: HashMap::new(),
601 4 : disk_consistent_lsn: "0/2532648".parse::<Lsn>().unwrap(),
602 4 : metadata: TimelineMetadata::from_bytes(&[
603 4 : 136, 151, 49, 208, 0, 70, 0, 4, 0, 0, 0, 0, 2, 83, 38, 72, 1, 0, 0, 0, 0, 2, 83,
604 4 : 38, 32, 1, 87, 198, 240, 135, 97, 119, 45, 125, 38, 29, 155, 161, 140, 141, 255,
605 4 : 210, 0, 0, 0, 0, 2, 83, 38, 72, 0, 0, 0, 0, 1, 73, 240, 192, 0, 0, 0, 0, 1, 73,
606 4 : 240, 192, 0, 0, 0, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
607 4 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
608 4 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
609 4 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
610 4 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
611 4 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
612 4 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
613 4 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
614 4 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
615 4 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
616 4 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
617 4 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
618 4 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
619 4 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
620 4 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
621 4 : 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
622 4 : 0, 0,
623 4 : ])
624 4 : .unwrap(),
625 4 : deleted_at: None,
626 4 : archived_at: None,
627 4 : lineage: Lineage::default(),
628 4 : gc_blocking: None,
629 4 : last_aux_file_policy: None,
630 4 : import_pgdata: None,
631 4 : rel_size_migration: None,
632 4 : l2_lsn: None,
633 4 : gc_compaction: None,
634 4 : };
635 4 :
636 4 : let empty_layers_parsed = IndexPart::from_json_bytes(empty_layers_json.as_bytes()).unwrap();
637 4 :
638 4 : assert_eq!(empty_layers_parsed, expected);
639 4 : }
640 :
641 : #[test]
642 4 : fn v4_indexpart_is_parsed() {
643 4 : let example = r#"{
644 4 : "version":4,
645 4 : "layer_metadata":{
646 4 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
647 4 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
648 4 : },
649 4 : "disk_consistent_lsn":"0/16960E8",
650 4 : "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
651 4 : "deleted_at": "2023-07-31T09:00:00.123"
652 4 : }"#;
653 4 :
654 4 : let expected = IndexPart {
655 4 : version: 4,
656 4 : layer_metadata: HashMap::from([
657 4 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
658 4 : file_size: 25600000,
659 4 : generation: Generation::none(),
660 4 : shard: ShardIndex::unsharded()
661 4 : }),
662 4 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
663 4 : // serde_json should always parse this but this might be a double with jq for
664 4 : // example.
665 4 : file_size: 9007199254741001,
666 4 : generation: Generation::none(),
667 4 : shard: ShardIndex::unsharded()
668 4 : })
669 4 : ]),
670 4 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
671 4 : metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
672 4 : deleted_at: Some(parse_naive_datetime("2023-07-31T09:00:00.123000000")),
673 4 : archived_at: None,
674 4 : lineage: Lineage::default(),
675 4 : gc_blocking: None,
676 4 : last_aux_file_policy: None,
677 4 : import_pgdata: None,
678 4 : rel_size_migration: None,
679 4 : l2_lsn: None,
680 4 : gc_compaction: None,
681 4 : };
682 4 :
683 4 : let part = IndexPart::from_json_bytes(example.as_bytes()).unwrap();
684 4 : assert_eq!(part, expected);
685 4 : }
686 :
687 : #[test]
688 4 : fn v5_indexpart_is_parsed() {
689 4 : let example = r#"{
690 4 : "version":5,
691 4 : "layer_metadata":{
692 4 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000014EF420-00000000014EF499":{"file_size":23289856,"generation":1},
693 4 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000014EF499-00000000015A7619":{"file_size":1015808,"generation":1}},
694 4 : "disk_consistent_lsn":"0/15A7618",
695 4 : "metadata_bytes":[226,88,25,241,0,46,0,4,0,0,0,0,1,90,118,24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,78,244,32,0,0,0,0,1,78,244,32,0,0,0,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
696 4 : "lineage":{
697 4 : "original_ancestor":["e2bfd8c633d713d279e6fcd2bcc15b6d","0/15A7618","2024-05-07T18:52:36.322426563"],
698 4 : "reparenting_history":["e1bfd8c633d713d279e6fcd2bcc15b6d"]
699 4 : }
700 4 : }"#;
701 4 :
702 4 : let expected = IndexPart {
703 4 : version: 5,
704 4 : layer_metadata: HashMap::from([
705 4 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000014EF420-00000000014EF499".parse().unwrap(), LayerFileMetadata {
706 4 : file_size: 23289856,
707 4 : generation: Generation::new(1),
708 4 : shard: ShardIndex::unsharded(),
709 4 : }),
710 4 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000014EF499-00000000015A7619".parse().unwrap(), LayerFileMetadata {
711 4 : file_size: 1015808,
712 4 : generation: Generation::new(1),
713 4 : shard: ShardIndex::unsharded(),
714 4 : })
715 4 : ]),
716 4 : disk_consistent_lsn: Lsn::from_str("0/15A7618").unwrap(),
717 4 : metadata: TimelineMetadata::from_bytes(&[226,88,25,241,0,46,0,4,0,0,0,0,1,90,118,24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,78,244,32,0,0,0,0,1,78,244,32,0,0,0,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
718 4 : deleted_at: None,
719 4 : archived_at: None,
720 4 : lineage: Lineage {
721 4 : reparenting_history_truncated: false,
722 4 : reparenting_history: vec![TimelineId::from_str("e1bfd8c633d713d279e6fcd2bcc15b6d").unwrap()],
723 4 : original_ancestor: Some((TimelineId::from_str("e2bfd8c633d713d279e6fcd2bcc15b6d").unwrap(), Lsn::from_str("0/15A7618").unwrap(), parse_naive_datetime("2024-05-07T18:52:36.322426563"))),
724 4 : },
725 4 : gc_blocking: None,
726 4 : last_aux_file_policy: None,
727 4 : import_pgdata: None,
728 4 : rel_size_migration: None,
729 4 : l2_lsn: None,
730 4 : gc_compaction: None,
731 4 : };
732 4 :
733 4 : let part = IndexPart::from_json_bytes(example.as_bytes()).unwrap();
734 4 : assert_eq!(part, expected);
735 4 : }
736 :
737 : #[test]
738 4 : fn v6_indexpart_is_parsed() {
739 4 : let example = r#"{
740 4 : "version":6,
741 4 : "layer_metadata":{
742 4 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
743 4 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
744 4 : },
745 4 : "disk_consistent_lsn":"0/16960E8",
746 4 : "metadata_bytes":[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
747 4 : "deleted_at": "2023-07-31T09:00:00.123",
748 4 : "lineage":{
749 4 : "original_ancestor":["e2bfd8c633d713d279e6fcd2bcc15b6d","0/15A7618","2024-05-07T18:52:36.322426563"],
750 4 : "reparenting_history":["e1bfd8c633d713d279e6fcd2bcc15b6d"]
751 4 : },
752 4 : "last_aux_file_policy": "V2"
753 4 : }"#;
754 4 :
755 4 : let expected = IndexPart {
756 4 : version: 6,
757 4 : layer_metadata: HashMap::from([
758 4 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
759 4 : file_size: 25600000,
760 4 : generation: Generation::none(),
761 4 : shard: ShardIndex::unsharded()
762 4 : }),
763 4 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
764 4 : // serde_json should always parse this but this might be a double with jq for
765 4 : // example.
766 4 : file_size: 9007199254741001,
767 4 : generation: Generation::none(),
768 4 : shard: ShardIndex::unsharded()
769 4 : })
770 4 : ]),
771 4 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
772 4 : metadata: TimelineMetadata::from_bytes(&[113,11,159,210,0,54,0,4,0,0,0,0,1,105,96,232,1,0,0,0,0,1,105,96,112,0,0,0,0,0,0,0,0,0,0,0,0,0,1,105,96,112,0,0,0,0,1,105,96,112,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]).unwrap(),
773 4 : deleted_at: Some(parse_naive_datetime("2023-07-31T09:00:00.123000000")),
774 4 : archived_at: None,
775 4 : lineage: Lineage {
776 4 : reparenting_history_truncated: false,
777 4 : reparenting_history: vec![TimelineId::from_str("e1bfd8c633d713d279e6fcd2bcc15b6d").unwrap()],
778 4 : original_ancestor: Some((TimelineId::from_str("e2bfd8c633d713d279e6fcd2bcc15b6d").unwrap(), Lsn::from_str("0/15A7618").unwrap(), parse_naive_datetime("2024-05-07T18:52:36.322426563"))),
779 4 : },
780 4 : gc_blocking: None,
781 4 : last_aux_file_policy: Some(AuxFilePolicy::V2),
782 4 : import_pgdata: None,
783 4 : rel_size_migration: None,
784 4 : l2_lsn: None,
785 4 : gc_compaction: None,
786 4 : };
787 4 :
788 4 : let part = IndexPart::from_json_bytes(example.as_bytes()).unwrap();
789 4 : assert_eq!(part, expected);
790 4 : }
791 :
792 : #[test]
793 4 : fn v7_indexpart_is_parsed() {
794 4 : let example = r#"{
795 4 : "version": 7,
796 4 : "layer_metadata":{
797 4 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
798 4 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
799 4 : },
800 4 : "disk_consistent_lsn":"0/16960E8",
801 4 : "metadata": {
802 4 : "disk_consistent_lsn": "0/16960E8",
803 4 : "prev_record_lsn": "0/1696070",
804 4 : "ancestor_timeline": "e45a7f37d3ee2ff17dc14bf4f4e3f52e",
805 4 : "ancestor_lsn": "0/0",
806 4 : "latest_gc_cutoff_lsn": "0/1696070",
807 4 : "initdb_lsn": "0/1696070",
808 4 : "pg_version": 14
809 4 : },
810 4 : "deleted_at": "2023-07-31T09:00:00.123"
811 4 : }"#;
812 4 :
813 4 : let expected = IndexPart {
814 4 : version: 7,
815 4 : layer_metadata: HashMap::from([
816 4 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
817 4 : file_size: 25600000,
818 4 : generation: Generation::none(),
819 4 : shard: ShardIndex::unsharded()
820 4 : }),
821 4 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
822 4 : file_size: 9007199254741001,
823 4 : generation: Generation::none(),
824 4 : shard: ShardIndex::unsharded()
825 4 : })
826 4 : ]),
827 4 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
828 4 : metadata: TimelineMetadata::new(
829 4 : Lsn::from_str("0/16960E8").unwrap(),
830 4 : Some(Lsn::from_str("0/1696070").unwrap()),
831 4 : Some(TimelineId::from_str("e45a7f37d3ee2ff17dc14bf4f4e3f52e").unwrap()),
832 4 : Lsn::INVALID,
833 4 : Lsn::from_str("0/1696070").unwrap(),
834 4 : Lsn::from_str("0/1696070").unwrap(),
835 4 : 14,
836 4 : ).with_recalculated_checksum().unwrap(),
837 4 : deleted_at: Some(parse_naive_datetime("2023-07-31T09:00:00.123000000")),
838 4 : archived_at: None,
839 4 : lineage: Default::default(),
840 4 : gc_blocking: None,
841 4 : last_aux_file_policy: Default::default(),
842 4 : import_pgdata: None,
843 4 : rel_size_migration: None,
844 4 : l2_lsn: None,
845 4 : gc_compaction: None,
846 4 : };
847 4 :
848 4 : let part = IndexPart::from_json_bytes(example.as_bytes()).unwrap();
849 4 : assert_eq!(part, expected);
850 4 : }
851 :
852 : #[test]
853 4 : fn v8_indexpart_is_parsed() {
854 4 : let example = r#"{
855 4 : "version": 8,
856 4 : "layer_metadata":{
857 4 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
858 4 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
859 4 : },
860 4 : "disk_consistent_lsn":"0/16960E8",
861 4 : "metadata": {
862 4 : "disk_consistent_lsn": "0/16960E8",
863 4 : "prev_record_lsn": "0/1696070",
864 4 : "ancestor_timeline": "e45a7f37d3ee2ff17dc14bf4f4e3f52e",
865 4 : "ancestor_lsn": "0/0",
866 4 : "latest_gc_cutoff_lsn": "0/1696070",
867 4 : "initdb_lsn": "0/1696070",
868 4 : "pg_version": 14
869 4 : },
870 4 : "deleted_at": "2023-07-31T09:00:00.123",
871 4 : "archived_at": "2023-04-29T09:00:00.123"
872 4 : }"#;
873 4 :
874 4 : let expected = IndexPart {
875 4 : version: 8,
876 4 : layer_metadata: HashMap::from([
877 4 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
878 4 : file_size: 25600000,
879 4 : generation: Generation::none(),
880 4 : shard: ShardIndex::unsharded()
881 4 : }),
882 4 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
883 4 : file_size: 9007199254741001,
884 4 : generation: Generation::none(),
885 4 : shard: ShardIndex::unsharded()
886 4 : })
887 4 : ]),
888 4 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
889 4 : metadata: TimelineMetadata::new(
890 4 : Lsn::from_str("0/16960E8").unwrap(),
891 4 : Some(Lsn::from_str("0/1696070").unwrap()),
892 4 : Some(TimelineId::from_str("e45a7f37d3ee2ff17dc14bf4f4e3f52e").unwrap()),
893 4 : Lsn::INVALID,
894 4 : Lsn::from_str("0/1696070").unwrap(),
895 4 : Lsn::from_str("0/1696070").unwrap(),
896 4 : 14,
897 4 : ).with_recalculated_checksum().unwrap(),
898 4 : deleted_at: Some(parse_naive_datetime("2023-07-31T09:00:00.123000000")),
899 4 : archived_at: Some(parse_naive_datetime("2023-04-29T09:00:00.123000000")),
900 4 : lineage: Default::default(),
901 4 : gc_blocking: None,
902 4 : last_aux_file_policy: Default::default(),
903 4 : import_pgdata: None,
904 4 : rel_size_migration: None,
905 4 : l2_lsn: None,
906 4 : gc_compaction: None,
907 4 : };
908 4 :
909 4 : let part = IndexPart::from_json_bytes(example.as_bytes()).unwrap();
910 4 : assert_eq!(part, expected);
911 4 : }
912 :
913 : #[test]
914 4 : fn v9_indexpart_is_parsed() {
915 4 : let example = r#"{
916 4 : "version": 9,
917 4 : "layer_metadata":{
918 4 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
919 4 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
920 4 : },
921 4 : "disk_consistent_lsn":"0/16960E8",
922 4 : "metadata": {
923 4 : "disk_consistent_lsn": "0/16960E8",
924 4 : "prev_record_lsn": "0/1696070",
925 4 : "ancestor_timeline": "e45a7f37d3ee2ff17dc14bf4f4e3f52e",
926 4 : "ancestor_lsn": "0/0",
927 4 : "latest_gc_cutoff_lsn": "0/1696070",
928 4 : "initdb_lsn": "0/1696070",
929 4 : "pg_version": 14
930 4 : },
931 4 : "gc_blocking": {
932 4 : "started_at": "2024-07-19T09:00:00.123",
933 4 : "reasons": ["DetachAncestor"]
934 4 : }
935 4 : }"#;
936 4 :
937 4 : let expected = IndexPart {
938 4 : version: 9,
939 4 : layer_metadata: HashMap::from([
940 4 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
941 4 : file_size: 25600000,
942 4 : generation: Generation::none(),
943 4 : shard: ShardIndex::unsharded()
944 4 : }),
945 4 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
946 4 : file_size: 9007199254741001,
947 4 : generation: Generation::none(),
948 4 : shard: ShardIndex::unsharded()
949 4 : })
950 4 : ]),
951 4 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
952 4 : metadata: TimelineMetadata::new(
953 4 : Lsn::from_str("0/16960E8").unwrap(),
954 4 : Some(Lsn::from_str("0/1696070").unwrap()),
955 4 : Some(TimelineId::from_str("e45a7f37d3ee2ff17dc14bf4f4e3f52e").unwrap()),
956 4 : Lsn::INVALID,
957 4 : Lsn::from_str("0/1696070").unwrap(),
958 4 : Lsn::from_str("0/1696070").unwrap(),
959 4 : 14,
960 4 : ).with_recalculated_checksum().unwrap(),
961 4 : deleted_at: None,
962 4 : lineage: Default::default(),
963 4 : gc_blocking: Some(GcBlocking {
964 4 : started_at: parse_naive_datetime("2024-07-19T09:00:00.123000000"),
965 4 : reasons: enumset::EnumSet::from_iter([GcBlockingReason::DetachAncestor]),
966 4 : }),
967 4 : last_aux_file_policy: Default::default(),
968 4 : archived_at: None,
969 4 : import_pgdata: None,
970 4 : rel_size_migration: None,
971 4 : l2_lsn: None,
972 4 : gc_compaction: None,
973 4 : };
974 4 :
975 4 : let part = IndexPart::from_json_bytes(example.as_bytes()).unwrap();
976 4 : assert_eq!(part, expected);
977 4 : }
978 :
979 : #[test]
980 4 : fn v10_importpgdata_is_parsed() {
981 4 : let example = r#"{
982 4 : "version": 10,
983 4 : "layer_metadata":{
984 4 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
985 4 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
986 4 : },
987 4 : "disk_consistent_lsn":"0/16960E8",
988 4 : "metadata": {
989 4 : "disk_consistent_lsn": "0/16960E8",
990 4 : "prev_record_lsn": "0/1696070",
991 4 : "ancestor_timeline": "e45a7f37d3ee2ff17dc14bf4f4e3f52e",
992 4 : "ancestor_lsn": "0/0",
993 4 : "latest_gc_cutoff_lsn": "0/1696070",
994 4 : "initdb_lsn": "0/1696070",
995 4 : "pg_version": 14
996 4 : },
997 4 : "gc_blocking": {
998 4 : "started_at": "2024-07-19T09:00:00.123",
999 4 : "reasons": ["DetachAncestor"]
1000 4 : },
1001 4 : "import_pgdata": {
1002 4 : "V1": {
1003 4 : "Done": {
1004 4 : "idempotency_key": "specified-by-client-218a5213-5044-4562-a28d-d024c5f057f5",
1005 4 : "started_at": "2024-11-13T09:23:42.123",
1006 4 : "finished_at": "2024-11-13T09:42:23.123"
1007 4 : }
1008 4 : }
1009 4 : }
1010 4 : }"#;
1011 4 :
1012 4 : let expected = IndexPart {
1013 4 : version: 10,
1014 4 : layer_metadata: HashMap::from([
1015 4 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
1016 4 : file_size: 25600000,
1017 4 : generation: Generation::none(),
1018 4 : shard: ShardIndex::unsharded()
1019 4 : }),
1020 4 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
1021 4 : file_size: 9007199254741001,
1022 4 : generation: Generation::none(),
1023 4 : shard: ShardIndex::unsharded()
1024 4 : })
1025 4 : ]),
1026 4 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
1027 4 : metadata: TimelineMetadata::new(
1028 4 : Lsn::from_str("0/16960E8").unwrap(),
1029 4 : Some(Lsn::from_str("0/1696070").unwrap()),
1030 4 : Some(TimelineId::from_str("e45a7f37d3ee2ff17dc14bf4f4e3f52e").unwrap()),
1031 4 : Lsn::INVALID,
1032 4 : Lsn::from_str("0/1696070").unwrap(),
1033 4 : Lsn::from_str("0/1696070").unwrap(),
1034 4 : 14,
1035 4 : ).with_recalculated_checksum().unwrap(),
1036 4 : deleted_at: None,
1037 4 : lineage: Default::default(),
1038 4 : gc_blocking: Some(GcBlocking {
1039 4 : started_at: parse_naive_datetime("2024-07-19T09:00:00.123000000"),
1040 4 : reasons: enumset::EnumSet::from_iter([GcBlockingReason::DetachAncestor]),
1041 4 : }),
1042 4 : last_aux_file_policy: Default::default(),
1043 4 : archived_at: None,
1044 4 : import_pgdata: Some(import_pgdata::index_part_format::Root::V1(import_pgdata::index_part_format::V1::Done(import_pgdata::index_part_format::Done{
1045 4 : started_at: parse_naive_datetime("2024-11-13T09:23:42.123000000"),
1046 4 : finished_at: parse_naive_datetime("2024-11-13T09:42:23.123000000"),
1047 4 : idempotency_key: import_pgdata::index_part_format::IdempotencyKey::new("specified-by-client-218a5213-5044-4562-a28d-d024c5f057f5".to_string()),
1048 4 : }))),
1049 4 : rel_size_migration: None,
1050 4 : l2_lsn: None,
1051 4 : gc_compaction: None,
1052 4 : };
1053 4 :
1054 4 : let part = IndexPart::from_json_bytes(example.as_bytes()).unwrap();
1055 4 : assert_eq!(part, expected);
1056 4 : }
1057 :
1058 : #[test]
1059 4 : fn v11_rel_size_migration_is_parsed() {
1060 4 : let example = r#"{
1061 4 : "version": 11,
1062 4 : "layer_metadata":{
1063 4 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
1064 4 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
1065 4 : },
1066 4 : "disk_consistent_lsn":"0/16960E8",
1067 4 : "metadata": {
1068 4 : "disk_consistent_lsn": "0/16960E8",
1069 4 : "prev_record_lsn": "0/1696070",
1070 4 : "ancestor_timeline": "e45a7f37d3ee2ff17dc14bf4f4e3f52e",
1071 4 : "ancestor_lsn": "0/0",
1072 4 : "latest_gc_cutoff_lsn": "0/1696070",
1073 4 : "initdb_lsn": "0/1696070",
1074 4 : "pg_version": 14
1075 4 : },
1076 4 : "gc_blocking": {
1077 4 : "started_at": "2024-07-19T09:00:00.123",
1078 4 : "reasons": ["DetachAncestor"]
1079 4 : },
1080 4 : "import_pgdata": {
1081 4 : "V1": {
1082 4 : "Done": {
1083 4 : "idempotency_key": "specified-by-client-218a5213-5044-4562-a28d-d024c5f057f5",
1084 4 : "started_at": "2024-11-13T09:23:42.123",
1085 4 : "finished_at": "2024-11-13T09:42:23.123"
1086 4 : }
1087 4 : }
1088 4 : },
1089 4 : "rel_size_migration": "legacy"
1090 4 : }"#;
1091 4 :
1092 4 : let expected = IndexPart {
1093 4 : version: 11,
1094 4 : layer_metadata: HashMap::from([
1095 4 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
1096 4 : file_size: 25600000,
1097 4 : generation: Generation::none(),
1098 4 : shard: ShardIndex::unsharded()
1099 4 : }),
1100 4 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
1101 4 : file_size: 9007199254741001,
1102 4 : generation: Generation::none(),
1103 4 : shard: ShardIndex::unsharded()
1104 4 : })
1105 4 : ]),
1106 4 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
1107 4 : metadata: TimelineMetadata::new(
1108 4 : Lsn::from_str("0/16960E8").unwrap(),
1109 4 : Some(Lsn::from_str("0/1696070").unwrap()),
1110 4 : Some(TimelineId::from_str("e45a7f37d3ee2ff17dc14bf4f4e3f52e").unwrap()),
1111 4 : Lsn::INVALID,
1112 4 : Lsn::from_str("0/1696070").unwrap(),
1113 4 : Lsn::from_str("0/1696070").unwrap(),
1114 4 : 14,
1115 4 : ).with_recalculated_checksum().unwrap(),
1116 4 : deleted_at: None,
1117 4 : lineage: Default::default(),
1118 4 : gc_blocking: Some(GcBlocking {
1119 4 : started_at: parse_naive_datetime("2024-07-19T09:00:00.123000000"),
1120 4 : reasons: enumset::EnumSet::from_iter([GcBlockingReason::DetachAncestor]),
1121 4 : }),
1122 4 : last_aux_file_policy: Default::default(),
1123 4 : archived_at: None,
1124 4 : import_pgdata: Some(import_pgdata::index_part_format::Root::V1(import_pgdata::index_part_format::V1::Done(import_pgdata::index_part_format::Done{
1125 4 : started_at: parse_naive_datetime("2024-11-13T09:23:42.123000000"),
1126 4 : finished_at: parse_naive_datetime("2024-11-13T09:42:23.123000000"),
1127 4 : idempotency_key: import_pgdata::index_part_format::IdempotencyKey::new("specified-by-client-218a5213-5044-4562-a28d-d024c5f057f5".to_string()),
1128 4 : }))),
1129 4 : rel_size_migration: Some(RelSizeMigration::Legacy),
1130 4 : l2_lsn: None,
1131 4 : gc_compaction: None,
1132 4 : };
1133 4 :
1134 4 : let part = IndexPart::from_json_bytes(example.as_bytes()).unwrap();
1135 4 : assert_eq!(part, expected);
1136 4 : }
1137 :
1138 : #[test]
1139 4 : fn v12_v13_l2_gc_ompaction_is_parsed() {
1140 4 : let example = r#"{
1141 4 : "version": 12,
1142 4 : "layer_metadata":{
1143 4 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9": { "file_size": 25600000 },
1144 4 : "000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51": { "file_size": 9007199254741001 }
1145 4 : },
1146 4 : "disk_consistent_lsn":"0/16960E8",
1147 4 : "metadata": {
1148 4 : "disk_consistent_lsn": "0/16960E8",
1149 4 : "prev_record_lsn": "0/1696070",
1150 4 : "ancestor_timeline": "e45a7f37d3ee2ff17dc14bf4f4e3f52e",
1151 4 : "ancestor_lsn": "0/0",
1152 4 : "latest_gc_cutoff_lsn": "0/1696070",
1153 4 : "initdb_lsn": "0/1696070",
1154 4 : "pg_version": 14
1155 4 : },
1156 4 : "gc_blocking": {
1157 4 : "started_at": "2024-07-19T09:00:00.123",
1158 4 : "reasons": ["DetachAncestor"]
1159 4 : },
1160 4 : "import_pgdata": {
1161 4 : "V1": {
1162 4 : "Done": {
1163 4 : "idempotency_key": "specified-by-client-218a5213-5044-4562-a28d-d024c5f057f5",
1164 4 : "started_at": "2024-11-13T09:23:42.123",
1165 4 : "finished_at": "2024-11-13T09:42:23.123"
1166 4 : }
1167 4 : }
1168 4 : },
1169 4 : "rel_size_migration": "legacy",
1170 4 : "l2_lsn": "0/16960E8",
1171 4 : "gc_compaction": {
1172 4 : "last_completed_lsn": "0/16960E8"
1173 4 : }
1174 4 : }"#;
1175 4 :
1176 4 : let expected = IndexPart {
1177 4 : version: 12,
1178 4 : layer_metadata: HashMap::from([
1179 4 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__0000000001696070-00000000016960E9".parse().unwrap(), LayerFileMetadata {
1180 4 : file_size: 25600000,
1181 4 : generation: Generation::none(),
1182 4 : shard: ShardIndex::unsharded()
1183 4 : }),
1184 4 : ("000000000000000000000000000000000000-FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF__00000000016B59D8-00000000016B5A51".parse().unwrap(), LayerFileMetadata {
1185 4 : file_size: 9007199254741001,
1186 4 : generation: Generation::none(),
1187 4 : shard: ShardIndex::unsharded()
1188 4 : })
1189 4 : ]),
1190 4 : disk_consistent_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
1191 4 : metadata: TimelineMetadata::new(
1192 4 : Lsn::from_str("0/16960E8").unwrap(),
1193 4 : Some(Lsn::from_str("0/1696070").unwrap()),
1194 4 : Some(TimelineId::from_str("e45a7f37d3ee2ff17dc14bf4f4e3f52e").unwrap()),
1195 4 : Lsn::INVALID,
1196 4 : Lsn::from_str("0/1696070").unwrap(),
1197 4 : Lsn::from_str("0/1696070").unwrap(),
1198 4 : 14,
1199 4 : ).with_recalculated_checksum().unwrap(),
1200 4 : deleted_at: None,
1201 4 : lineage: Default::default(),
1202 4 : gc_blocking: Some(GcBlocking {
1203 4 : started_at: parse_naive_datetime("2024-07-19T09:00:00.123000000"),
1204 4 : reasons: enumset::EnumSet::from_iter([GcBlockingReason::DetachAncestor]),
1205 4 : }),
1206 4 : last_aux_file_policy: Default::default(),
1207 4 : archived_at: None,
1208 4 : import_pgdata: Some(import_pgdata::index_part_format::Root::V1(import_pgdata::index_part_format::V1::Done(import_pgdata::index_part_format::Done{
1209 4 : started_at: parse_naive_datetime("2024-11-13T09:23:42.123000000"),
1210 4 : finished_at: parse_naive_datetime("2024-11-13T09:42:23.123000000"),
1211 4 : idempotency_key: import_pgdata::index_part_format::IdempotencyKey::new("specified-by-client-218a5213-5044-4562-a28d-d024c5f057f5".to_string()),
1212 4 : }))),
1213 4 : rel_size_migration: Some(RelSizeMigration::Legacy),
1214 4 : l2_lsn: Some("0/16960E8".parse::<Lsn>().unwrap()),
1215 4 : gc_compaction: Some(GcCompactionState {
1216 4 : last_completed_lsn: "0/16960E8".parse::<Lsn>().unwrap(),
1217 4 : }),
1218 4 : };
1219 4 :
1220 4 : let part = IndexPart::from_json_bytes(example.as_bytes()).unwrap();
1221 4 : assert_eq!(part, expected);
1222 4 : }
1223 :
1224 72 : fn parse_naive_datetime(s: &str) -> NaiveDateTime {
1225 72 : chrono::NaiveDateTime::parse_from_str(s, "%Y-%m-%dT%H:%M:%S.%f").unwrap()
1226 72 : }
1227 : }
|