@@ -197,18 +197,28 @@ def upload_data_part(
197
197
raise StorageError (msg ) from e
198
198
199
199
def upload_cloud_storage_metadata (
200
- self , backup_meta : BackupMetadata , disk : Disk , delete_after_upload : bool = False
200
+ self ,
201
+ backup_meta : BackupMetadata ,
202
+ disk : Disk ,
203
+ table : Table ,
204
+ delete_after_upload : bool = False ,
201
205
) -> bool :
202
206
"""
203
207
Upload specified disk metadata files from given directory path as a tarball.
204
208
Returns: whether backed up disk had data.
205
209
"""
210
+ assert table .path_on_disk , f"Table { table } doesn't store data on disk"
211
+
206
212
backup_name = backup_meta .get_sanitized_name ()
207
213
compression = backup_meta .cloud_storage .compressed
208
214
remote_path = _disk_metadata_path (
209
- self .get_backup_path (backup_name ), disk .name , compression
215
+ self .get_backup_path (backup_name ),
216
+ table .database ,
217
+ table .name ,
218
+ disk .name ,
219
+ compression ,
210
220
)
211
- shadow_path = os . path . join (disk .path , "shadow" , backup_name )
221
+ shadow_path = _table_shadow_path (disk .path , backup_name , table . path_on_disk )
212
222
exclude_file_names = ["frozen_metadata.txt" ]
213
223
if dir_is_empty (shadow_path , exclude_file_names ):
214
224
return False
@@ -219,6 +229,7 @@ def upload_cloud_storage_metadata(
219
229
self ._storage_loader .upload_files_tarball_scan (
220
230
dir_path = shadow_path ,
221
231
remote_path = remote_path ,
232
+ tar_base_dir = table .path_on_disk ,
222
233
exclude_file_names = exclude_file_names ,
223
234
is_async = True ,
224
235
encryption = backup_meta .cloud_storage .encrypted ,
@@ -532,6 +543,28 @@ def check_data_part(self, backup_path: str, part: PartMetadata) -> bool:
532
543
)
533
544
return False
534
545
546
+ def _get_cloud_storage_metadata_remote_paths (
547
+ self ,
548
+ backup_name : str ,
549
+ source_disk_name : str ,
550
+ compression : bool ,
551
+ ) -> Sequence [str ]:
552
+ # Check if metadata is stored as 'disks/s3.tar.gz' for backwards compatibility
553
+ old_style_remote_path = _disk_metadata_path (
554
+ self .get_backup_path (backup_name ), None , None , source_disk_name , compression
555
+ )
556
+ if self ._storage_loader .path_exists (old_style_remote_path ):
557
+ return [old_style_remote_path ]
558
+ return self ._storage_loader .list_dir (
559
+ str (
560
+ os .path .join (
561
+ self .get_backup_path (backup_name ), "disks" , source_disk_name
562
+ )
563
+ ),
564
+ recursive = True ,
565
+ absolute = True ,
566
+ )
567
+
535
568
def download_cloud_storage_metadata (
536
569
self , backup_meta : BackupMetadata , disk : Disk , source_disk_name : str
537
570
) -> None :
@@ -542,22 +575,23 @@ def download_cloud_storage_metadata(
542
575
compression = backup_meta .cloud_storage .compressed
543
576
disk_path = os .path .join (disk .path , "shadow" , backup_name )
544
577
os .makedirs (disk_path , exist_ok = True )
545
- remote_path = _disk_metadata_path (
546
- self . get_backup_path ( backup_name ) , source_disk_name , compression
578
+ metadata_remote_paths = self . _get_cloud_storage_metadata_remote_paths (
579
+ backup_name , source_disk_name , compression
547
580
)
548
581
549
- logging .debug (f'Downloading "{ disk_path } " files from "{ remote_path } "' )
550
- try :
551
- self ._storage_loader .download_files (
552
- remote_path = remote_path ,
553
- local_path = disk_path ,
554
- is_async = True ,
555
- encryption = backup_meta .cloud_storage .encrypted ,
556
- compression = compression ,
557
- )
558
- except Exception as e :
559
- msg = f'Failed to download tarball file "{ remote_path } "'
560
- raise StorageError (msg ) from e
582
+ for remote_path in metadata_remote_paths :
583
+ logging .debug (f'Downloading "{ disk_path } " files from "{ remote_path } "' )
584
+ try :
585
+ self ._storage_loader .download_files (
586
+ remote_path = remote_path ,
587
+ local_path = disk_path ,
588
+ is_async = True ,
589
+ encryption = backup_meta .cloud_storage .encrypted ,
590
+ compression = compression ,
591
+ )
592
+ except Exception as e :
593
+ msg = f'Failed to download tarball file "{ remote_path } "'
594
+ raise StorageError (msg ) from e
561
595
562
596
def delete_backup (self , backup_name : str ) -> None :
563
597
"""
@@ -715,17 +749,39 @@ def _part_path(
715
749
716
750
717
751
def _disk_metadata_path (
718
- backup_path : str , disk_name : str , compressed : bool = False
752
+ backup_path : str ,
753
+ db_name : Optional [str ],
754
+ table_name : Optional [str ],
755
+ disk_name : str ,
756
+ compressed : bool = False ,
719
757
) -> str :
720
758
"""
721
759
Returns path to store tarball with cloud storage shadow metadata.
722
760
"""
723
761
extension = ".tar"
724
762
if compressed :
725
763
extension += COMPRESSED_EXTENSION
764
+ if table_name or db_name :
765
+ assert table_name and db_name
766
+ return os .path .join (
767
+ backup_path ,
768
+ "disks" ,
769
+ disk_name ,
770
+ _quote (db_name ),
771
+ f"{ _quote (table_name )} { extension } " ,
772
+ )
726
773
return os .path .join (backup_path , "disks" , f"{ disk_name } { extension } " )
727
774
728
775
776
+ def _table_shadow_path (
777
+ disk_path : str , backup_name : str , table_path_on_disk : str
778
+ ) -> str :
779
+ """
780
+ Returns path to frozen table data on given disk.
781
+ """
782
+ return os .path .join (disk_path , "shadow" , backup_name , table_path_on_disk )
783
+
784
+
729
785
def _quote (value : str ) -> str :
730
786
return quote (value , safe = "" ).translate (
731
787
{
0 commit comments