Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Orioledb basic support #1698

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
23 changes: 18 additions & 5 deletions cmd/pg/backup_push.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ const (
deltaFromNameFlag = "delta-from-name"
addUserDataFlag = "add-user-data"
withoutFilesMetadataFlag = "without-files-metadata"
withOrioledb = "with-orioledb"
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can we query database for this somehow? Why ask user, if we can ask db?


permanentShorthand = "p"
fullBackupShorthand = "f"
Expand Down Expand Up @@ -106,11 +107,20 @@ var (
userData, err := internal.UnmarshalSentinelUserData(userDataRaw)
tracelog.ErrorLogger.FatalfOnError("Failed to unmarshal the provided UserData: %s", err)

arguments := postgres.NewBackupArguments(uploader, dataDirectory, utility.BaseBackupPath,
permanent, verifyPageChecksums || viper.GetBool(conf.VerifyPageChecksumsSetting),
fullBackup, storeAllCorruptBlocks || viper.GetBool(conf.StoreAllCorruptBlocksSetting),
tarBallComposerType, postgres.NewRegularDeltaBackupConfigurator(deltaBaseSelector),
userData, withoutFilesMetadata)
var arguments postgres.BackupArguments
if orioledbEnabled {
arguments = postgres.OrioledbNewBackupArguments(uploader, dataDirectory, utility.BaseBackupPath,
permanent, verifyPageChecksums || viper.GetBool(conf.VerifyPageChecksumsSetting),
fullBackup, storeAllCorruptBlocks || viper.GetBool(conf.StoreAllCorruptBlocksSetting),
tarBallComposerType, postgres.NewRegularDeltaBackupConfigurator(deltaBaseSelector),
userData, withoutFilesMetadata, orioledbEnabled)
} else {
arguments = postgres.NewBackupArguments(uploader, dataDirectory, utility.BaseBackupPath,
permanent, verifyPageChecksums || viper.GetBool(conf.VerifyPageChecksumsSetting),
fullBackup, storeAllCorruptBlocks || viper.GetBool(conf.StoreAllCorruptBlocksSetting),
tarBallComposerType, postgres.NewRegularDeltaBackupConfigurator(deltaBaseSelector),
userData, withoutFilesMetadata)
}

backupHandler, err := postgres.NewBackupHandler(arguments)
tracelog.ErrorLogger.FatalOnError(err)
Expand All @@ -128,6 +138,7 @@ var (
deltaFromUserData = ""
userDataRaw = ""
withoutFilesMetadata = false
orioledbEnabled = false
)

func chooseTarBallComposer() postgres.TarBallComposerType {
Expand Down Expand Up @@ -177,6 +188,8 @@ func init() {
"", "Write the provided user data to the backup sentinel and metadata files.")
backupPushCmd.Flags().BoolVar(&withoutFilesMetadata, withoutFilesMetadataFlag,
false, "Do not track files metadata, significantly reducing memory usage")
backupPushCmd.Flags().BoolVar(&orioledbEnabled, withOrioledb,
false, "Enable experimental orioledb delta backups support")
backupPushCmd.Flags().StringVar(&targetStorage, "target-storage", "",
targetStorageDescription)
}
41 changes: 38 additions & 3 deletions internal/databases/postgres/backup_push_handler.go
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ type BackupArguments struct {
isFullBackup bool
deltaConfigurator DeltaBackupConfigurator
withoutFilesMetadata bool
orioledbEnabled bool
composerInitFunc func(handler *BackupHandler) error
preventConcurrentBackups bool
}
Expand All @@ -77,6 +78,7 @@ type CurBackupInfo struct {
compressedSize int64
dataCatalogSize int64
incrementCount int
startChkpNum *uint32
}

func NewPrevBackupInfo(name string, sentinel BackupSentinelDto, filesMeta FilesMetadataDto) PrevBackupInfo {
Expand Down Expand Up @@ -139,6 +141,29 @@ func NewBackupArguments(uploader internal.Uploader, pgDataDirectory string, back
}
}

// OrioledbNewBackupArguments creates a BackupArgument object to hold the arguments from the cmd
func OrioledbNewBackupArguments(uploader internal.Uploader, pgDataDirectory string, backupsFolder string, isPermanent bool,
verifyPageChecksums bool, isFullBackup bool, storeAllCorruptBlocks bool, tarBallComposerType TarBallComposerType,
deltaConfigurator DeltaBackupConfigurator, userData interface{}, withoutFilesMetadata bool, orioledbEnabled bool) BackupArguments {
return BackupArguments{
Uploader: uploader,
pgDataDirectory: pgDataDirectory,
backupsFolder: backupsFolder,
isPermanent: isPermanent,
verifyPageChecksums: verifyPageChecksums,
isFullBackup: isFullBackup,
storeAllCorruptBlocks: storeAllCorruptBlocks,
deltaConfigurator: deltaConfigurator,
userData: userData,
withoutFilesMetadata: withoutFilesMetadata,
orioledbEnabled: orioledbEnabled,
composerInitFunc: func(handler *BackupHandler) error {
return configureTarBallComposer(handler, tarBallComposerType)
},
preventConcurrentBackups: false,
}
}

func (ba *BackupArguments) EnablePreventConcurrentBackups() {
ba.preventConcurrentBackups = true
tracelog.InfoLogger.Println("Concurrent backups are disabled")
Expand All @@ -154,12 +179,22 @@ func (bh *BackupHandler) createAndPushBackup(ctx context.Context) {

arguments := bh.Arguments
crypter := internal.ConfigureCrypter()
bh.Workers.Bundle = NewBundle(bh.PgInfo.PgDataDirectory, crypter, bh.prevBackupInfo.name,
bh.prevBackupInfo.sentinelDto.BackupStartLSN, bh.prevBackupInfo.filesMetadataDto.Files, arguments.forceIncremental,
viper.GetInt64(conf.TarSizeThresholdSetting))
if arguments.orioledbEnabled && bh.prevBackupInfo.sentinelDto.BackupStartChkpNum != nil {
bh.Workers.Bundle = OrioledbNewBundle(bh.PgInfo.PgDataDirectory, crypter, bh.prevBackupInfo.name,
bh.prevBackupInfo.sentinelDto.BackupStartLSN, bh.prevBackupInfo.filesMetadataDto.Files, arguments.forceIncremental,
viper.GetInt64(conf.TarSizeThresholdSetting), bh.prevBackupInfo.sentinelDto.BackupStartChkpNum)
} else {
bh.Workers.Bundle = NewBundle(bh.PgInfo.PgDataDirectory, crypter, bh.prevBackupInfo.name,
bh.prevBackupInfo.sentinelDto.BackupStartLSN, bh.prevBackupInfo.filesMetadataDto.Files, arguments.forceIncremental,
viper.GetInt64(conf.TarSizeThresholdSetting))
}

err = bh.startBackup()
tracelog.ErrorLogger.FatalOnError(err)
if arguments.orioledbEnabled {
OrioledbSetStartChkpNum(bh)
}

bh.handleDeltaBackup(folder)
tarFileSets := bh.uploadBackup()
sentinelDto, filesMetaDto, err := bh.setupDTO(tarFileSets)
Expand Down
14 changes: 9 additions & 5 deletions internal/databases/postgres/backup_sentinel_dto.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,15 +32,19 @@ type BackupSentinelDto struct {

UserData interface{} `json:"UserData,omitempty"`

FilesMetadataDisabled bool `json:"FilesMetadataDisabled,omitempty"`
FilesMetadataDisabled bool `json:"FilesMetadataDisabled,omitempty"`
BackupStartChkpNum *uint32 `json:"ChkpNum"`
IncrementFromChkpNum *uint32 `json:"DeltaChkpNum,omitempty"`
}

func NewBackupSentinelDto(bh *BackupHandler, tbsSpec *TablespaceSpec) BackupSentinelDto {
sentinel := BackupSentinelDto{
BackupStartLSN: &bh.CurBackupInfo.startLSN,
IncrementFromLSN: bh.prevBackupInfo.sentinelDto.BackupStartLSN,
PgVersion: bh.PgInfo.PgVersion,
TablespaceSpec: tbsSpec,
BackupStartLSN: &bh.CurBackupInfo.startLSN,
IncrementFromLSN: bh.prevBackupInfo.sentinelDto.BackupStartLSN,
PgVersion: bh.PgInfo.PgVersion,
TablespaceSpec: tbsSpec,
BackupStartChkpNum: bh.CurBackupInfo.startChkpNum,
IncrementFromChkpNum: bh.prevBackupInfo.sentinelDto.BackupStartChkpNum,
}
if bh.prevBackupInfo.sentinelDto.BackupStartLSN != nil {
sentinel.IncrementFrom = &bh.prevBackupInfo.name
Expand Down
32 changes: 31 additions & 1 deletion internal/databases/postgres/bundle.go
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,8 @@
DataCatalogSize *int64

forceIncremental bool

IncrementFromChkpNum *uint32
}

// TODO: use DiskDataFolder
Expand All @@ -95,6 +97,30 @@
}
}

// TODO: use DiskDataFolder
func OrioledbNewBundle(
directory string, crypter crypto.Crypter,
incrementFromName string, incrementFromLsn *LSN, incrementFromFiles internal.BackupFileList,
forceIncremental bool, tarSizeThreshold int64,
incrementFromChkpNum *uint32,
) *Bundle {
return &Bundle{
Bundle: internal.Bundle{
Directory: directory,
Crypter: crypter,
TarSizeThreshold: tarSizeThreshold,
ExcludedFilenames: ExcludedFilenames,
},
IncrementFromLsn: incrementFromLsn,
IncrementFromFiles: incrementFromFiles,
IncrementFromName: incrementFromName,
TablespaceSpec: NewTablespaceSpec(directory),
forceIncremental: forceIncremental,
DataCatalogSize: new(int64),
IncrementFromChkpNum: incrementFromChkpNum,
}
}

func (bundle *Bundle) SetupComposer(composerMaker TarBallComposerMaker) (err error) {
tarBallComposer, err := composerMaker.Make(bundle)
if err != nil {
Expand Down Expand Up @@ -241,7 +267,7 @@
// Does not follow symlinks (it seems like it does). If file is in ExcludedFilenames, will not be included
// in the final tarball. EXCLUDED directories are created
// but their contents are not written to local disk.
func (bundle *Bundle) addToBundle(path string, info os.FileInfo) error {

Check failure on line 270 in internal/databases/postgres/bundle.go

View workflow job for this annotation

GitHub Actions / lint

cyclomatic complexity 18 of func `(*Bundle).addToBundle` is high (> 15) (gocyclo)
fileName := info.Name()
_, excluded := ExcludedFilenames[fileName]
isDir := info.IsDir()
Expand Down Expand Up @@ -275,7 +301,11 @@
return nil
}
incrementBaseLsn := bundle.getIncrementBaseLsn()
isIncremented := incrementBaseLsn != nil && (wasInBase || bundle.forceIncremental) && isPagedFile(info, path)
isIncremented := incrementBaseLsn != nil && (wasInBase || bundle.forceIncremental) && (isPagedFile(info, path))

if !isIncremented && bundle.IncrementFromChkpNum != nil {
isIncremented = wasInBase && isOrioledbDataFile(info, path)
}
bundle.TarBallComposer.AddFile(internal.NewComposeFileInfo(path, info, wasInBase, isIncremented, fileInfoHeader))
} else {
err := bundle.TarBallComposer.AddHeader(fileInfoHeader, info)
Expand Down