Skip to content

Commit

Permalink
docdb: expose almost all options to user (#231) (#236)
Browse files Browse the repository at this point in the history
ref #230
  • Loading branch information
ti-chi-bot authored Feb 28, 2024
1 parent 47595b6 commit 0f63593
Show file tree
Hide file tree
Showing 4 changed files with 124 additions and 38 deletions.
74 changes: 57 additions & 17 deletions config/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,16 +23,6 @@ import (
"go.uber.org/zap"
)

const (
DefProfilingEnable = false // TODO(mornyx): Enable when tiflash#5285 is fixed.
DefProfilingIntervalSeconds = 60
DefProfileSeconds = 10
DefProfilingTimeoutSeconds = 120
DefProfilingDataRetentionSeconds = 3 * 24 * 60 * 60 // 3 days
DefTSDBRetentionPeriod = "1" // 1 month
DefTSDBSearchMaxUniqueTimeseries = 300000
)

type Config struct {
Address string `toml:"address" json:"address"`
AdvertiseAddress string `toml:"advertise-address" json:"advertise_address"`
Expand All @@ -42,6 +32,7 @@ type Config struct {
ContinueProfiling ContinueProfilingConfig `toml:"-" json:"continuous_profiling"`
Security Security `toml:"security" json:"security"`
TSDB TSDB `toml:"tsdb" json:"tsdb"`
DocDB DocDB `toml:"docdb" json:"docdb"`
}

var defaultConfig = Config{
Expand All @@ -57,15 +48,39 @@ var defaultConfig = Config{
Path: "data",
},
ContinueProfiling: ContinueProfilingConfig{
Enable: DefProfilingEnable,
ProfileSeconds: DefProfileSeconds,
IntervalSeconds: DefProfilingIntervalSeconds,
TimeoutSeconds: DefProfilingTimeoutSeconds,
DataRetentionSeconds: DefProfilingDataRetentionSeconds,
Enable: false, // TODO(mornyx): Enable when tiflash#5285 is fixed.
ProfileSeconds: 10,
IntervalSeconds: 60,
TimeoutSeconds: 120,
DataRetentionSeconds: 3 * 24 * 60 * 60, // 3 days
},
TSDB: TSDB{
RetentionPeriod: DefTSDBRetentionPeriod,
SearchMaxUniqueTimeseries: DefTSDBSearchMaxUniqueTimeseries,
RetentionPeriod: "1", // 1 month
SearchMaxUniqueTimeseries: 300000,
},
DocDB: DocDB{
LSMOnly: false,
SyncWrites: false,
NumVersionsToKeep: 1,
NumGoroutines: 8,
MemTableSize: 64 << 20,
BaseTableSize: 2 << 20,
BaseLevelSize: 10 << 20,
LevelSizeMultiplier: 10,
MaxLevels: 7,
VLogPercentile: 0.0,
ValueThreshold: 1 << 20,
NumMemtables: 5,
BlockSize: 4 * 1024,
BloomFalsePositive: 0.01,
BlockCacheSize: 256 << 20,
IndexCacheSize: 0,
NumLevelZeroTables: 5,
NumLevelZeroTablesStall: 15,
ValueLogFileSize: 1<<30 - 1,
ValueLogMaxEntries: 1000000,
NumCompactors: 4,
ZSTDCompressionLevel: 1,
},
}

Expand Down Expand Up @@ -400,6 +415,31 @@ type TSDB struct {
SearchMaxUniqueTimeseries int64 `toml:"search-max-unique-timeseries" json:"search_max_unique_timeseries"`
}

type DocDB struct {
LSMOnly bool `toml:"lsm-only" json:"lsm_only"`
SyncWrites bool `toml:"sync-writes" json:"sync_writes"`
NumVersionsToKeep int `toml:"num-versions-to-keep" json:"num_versions_to_keep"`
NumGoroutines int `toml:"num-goroutines" json:"num_goroutines"`
MemTableSize int64 `toml:"mem-table-size" json:"mem_table_size"`
BaseTableSize int64 `toml:"base-table-size" json:"base_table_size"`
BaseLevelSize int64 `toml:"base-level-size" json:"base_level_size"`
LevelSizeMultiplier int `toml:"level-size-multiplier" json:"level_size_multiplier"`
MaxLevels int `toml:"max-levels" json:"max_levels"`
VLogPercentile float64 `toml:"vlog-percentile" json:"vlog_percentile"`
ValueThreshold int64 `toml:"value-threshold" json:"value_threshold"`
NumMemtables int `toml:"num-memtables" json:"num_memtables"`
BlockSize int `toml:"block-size" json:"block_size"`
BloomFalsePositive float64 `toml:"bloom-false-positive" json:"bloom_false_positive"`
BlockCacheSize int64 `toml:"block-cache-size" json:"block_cache_size"`
IndexCacheSize int64 `toml:"index-cache-size" json:"index_cache_size"`
NumLevelZeroTables int `toml:"num-level-zero-tables" json:"num_level_zero_tables"`
NumLevelZeroTablesStall int `toml:"num-level-zero-tables-stall" json:"num_level_zero_tables_stall"`
ValueLogFileSize int64 `toml:"value-log-file-size" json:"value_log_file_size"`
ValueLogMaxEntries uint32 `toml:"value-log-max-entries" json:"value_log_max_entries"`
NumCompactors int `toml:"num-compactors" json:"num_compactors"`
ZSTDCompressionLevel int `toml:"zstd-compression-level" json:"zstd_compression_level"`
}

type ContinueProfilingConfig struct {
Enable bool `json:"enable"`
ProfileSeconds int `json:"profile_seconds"`
Expand Down
3 changes: 3 additions & 0 deletions config/config.toml.example
Original file line number Diff line number Diff line change
Expand Up @@ -35,3 +35,6 @@ retention-period = "1"
# and spends some CPU time for processing the found time series. This means that the maximum memory usage
# and CPU usage a single query can use is proportional to `search-max-unique-timeseries`.
search-max-unique-timeseries = 300000

[docdb]
lsm-only = false
46 changes: 33 additions & 13 deletions config/config_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ package config

import (
"context"
"encoding/json"
"io/ioutil"
"os"
"path"
Expand Down Expand Up @@ -83,10 +82,17 @@ key-path = "ngm.key"`
cfg, err := InitConfig(cfgFileName, func(config *Config) {})
require.NoError(t, err)
require.NotNil(t, cfg)
data, err := json.Marshal(cfg)
require.NoError(t, err)
// TODO(mornyx): Rollback when tiflash#5285 is fixed.
require.Equal(t, `{"address":"0.0.0.0:12020","advertise_address":"10.0.1.8:12020","pd":{"endpoints":["10.0.1.8:2379"]},"log":{"path":"log","level":"INFO"},"storage":{"path":"data"},"continuous_profiling":{"enable":false,"profile_seconds":10,"interval_seconds":60,"timeout_seconds":120,"data_retention_seconds":259200},"security":{"ca_path":"ngm.ca","cert_path":"ngm.cert","key_path":"ngm.key"},"tsdb":{"retention_period":"1","search_max_unique_timeseries":300000}}`, string(data))
require.Equal(t, "0.0.0.0:12020", cfg.Address)
require.Equal(t, "10.0.1.8:12020", cfg.AdvertiseAddress)
require.Equal(t, "log", cfg.Log.Path)
require.Equal(t, "INFO", cfg.Log.Level)
require.Len(t, cfg.PD.Endpoints, 1)
require.Equal(t, "10.0.1.8:2379", cfg.PD.Endpoints[0])
require.Equal(t, "data", cfg.Storage.Path)
require.Equal(t, "ngm.ca", cfg.Security.SSLCA)
require.Equal(t, "ngm.ca", cfg.Security.SSLCA)
require.Equal(t, "ngm.cert", cfg.Security.SSLCert)
require.Equal(t, "ngm.key", cfg.Security.SSLKey)

ctx, cancel := context.WithCancel(context.Background())
defer cancel()
Expand All @@ -111,21 +117,35 @@ path = "data1"`
getCfg := <-cfgSub
globalCfg := GetGlobalConfig()
require.Equal(t, getCfg(), globalCfg)
data, err = json.Marshal(globalCfg)
require.NoError(t, err)
// TODO(mornyx): Rollback when tiflash#5285 is fixed.
require.Equal(t, `{"address":"0.0.0.0:12020","advertise_address":"10.0.1.8:12020","pd":{"endpoints":["10.0.1.8:2378","10.0.1.9:2379"]},"log":{"path":"log","level":"INFO"},"storage":{"path":"data"},"continuous_profiling":{"enable":false,"profile_seconds":10,"interval_seconds":60,"timeout_seconds":120,"data_retention_seconds":259200},"security":{"ca_path":"ngm.ca","cert_path":"ngm.cert","key_path":"ngm.key"},"tsdb":{"retention_period":"1","search_max_unique_timeseries":300000}}`, string(data))
require.Equal(t, "0.0.0.0:12020", globalCfg.Address)
require.Equal(t, "10.0.1.8:12020", globalCfg.AdvertiseAddress)
require.Equal(t, "log", globalCfg.Log.Path)
require.Equal(t, "INFO", globalCfg.Log.Level)
require.Len(t, globalCfg.PD.Endpoints, 2)
require.Equal(t, "10.0.1.8:2378", globalCfg.PD.Endpoints[0])
require.Equal(t, "10.0.1.9:2379", globalCfg.PD.Endpoints[1])
require.Equal(t, "data", globalCfg.Storage.Path)
require.Equal(t, "ngm.ca", globalCfg.Security.SSLCA)
require.Equal(t, "ngm.cert", globalCfg.Security.SSLCert)
require.Equal(t, "ngm.key", globalCfg.Security.SSLKey)

cfgData = ``
err = ioutil.WriteFile(cfgFileName, []byte(cfgData), 0666)
require.NoError(t, err)
procutil.SelfSIGHUP()
// wait reload
time.Sleep(time.Millisecond * 10)
data, err = json.Marshal(GetGlobalConfig())
require.NoError(t, err)
// TODO(mornyx): Rollback when tiflash#5285 is fixed.
require.Equal(t, `{"address":"0.0.0.0:12020","advertise_address":"10.0.1.8:12020","pd":{"endpoints":["10.0.1.8:2378","10.0.1.9:2379"]},"log":{"path":"log","level":"INFO"},"storage":{"path":"data"},"continuous_profiling":{"enable":false,"profile_seconds":10,"interval_seconds":60,"timeout_seconds":120,"data_retention_seconds":259200},"security":{"ca_path":"ngm.ca","cert_path":"ngm.cert","key_path":"ngm.key"},"tsdb":{"retention_period":"1","search_max_unique_timeseries":300000}}`, string(data))
require.Equal(t, "0.0.0.0:12020", globalCfg.Address)
require.Equal(t, "10.0.1.8:12020", globalCfg.AdvertiseAddress)
require.Equal(t, "log", globalCfg.Log.Path)
require.Equal(t, "INFO", globalCfg.Log.Level)
require.Len(t, globalCfg.PD.Endpoints, 2)
require.Equal(t, "10.0.1.8:2378", globalCfg.PD.Endpoints[0])
require.Equal(t, "10.0.1.9:2379", globalCfg.PD.Endpoints[1])
require.Equal(t, "data", globalCfg.Storage.Path)
require.Equal(t, "ngm.ca", globalCfg.Security.SSLCA)
require.Equal(t, "ngm.cert", globalCfg.Security.SSLCert)
require.Equal(t, "ngm.key", globalCfg.Security.SSLKey)
}

func TestConfigValid(t *testing.T) {
Expand Down
39 changes: 31 additions & 8 deletions database/document/document.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,37 @@ func Init(cfg *config.Config) {

dataPath := path.Join(cfg.Storage.Path, "docdb")
l, _ := initLogger(cfg)
opts := badger.DefaultOptions(dataPath).
WithZSTDCompressionLevel(3).
WithBlockSize(8 * 1024).
WithValueThreshold(128 * 1024).
WithValueLogFileSize(64 * 1024 * 1024).
WithBlockCacheSize(16 * 1024 * 1024).
WithMemTableSize(16 * 1024 * 1024).
WithLogger(l)
var opts badger.Options
if cfg.DocDB.LSMOnly {
opts = badger.LSMOnlyOptions(dataPath)
} else {
opts = badger.DefaultOptions(dataPath)
}
if !cfg.DocDB.LSMOnly {
opts = opts.WithValueThreshold(cfg.DocDB.ValueThreshold)
}
opts = opts.
WithLogger(l).
WithSyncWrites(cfg.DocDB.SyncWrites).
WithNumVersionsToKeep(cfg.DocDB.NumVersionsToKeep).
WithNumGoroutines(cfg.DocDB.NumGoroutines).
WithMemTableSize(cfg.DocDB.MemTableSize).
WithBaseTableSize(cfg.DocDB.BaseTableSize).
WithBaseLevelSize(cfg.DocDB.BaseLevelSize).
WithLevelSizeMultiplier(cfg.DocDB.LevelSizeMultiplier).
WithMaxLevels(cfg.DocDB.MaxLevels).
WithVLogPercentile(cfg.DocDB.VLogPercentile).
WithNumMemtables(cfg.DocDB.NumMemtables).
WithBlockSize(cfg.DocDB.BlockSize).
WithBloomFalsePositive(cfg.DocDB.BloomFalsePositive).
WithBlockCacheSize(cfg.DocDB.BlockCacheSize).
WithIndexCacheSize(cfg.DocDB.IndexCacheSize).
WithNumLevelZeroTables(cfg.DocDB.NumLevelZeroTables).
WithNumLevelZeroTablesStall(cfg.DocDB.NumLevelZeroTablesStall).
WithValueLogFileSize(cfg.DocDB.ValueLogFileSize).
WithValueLogMaxEntries(cfg.DocDB.ValueLogMaxEntries).
WithNumCompactors(cfg.DocDB.NumCompactors).
WithZSTDCompressionLevel(cfg.DocDB.ZSTDCompressionLevel)

engine, err := badgerengine.NewEngine(opts)
if err != nil {
Expand Down

0 comments on commit 0f63593

Please sign in to comment.