diff --git a/cmd/e2e-test/clickhouse.go b/cmd/e2e-test/clickhouse.go index 6bbd9a833..ea6ae6fb4 100644 --- a/cmd/e2e-test/clickhouse.go +++ b/cmd/e2e-test/clickhouse.go @@ -4,13 +4,12 @@ import ( "bytes" "errors" "fmt" - "io" + "io/ioutil" "net/http" "os" "os/exec" "strconv" "strings" - "time" "github.com/msaf1980/go-stringutils" ) @@ -20,16 +19,19 @@ var ClickhouseOldImage = "yandex/clickhouse-server" var ClickhouseDefaultImage = "clickhouse/clickhouse-server" type Clickhouse struct { - Version string `toml:"version"` - Dir string `toml:"dir"` + Version string `toml:"version"` + Dir string `toml:"dir"` + TLSEnabled bool `toml:"tls"` DockerImage string `toml:"image"` TZ string `toml:"tz"` // override timezone - httpAddress string `toml:"-"` - url string `toml:"-"` - container string `toml:"-"` + httpAddress string `toml:"-"` + httpsAddress string `toml:"-"` + url string `toml:"-"` + tlsurl string `toml:"-"` + container string `toml:"-"` } func (c *Clickhouse) CheckConfig(rootDir string) error { @@ -71,6 +73,7 @@ func (c *Clickhouse) Start() (string, error) { if err != nil { return "", err } + port := strings.Split(c.httpAddress, ":")[1] c.url = "http://" + c.httpAddress c.container = ClickhouseContainerName @@ -80,7 +83,7 @@ func (c *Clickhouse) Start() (string, error) { chStart := []string{"run", "-d", "--name", c.container, "--ulimit", "nofile=262144:262144", - "-p", c.httpAddress + ":8123", + "-p", port + ":8123", // "-e", "TZ=" + tz, // workaround for TZ=":/etc/localtime" "-v", c.Dir + "/config.xml:/etc/clickhouse-server/config.xml", "-v", c.Dir + "/users.xml:/etc/clickhouse-server/users.xml", @@ -88,6 +91,20 @@ func (c *Clickhouse) Start() (string, error) { "-v", c.Dir + "/init.sql:/docker-entrypoint-initdb.d/init.sql", "--network", DockerNetwork, } + if c.TLSEnabled { + c.httpsAddress, err = getFreeTCPPort("") + if err != nil { + return "", err + } + port = strings.Split(c.httpsAddress, ":")[1] + c.tlsurl = "https://" + c.httpsAddress + chStart = append(chStart, + "-v", c.Dir+"/server.crt:/etc/clickhouse-server/server.crt", + "-v", c.Dir+"/server.key:/etc/clickhouse-server/server.key", + "-v", c.Dir+"/rootCA.crt:/etc/clickhouse-server/rootCA.crt", + "-p", port+":8443", + ) + } if c.TZ != "" { chStart = append(chStart, "-e", "TZ="+c.TZ) } @@ -137,6 +154,10 @@ func (c *Clickhouse) URL() string { return c.url } +func (c *Clickhouse) TLSURL() string { + return c.tlsurl +} + func (c *Clickhouse) Container() string { return c.container } @@ -152,14 +173,11 @@ func (c *Clickhouse) Query(sql string) (string, error) { return "", err } - httpClient := http.Client{ - Timeout: time.Minute, - } - resp, err := httpClient.Do(request) + resp, err := http.DefaultClient.Do(request) if err != nil { return "", err } - msg, err := io.ReadAll(resp.Body) + msg, err := ioutil.ReadAll(resp.Body) if err != nil { return "", err } @@ -173,7 +191,7 @@ func (c *Clickhouse) Alive() bool { if len(c.container) == 0 { return false } - req, err := http.DefaultClient.Get(c.url) + req, err := http.DefaultClient.Get(c.URL()) if err != nil { return false } diff --git a/cmd/e2e-test/e2etesting.go b/cmd/e2e-test/e2etesting.go index 09534f6b7..e51f4b796 100644 --- a/cmd/e2e-test/e2etesting.go +++ b/cmd/e2e-test/e2etesting.go @@ -12,9 +12,10 @@ import ( "strings" "time" + "go.uber.org/zap" + "github.com/lomik/graphite-clickhouse/helper/client" "github.com/lomik/graphite-clickhouse/helper/datetime" - "go.uber.org/zap" "github.com/pelletier/go-toml" ) @@ -152,6 +153,10 @@ type TestSchema struct { // input map[string][]Point `toml:"-"` } +func (schema *TestSchema) HasTLSSettings() bool { + return strings.Contains(schema.dir, "tls") +} + func getFreeTCPPort(name string) (string, error) { if len(name) == 0 { name = "127.0.0.1:0" @@ -202,7 +207,7 @@ func sendPlain(network, address string, metrics []InputMetric) error { func verifyGraphiteClickhouse(test *TestSchema, gch *GraphiteClickhouse, clickhouse *Clickhouse, testDir, clickhouseDir string, verbose, breakOnError bool, logger *zap.Logger) (testSuccess bool, verifyCount, verifyFailed int) { testSuccess = true - err := gch.Start(testDir, clickhouse.URL(), test.Proxy.URL()) + err := gch.Start(testDir, clickhouse.URL(), test.Proxy.URL(), clickhouse.TLSURL()) if err != nil { logger.Error("starting graphite-clickhouse", zap.String("config", test.name), diff --git a/cmd/e2e-test/graphite-clickhouse.go b/cmd/e2e-test/graphite-clickhouse.go index 60fbac6e7..9cb8a6a59 100644 --- a/cmd/e2e-test/graphite-clickhouse.go +++ b/cmd/e2e-test/graphite-clickhouse.go @@ -12,13 +12,15 @@ import ( "syscall" "text/template" - "github.com/lomik/graphite-clickhouse/helper/client" "github.com/msaf1980/go-stringutils" + + "github.com/lomik/graphite-clickhouse/helper/client" ) type GraphiteClickhouse struct { Binary string `toml:"binary"` ConfigTpl string `toml:"template"` + TestDir string `toml:"-"` TZ string `toml:"tz"` // override timezone @@ -28,7 +30,7 @@ type GraphiteClickhouse struct { cmd *exec.Cmd `toml:"-"` } -func (c *GraphiteClickhouse) Start(testDir, clickhouseURL, chProxyURL string) error { +func (c *GraphiteClickhouse) Start(testDir, chURL, chProxyURL, chTLSURL string) error { if c.cmd != nil { return errors.New("carbon-clickhouse already started") } @@ -52,6 +54,11 @@ func (c *GraphiteClickhouse) Start(testDir, clickhouseURL, chProxyURL string) er return err } + c.TestDir, err = filepath.Abs(testDir) + if err != nil { + return err + } + name := filepath.Base(c.ConfigTpl) tmpl, err := template.New(name).ParseFiles(path.Join(testDir, c.ConfigTpl)) if err != nil { @@ -59,15 +66,19 @@ func (c *GraphiteClickhouse) Start(testDir, clickhouseURL, chProxyURL string) er return err } param := struct { - CLICKHOUSE_URL string - PROXY_URL string - GCH_ADDR string - GCH_DIR string + CLICKHOUSE_URL string + CLICKHOUSE_TLS_URL string + PROXY_URL string + GCH_ADDR string + GCH_DIR string + TEST_DIR string }{ - CLICKHOUSE_URL: clickhouseURL, - PROXY_URL: chProxyURL, - GCH_ADDR: c.address, - GCH_DIR: c.storeDir, + CLICKHOUSE_URL: chURL, + CLICKHOUSE_TLS_URL: chTLSURL, + PROXY_URL: chProxyURL, + GCH_ADDR: c.address, + GCH_DIR: c.storeDir, + TEST_DIR: c.TestDir, } c.configFile = path.Join(c.storeDir, "graphite-clickhouse.conf") diff --git a/config/config.go b/config/config.go index d836c8ee9..7c13764fa 100644 --- a/config/config.go +++ b/config/config.go @@ -19,6 +19,7 @@ import ( "github.com/msaf1980/go-metrics/graphite" "github.com/msaf1980/go-timeutils/duration" toml "github.com/pelletier/go-toml" + "github.com/pkg/errors" "go.uber.org/zap" "github.com/lomik/zapwriter" @@ -139,8 +140,6 @@ func binarySearchQueryParamLe(a []QueryParam, duration time.Duration, start, end // ClickHouse config type ClickHouse struct { URL string `toml:"url" json:"url" comment:"default url, see https://clickhouse.tech/docs/en/interfaces/http. Can be overwritten with query-params"` - TLSParams config.TLS `toml:"tls" json:"tls" comment:"mTLS HTTPS configuration for connecting to clickhouse server" commented:"true"` - TLSConfig *tls.Config `toml:"-" json:"-"` DataTimeout time.Duration `toml:"data-timeout" json:"data-timeout" comment:"default total timeout to fetch data, can be overwritten with query-params"` RenderMaxQueries int `toml:"render-max-queries" json:"render-max-queries" comment:"Max queries to render queiries"` RenderMaxConcurrent int `toml:"render-max-concurrent" json:"render-max-concurrent" comment:"Maximum concurrent queries to render queiries"` @@ -179,6 +178,9 @@ type ClickHouse struct { MaxDataPoints int `toml:"max-data-points" json:"max-data-points" comment:"max points per metric when internal-aggregation=true"` // InternalAggregation controls if ClickHouse itself or graphite-clickhouse aggregates points to proper retention InternalAggregation bool `toml:"internal-aggregation" json:"internal-aggregation" comment:"ClickHouse-side aggregation, see doc/aggregation.md"` + + TLSParams config.TLS `toml:"tls" json:"tls" comment:"mTLS HTTPS configuration for connecting to clickhouse server" commented:"true"` + TLSConfig *tls.Config `toml:"-" json:"-"` } func clickhouseURLValidate(chURL string) (*url.URL, error) { @@ -427,112 +429,69 @@ func PrintDefaultConfig() error { } // ReadConfig reads the content of the file with given name and process it to the *Config -func ReadConfig(filename string, noLog bool) (*Config, error) { +func ReadConfig(filename string) (*Config, []zap.Field, error) { var err error var body []byte if filename != "" { body, err = os.ReadFile(filename) if err != nil { - return nil, err - } - } - - return Unmarshal(body, noLog) -} - -// Parse should be called to correctly set parameters in the config. -func (cfg *Config) Parse() error { - err := zapwriter.ApplyConfig(cfg.Logging) - if err != nil { - return err - } - - if cfg.Common.FindCache, err = CreateCache("index", &cfg.Common.FindCacheConfig); err == nil { - if cfg.Common.FindCacheConfig.Type != "null" { - logger := zapwriter.Logger("config") - logger.Info("enable find cache", zap.String("type", cfg.Common.FindCacheConfig.Type)) + return nil, nil, err } - } else { - return err } - chURL, err := clickhouseURLValidate(cfg.ClickHouse.URL) - if err != nil { - return err - } - if !reflect.DeepEqual(cfg.ClickHouse.TLSParams, config.TLS{}) { - tlsConfig, warns, err := config.ParseClientTLSConfig(&cfg.ClickHouse.TLSParams) - if err != nil { - return err - } - if chURL.Scheme == "https" { - cfg.ClickHouse.TLSConfig = tlsConfig - } else { - warns = append(warns, "TLS configurations is ignored because scheme is not HTTPS") - } - if len(warns) > 0 { - logger := zapwriter.Logger("clickhouse tlsconfig") - logger.Warn( - "insecure options detected while parsing HTTP Client TLS config for ClickHouse", - zap.Strings("warnings", warns), - ) - } - } - return nil + return Unmarshal(body) } // Unmarshal process the body to *Config -func Unmarshal(body []byte, noLog bool) (*Config, error) { - var err error +func Unmarshal(body []byte) (cfg *Config, warns []zap.Field, err error) { deprecations := make(map[string]error) - cfg := New() + cfg = New() if len(body) != 0 { // TODO: remove in v0.14 if bytes.Index(body, []byte("\n[logging]\n")) != -1 || bytes.Index(body, []byte("[logging]")) == 0 { - deprecations["logging"] = fmt.Errorf( - "single [logging] value became multivalue [[logging]]; please, adjust your config", - ) + deprecations["logging"] = fmt.Errorf("single [logging] value became multivalue [[logging]]; please, adjust your config") body = bytes.ReplaceAll(body, []byte("\n[logging]\n"), []byte("\n[[logging]]\n")) if bytes.Index(body, []byte("[logging]")) == 0 { body = bytes.Replace(body, []byte("[logging]"), []byte("[[logging]]"), 1) } } if err = toml.Unmarshal(body, cfg); err != nil { - return nil, err + return nil, nil, err } } - // Check if debug directory exists or could be created - if cfg.Debug.Directory != "" { - info, err := os.Stat(cfg.Debug.Directory) - if os.IsNotExist(err) { - err := os.MkdirAll(cfg.Debug.Directory, os.ModeDir|cfg.Debug.DirectoryPerm) - if err != nil { - return nil, err - } - } else if !info.IsDir() { - return nil, fmt.Errorf("the file for external data debug dumps exists and is not a directory: %v", cfg.Debug.Directory) - } - } - if len(cfg.Logging) == 0 { - cfg.Logging = append(cfg.Logging, newLoggingConfig()) - } - if err = zapwriter.CheckConfig(cfg.Logging, nil); err != nil { - return nil, err + if cfg.Logging == nil { + cfg.Logging = make([]zapwriter.Config, 0) } if cfg.ClickHouse.RenderMaxConcurrent > cfg.ClickHouse.RenderMaxQueries && cfg.ClickHouse.RenderMaxQueries > 0 { cfg.ClickHouse.RenderMaxConcurrent = 0 } + chURL, err := clickhouseURLValidate(cfg.ClickHouse.URL) + if err != nil { + return nil, nil, err + } + + if !reflect.DeepEqual(cfg.ClickHouse.TLSParams, config.TLS{}) { + tlsConfig, warnings, err := config.ParseClientTLSConfig(&cfg.ClickHouse.TLSParams) + if err != nil { + return nil, nil, err + } + if chURL.Scheme == "https" { + cfg.ClickHouse.TLSConfig = tlsConfig + } else { + warnings = append(warnings, "TLS configurations is ignored because scheme is not HTTPS") + } + warns = append(warns, zap.Strings("tls-config", warnings)) + } for i := range cfg.ClickHouse.QueryParams { - if cfg.ClickHouse.QueryParams[i].MaxConcurrent > cfg.ClickHouse.QueryParams[i].MaxQueries && - cfg.ClickHouse.QueryParams[i].MaxQueries > 0 { + if cfg.ClickHouse.QueryParams[i].MaxConcurrent > cfg.ClickHouse.QueryParams[i].MaxQueries && cfg.ClickHouse.QueryParams[i].MaxQueries > 0 { cfg.ClickHouse.QueryParams[i].MaxConcurrent = 0 } if cfg.ClickHouse.QueryParams[i].Duration == 0 { - return nil, fmt.Errorf("query duration param not set for: %+v", cfg.ClickHouse.QueryParams[i]) + return nil, nil, fmt.Errorf("query duration param not set for: %+v", cfg.ClickHouse.QueryParams[i]) } if cfg.ClickHouse.QueryParams[i].DataTimeout == 0 { cfg.ClickHouse.QueryParams[i].DataTimeout = cfg.ClickHouse.DataTimeout @@ -542,7 +501,7 @@ func Unmarshal(body []byte, noLog bool) (*Config, error) { cfg.ClickHouse.QueryParams[i].URL = cfg.ClickHouse.URL } if _, err = clickhouseURLValidate(cfg.ClickHouse.QueryParams[i].URL); err != nil { - return nil, err + return nil, nil, err } } @@ -558,13 +517,42 @@ func Unmarshal(body []byte, noLog bool) (*Config, error) { return cfg.ClickHouse.QueryParams[i].Duration < cfg.ClickHouse.QueryParams[j].Duration }) + if len(cfg.Logging) == 0 { + cfg.Logging = append(cfg.Logging, newLoggingConfig()) + } + + if err = zapwriter.CheckConfig(cfg.Logging, nil); err != nil { + return nil, nil, err + } + + // Check if debug directory exists or could be created + if cfg.Debug.Directory != "" { + info, err := os.Stat(cfg.Debug.Directory) + if os.IsNotExist(err) { + err := os.MkdirAll(cfg.Debug.Directory, os.ModeDir|cfg.Debug.DirectoryPerm) + if err != nil { + return nil, nil, err + } + } else if !info.IsDir() { + return nil, nil, fmt.Errorf("the file for external data debug dumps exists and is not a directory: %v", cfg.Debug.Directory) + } + } + if _, ok := IndexReverse[cfg.ClickHouse.IndexReverse]; !ok { - return nil, fmt.Errorf("%s is not valid value for index-reverse", cfg.ClickHouse.IndexReverse) + return nil, nil, fmt.Errorf("%s is not valid value for index-reverse", cfg.ClickHouse.IndexReverse) } err = cfg.ClickHouse.IndexReverses.Compile() if err != nil { - return nil, err + return nil, nil, err + } + + if cfg.Common.FindCache, err = CreateCache("index", &cfg.Common.FindCacheConfig); err == nil { + if cfg.Common.FindCacheConfig.Type != "null" { + warns = append(warns, zap.Any("enable find cache", zap.String("type", cfg.Common.FindCacheConfig.Type))) + } + } else { + return nil, nil, err } l := len(cfg.Common.TargetBlacklist) @@ -573,7 +561,7 @@ func Unmarshal(body []byte, noLog bool) (*Config, error) { for i := 0; i < l; i++ { r, err := regexp.Compile(cfg.Common.TargetBlacklist[i]) if err != nil { - return nil, err + return nil, nil, err } cfg.Common.Blacklist[i] = r } @@ -581,7 +569,7 @@ func Unmarshal(body []byte, noLog bool) (*Config, error) { err = cfg.ProcessDataTables() if err != nil { - return nil, err + return nil, nil, err } // compute prometheus external url @@ -589,30 +577,27 @@ func Unmarshal(body []byte, noLog bool) (*Config, error) { if rawURL == "" { hostname, err := os.Hostname() if err != nil { - return nil, err + return nil, nil, err } _, port, err := net.SplitHostPort(cfg.Common.Listen) if err != nil { - return nil, err + return nil, nil, err } rawURL = fmt.Sprintf("http://%s:%s/", hostname, port) } cfg.Prometheus.ExternalURL, err = url.Parse(rawURL) if err != nil { - return nil, err + return nil, nil, err } cfg.Prometheus.ExternalURL.Path = strings.TrimRight(cfg.Prometheus.ExternalURL.Path, "/") checkDeprecations(cfg, deprecations) if len(deprecations) != 0 { + deprecationList := make([]error, len(deprecations)) for name, message := range deprecations { - if noLog { - fmt.Fprintf(os.Stderr, "config deprecation %s: %s\n", name, message) - } else { - logger := zapwriter.Logger("config deprecation") - logger.Error(name, zap.Error(message)) - } + deprecationList = append(deprecationList, errors.Wrap(message, name)) } + warns = append(warns, zap.Errors("config deprecations", deprecationList)) } switch strings.ToLower(cfg.ClickHouse.DateFormat) { @@ -622,7 +607,7 @@ func Unmarshal(body []byte, noLog bool) (*Config, error) { date.SetBoth() default: if cfg.ClickHouse.DateFormat != "" && cfg.ClickHouse.DateFormat != "default" { - return nil, fmt.Errorf("unsupported date-format: %s", cfg.ClickHouse.DateFormat) + return nil, nil, fmt.Errorf("unsupported date-format: %s", cfg.ClickHouse.DateFormat) } } @@ -636,37 +621,19 @@ func Unmarshal(body []byte, noLog bool) (*Config, error) { metricsEnabled := cfg.setupGraphiteMetrics() - cfg.ClickHouse.FindLimiter = limiter.NewWLimiter( - cfg.ClickHouse.FindMaxQueries, - cfg.ClickHouse.FindMaxConcurrent, - metricsEnabled, - "find", - "all", - ) + cfg.ClickHouse.FindLimiter = limiter.NewWLimiter(cfg.ClickHouse.FindMaxQueries, cfg.ClickHouse.FindMaxConcurrent, metricsEnabled, "find", "all") - cfg.ClickHouse.TagsLimiter = limiter.NewWLimiter( - cfg.ClickHouse.TagsMaxQueries, - cfg.ClickHouse.TagsMaxConcurrent, - metricsEnabled, - "tags", - "all", - ) + cfg.ClickHouse.TagsLimiter = limiter.NewWLimiter(cfg.ClickHouse.TagsMaxQueries, cfg.ClickHouse.TagsMaxConcurrent, metricsEnabled, "tags", "all") for i := range cfg.ClickHouse.QueryParams { - cfg.ClickHouse.QueryParams[i].Limiter = limiter.NewWLimiter( - cfg.ClickHouse.QueryParams[i].MaxQueries, - cfg.ClickHouse.QueryParams[i].MaxConcurrent, - metricsEnabled, - "render", - duration.String(cfg.ClickHouse.QueryParams[i].Duration), - ) + cfg.ClickHouse.QueryParams[i].Limiter = limiter.NewWLimiter(cfg.ClickHouse.QueryParams[i].MaxQueries, cfg.ClickHouse.QueryParams[i].MaxConcurrent, metricsEnabled, "render", duration.String(cfg.ClickHouse.QueryParams[i].Duration)) } for u, q := range cfg.ClickHouse.UserLimits { q.Limiter = limiter.NewWLimiter(q.MaxQueries, q.MaxConcurrent, metricsEnabled, u, "all") cfg.ClickHouse.UserLimits[u] = q } - return cfg, nil + return cfg, warns, nil } // ProcessDataTables checks if legacy `data`-table config is used, compiles regexps for `target-match-any` and `target-match-all` diff --git a/config/config_test.go b/config/config_test.go index 9d5f900d1..128a14eaa 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -316,7 +316,7 @@ sample-initial = 10 sample-thereafter = 12 `, ) - config, err := Unmarshal(body, false) + config, _, err := Unmarshal(body) expected := New() require.NoError(t, err) @@ -546,7 +546,7 @@ sample-initial = 10 sample-thereafter = 12 `, ) - config, err := Unmarshal(body, false) + config, _, err := Unmarshal(body) expected := New() require.NoError(t, err) assert.NotNil(t, metrics.Graphite) @@ -745,7 +745,7 @@ func TestGetQueryParamBroken(t *testing.T) { }, ]`) - _, err := Unmarshal(config, false) + _, _, err := Unmarshal(config) assert.Error(t, err) config = @@ -760,7 +760,7 @@ func TestGetQueryParamBroken(t *testing.T) { }, ]`) - _, err = Unmarshal(config, false) + _, _, err = Unmarshal(config) assert.Error(t, err) } @@ -923,7 +923,7 @@ func TestGetQueryParam(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - if config, err := Unmarshal(tt.config, false); err == nil { + if config, _, err := Unmarshal(tt.config); err == nil { for i := range config.ClickHouse.QueryParams { config.ClickHouse.QueryParams[i].Limiter = nil } diff --git a/doc/config.md b/doc/config.md index 7bf7d8f76..46afcef12 100644 --- a/doc/config.md +++ b/doc/config.md @@ -237,17 +237,6 @@ Only one tag used as filter for index field Tag1, see graphite_tagged table [str [clickhouse] # default url, see https://clickhouse.tech/docs/en/interfaces/http. Can be overwritten with query-params url = "http://localhost:8123?cancel_http_readonly_queries_on_client_close=1" - - # mTLS HTTPS configuration for connecting to clickhouse server - # [clickhouse.tls] - # ca-cert = [] - # client-auth = "" - # server-name = "" - # min-version = "" - # max-version = "" - # insecure-skip-verify = false - # curves = [] - # cipher-suites = [] # default total timeout to fetch data, can be overwritten with query-params data-timeout = "1m0s" # Max queries to render queiries @@ -325,6 +314,17 @@ Only one tag used as filter for index field Tag1, see graphite_tagged table [str # ClickHouse-side aggregation, see doc/aggregation.md internal-aggregation = true + # mTLS HTTPS configuration for connecting to clickhouse server + # [clickhouse.tls] + # ca-cert = [] + # client-auth = "" + # server-name = "" + # min-version = "" + # max-version = "" + # insecure-skip-verify = false + # curves = [] + # cipher-suites = [] + [[data-table]] # data table from carbon-clickhouse table = "graphite_data" diff --git a/graphite-clickhouse.go b/graphite-clickhouse.go index f97e63bd8..dd78cc4ea 100644 --- a/graphite-clickhouse.go +++ b/graphite-clickhouse.go @@ -90,7 +90,11 @@ func main() { printDefaultConfig := flag.Bool("config-print-default", false, "Print default config") checkConfig := flag.Bool("check-config", false, "Check config and exit") buildTags := flag.Bool("tags", false, "Build tags table") - pprof := flag.String("pprof", "", "Additional pprof listen addr for non-server modes (tagger, etc..), overrides pprof-listen from common ") + pprof := flag.String( + "pprof", + "", + "Additional pprof listen addr for non-server modes (tagger, etc..), overrides pprof-listen from common ", + ) printVersion := flag.Bool("version", false, "Print version") verbose := flag.Bool("verbose", false, "Verbose (print config on startup)") @@ -109,7 +113,7 @@ func main() { return } - cfg, err := config.ReadConfig(*configFile, *checkConfig) + cfg, warns, err := config.ReadConfig(*configFile) if err != nil { log.Fatal(err) } @@ -119,11 +123,15 @@ func main() { return } - if err = cfg.Parse(); err != nil { + if err := zapwriter.ApplyConfig(cfg.Logging); err != nil { log.Fatal(err) } logger := zapwriter.Logger("start") + + if len(warns) > 0 { + zapwriter.Logger("config").Warn("warnings", warns...) + } if *verbose { logger.Info("starting graphite-clickhouse", zap.String("build_version", BuildVersion), @@ -179,7 +187,6 @@ func main() { mux.Handle("/tags/autoComplete/values", app.Handler(autocomplete.NewValues(cfg))) mux.Handle("/alive", app.Handler(healthcheck.NewHandler(cfg))) mux.HandleFunc("/debug/config", func(w http.ResponseWriter, r *http.Request) { - status := http.StatusOK start := time.Now() diff --git a/tests/clickhouse/rollup_tls/config.xml b/tests/clickhouse/rollup_tls/config.xml new file mode 100644 index 000000000..d8e6482db --- /dev/null +++ b/tests/clickhouse/rollup_tls/config.xml @@ -0,0 +1,91 @@ + + + + debug + /var/log/clickhouse-server/clickhouse-server.log + /var/log/clickhouse-server/clickhouse-server.err.log + 2000M + 20 + + + 8123 + 9000 + 8443 + 9440 + + + none + false + /etc/clickhouse-server/rootCA.crt + /etc/clickhouse-server/server.crt + /etc/clickhouse-server/server.key + true + + true + + + /etc/clickhouse-server/rootCA.crt + + + + + 9009 + + test-clickhouse-s1 + + + + + + 0.0.0.0 + + + 1073741824 + + + 1073741824 + + + /var/lib/clickhouse/ + + + /var/lib/clickhouse/tmp/ + + + users.xml + + + default + + + default + + + + + system + query_log
+ + + 7500 +
+ + + + + system + part_log
+ + 7500 +
+ + +
diff --git a/tests/clickhouse/rollup_tls/init.sql b/tests/clickhouse/rollup_tls/init.sql new file mode 100644 index 000000000..75550225f --- /dev/null +++ b/tests/clickhouse/rollup_tls/init.sql @@ -0,0 +1,38 @@ +CREATE TABLE IF NOT EXISTS default.graphite_reverse ( + Path String, + Value Float64, + Time UInt32, + Date Date, + Timestamp UInt32 +) ENGINE = GraphiteMergeTree('graphite_rollup') +PARTITION BY toYYYYMM(Date) +ORDER BY (Path, Time); + +CREATE TABLE IF NOT EXISTS default.graphite ( + Path String, + Value Float64, + Time UInt32, + Date Date, + Timestamp UInt32 +) ENGINE = GraphiteMergeTree('graphite_rollup') +PARTITION BY toYYYYMM(Date) +ORDER BY (Path, Time); + +CREATE TABLE IF NOT EXISTS default.graphite_index ( + Date Date, + Level UInt32, + Path String, + Version UInt32 +) ENGINE = ReplacingMergeTree(Version) +PARTITION BY toYYYYMM(Date) +ORDER BY (Level, Path, Date); + +CREATE TABLE IF NOT EXISTS default.graphite_tags ( + Date Date, + Tag1 String, + Path String, + Tags Array(String), + Version UInt32 +) ENGINE = ReplacingMergeTree(Version) +PARTITION BY toYYYYMM(Date) +ORDER BY (Tag1, Path, Date); diff --git a/tests/clickhouse/rollup_tls/rollup.xml b/tests/clickhouse/rollup_tls/rollup.xml new file mode 100644 index 000000000..f9bdfda20 --- /dev/null +++ b/tests/clickhouse/rollup_tls/rollup.xml @@ -0,0 +1,36 @@ + + + + + avg + + 0 + 1 + + + + \.sum$ + sum + + 0 + 1 + + + + \.min$ + min + + 0 + 1 + + + + \.max$ + max + + 0 + 1 + + + + diff --git a/tests/clickhouse/rollup_tls/rootCA.crt b/tests/clickhouse/rollup_tls/rootCA.crt new file mode 100644 index 000000000..9fc1d55b3 --- /dev/null +++ b/tests/clickhouse/rollup_tls/rootCA.crt @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDGzCCAgOgAwIBAgIUQceciMiYCCUXeYNKJvK7inIjaBgwDQYJKoZIhvcNAQEL +BQAwHTEbMBkGA1UEAwwSbG9yZHZpZGV4LmxvY2FsIENBMB4XDTIzMDcyMDEwNTkz +N1oXDTI2MDcxOTEwNTkzN1owHTEbMBkGA1UEAwwSbG9yZHZpZGV4LmxvY2FsIENB +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArXRLuhtXXuHYGEX8gD2/ +G31ATjvYM8miwM28ps0VFFBeWsvTlA9aNf0Vrd8rSD6eDZHouyvpVbrjd0KofGhm +rtgvMEuwTG936C8a8R38ac7Wnl+i59R6Wo7Nkv1LrYuJRNJUhfwzO5ii7dPMG6TL +2SpzjKFcT65Ta9EKQxD6NfeB3/+ZkfmyFfynxRp7yMFeTY/DhwOeYqmDQkQcBuDk +jFHvD+Xh1O8UxHSF86bQ00qj+yYjpEEJYJ/nyDlwSRPFaQJ+sWxleMcW7jrl1AhJ +tRw0iOwer+hRgEbLBAKupbar7t92fH2u52GuUMcHloAaJ8x1IqAI8AuZDfaF0s7R +ZQIDAQABo1MwUTAdBgNVHQ4EFgQUXeOxam6c9AjeKuEv/r6e/c2KeDcwHwYDVR0j +BBgwFoAUXeOxam6c9AjeKuEv/r6e/c2KeDcwDwYDVR0TAQH/BAUwAwEB/zANBgkq +hkiG9w0BAQsFAAOCAQEAMfQaOjski0fBk5i/epqHDbH1l4YzmgMI6xsn4IG2gmSI +9568QVgMehW1qkcfhTvoonGuvX6AgqrIdJHUP35FAftXe0sWXZ/duPRN6/OzI7DT +7Tg6KjzV9zsNCQNy8e2TgmVU2MH3Unq5GTCTAkeOLrLw3cRivt6S4i0/O0nmjBZ7 +fhkkiKTesfYCO56z8y4v6aoJOSo60QIO66OkwIMPvjexe4Z5/ovXO0cw/06uGfw/ +dNm3nq/MoSeGBX24XvrH29aMeNK255TTvXjmgA/eX3lGejjpD9AppcRz9wSwY/er +B0g78FWIgNuvrBSRYCe3tLZ8MGtGwrAb8Z/iWp3uNA== +-----END CERTIFICATE----- diff --git a/tests/clickhouse/rollup_tls/server.crt b/tests/clickhouse/rollup_tls/server.crt new file mode 100644 index 000000000..a1d5fc124 --- /dev/null +++ b/tests/clickhouse/rollup_tls/server.crt @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDZDCCAkygAwIBAgIUD6xKwCb87PU0zSOCN6NypCUuK74wDQYJKoZIhvcNAQEL +BQAwHTEbMBkGA1UEAwwSbG9yZHZpZGV4LmxvY2FsIENBMB4XDTIzMDcyMTIwMDk1 +N1oXDTI0MDcyMDIwMDk1N1owVjELMAkGA1UEBhMCUlUxEjAQBgNVBAgMCVRhdGFy +c3RhbjEOMAwGA1UEBwwFS2F6YW4xDzANBgNVBAoMBktvbnR1cjESMBAGA1UEAwwJ +bG9jYWxob3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2qyl6NvQ +s6u0At1RwR8Db3cDjb2k6Bl4vMC/CyBJ/Vfa2GbhTQTqW0Myc+ecAu/u6RqalgYb +aiC2Ja6QKZwVwFG4y84lnmyrJq2dTi25fTGbTqlp+yE1t7S/pf5JLVOcX3QZbiRF +M85WoRhsLE3buxdn6HMi4R0+cWl3HXcDOaoKbgpd26QiIyz4IxkL2bNUv6fi1DP1 +9qdy1AakyERCQ5yacVrIm2naPSFCiwhn0G9teNy4dXbbXfNkPEv3TWrKQo5sUhT2 +WnPOhV9GYLxpvC+jcUQokfPCJuDbam6whcOCRJ+AEzd2lj7WpzUW7ogjEfJVkB8f +FaSeYBehXRos5QIDAQABo2MwYTAfBgNVHREEGDAWgglsb2NhbGhvc3SCCTEyNy4w +LjAuMTAdBgNVHQ4EFgQUjnT4gvkNbqxoSQyce3WxUOB9az4wHwYDVR0jBBgwFoAU +XeOxam6c9AjeKuEv/r6e/c2KeDcwDQYJKoZIhvcNAQELBQADggEBACEmTUxe/A/B +peDEsLpVeMPw1oIy0mxcfOSr3B5ckn3sDqSJkdFh7bjrFBinDS5VCx7sbc2cnI5b +NUb/4U+vwGmsvvAjMWTPKEYOtl+ytpOZ+q1AdDSfJxTtMNWISrp6eJSGAO9JSgBe +ZHEI6XuEfnY+rJiDXyG59cKQ2FfRFPiqXPXwlBcq7kUvppliZV4/KWpz8WmgEnwe +80MCgdTL+BX70TeVSZI+u4s2OwuWk7/NoFUnLT5fMtUjXOI+NmLDK9zocR3/YZkd +nxFT8Qaq4QN/Vh16Mofnm41VODFS8yIpTXYsaqgusDsYY17kzZKXuN/9km7oiIPl +C/e/9s06iXk= +-----END CERTIFICATE----- diff --git a/tests/clickhouse/rollup_tls/server.key b/tests/clickhouse/rollup_tls/server.key new file mode 100644 index 000000000..835eebad6 --- /dev/null +++ b/tests/clickhouse/rollup_tls/server.key @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQDarKXo29Czq7QC +3VHBHwNvdwONvaToGXi8wL8LIEn9V9rYZuFNBOpbQzJz55wC7+7pGpqWBhtqILYl +rpApnBXAUbjLziWebKsmrZ1OLbl9MZtOqWn7ITW3tL+l/kktU5xfdBluJEUzzlah +GGwsTdu7F2focyLhHT5xaXcddwM5qgpuCl3bpCIjLPgjGQvZs1S/p+LUM/X2p3LU +BqTIREJDnJpxWsibado9IUKLCGfQb2143Lh1dttd82Q8S/dNaspCjmxSFPZac86F +X0ZgvGm8L6NxRCiR88Im4NtqbrCFw4JEn4ATN3aWPtanNRbuiCMR8lWQHx8VpJ5g +F6FdGizlAgMBAAECggEAMQHTVun4jmOzPpcaLOVF8FOkXL49OHUI7Yhm6FfurRPk +7G22HXw2B4j6RCaKfNAH437+WRP6TeER4UULcv/TkhvehfJn1Ob2vn7XS7+MnAWj +kFAAe/MX5llgQBPKk5ly6KOM1XXUeoVKPeVKzO0QCxB2TY6MKRZUObeQpe5XrzLF +uSJAcpP4Dq8zPEbtUqcJrzMiJU0h6Zley7+FC4JikX3w2FjmjKHrPA+8SG8RwFdB +kzYm1oppp14+qcc+hX4rKJvdf5UUNW1mMT122vcUY+74rkBwnA8SbCwCyq8aifZv +dpPjMypYwE+tIhC9Y4f7kADr4ggJwqmv8mE+REnRSwKBgQD+gr2bQlQfMvVVcV3k +rXEEotR9P1bfk0ErZU7k8aR5xnEH/lJeHRTNE9ETtLjhVG1kbJY1LDvK+qbSPP6V +ArF7K9PNkRTUp8oie3ybnHHD7dKS6O9IBLUgnKvjTvWY1Ukei7y/SvoHSV0/IeUd +CmbmjK0RoZB/2cwqZRPG2XtnlwKBgQDb9Dlt2XkZjz4B7Bv7goe623Dr06z8eZC2 +s8KnwjTVJ2FxEPf2zX17bCyldhnNsgOWiySVu20i62j69YRepRpLWp4aZtf1qVPG +pIvAc13A1KzvQVRHOwroj99spe/j3FlsIcEyJQSWxqoWcu9WZR8HqAInaKmxsDvc +p2jmc/d+4wKBgExBd/Xw5auDZSZ4lR1TKi2ta+Uq+Sh7wmbyL7VBAXrLDkrK1HsG +U5kuetJgr5Qz66LPNiIwYqnqd9k+FTkoWW0CMK/C7G7a5yF1xlwJf7yooRMLX8ZA +QtTGnGm62rM0FHhmfeMC3+8W+C1WRoNw/mvNcn153wWviBCbwFUwDF15AoGAVTai +4Sn1vq8+CKSemqwRRZUqhFK3nADEzeOKsE+PBF2DZLh8OIOX768TBO9iQ8yJk1NI +0zGPsI2fWEqyA7DXJQZwZi4d0LDPIhAxjFelpMVAUlju+8GmEj9bAc6vx4lgo4HY +RUMOKhPBZX0VuFTo1qCCZR4pUAvExVGeVFntMp0CgYBIAqZyJnACriFAG5xkpr3j +eKigirUPIyCtLkio3YUyjWlRQgIlBvTnzB+cw0NRn4odiNCiMOygG6znz2rmRiWp +jUmnrLCPZtgvh/aIEqE0CL2/4641AjcTOmwKUyYa74k70x7EtDF+MQu0LE0P/3je +n5YaFx7FJvcFADBq9LgG/w== +-----END PRIVATE KEY----- diff --git a/tests/clickhouse/rollup_tls/users.xml b/tests/clickhouse/rollup_tls/users.xml new file mode 100644 index 000000000..d73609e9f --- /dev/null +++ b/tests/clickhouse/rollup_tls/users.xml @@ -0,0 +1,129 @@ + + + + + + + 1 + 0 + + + 40000000000 + + + 1 + + + + + + 1048576000 + 1000000 + 1000000 + 720000 + + 6000000000000 + + 40 + + 16 + + 25571520 + 1073741824 + + random + 1000 + + + + + 1 + + + + + + + + + + + + + ::/0 + + + + default + + + default + + + + + + + ::1 + 127.0.0.1 + + readonly + default + + + + + + + + + + + 3600 + + + 0 + 0 + 0 + 0 + 0 + + + + diff --git a/tests/tls/ca.crt b/tests/tls/ca.crt new file mode 100644 index 000000000..9fc1d55b3 --- /dev/null +++ b/tests/tls/ca.crt @@ -0,0 +1,19 @@ +-----BEGIN CERTIFICATE----- +MIIDGzCCAgOgAwIBAgIUQceciMiYCCUXeYNKJvK7inIjaBgwDQYJKoZIhvcNAQEL +BQAwHTEbMBkGA1UEAwwSbG9yZHZpZGV4LmxvY2FsIENBMB4XDTIzMDcyMDEwNTkz +N1oXDTI2MDcxOTEwNTkzN1owHTEbMBkGA1UEAwwSbG9yZHZpZGV4LmxvY2FsIENB +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArXRLuhtXXuHYGEX8gD2/ +G31ATjvYM8miwM28ps0VFFBeWsvTlA9aNf0Vrd8rSD6eDZHouyvpVbrjd0KofGhm +rtgvMEuwTG936C8a8R38ac7Wnl+i59R6Wo7Nkv1LrYuJRNJUhfwzO5ii7dPMG6TL +2SpzjKFcT65Ta9EKQxD6NfeB3/+ZkfmyFfynxRp7yMFeTY/DhwOeYqmDQkQcBuDk +jFHvD+Xh1O8UxHSF86bQ00qj+yYjpEEJYJ/nyDlwSRPFaQJ+sWxleMcW7jrl1AhJ +tRw0iOwer+hRgEbLBAKupbar7t92fH2u52GuUMcHloAaJ8x1IqAI8AuZDfaF0s7R +ZQIDAQABo1MwUTAdBgNVHQ4EFgQUXeOxam6c9AjeKuEv/r6e/c2KeDcwHwYDVR0j +BBgwFoAUXeOxam6c9AjeKuEv/r6e/c2KeDcwDwYDVR0TAQH/BAUwAwEB/zANBgkq +hkiG9w0BAQsFAAOCAQEAMfQaOjski0fBk5i/epqHDbH1l4YzmgMI6xsn4IG2gmSI +9568QVgMehW1qkcfhTvoonGuvX6AgqrIdJHUP35FAftXe0sWXZ/duPRN6/OzI7DT +7Tg6KjzV9zsNCQNy8e2TgmVU2MH3Unq5GTCTAkeOLrLw3cRivt6S4i0/O0nmjBZ7 +fhkkiKTesfYCO56z8y4v6aoJOSo60QIO66OkwIMPvjexe4Z5/ovXO0cw/06uGfw/ +dNm3nq/MoSeGBX24XvrH29aMeNK255TTvXjmgA/eX3lGejjpD9AppcRz9wSwY/er +B0g78FWIgNuvrBSRYCe3tLZ8MGtGwrAb8Z/iWp3uNA== +-----END CERTIFICATE----- diff --git a/tests/tls/carbon-clickhouse.conf.tpl b/tests/tls/carbon-clickhouse.conf.tpl new file mode 100644 index 000000000..41d7ce56d --- /dev/null +++ b/tests/tls/carbon-clickhouse.conf.tpl @@ -0,0 +1,45 @@ +[common] + +[data] +path = "/etc/carbon-clickhouse/data" +chunk-interval = "1s" +chunk-auto-interval = "" + +[upload.graphite_index] +type = "index" +table = "graphite_index" +url = "{{ .CLICKHOUSE_URL }}/" +timeout = "2m30s" +cache-ttl = "1h" + +[upload.graphite_tags] +type = "tagged" +table = "graphite_tags" +threads = 3 +url = "{{ .CLICKHOUSE_URL }}/" +timeout = "2m30s" +cache-ttl = "1h" + +[upload.graphite_reverse] +type = "points-reverse" +table = "graphite_reverse" +url = "{{ .CLICKHOUSE_URL }}/" +timeout = "2m30s" +zero-timestamp = false + +[upload.graphite] +type = "points" +table = "graphite" +url = "{{ .CLICKHOUSE_URL }}/" +timeout = "2m30s" +zero-timestamp = false + +[tcp] +listen = ":2003" +enabled = true +drop-future = "0s" +drop-past = "0s" + +[logging] +file = "/etc/carbon-clickhouse/carbon-clickhouse.log" +level = "debug" diff --git a/tests/tls/client.crt b/tests/tls/client.crt new file mode 100644 index 000000000..029f0a1a0 --- /dev/null +++ b/tests/tls/client.crt @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDajCCAlKgAwIBAgIUD6xKwCb87PU0zSOCN6NypCUuK8AwDQYJKoZIhvcNAQEL +BQAwHTEbMBkGA1UEAwwSbG9yZHZpZGV4LmxvY2FsIENBMB4XDTIzMDgwMzA0NTA1 +MloXDTI0MDgwMjA0NTA1MlowVjELMAkGA1UEBhMCUlUxEjAQBgNVBAgMCVRhdGFy +c3RhbjEOMAwGA1UEBwwFS2F6YW4xDzANBgNVBAoMBktvbnR1cjESMBAGA1UEAwwJ +bG9jYWxob3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvYtLjsDc +B2e/vecCsXGrTN7qZevmrwcfeuXeMainFnGgQhddg7heRXBWqjcnklHmUTtgBBgD +bOIzDUpsSq6cEd6aS8Qc1aRZ3Q2nwN28ZrlEgQo5B8K0JI/RfeXy3IiAYi0T2lqq +09z/sZNx4YViWyDmltE8DJAEHaFzjPsfTsJsTpxKvPm/sXFoOqDMiywkicHaj2/4 +W+seRCx9Mdgc47jjrDgzvnd32vIY9rhxoEKhE9FZTDEfJL+nu4s/bn1soLuVxxy/ +6gQ6uOB2Mh/jxped0Th6tHqq+wPRjZrrCPH+dTPzg1SIf0uhpyhLB+xXVyqwvi4m +0MBlC7N/tJJPBwIDAQABo2kwZzAlBgNVHREEHjAcgglsb2NhbGhvc3SCCTEyNy4w +LjAuMYcEfwAAATAdBgNVHQ4EFgQU3MPvMfbd9gfsWbigWjWvOOqXeKkwHwYDVR0j +BBgwFoAUXeOxam6c9AjeKuEv/r6e/c2KeDcwDQYJKoZIhvcNAQELBQADggEBADmB +Z3NszgGXyhzO0VaJOlGRRRDnIZkhoBXYlZ2ezFFbqQlh9iycKyV9z6LG999SGQTn ++PvXeWi6fHVnAAWBlk+po7qedrqSzbthx7LXL4Fg7/dexR8ZOWJs4ZNIeDSo8YWF +UO062wMp4eqTzyy14WIh+hP8SlVQcdr7xCl4e4yezFsxCIR6kV6u3CN3AXdFv64a +ql1/HmBd+Ohs+m5yQnyBcXdgmWUUGcRC6jE2WPMn2TSztnAEQ3CTWNYNi4qZntRR +zBWhcwASsu33pvHjP5ps0VR9cwCChZ7M0DQVqFnr0AKXa/Z8MlQ2XKGJBGmCqLcY +pBgR/BZrdO1vXFR0Q/I= +-----END CERTIFICATE----- diff --git a/tests/tls/client.key b/tests/tls/client.key new file mode 100644 index 000000000..2195d9f47 --- /dev/null +++ b/tests/tls/client.key @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQC9i0uOwNwHZ7+9 +5wKxcatM3upl6+avBx965d4xqKcWcaBCF12DuF5FcFaqNyeSUeZRO2AEGANs4jMN +SmxKrpwR3ppLxBzVpFndDafA3bxmuUSBCjkHwrQkj9F95fLciIBiLRPaWqrT3P+x +k3HhhWJbIOaW0TwMkAQdoXOM+x9OwmxOnEq8+b+xcWg6oMyLLCSJwdqPb/hb6x5E +LH0x2BzjuOOsODO+d3fa8hj2uHGgQqET0VlMMR8kv6e7iz9ufWygu5XHHL/qBDq4 +4HYyH+PGl53ROHq0eqr7A9GNmusI8f51M/ODVIh/S6GnKEsH7FdXKrC+LibQwGUL +s3+0kk8HAgMBAAECggEAH/T/0Lo4frEh1VlBDXzONmAxIe0DkEgBqT1fWH0IatLO +g8YfuKkZj9iQbBNvgw526AQ8PIR182Ix3FBLcM3+t06SMisG94hFtGzsdyqLbMBY +ye7YzbOonq920SS3bTJAfwz9l2eI/L56h8yFu1G7UwT5i70MREv1va8uZbsjPKpl +v4a7RNy8bbp0cYDebHs4ZTN1qo2y9KFRbVkGLQ6Ltt2ze2L+DMr5oZ2geTqDC/hj +6RZ97TwIJ6yqqb99oWtD/gVSGQsgJRSDUoPD8+DHJZwU/ppru9vidQYN8JJ3RHb6 +BCSaiw/UxXUujJLIKgD99nNx3Dd36O2txXMnay0VkQKBgQDhCNhsK/QoIwr2wNA5 +6TEu6ppzjuPQvbyj0xhE54I7JtWQ60rJVCmNstClR4xXrW1iHjezlDrCNxXtwTam +MMqAOzdiiFM/SZagwByKA8eqbWC2xOhMcdhDUe7w4y2kbq6QaN2JRmqEoGk7DsoV +BNs8tFJesUN2IEh9D0fFswykkQKBgQDXoEACw9p1CTsCjslbhLcXHRD6dSJj9tFZ +7rkEQ4HZaSuxiKbbik+ei1rxP8dS37hwTwhhdYQiKCH8djVtoOKy6LBkKyt6QCXc +z+fbjejCiw5d69U9shJtwYMu+9rJB/Xftb4c3O4YvR4PNZrQb05omW3dowGN9lG7 +RJSrFuImFwKBgHrYGUzzsZU3jASnvQPgCLlT2Hy8xCBvc0r7MYRr3OvthyhVSb4d +85EmzD9tj29NfSjS1hVyFaFv8MiZcPvIJsNZkCJ5yPgDc/bM6MduyNCAZQtg8m7I +twYI9CcL3R6mTSUIGeeWSQ+dgdR7flV7ETNSPKT0n58cgHXEoamK1JwBAoGALcQP +kgar2WdqJPLVfh0/FyLONbqi4ADFEod+sY76goC2C2prr/E9UPNXDBlzXv2pEf1U +VAszTmSiQgl88ZXSSBLnTXt0MPBomONv3MPUAWauV/T0P7IKA02tYE2IdF7CKwdS +IKIrzwhk1umTwGW/iKf4D7DHZbBATLUTsPScb2UCgYBSXNSXe+avlegP5qfW4Uwj +vJcNRdFUNXNR378YkY7+CVEwIf2IpDzHdjVAVGTCR7Jlv0RXLMWVs+L5CakfbuNN +mFnej787UB3EdV5+vjD8K+LzO88ymUeC7srXe8sky9+rXPfFqD/Q3sbyibkerv/V +XoJ6ElBU9n7ZtIl8C6WIeg== +-----END PRIVATE KEY----- diff --git a/tests/tls/graphite-clickhouse.conf.tpl b/tests/tls/graphite-clickhouse.conf.tpl new file mode 100644 index 000000000..f9ba2dd43 --- /dev/null +++ b/tests/tls/graphite-clickhouse.conf.tpl @@ -0,0 +1,39 @@ +[common] +listen = "{{ .GCH_ADDR }}" +max-cpu = 0 +max-metrics-in-render-answer = 10000 +max-metrics-per-target = 10000 +headers-to-log = [ "X-Ctx-Carbonapi-Uuid" ] +append-empty-series = false + +[clickhouse] +url = "{{ .CLICKHOUSE_TLS_URL }}/?max_rows_to_read=500000000&max_result_bytes=1073741824&readonly=2&log_queries=1" +data-timeout = "30s" +index-table = "graphite_index" +index-use-daily = true +index-timeout = "1m" +internal-aggregation = true + +tagged-table = "graphite_tags" +tagged-autocomplete-days = 1 +[clickhouse.tls] +ca-cert = ["{{- .TEST_DIR -}}/ca.crt"] +server-name = "localhost" +[[clickhouse.tls.certificates]] +key = "{{- .TEST_DIR -}}/client.key" +cert = "{{- .TEST_DIR -}}/client.crt" + +[[data-table]] +# # clickhouse table name +table = "graphite" +# # points in table are stored with reverse path +reverse = false +rollup-conf = "auto" + +[[logging]] +logger = "" +file = "{{ .GCH_DIR }}/graphite-clickhouse.log" +level = "info" +encoding = "json" +encoding-time = "iso8601" +encoding-duration = "seconds" diff --git a/tests/tls/test.toml b/tests/tls/test.toml new file mode 100644 index 000000000..e098573a0 --- /dev/null +++ b/tests/tls/test.toml @@ -0,0 +1,515 @@ +[test] +precision = "10s" + +[[test.clickhouse]] +version = "21.3" +tls = true +dir = "tests/clickhouse/rollup_tls" + +[[test.clickhouse]] +version = "22.8" +tls = true +dir = "tests/clickhouse/rollup_tls" + +[[test.clickhouse]] +version = "latest" +tls = true +dir = "tests/clickhouse/rollup_tls" + +[test.carbon_clickhouse] +template = "carbon-clickhouse.conf.tpl" + +[[test.graphite_clickhouse]] +template = "graphite-clickhouse.conf.tpl" + +[[test.input]] +name = "test.plain1" +points = [{value = 3.0, time = "rnow-30"}, {value = 0.0, time = "rnow-20"}, {value = 1.0, time = "rnow-10"}, {value = 2.0, time = "rnow"}] + +[[test.input]] +name = "test.plain2" +points = [{value = 2.0, time = "rnow-30"}, {value = 1.0, time = "rnow-20"}, {value = 1.5, time = "rnow-10"}, {value = 2.5, time = "rnow"}] + +[[test.input]] +name = "test2.plain" +points = [{value = 1.0, time = "rnow-30"}, {value = 2.0, time = "rnow-20"}, {value = 2.5, time = "rnow-10"}, {value = 3.5, time = "rnow"}] + +[[test.input]] +name = "metric1;tag1=value1;tag2=value21;tag3=value3" +points = [{value = 2.0, time = "rnow-30"}, {value = 2.5, time = "rnow-20"}, {value = 2.0, time = "rnow-10"}, {value = 3.0, time = "rnow"}] + +[[test.input]] +name = "metric1;tag2=value22;tag4=value4" +points = [{value = 1.0, time = "rnow-30"}, {value = 2.0, time = "rnow-20"}, {value = 0.0, time = "rnow-10"}, {value = 1.0, time = "rnow"}] + +[[test.input]] +name = "metric1;tag1=value1;tag2=value23;tag3=value3" +points = [{value = 0.5, time = "rnow-30"}, {value = 1.5, time = "rnow-20"}, {value = 4.0, time = "rnow-10"}, {value = 3.0, time = "rnow"}] + +[[test.input]] +name = "metric2;tag2=value21;tag4=value4" +points = [{value = 2.0, time = "rnow-30"}, {value = 1.0, time = "rnow-20"}, {value = 0.0, time = "rnow-10"}, {value = 1.0, time = "rnow"}] + +[[test.input]] +name = "test_metric;minus=-;plus=+;percent=%;underscore=_;colon=:;hash=#;forward=/;host=127.0.0.1" +points = [{value = 2.1, time = "rnow-30"}, {value = 0.1, time = "rnow-20"}, {value = 0.2, time = "rnow-10"}, {value = 1.5, time = "rnow"}] + +###################################### +# Check metrics find + +[[test.find_checks]] +formats = [ "pickle", "protobuf", "carbonapi_v3_pb" ] +query = "test" +result = [ + { path = "test", is_leaf = false } +] + +[[test.find_checks]] +formats = [ "pickle", "protobuf", "carbonapi_v3_pb" ] +query = "test.pl*" +result = [ + { path = "test.plain1", is_leaf = true }, { path = "test.plain2", is_leaf = true } +] + +# End - Check metrics find +###################################### +# Check tags autocomplete + +[[test.tags_checks]] +query = "tag1;tag2=value21" +result = [ + "value1" +] + +[[test.tags_checks]] +query = "name;tag2=value21;tag1=~value" +result = [ + "metric1", +] + +[[test.tags_checks]] +query = "colon;percent=%" +result = [ + ":", +] + +# End - Check tags autocomplete +########################################################################## +# Plain metrics (carbonapi_v3_pb) + +# test.plain1 +# test.plain2 +# test2.plain + +[[test.render_checks]] +from = "rnow-10" +until = "rnow" +targets = [ + "test.plain*", + "test{1,2}.plain" +] + +[[test.render_checks.result]] +name = "test.plain1" +path = "test.plain*" +consolidation = "avg" +start = "rnow-10" +stop = "rnow+10" +step = 10 +req_start = "rnow-10" +req_stop = "rnow+10" +values = [1.0, 2.0] + +[[test.render_checks.result]] +name = "test.plain2" +path = "test.plain*" +consolidation = "avg" +start = "rnow-10" +stop = "rnow+10" +step = 10 +req_start = "rnow-10" +req_stop = "rnow+10" +values = [1.5, 2.5] + +[[test.render_checks.result]] +name = "test2.plain" +path = "test{1,2}.plain" +consolidation = "avg" +start = "rnow-10" +stop = "rnow+10" +step = 10 +req_start = "rnow-10" +req_stop = "rnow+10" +values = [2.5, 3.5] + +# End - Plain metrics (carbonapi_v3_pb) +########################################################################## +# Plain metrics (carbonapi_v2_pb) + +[[test.render_checks]] +formats = [ "protobuf", "carbonapi_v2_pb" ] +from = "rnow-10" +until = "rnow+1" +targets = [ + "test.plain*", + "test{1,2}.plain" +] + +[[test.render_checks.result]] +name = "test.plain1" +start = "rnow-10" +stop = "rnow+10" +step = 10 +values = [1.0, 2.0] + +[[test.render_checks.result]] +name = "test.plain2" +start = "rnow-10" +stop = "rnow+10" +step = 10 +values = [1.5, 2.5] + +[[test.render_checks.result]] +name = "test2.plain" +start = "rnow-10" +stop = "rnow+10" +step = 10 +values = [2.5, 3.5] + +# End - Plain metrics (carbonapi_v2_pb) +########################################################################## +# Plain metrics (pickle) + +[[test.render_checks]] +formats = [ "pickle" ] +from = "rnow-10" +until = "rnow+1" +targets = [ + "test.plain*", + "test{1,2}.plain" +] + +[[test.render_checks.result]] +name = "test.plain1" +path = "test.plain*" +start = "rnow-10" +stop = "rnow+10" +step = 10 +values = [1.0, 2.0] + +[[test.render_checks.result]] +name = "test.plain2" +path = "test.plain*" +start = "rnow-10" +stop = "rnow+10" +step = 10 +values = [1.5, 2.5] + +[[test.render_checks.result]] +name = "test2.plain" +path = "test{1,2}.plain" +start = "rnow-10" +stop = "rnow+10" +step = 10 +values = [2.5, 3.5] + +# End - Plain metrics (pickle) +########################################################################## +# Taged metrics (carbonapi_v3_pb) + +# metric1;tag1=value1;tag2=value21;tag3=value3 +# metric1;tag2=value22;tag4=value4 +# metric1;tag1=value1;tag2=value23;tag3=value3 +# metric2;tag2=value21;tag4=value4 + +[[test.render_checks]] +from = "rnow-10" +until = "rnow+1" +targets = [ + "seriesByTag('name=metric1', 'tag2=~value', 'tag3=value*')", + "seriesByTag('name=metric2', 'tag2=~value', 'tag4=value4')" +] + +[[test.render_checks.result]] +name = "metric1;tag1=value1;tag2=value21;tag3=value3" +path = "seriesByTag('name=metric1', 'tag2=~value', 'tag3=value*')" +consolidation = "avg" +start = "rnow-10" +stop = "rnow+10" +step = 10 +req_start = "rnow-10" +req_stop = "rnow+10" +values = [2.0, 3.0] + +[[test.render_checks.result]] +name = "metric1;tag1=value1;tag2=value23;tag3=value3" +path = "seriesByTag('name=metric1', 'tag2=~value', 'tag3=value*')" +consolidation = "avg" +start = "rnow-10" +stop = "rnow+10" +step = 10 +req_start = "rnow-10" +req_stop = "rnow+10" +values = [4.0, 3.0] + +[[test.render_checks.result]] +name = "metric2;tag2=value21;tag4=value4" +path = "seriesByTag('name=metric2', 'tag2=~value', 'tag4=value4')" +consolidation = "avg" +start = "rnow-10" +stop = "rnow+10" +step = 10 +req_start = "rnow-10" +req_stop = "rnow+10" +values = [0.0, 1.0] + +# End - Tagged metrics (carbonapi_v3_pb) +########################################################################## +# Tagged metrics (carbonapi_v2_pb) + +[[test.render_checks]] +formats = [ "protobuf", "carbonapi_v2_pb" ] +from = "rnow-10" +until = "rnow+1" +targets = [ + "seriesByTag('name=metric1', 'tag2=~value', 'tag3=value*')", + "seriesByTag('name=metric2', 'tag2=~value', 'tag4=value4')" +] + +[[test.render_checks.result]] +name = "metric1;tag1=value1;tag2=value21;tag3=value3" +start = "rnow-10" +stop = "rnow+10" +step = 10 +values = [2.0, 3.0] + +[[test.render_checks.result]] +name = "metric1;tag1=value1;tag2=value23;tag3=value3" +start = "rnow-10" +stop = "rnow+10" +step = 10 +values = [4.0, 3.0] + +[[test.render_checks.result]] +name = "metric2;tag2=value21;tag4=value4" +start = "rnow-10" +stop = "rnow+10" +step = 10 +values = [0.0, 1.0] + +# End - Tagged metrics (carbonapi_v2_pb) +########################################################################## +# Tagged metrics (pickle) + +[[test.render_checks]] +formats = [ "pickle" ] +from = "rnow-10" +until = "rnow+1" +targets = [ + "seriesByTag('name=metric1', 'tag2=~value', 'tag3=value*')", + "seriesByTag('name=metric2', 'tag2=~value', 'tag4=value4')" +] + +[[test.render_checks.result]] +name = "metric1;tag1=value1;tag2=value21;tag3=value3" +path = "seriesByTag('name=metric1', 'tag2=~value', 'tag3=value*')" +start = "rnow-10" +stop = "rnow+10" +step = 10 +values = [2.0, 3.0] + +[[test.render_checks.result]] +name = "metric1;tag1=value1;tag2=value23;tag3=value3" +path = "seriesByTag('name=metric1', 'tag2=~value', 'tag3=value*')" +start = "rnow-10" +stop = "rnow+10" +step = 10 +values = [4.0, 3.0] + +[[test.render_checks.result]] +name = "metric2;tag2=value21;tag4=value4" +path = "seriesByTag('name=metric2', 'tag2=~value', 'tag4=value4')" +start = "rnow-10" +stop = "rnow+10" +step = 10 +values = [0.0, 1.0] + +# End - Tagged metrics (pickle) +########################################################################## +# Unescape + +[[test.render_checks]] +formats = [ "protobuf", "carbonapi_v2_pb" ] +from = "rnow-10" +until = "rnow+1" +targets = [ + "seriesByTag('percent=%')", +] + +[[test.render_checks.result]] +name = "test_metric;colon=:;forward=/;hash=#;host=127.0.0.1;minus=-;percent=%;plus=+;underscore=_" +start = "rnow-10" +stop = "rnow+10" +step = 10 +values = [0.2, 1.5] + +# End - Tagged metrics (pickle) +########################################################################## +# Midnight + +# points for check https://github.com/go-graphite/graphite-clickhouse/issues/184 +[[test.input]] +name = "test.midnight" +points = [{value = 3.0, time = "midnight+60s"}] + +[[test.input]] +name = "now;scope=midnight" +points = [{value = 4.0, time = "midnight+60s"}] + +[[test.find_checks]] +name = "Midnight (direct)" +query = "test.midnight*" +result = [{ path = "test.midnight", is_leaf = true }] + +[[test.find_checks]] +name = "Midnight" +query = "test.midnight" +from = "midnight+60s" +until = "midnight+70s" +result = [{ path = "test.midnight", is_leaf = true }] + +[[test.find_checks]] +name = "Midnight (reverse)" +query = "*test.midnight" +result = [{ path = "test.midnight", is_leaf = true }] + +[[test.find_checks]] +name = "Midnight" +query = "test.midnight" +from = "midnight+60s" +until = "midnight+70s" +result = [{ path = "test.midnight", is_leaf = true }] + +[[test.tags_checks]] +name = "Midnight" +query = "name;scope=midnight" +result = [ + "now", +] + +[[test.render_checks]] +name = "Midnight (direct)" +formats = [ "protobuf" ] +from = "midnight+60s" +until = "midnight+70s" +targets = [ + "test.midnight*", + ] + +[[test.render_checks.result]] +name = "test.midnight" +start = "midnight+60s" +stop = "midnight+80s" +step = 10 +values = [3.0, nan] + +[[test.render_checks]] +name = "Midnight (reverse)" +formats = [ "protobuf" ] +from = "midnight+60s" +until = "midnight+70s" +targets = [ + "*test.midnight", + ] + +[[test.render_checks.result]] +name = "test.midnight" +start = "midnight+60s" +stop = "midnight+80s" +step = 10 +values = [3.0, nan] + +[[test.render_checks]] +name = "Midnight" +formats = [ "protobuf" ] +from = "midnight+60s" +until = "midnight+70s" +targets = [ + "seriesByTag('name=now', 'scope=midnight')", + ] + +[[test.render_checks.result]] +name = "now;scope=midnight" +start = "midnight+60s" +stop = "midnight+80s" +step = 10 +values = [4.0, nan] + +# End - Midnight +########################################################################## +# Day end + +# points for check https://github.com/go-graphite/graphite-clickhouse/issues/184 +[[test.input]] +name = "test.23h" +points = [{value = 3.0, time = "midnight+1380m"}] + +[[test.input]] +name = "now;scope=23h" +points = [{value = 4.0, time = "midnight+1380m"}] + +[[test.find_checks]] +name = "Day end" +query = "test.23h" +from = "midnight+1380m" +until = "midnight+1381m" +result = [{ path = "test.23h", is_leaf = true }] + +[[test.find_checks]] +name = "Day end" +query = "test.23h" +from = "midnight+1380m" +until = "midnight+1381m" +result = [{ path = "test.23h", is_leaf = true }] + +[[test.tags_checks]] +name = "Day end" +query = "name;scope=23h" +result = [ + "now", +] + +[[test.render_checks]] +name = "Day end" +formats = [ "protobuf" ] +from = "midnight+1380m" +until = "midnight+1380m+10s" +targets = [ + "test.23h", + ] + +[[test.render_checks.result]] +name = "test.23h" +start = "midnight+1380m" +stop = "midnight+1380m+20s" +step = 10 +values = [3.0, nan] + +[[test.render_checks]] +name = "Day end" +formats = [ "protobuf" ] +from = "midnight+1380m" +until = "midnight+1380m+10s" +targets = [ + "seriesByTag('name=now', 'scope=23h')", + ] + +[[test.render_checks.result]] +name = "now;scope=23h" +start = "midnight+1380m" +stop = "midnight+1380m+20s" +step = 10 +values = [4.0, nan] + +# End - Day end +##########################################################################