Commit e4b3bb90 authored by craig[bot]'s avatar craig[bot]

Merge #50654 #50702 #50794

50654: geomfn: apply bounding box checks for DWithin/DFullyWithin r=sumeerbhola a=otan

We are able to optimize DWithin/DFullyWithin by extended the bounding
box for geometry types by distance units in each direction and
checking if they intersect / covers.

Also implement cartesian bounding box covers and use it for other
relevant operations.

Release note: None



50702: localmetrics: add local Grafana timeseries tooling r=petermattis a=tbg



Add a docker-compose setup that starts a local Grafana backed by a local
Postgres along with a helper that can import timeseries data into the
Postgres instance which the Grafana instance is configured to display.

Consult scripts/localmetrics/README.md for a quickstart.

This isn't a valuable debug tool just yet, but with a bit of elbow
grease, I believe that it will become an invaluable tool to avoid
the many back-and-forth round-trips we have these days with customers
to exchange screenshots of the Admin UI.

To make it truly useful, we need

1. [timeseries in debug.zip](#50432)
2. auto-generate dashboards from `./pkg/ts/catalog`.

Both are totally doable, and even without 2) there's already some
utility as it's easy to make ad-hoc panels in Grafana thanks to the
built-in query builder.

Finally, here's a screenshot of the one panel included here right now,
the rate of DistSender batches, taken from `sample.csv`.


![image](https://user-images.githubusercontent.com/5076964/85878083-78756580-b7d8-11ea-88b4-fb515ef26186.png)


Release note: None

50794: sql: disallow creation of schemas with the "pg_" prefix r=arulajmani a=rohany

This is disallowed by Postgres, and will be useful when integrating user
defined schema resolution with the temporary schema (which isn't backed
by a descriptor).

Release note: None
Co-authored-by: default avatarOliver Tan <[email protected]>
Co-authored-by: default avatarTobias Schottdorf <[email protected]>
Co-authored-by: default avatarRohan Yadav <[email protected]>
......@@ -16,7 +16,6 @@ import (
"context"
gohex "encoding/hex"
"fmt"
"io"
"math"
"os"
"path/filepath"
......@@ -44,7 +43,6 @@ import (
"github.com/cockroachdb/cockroach/pkg/sql/execinfrapb"
"github.com/cockroachdb/cockroach/pkg/storage"
"github.com/cockroachdb/cockroach/pkg/storage/enginepb"
"github.com/cockroachdb/cockroach/pkg/ts/tspb"
"github.com/cockroachdb/cockroach/pkg/util/envutil"
"github.com/cockroachdb/cockroach/pkg/util/flagutil"
"github.com/cockroachdb/cockroach/pkg/util/hlc"
......@@ -854,50 +852,6 @@ func parseGossipValues(gossipInfo *gossip.InfoStatus) (string, error) {
return strings.Join(output, "\n"), nil
}
var debugTimeSeriesDumpCmd = &cobra.Command{
Use: "tsdump",
Short: "dump all the raw timeseries values in a cluster",
Long: `
Dumps all of the raw timeseries values in a cluster.
`,
RunE: MaybeDecorateGRPCError(runTimeSeriesDump),
}
func runTimeSeriesDump(cmd *cobra.Command, args []string) error {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
conn, _, finish, err := getClientGRPCConn(ctx, serverCfg)
if err != nil {
return err
}
defer finish()
tsClient := tspb.NewTimeSeriesClient(conn)
stream, err := tsClient.Dump(context.Background(), &tspb.DumpRequest{})
if err != nil {
log.Fatalf(context.Background(), "%v", err)
}
var name, source string
for {
data, err := stream.Recv()
if err == io.EOF {
return nil
}
if err != nil {
return err
}
if name != data.Name || source != data.Source {
name, source = data.Name, data.Source
fmt.Printf("%s %s\n", name, source)
}
for _, d := range data.Datapoints {
fmt.Printf("%d %v\n", d.TimestampNanos, d.Value)
}
}
}
var debugSyncBenchCmd = &cobra.Command{
Use: "syncbench [directory]",
Short: "Run a performance test for WAL sync speed",
......
......@@ -689,7 +689,7 @@ func init() {
// Commands that print tables.
tableOutputCommands := append(
[]*cobra.Command{sqlShellCmd, genSettingsListCmd, demoCmd},
[]*cobra.Command{sqlShellCmd, genSettingsListCmd, demoCmd, debugTimeSeriesDumpCmd},
demoCmd.Commands()...)
tableOutputCommands = append(tableOutputCommands, nodeCmds...)
tableOutputCommands = append(tableOutputCommands, authCmds...)
......
// Copyright 2020 The Cockroach Authors.
//
// Use of this software is governed by the Business Source License
// included in the file licenses/BSL.txt.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0, included in the file
// licenses/APL.txt.
package cli
import (
"context"
"encoding/csv"
"fmt"
"io"
"os"
"time"
"github.com/cockroachdb/cockroach/pkg/ts/tspb"
"github.com/cockroachdb/cockroach/pkg/util/log"
"github.com/cockroachdb/cockroach/pkg/util/timeutil"
"github.com/spf13/cobra"
)
var debugTimeSeriesDumpCmd = &cobra.Command{
Use: "tsdump",
Short: "dump all the raw timeseries values in a cluster",
Long: `
Dumps all of the raw timeseries values in a cluster.
`,
RunE: MaybeDecorateGRPCError(func(cmd *cobra.Command, args []string) error {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
var w tsWriter
switch cliCtx.tableDisplayFormat {
case tableDisplayCSV:
w = csvTSWriter{w: csv.NewWriter(os.Stdout)}
case tableDisplayTSV:
cw := csvTSWriter{w: csv.NewWriter(os.Stdout)}
cw.w.Comma = '\t'
w = cw
default:
w = rawTSWriter{w: os.Stdout}
}
conn, _, finish, err := getClientGRPCConn(ctx, serverCfg)
if err != nil {
return err
}
defer finish()
tsClient := tspb.NewTimeSeriesClient(conn)
stream, err := tsClient.Dump(context.Background(), &tspb.DumpRequest{})
if err != nil {
log.Fatalf(context.Background(), "%v", err)
}
for {
data, err := stream.Recv()
if err == io.EOF {
return w.Flush()
}
if err != nil {
return err
}
if err := w.Emit(data); err != nil {
return err
}
}
}),
}
type tsWriter interface {
Emit(*tspb.TimeSeriesData) error
Flush() error
}
type csvTSWriter struct {
w *csv.Writer
}
func (w csvTSWriter) Emit(data *tspb.TimeSeriesData) error {
for _, d := range data.Datapoints {
if err := w.w.Write(
[]string{data.Name, timeutil.Unix(0, d.TimestampNanos).In(time.UTC).Format(time.RFC3339), data.Source, fmt.Sprint(d.Value)},
); err != nil {
return err
}
}
return nil
}
func (w csvTSWriter) Flush() error {
w.w.Flush()
return w.w.Error()
}
type rawTSWriter struct {
last struct {
name, source string
}
w io.Writer
}
func (w rawTSWriter) Flush() error { return nil }
func (w rawTSWriter) Emit(data *tspb.TimeSeriesData) error {
if w.last.name != data.Name || w.last.source != data.Source {
w.last.name, w.last.source = data.Name, data.Source
fmt.Fprintf(w.w, "%s %s\n", data.Name, data.Source)
}
for _, d := range data.Datapoints {
fmt.Fprintf(w.w, "%v %v\n", d.TimestampNanos, d.Value)
}
return nil
}
......@@ -46,6 +46,21 @@ func (b *CartesianBoundingBox) AddPoint(x, y float64) {
b.HiY = math.Max(b.HiY, y)
}
// Buffer adds n units to each side of the bounding box.
func (b *CartesianBoundingBox) Buffer(n float64) *CartesianBoundingBox {
if b == nil {
return nil
}
return &CartesianBoundingBox{
BoundingBox: geopb.BoundingBox{
LoX: b.LoX - n,
HiX: b.HiX + n,
LoY: b.LoY - n,
HiY: b.HiY + n,
},
}
}
// Intersects returns whether the BoundingBoxes intersect.
// Empty bounding boxes never intersect.
func (b *CartesianBoundingBox) Intersects(o *CartesianBoundingBox) bool {
......@@ -60,6 +75,18 @@ func (b *CartesianBoundingBox) Intersects(o *CartesianBoundingBox) bool {
return true
}
// Covers returns whether the BoundingBox covers the other bounding box.
// Empty bounding boxes never cover.
func (b *CartesianBoundingBox) Covers(o *CartesianBoundingBox) bool {
if b == nil || o == nil {
return false
}
return b.LoX <= o.LoX && o.LoX <= b.HiX &&
b.LoX <= o.HiX && o.HiX <= b.HiX &&
b.LoY <= o.LoY && o.LoY <= b.HiY &&
b.LoY <= o.HiY && o.HiY <= b.HiY
}
// boundingBoxFromGeomT returns a bounding box from a given geom.T.
// Returns nil if no bounding box was found.
func boundingBoxFromGeomT(g geom.T, soType geopb.SpatialObjectType) (*geopb.BoundingBox, error) {
......
......@@ -129,6 +129,89 @@ func TestCartesianBoundingBoxIntersects(t *testing.T) {
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
require.Equal(t, tc.expected, tc.a.Intersects(tc.b))
require.Equal(t, tc.expected, tc.b.Intersects(tc.a))
})
}
}
func TestCartesianBoundingBoxCovers(t *testing.T) {
testCases := []struct {
desc string
a *CartesianBoundingBox
b *CartesianBoundingBox
expected bool
}{
{
desc: "same bounding box covers",
a: &CartesianBoundingBox{BoundingBox: geopb.BoundingBox{LoX: 0, HiX: 1, LoY: 0, HiY: 1}},
b: &CartesianBoundingBox{BoundingBox: geopb.BoundingBox{LoX: 0, HiX: 1, LoY: 0, HiY: 1}},
expected: true,
},
{
desc: "nested bounding box covers",
a: &CartesianBoundingBox{BoundingBox: geopb.BoundingBox{LoX: 0, HiX: 1, LoY: 0, HiY: 1}},
b: &CartesianBoundingBox{BoundingBox: geopb.BoundingBox{LoX: 0.1, HiX: 0.9, LoY: 0.1, HiY: 0.9}},
expected: true,
},
{
desc: "side touching bounding box covers",
a: &CartesianBoundingBox{BoundingBox: geopb.BoundingBox{LoX: 0, HiX: 1, LoY: 0, HiY: 1}},
b: &CartesianBoundingBox{BoundingBox: geopb.BoundingBox{LoX: 0.1, HiX: 0.9, LoY: 0.1, HiY: 0.9}},
expected: true,
},
{
desc: "top touching bounding box covers",
a: &CartesianBoundingBox{BoundingBox: geopb.BoundingBox{LoX: 0, HiX: 1, LoY: 0, HiY: 1}},
b: &CartesianBoundingBox{BoundingBox: geopb.BoundingBox{LoX: 0.1, HiX: 0.9, LoY: 0, HiY: 1}},
expected: true,
},
{
desc: "reversed nested bounding box does not cover",
a: &CartesianBoundingBox{BoundingBox: geopb.BoundingBox{LoX: 0.1, HiX: 0.9, LoY: 0.1, HiY: 0.9}},
b: &CartesianBoundingBox{BoundingBox: geopb.BoundingBox{LoX: 0, HiX: 1, LoY: 0, HiY: 1}},
expected: false,
},
{
desc: "overlapping bounding box from the left covers",
a: &CartesianBoundingBox{BoundingBox: geopb.BoundingBox{LoX: 0, HiX: 1, LoY: 0, HiY: 1}},
b: &CartesianBoundingBox{BoundingBox: geopb.BoundingBox{LoX: 0.5, HiX: 1.5, LoY: 0.5, HiY: 1.5}},
expected: false,
},
{
desc: "overlapping bounding box from the right covers",
a: &CartesianBoundingBox{BoundingBox: geopb.BoundingBox{LoX: 0, HiX: 1, LoY: 0, HiY: 1}},
b: &CartesianBoundingBox{BoundingBox: geopb.BoundingBox{LoX: 0.5, HiX: 1.5, LoY: 0.5, HiY: 1.5}},
expected: false,
},
{
desc: "touching bounding box covers",
a: &CartesianBoundingBox{BoundingBox: geopb.BoundingBox{LoX: 0, HiX: 1, LoY: 0, HiY: 1}},
b: &CartesianBoundingBox{BoundingBox: geopb.BoundingBox{LoX: 1, HiX: 2, LoY: 1, HiY: 2}},
expected: false,
},
{
desc: "bounding box that is left does not cover",
a: &CartesianBoundingBox{BoundingBox: geopb.BoundingBox{LoX: 0, HiX: 1, LoY: 0, HiY: 1}},
b: &CartesianBoundingBox{BoundingBox: geopb.BoundingBox{LoX: 1.5, HiX: 2, LoY: 0, HiY: 1}},
expected: false,
},
{
desc: "higher bounding box does not cover",
a: &CartesianBoundingBox{BoundingBox: geopb.BoundingBox{LoX: 0, HiX: 1, LoY: 0, HiY: 1}},
b: &CartesianBoundingBox{BoundingBox: geopb.BoundingBox{LoX: 0, HiX: 1, LoY: 1.5, HiY: 2}},
expected: false,
},
{
desc: "completely disjoint bounding box does not cover",
a: &CartesianBoundingBox{BoundingBox: geopb.BoundingBox{LoX: 0, HiX: 1, LoY: 0, HiY: 1}},
b: &CartesianBoundingBox{BoundingBox: geopb.BoundingBox{LoX: -3, HiX: -2, LoY: 1.5, HiY: 2}},
expected: false,
},
}
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
require.Equal(t, tc.expected, tc.a.Covers(tc.b))
})
}
}
......@@ -20,7 +20,7 @@ func Covers(a *geo.Geometry, b *geo.Geometry) (bool, error) {
if a.SRID() != b.SRID() {
return false, geo.NewMismatchingSRIDsError(a, b)
}
if !a.CartesianBoundingBox().Intersects(b.CartesianBoundingBox()) {
if !a.CartesianBoundingBox().Covers(b.CartesianBoundingBox()) {
return false, nil
}
return geos.Covers(a.EWKB(), b.EWKB())
......@@ -31,7 +31,7 @@ func CoveredBy(a *geo.Geometry, b *geo.Geometry) (bool, error) {
if a.SRID() != b.SRID() {
return false, geo.NewMismatchingSRIDsError(a, b)
}
if !a.CartesianBoundingBox().Intersects(b.CartesianBoundingBox()) {
if !b.CartesianBoundingBox().Covers(a.CartesianBoundingBox()) {
return false, nil
}
return geos.CoveredBy(a.EWKB(), b.EWKB())
......@@ -42,7 +42,7 @@ func Contains(a *geo.Geometry, b *geo.Geometry) (bool, error) {
if a.SRID() != b.SRID() {
return false, geo.NewMismatchingSRIDsError(a, b)
}
if !a.CartesianBoundingBox().Intersects(b.CartesianBoundingBox()) {
if !a.CartesianBoundingBox().Covers(b.CartesianBoundingBox()) {
return false, nil
}
return geos.Contains(a.EWKB(), b.EWKB())
......@@ -53,7 +53,7 @@ func ContainsProperly(a *geo.Geometry, b *geo.Geometry) (bool, error) {
if a.SRID() != b.SRID() {
return false, geo.NewMismatchingSRIDsError(a, b)
}
if !a.CartesianBoundingBox().Intersects(b.CartesianBoundingBox()) {
if !a.CartesianBoundingBox().Covers(b.CartesianBoundingBox()) {
return false, nil
}
return geos.RelatePattern(a.EWKB(), b.EWKB(), "T**FF*FF*")
......@@ -81,7 +81,7 @@ func Equals(a *geo.Geometry, b *geo.Geometry) (bool, error) {
if a.Empty() && b.Empty() {
return true, nil
}
if !a.CartesianBoundingBox().Intersects(b.CartesianBoundingBox()) {
if !a.CartesianBoundingBox().Covers(b.CartesianBoundingBox()) {
return false, nil
}
return geos.Equals(a.EWKB(), b.EWKB())
......@@ -125,7 +125,7 @@ func Within(a *geo.Geometry, b *geo.Geometry) (bool, error) {
if a.SRID() != b.SRID() {
return false, geo.NewMismatchingSRIDsError(a, b)
}
if !a.CartesianBoundingBox().Intersects(b.CartesianBoundingBox()) {
if !b.CartesianBoundingBox().Covers(a.CartesianBoundingBox()) {
return false, nil
}
return geos.Within(a.EWKB(), b.EWKB())
......
......@@ -59,6 +59,9 @@ func DWithin(a *geo.Geometry, b *geo.Geometry, d float64) (bool, error) {
if d < 0 {
return false, errors.Newf("dwithin distance cannot be less than zero")
}
if !a.CartesianBoundingBox().Buffer(d).Intersects(b.CartesianBoundingBox()) {
return false, nil
}
dist, err := minDistanceInternal(a, b, d, geo.EmptyBehaviorError)
if err != nil {
// In case of any empty geometries return false.
......@@ -79,6 +82,9 @@ func DFullyWithin(a *geo.Geometry, b *geo.Geometry, d float64) (bool, error) {
if d < 0 {
return false, errors.Newf("dwithin distance cannot be less than zero")
}
if !a.CartesianBoundingBox().Buffer(d).Covers(b.CartesianBoundingBox()) {
return false, nil
}
dist, err := maxDistanceInternal(a, b, d, geo.EmptyBehaviorError)
if err != nil {
// In case of any empty geometries return false.
......
......@@ -264,6 +264,15 @@ var Projections = map[geopb.SRID]ProjInfo{
IsLatLng: false,
Spheroid: spheroid2,
},
26918: {
SRID: 26918,
AuthName: "EPSG",
AuthSRID: 26918,
SRText: `PROJCS["NAD83 / UTM zone 18N",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4269"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",-75],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","26918"]]`,
Proj4Text: MakeProj4Text(`+proj=utm +zone=18 +datum=NAD83 +units=m +no_defs`),
IsLatLng: false,
Spheroid: spheroid3,
},
32601: {
SRID: 32601,
AuthName: "EPSG",
......
......@@ -12,6 +12,7 @@ package sql
import (
"context"
"strings"
"github.com/cockroachdb/cockroach/pkg/clusterversion"
"github.com/cockroachdb/cockroach/pkg/keys"
......@@ -19,7 +20,9 @@ import (
"github.com/cockroachdb/cockroach/pkg/sql/pgwire/pgcode"
"github.com/cockroachdb/cockroach/pkg/sql/pgwire/pgerror"
"github.com/cockroachdb/cockroach/pkg/sql/sem/tree"
"github.com/cockroachdb/cockroach/pkg/sql/sessiondata"
"github.com/cockroachdb/cockroach/pkg/sql/sqlbase"
"github.com/cockroachdb/errors"
)
type createSchemaNode struct {
......@@ -80,6 +83,13 @@ func (p *planner) createUserDefinedSchema(params runParams, n *tree.CreateSchema
return pgerror.Newf(pgcode.DuplicateSchema, "schema %q already exists", n.Schema)
}
// Schemas starting with "pg_" are not allowed.
if strings.HasPrefix(n.Schema, sessiondata.PgSchemaPrefix) {
err := pgerror.Newf(pgcode.ReservedName, "unacceptable schema name %q", n.Schema)
err = errors.WithDetail(err, `The prefix "pg_" is reserved for system schemas.`)
return err
}
// Ensure that the cluster version is high enough to create the schema.
if !params.p.ExecCfg().Settings.Version.IsActive(params.ctx, clusterversion.VersionUserDefinedSchemas) {
return pgerror.Newf(pgcode.ObjectNotInPrerequisiteState,
......
......@@ -33,3 +33,6 @@ CREATE SCHEMA pg_catalog
statement error schema .* already exists
CREATE SCHEMA information_schema
statement error pq: unacceptable schema name \"pg_temp\"
CREATE SCHEMA pg_temp
......@@ -29,6 +29,10 @@ const InformationSchemaName = "information_schema"
// CRDBInternalSchemaName is the name of the crdb_internal system schema.
const CRDBInternalSchemaName = "crdb_internal"
// PgSchemaPrefix is a prefix for Postgres system schemas. Users cannot
// create schemas with this prefix.
const PgSchemaPrefix = "pg_"
// PgTempSchemaName is the alias for temporary schemas across sessions.
const PgTempSchemaName = "pg_temp"
......
# Local timeseries tooling
## Quick Start
```
docker-compose up -d
./import-csv.sh < (curl https://gist.githubusercontent.com/tbg/98d9814f624629833e6cfb7d25cb8258/raw/70a96d50032361f864b240dbd9f1c36c385b7515/sample.csv)
# User/Pass admin/x
open http://127.0.0.1:3000
```
## Usage:
### Step 1: procure a metrics dump
The source of this could be a `debug zip` (pending [#50432]), or
`./cockroach debug tsdump --format=csv --host=... > dump.csv`.
### Step 2: `docker-compose up -d`
Not much more to be said. Unsurprisingly, this needs Docker to work. Omit the
`-d` if you want to see what's going on behind the scenes. It may take a moment
for the next step to work.
### Step 3: `./import-csv.sh < dump.csv`
This loads the CSV data into your local Postgres instance. Note that it will
truncate any existing data you have imported, so you're not accidentally mixing
metrics from various sources.
If you legitimately want to import multiple csvs at once, use `cat *.csv |
./import-csv` instead.
### Step 3: open [Grafana](http://127.0.0.1:3000)
Log in as user `admin`, password `x`. You should be able to find a reference to
the CockroachDB dashboard on the landing page.
### Step 4: Play
You can edit the provided panel or add panels to plot interesting metrics.
A good starting point for learning how to do that is the [Grafana blog].
Replace ./grafana/dashboards/home.json if you want the changes to persist.
TODO(tbg): auto-generate a better home.json from `pkg/ts/catalog`.
### Step 5: docker-compose stop
To avoid hogging resources on your machine. The postgres database is on your
local file system, so it will remain. If you want to nuke everything, use
`down` instead of `stop` and then `git clean -f .`.
[#50432]: https://github.com/cockroachdb/cockroach/pull/50432
[Grafana blog]: https://grafana.com/blog/2018/10/15/make-time-series-exploration-easier-with-the-postgresql/timescaledb-query-editor/
version: '2.0'
services:
grafana:
build: ./grafana
container_name: grafana
ports:
- 3000:3000
links:
- postgres
# It's already in the container, but by mounting it we can edit these files
# without rebuilding the container.
volumes:
- ./grafana/dashboards:/var/lib/grafana/dashboards
postgres:
image: postgres:12.3-alpine
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: postgres
PGDATA: /postgres-data
ports:
- 5432:5432
volumes:
- ./postgres-data:/postgres-data
# NB: want master to pick up GF_DASHBOARDS_DEFAULT_HOME_DASHBOARD_PATH:
# https://github.com/grafana/grafana/pull/25595
# TODO(tbg): pin a release once the above is released to avoid random breakage.
FROM grafana/grafana:master
ENV GF_INSTALL_PLUGINS grafana-clock-panel,briangann-gauge-panel,natel-plotly-panel,grafana-simple-json-datasource
# Set up admin login
ENV GF_SECURITY_ADMIN_PASSWORD x
# Disable anonymous login for now - when we have great auto-generated dashboards
# we can enable it, but as is a shitty dashboard you can't edit isn't a great
# place to land by default.
#ENV GF_AUTH_ANONYMOUS_ENABLED true
#ENV GF_AUTH_ANONYMOUS_ORG_NAME Main Org.
ENV GF_USERS_ALLOW_SIGN_UP false
ENV GF_DASHBOARDS_JSON_ENABLED true
ENV GF_DASHBOARDS_JSON_PATH ./docker-compose.d/grafana
ENV GF_DASHBOARDS_DEFAULT_HOME_DASHBOARD_PATH /var/lib/grafana/dashboards/home.json
COPY ./postgres.yml /etc/grafana/provisioning/datasources/postgres.yml
COPY ./dashboards.yml /etc/grafana/provisioning/dashboards/dashboards.yml
# /var/lib/grafana/dashboards/ is mounted in from the outside, to allow editing
# while Grafana is running.
# COPY ./dashboards /var/lib/grafana/dashboards
apiVersion: 1
providers:
- name: 'default'
orgId: 1
folder: ''
type: file
disableDeletion: false
updateIntervalSeconds: 10
options:
path: /var/lib/grafana/dashboards
{
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": "-- Grafana --",
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
}
]
},
"editable": true,
"gnetId": null,
"graphTooltip": 0,
"links": [],
"panels": [
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": null,
"fieldConfig": {
"defaults": {
"custom": {}
},
"overrides": []
},
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 9,
"w": 12,
"x": 0,
"y": 0
},
"hiddenSeries": false,
"id": 2,
"legend": {
"avg": false,
"current": false,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"nullPointMode": "null",
"options": {
"dataLinks": []
},
"percentage": false,
"pointradius": 2,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"format": "time_series",
"group": [],
"metricColumn": "concat(name,'.s',source)",
"rawQuery": false,
"rawSql": "SELECT\n \"time\" AS \"time\",\n concat(name,'.s',source) AS metric,\n (CASE WHEN value >= lag(value) OVER (PARTITION BY concat(name,'.s',source) ORDER BY \"time\") THEN value - lag(value) OVER (PARTITION BY concat(name,'.s',source) ORDER BY \"time\") WHEN lag(value) OVER (PARTITION BY concat(name,'.s',source) ORDER BY \"time\") IS NULL THEN NULL ELSE value END)/extract(epoch from \"time\" - lag(\"time\") OVER (PARTITION BY concat(name,'.s',source) ORDER BY \"time\")) AS \"value\"\nFROM metrics\nWHERE\n $__timeFilter(\"time\") AND\n name = 'cr.node.distsender.batches'\nORDER BY 1,2",
"refId": "A",
"select": [
[
{
"params": [
"value"
],
"type": "column"
},
{
"params": [
"rate"
],
"type": "window"
},
{
"params": [
"value"
],
"type": "alias"
}
]
],
"table": "metrics",
"timeColumn": "\"time\"",
"timeColumnType": "timestamp",
"where": [
{
"name": "$__timeFilter",
"params": [],
"type": "macro"
},
{
"datatype": "varchar",
"name": "",
"params": [
"name",
"=",
"'cr.node.distsender.batches'"