Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
41 commits
Select commit Hold shift + click to select a range
9f9c439
Adds a New Condition for PVC Resize Errors
andrewlecuyer Feb 7, 2025
a942197
Initial configuration for an OpenTelemetry Collector
cbandy Dec 23, 2024
3ea8f17
Add an OTel Collector with Patroni metrics
dsessler7 Jan 6, 2025
c3a98fb
Add PgBouncer metrics
dsessler7 Jan 14, 2025
9fcef77
Parse Postgres and pgAudit logs using the OTel Collector
benjaminjb Jan 22, 2025
08ab9a4
Parse Patroni logs
dsessler7 Jan 22, 2025
2e59c1b
Parse PgBouncer logs using the OTel Collector
dsessler7 Jan 29, 2025
96e1ffb
Scrape pgAdmin logs using the OTel collector
tony-landreth Jan 29, 2025
ee9bf60
Add pgBackRest repohost log collector
benjaminjb Feb 1, 2025
836572d
Validate and strip/minify Collector SQL files
cbandy Feb 7, 2025
f2a80ac
Change pgbackrest init for running containers
benjaminjb Feb 7, 2025
0dcb1be
Bump controller-gen to v0.17.2
cbandy Feb 10, 2025
fbb4f32
Change PostgresIdentifier to a type alias
cbandy Jan 3, 2025
7089149
Add k8s attributes to patroni logs. Add CompactingProcessor to patron…
dsessler7 Feb 7, 2025
8e37a1f
Create initial API for OTel instrumentation. Allow users to configure…
dsessler7 Feb 9, 2025
38fc33a
Add instrumentation_scope.name and log.record.original attributes to …
dsessler7 Feb 9, 2025
3602c70
Add configurable collector (#4092)
benjaminjb Feb 12, 2025
f7e9625
Add shared functions for quoting shell words
cbandy Nov 4, 2024
d4483cc
Add a function for setting permission on directories
cbandy Feb 10, 2025
e6ea78b
Store pgAdmin log file positions in the logs directory
cbandy Feb 6, 2025
951fa40
Ensure Postgres and Patroni log directories are writable
cbandy Feb 10, 2025
88130ca
Ensure pgBackRest log directories are writable
cbandy Feb 11, 2025
8dbe427
Add a field specifying when to delete log files
cbandy Feb 14, 2025
1797f8f
Rotate PgBouncer logs using specified retention
dsessler7 Feb 11, 2025
8b87822
Document a Kubernetes bug with the duration format
cbandy Feb 18, 2025
85636a8
Add an API struct representing a single Secret value
cbandy Jan 15, 2025
ef1eae0
Allow more control over the arguments to pg_upgrade
cbandy Dec 9, 2024
510ddf4
Validate pg_upgrade versions at the API server
cbandy Feb 19, 2025
e4dfdf2
Add a validated field for Postgres parameters
cbandy Dec 20, 2024
e884806
Otel pgMonitor metrics (#4096)
tony-landreth Feb 21, 2025
00c9068
Add reload logic to collector container start script.
dsessler7 Feb 19, 2025
19a28f7
Add a test helper that unmarshals JSON and YAML
cbandy Feb 26, 2025
9977db2
If the OpenTelemetryLogs feature gate is set, tell patroni to log to …
dsessler7 Feb 26, 2025
bfd4160
Add resources from API to OTEL sidecar (#4104)
benjaminjb Feb 26, 2025
6ba9057
Change PostgresCluster.spec.config to a pointer
cbandy Feb 26, 2025
2a2fe9b
Calculate Postgres parameters in the controller
cbandy Feb 26, 2025
9018342
Rotate postgres logs according to retentionPeriod in spec.
dsessler7 Feb 20, 2025
d04885c
Clone embedded metrics variable to avoid continuous appending.
dsessler7 Feb 28, 2025
00a93f6
Add a script to help with bumping dependencies
cbandy Feb 28, 2025
6dbbf9b
Bump golang.org/x/crypto and golang.org/x/oauth2
cbandy Feb 28, 2025
b50bae9
Rotate pgbackrest (#4108)
benjaminjb Mar 1, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Add a test helper that unmarshals JSON and YAML
The "sigs.k8s.io/yaml" package alone does not produce the same results
as the Kubernetes API. This new function produces the same results and
uses type parameters to accept input as a string or byte slice.
  • Loading branch information
cbandy committed Feb 26, 2025
commit 19a28f79b9334cff347a46bd2182943fc21e10a6
2 changes: 1 addition & 1 deletion go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ require (
k8s.io/component-base v0.31.0
k8s.io/kube-openapi v0.0.0-20240521193020-835d969ad83a
sigs.k8s.io/controller-runtime v0.19.3
sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd
sigs.k8s.io/yaml v1.4.0
)

Expand Down Expand Up @@ -123,6 +124,5 @@ require (
k8s.io/klog/v2 v2.130.1 // indirect
k8s.io/utils v0.0.0-20240711033017-18e509b52bc8 // indirect
sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.30.3 // indirect
sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd // indirect
sigs.k8s.io/structured-merge-diff/v4 v4.4.1 // indirect
)
12 changes: 6 additions & 6 deletions internal/bridge/installation_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,10 @@ import (
corev1 "k8s.io/api/core/v1"
corev1apply "k8s.io/client-go/applyconfigurations/core/v1"
"sigs.k8s.io/controller-runtime/pkg/client"
"sigs.k8s.io/yaml"

"github.com/crunchydata/postgres-operator/internal/controller/runtime"
"github.com/crunchydata/postgres-operator/internal/testing/cmp"
"github.com/crunchydata/postgres-operator/internal/testing/require"
)

func TestExtractSecretContract(t *testing.T) {
Expand Down Expand Up @@ -136,7 +136,7 @@ func TestInstallationReconcile(t *testing.T) {
assert.Assert(t, cmp.Contains(applies[0], `"kind":"Secret"`))

var decoded corev1.Secret
assert.NilError(t, yaml.Unmarshal([]byte(applies[0]), &decoded))
require.UnmarshalInto(t, &decoded, applies[0])
assert.Assert(t, cmp.Contains(string(decoded.Data["bridge-token"]), `"id":"abc"`))
assert.Assert(t, cmp.Contains(string(decoded.Data["bridge-token"]), `"secret":"xyz"`))
})
Expand Down Expand Up @@ -230,7 +230,7 @@ func TestInstallationReconcile(t *testing.T) {
assert.Assert(t, cmp.Contains(applies[0], `"kind":"Secret"`))

var decoded corev1.Secret
assert.NilError(t, yaml.Unmarshal([]byte(applies[0]), &decoded))
require.UnmarshalInto(t, &decoded, applies[0])
assert.Assert(t, cmp.Contains(string(decoded.Data["bridge-token"]), `"id":"asdf"`))
})
}
Expand Down Expand Up @@ -326,7 +326,7 @@ func TestInstallationReconcile(t *testing.T) {
assert.Assert(t, cmp.Contains(applies[0], `"kind":"Secret"`))

var decoded corev1.Secret
assert.NilError(t, yaml.Unmarshal([]byte(applies[0]), &decoded))
require.UnmarshalInto(t, &decoded, applies[0])
assert.Assert(t, cmp.Contains(string(decoded.Data["bridge-token"]), `"id":"xyz"`))
assert.Assert(t, cmp.Contains(string(decoded.Data["bridge-token"]), `"secret":"def"`))
})
Expand Down Expand Up @@ -373,7 +373,7 @@ func TestInstallationReconcile(t *testing.T) {
assert.Assert(t, cmp.Contains(applies[0], `"kind":"Secret"`))

var decoded corev1.Secret
assert.NilError(t, yaml.Unmarshal([]byte(applies[0]), &decoded))
require.UnmarshalInto(t, &decoded, applies[0])
assert.Equal(t, len(decoded.Data["bridge-token"]), 0)

archived := string(decoded.Data["bridge-token--2020-10-28"])
Expand Down Expand Up @@ -463,7 +463,7 @@ func TestInstallationReconcile(t *testing.T) {
assert.Assert(t, cmp.Contains(applies[0], `"kind":"Secret"`))

var decoded corev1.Secret
assert.NilError(t, yaml.Unmarshal([]byte(applies[0]), &decoded))
require.UnmarshalInto(t, &decoded, applies[0])
assert.Assert(t, cmp.Contains(string(decoded.Data["bridge-token"]), `"id":"ddd"`))
assert.Assert(t, cmp.Contains(string(decoded.Data["bridge-token"]), `"secret":"fresh"`))
})
Expand Down
6 changes: 3 additions & 3 deletions internal/collector/pgadmin_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,13 @@ import (

"gotest.tools/v3/assert"
corev1 "k8s.io/api/core/v1"
"sigs.k8s.io/yaml"

"github.com/crunchydata/postgres-operator/internal/collector"
pgadmin "github.com/crunchydata/postgres-operator/internal/controller/standalone_pgadmin"
"github.com/crunchydata/postgres-operator/internal/feature"
"github.com/crunchydata/postgres-operator/internal/initialize"
"github.com/crunchydata/postgres-operator/internal/testing/cmp"
"github.com/crunchydata/postgres-operator/internal/testing/require"
"github.com/crunchydata/postgres-operator/pkg/apis/postgres-operator.crunchydata.com/v1beta1"
)

Expand Down Expand Up @@ -125,7 +125,7 @@ collector.yaml: |
ctx := feature.NewContext(context.Background(), gate)

var spec v1beta1.InstrumentationSpec
assert.NilError(t, yaml.Unmarshal([]byte(`{
require.UnmarshalInto(t, &spec, `{
config: {
exporters: {
googlecloud: {
Expand All @@ -135,7 +135,7 @@ collector.yaml: |
},
},
logs: { exporters: [googlecloud] },
}`), &spec))
}`)

configmap := new(corev1.ConfigMap)
initialize.Map(&configmap.Data)
Expand Down
46 changes: 23 additions & 23 deletions internal/config/config_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ import (
"testing"

"gotest.tools/v3/assert"
"sigs.k8s.io/yaml"

"github.com/crunchydata/postgres-operator/internal/testing/require"
"github.com/crunchydata/postgres-operator/pkg/apis/postgres-operator.crunchydata.com/v1beta1"
)

Expand Down Expand Up @@ -54,7 +54,7 @@ func TestFetchKeyCommand(t *testing.T) {

t.Run("blank", func(t *testing.T) {
var spec1 v1beta1.PostgresClusterSpec
assert.NilError(t, yaml.Unmarshal([]byte(`{
require.UnmarshalInto(t, &spec1, `{
patroni: {
dynamicConfiguration: {
postgresql: {
Expand All @@ -64,23 +64,23 @@ func TestFetchKeyCommand(t *testing.T) {
},
},
},
}`), &spec1))
}`)
assert.Equal(t, "", FetchKeyCommand(&spec1))

var spec2 v1beta1.PostgresClusterSpec
assert.NilError(t, yaml.Unmarshal([]byte(`{
require.UnmarshalInto(t, &spec2, `{
config: {
parameters: {
encryption_key_command: "",
},
},
}`), &spec2))
}`)
assert.Equal(t, "", FetchKeyCommand(&spec2))
})

t.Run("exists", func(t *testing.T) {
var spec1 v1beta1.PostgresClusterSpec
assert.NilError(t, yaml.Unmarshal([]byte(`{
require.UnmarshalInto(t, &spec1, `{
patroni: {
dynamicConfiguration: {
postgresql: {
Expand All @@ -90,23 +90,23 @@ func TestFetchKeyCommand(t *testing.T) {
},
},
},
}`), &spec1))
}`)
assert.Equal(t, "echo mykey", FetchKeyCommand(&spec1))

var spec2 v1beta1.PostgresClusterSpec
assert.NilError(t, yaml.Unmarshal([]byte(`{
require.UnmarshalInto(t, &spec2, `{
config: {
parameters: {
encryption_key_command: "cat somefile",
},
},
}`), &spec2))
}`)
assert.Equal(t, "cat somefile", FetchKeyCommand(&spec2))
})

t.Run("config.parameters takes precedence", func(t *testing.T) {
var spec v1beta1.PostgresClusterSpec
assert.NilError(t, yaml.Unmarshal([]byte(`{
require.UnmarshalInto(t, &spec, `{
config: {
parameters: {
encryption_key_command: "cat somefile",
Expand All @@ -121,7 +121,7 @@ func TestFetchKeyCommand(t *testing.T) {
},
},
},
}`), &spec))
}`)
assert.Equal(t, "cat somefile", FetchKeyCommand(&spec))
})
}
Expand All @@ -139,9 +139,9 @@ func TestPGAdminContainerImage(t *testing.T) {
t.Setenv("RELATED_IMAGE_PGADMIN", "env-var-pgadmin")
assert.Equal(t, PGAdminContainerImage(cluster), "env-var-pgadmin")

assert.NilError(t, yaml.Unmarshal([]byte(`{
require.UnmarshalInto(t, &cluster.Spec, `{
userInterface: { pgAdmin: { image: spec-image } },
}`), &cluster.Spec))
}`)
assert.Equal(t, PGAdminContainerImage(cluster), "spec-image")
}

Expand All @@ -158,9 +158,9 @@ func TestPGBackRestContainerImage(t *testing.T) {
t.Setenv("RELATED_IMAGE_PGBACKREST", "env-var-pgbackrest")
assert.Equal(t, PGBackRestContainerImage(cluster), "env-var-pgbackrest")

assert.NilError(t, yaml.Unmarshal([]byte(`{
backups: { pgBackRest: { image: spec-image } },
}`), &cluster.Spec))
require.UnmarshalInto(t, &cluster.Spec, `{
backups: { pgbackrest: { image: spec-image } },
}`)
assert.Equal(t, PGBackRestContainerImage(cluster), "spec-image")
}

Expand All @@ -177,9 +177,9 @@ func TestPGBouncerContainerImage(t *testing.T) {
t.Setenv("RELATED_IMAGE_PGBOUNCER", "env-var-pgbouncer")
assert.Equal(t, PGBouncerContainerImage(cluster), "env-var-pgbouncer")

assert.NilError(t, yaml.Unmarshal([]byte(`{
require.UnmarshalInto(t, &cluster.Spec, `{
proxy: { pgBouncer: { image: spec-image } },
}`), &cluster.Spec))
}`)
assert.Equal(t, PGBouncerContainerImage(cluster), "spec-image")
}

Expand All @@ -196,9 +196,9 @@ func TestPGExporterContainerImage(t *testing.T) {
t.Setenv("RELATED_IMAGE_PGEXPORTER", "env-var-pgexporter")
assert.Equal(t, PGExporterContainerImage(cluster), "env-var-pgexporter")

assert.NilError(t, yaml.Unmarshal([]byte(`{
monitoring: { pgMonitor: { exporter: { image: spec-image } } },
}`), &cluster.Spec))
require.UnmarshalInto(t, &cluster.Spec, `{
monitoring: { pgmonitor: { exporter: { image: spec-image } } },
}`)
assert.Equal(t, PGExporterContainerImage(cluster), "spec-image")
}

Expand All @@ -215,9 +215,9 @@ func TestStandalonePGAdminContainerImage(t *testing.T) {
t.Setenv("RELATED_IMAGE_STANDALONE_PGADMIN", "env-var-pgadmin")
assert.Equal(t, StandalonePGAdminContainerImage(pgadmin), "env-var-pgadmin")

assert.NilError(t, yaml.Unmarshal([]byte(`{
require.UnmarshalInto(t, &pgadmin.Spec, `{
image: spec-image
}`), &pgadmin.Spec))
}`)
assert.Equal(t, StandalonePGAdminContainerImage(pgadmin), "spec-image")
}

Expand Down
6 changes: 3 additions & 3 deletions internal/controller/pgupgrade/jobs_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import (
"github.com/crunchydata/postgres-operator/internal/feature"
"github.com/crunchydata/postgres-operator/internal/initialize"
"github.com/crunchydata/postgres-operator/internal/testing/cmp"
"github.com/crunchydata/postgres-operator/internal/testing/require"
"github.com/crunchydata/postgres-operator/pkg/apis/postgres-operator.crunchydata.com/v1beta1"
)

Expand Down Expand Up @@ -54,7 +55,7 @@ func TestLargestWholeCPU(t *testing.T) {
} {
t.Run(tt.Name, func(t *testing.T) {
var resources corev1.ResourceRequirements
assert.NilError(t, yaml.Unmarshal([]byte(tt.ResourcesYAML), &resources))
require.UnmarshalInto(t, &resources, tt.ResourcesYAML)
assert.Equal(t, tt.Result, largestWholeCPU(resources))
})
}
Expand Down Expand Up @@ -383,8 +384,7 @@ func TestPGUpgradeContainerImage(t *testing.T) {
t.Setenv("RELATED_IMAGE_PGUPGRADE", "env-var-pgbackrest")
assert.Equal(t, pgUpgradeContainerImage(upgrade), "env-var-pgbackrest")

assert.NilError(t, yaml.Unmarshal(
[]byte(`{ image: spec-image }`), &upgrade.Spec))
require.UnmarshalInto(t, &upgrade.Spec, `{ image: spec-image }`)
assert.Equal(t, pgUpgradeContainerImage(upgrade), "spec-image")
}

Expand Down
25 changes: 12 additions & 13 deletions internal/controller/postgrescluster/postgres_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ import (
"k8s.io/apimachinery/pkg/api/resource"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"sigs.k8s.io/controller-runtime/pkg/client"
"sigs.k8s.io/yaml"

"github.com/crunchydata/postgres-operator/internal/controller/runtime"
"github.com/crunchydata/postgres-operator/internal/feature"
Expand Down Expand Up @@ -198,9 +197,9 @@ func TestGeneratePostgresUserSecret(t *testing.T) {
})

t.Run("PgBouncer", func(t *testing.T) {
assert.NilError(t, yaml.Unmarshal([]byte(`{
require.UnmarshalInto(t, &cluster.Spec, `{
proxy: { pgBouncer: { port: 10220 } },
}`), &cluster.Spec))
}`)

secret, err := reconciler.generatePostgresUserSecret(cluster, spec, nil)
assert.NilError(t, err)
Expand Down Expand Up @@ -250,14 +249,14 @@ func TestReconcilePostgresVolumes(t *testing.T) {
t.Cleanup(func() { assert.Check(t, tClient.Delete(ctx, cluster)) })

spec := &v1beta1.PostgresInstanceSetSpec{}
assert.NilError(t, yaml.Unmarshal([]byte(`{
require.UnmarshalInto(t, spec, `{
name: "some-instance",
dataVolumeClaimSpec: {
accessModes: [ReadWriteOnce],
resources: { requests: { storage: 1Gi } },
storageClassName: "storage-class-for-data",
},
}`), spec))
}`)
instance := &appsv1.StatefulSet{ObjectMeta: naming.GenerateInstance(cluster, spec)}

pvc, err := reconciler.reconcilePostgresDataVolume(ctx, cluster, spec, instance, nil, nil)
Expand Down Expand Up @@ -290,14 +289,14 @@ volumeMode: Filesystem
t.Cleanup(func() { assert.Check(t, tClient.Delete(ctx, cluster)) })

spec := &v1beta1.PostgresInstanceSetSpec{}
assert.NilError(t, yaml.Unmarshal([]byte(`{
require.UnmarshalInto(t, spec, `{
name: "some-instance",
dataVolumeClaimSpec: {
accessModes: [ReadWriteOnce],
resources: { requests: { storage: 1Gi } },
storageClassName: "storage-class-for-data",
},
}`), spec))
}`)
instance := &appsv1.StatefulSet{ObjectMeta: naming.GenerateInstance(cluster, spec)}

recorder := events.NewRecorder(t, runtime.Scheme)
Expand Down Expand Up @@ -392,14 +391,14 @@ volumeMode: Filesystem
t.Cleanup(func() { assert.Check(t, tClient.Delete(ctx, cluster)) })

spec := &v1beta1.PostgresInstanceSetSpec{}
assert.NilError(t, yaml.Unmarshal([]byte(`{
require.UnmarshalInto(t, spec, `{
name: "some-instance",
dataVolumeClaimSpec: {
accessModes: [ReadWriteOnce],
resources: { requests: { storage: 1Gi } },
storageClassName: "storage-class-for-data",
},
}`), spec))
}`)
instance := &appsv1.StatefulSet{ObjectMeta: naming.GenerateInstance(cluster, spec)}

recorder := events.NewRecorder(t, runtime.Scheme)
Expand Down Expand Up @@ -455,14 +454,14 @@ volumeMode: Filesystem
t.Cleanup(func() { assert.Check(t, tClient.Delete(ctx, cluster)) })

spec := &v1beta1.PostgresInstanceSetSpec{}
assert.NilError(t, yaml.Unmarshal([]byte(`{
require.UnmarshalInto(t, spec, `{
name: "some-instance",
dataVolumeClaimSpec: {
accessModes: [ReadWriteOnce],
resources: { requests: { storage: 1Gi } },
storageClassName: "storage-class-for-data",
},
}`), spec))
}`)
instance := &appsv1.StatefulSet{ObjectMeta: naming.GenerateInstance(cluster, spec)}

observed := &Instance{}
Expand All @@ -475,13 +474,13 @@ volumeMode: Filesystem

t.Run("Specified", func(t *testing.T) {
spec := spec.DeepCopy()
assert.NilError(t, yaml.Unmarshal([]byte(`{
require.UnmarshalInto(t, spec, `{
walVolumeClaimSpec: {
accessModes: [ReadWriteMany],
resources: { requests: { storage: 2Gi } },
storageClassName: "storage-class-for-wal",
},
}`), spec))
}`)

pvc, err := reconciler.reconcilePostgresWALVolume(ctx, cluster, spec, instance, observed, nil)
assert.NilError(t, err)
Expand Down
Loading