Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Remove ingestion_idle_time setting
  • Loading branch information
mrodm committed Feb 26, 2025
commit b37f7d240801dcc55b3e25c8071e4b4431ecfea6
3 changes: 0 additions & 3 deletions docs/howto/system_testing.md
Original file line number Diff line number Diff line change
Expand Up @@ -489,9 +489,6 @@ There are other 4 options available:
- This could be used to ensure that a wide range of different documents have been ingested into Elasticsearch.
- Collect data into the data stream until all the fields defined in the list `assert.fields_present` are present in any of the documents.
- Each field in that list could be present in different documents.
- Wait for a period of time (`assert.ingestion_idle_time`) without ingesting new documents into given the data stream.
- That period of time is just taken into account if at least there is one document in the data stream.
- It could be used when it is not known the exact number of documents that tests are going to be sending to Elasticsearch.

The following example shows how to add an assertion on the number of hits in a given system test using `assert.hit_count`.

Expand Down
3 changes: 0 additions & 3 deletions internal/testrunner/runners/system/test_config.go
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,6 @@ type testConfig struct {

// FieldsPresent list of fields that must be present in any of documents ingested
FieldsPresent []string `config:"fields_present"`

// IngestionIdleTime time elapsed since the last document was ingested
IngestionIdleTime time.Duration `config:"ingestion_idle_time"`
} `config:"assert"`

// NumericKeywordFields holds a list of fields that have keyword
Expand Down
22 changes: 1 addition & 21 deletions internal/testrunner/runners/system/tester.go
Original file line number Diff line number Diff line change
Expand Up @@ -1554,7 +1554,6 @@ func (r *tester) waitForDocs(ctx context.Context, config *testConfig, dataStream
logger.Debugf("checking for expected data in data stream (%s)...", waitForDataTimeout)
var hits *hits
oldHits := 0
prevTime := time.Now()
foundFields := map[string]any{}
passed, waitErr := wait.UntilTrue(ctx, func(ctx context.Context) (bool, error) {
var err error
Expand Down Expand Up @@ -1596,25 +1595,6 @@ func (r *tester) waitForDocs(ctx context.Context, config *testConfig, dataStream
return ret
}()

assertIngestionIdleTime := func() bool {
if config.Assert.IngestionIdleTime.Seconds() == 0 {
// not enabled
return true
}
if hits.size() == 0 {
return false
}
if oldHits != hits.size() {
prevTime = time.Now()
return false
}
if time.Since(prevTime) > config.Assert.IngestionIdleTime {
logger.Debugf("No new documents ingested in %s", config.Assert.IngestionIdleTime)
return true
}
return false
}()

assertFieldsPresent := func() bool {
if len(config.Assert.FieldsPresent) == 0 {
// not enabled
Expand Down Expand Up @@ -1648,7 +1628,7 @@ func (r *tester) waitForDocs(ctx context.Context, config *testConfig, dataStream
// By default, config.Assert.MinCount is zero
assertMinCount := hits.size() > config.Assert.MinCount

return assertIngestionIdleTime && assertFieldsPresent && assertMinCount && assertHitCount, nil
return assertFieldsPresent && assertMinCount && assertHitCount, nil
}, 1*time.Second, waitForDataTimeout)

if waitErr != nil {
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ data_stream:
paths:
- "/custom/paths/logs.json"
assert:
min_count: 50
min_count: 500
agent:
provisioning_script:
language: bash
Expand All @@ -13,7 +13,7 @@ agent:
cd /custom/paths
touch logs.json
# elastic-package just retrieves the 500 first documents in the search query
for i in $(seq 1 100) ; do
for i in $(seq 1 501) ; do
echo '{ "contents": "Message from file", "file": "logs.json"}'
done >> logs.json
pre_start_script:
Expand Down