Skip to content

Commit dffbb6a

Browse files
authored
Always write run_results.json (#7539)
1 parent 272beb2 commit dffbb6a

File tree

6 files changed

+77
-19
lines changed

6 files changed

+77
-19
lines changed
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
kind: Fixes
2+
body: '`run_results.json` is now written after every node completes.'
3+
time: 2023-05-08T09:37:32.809356-05:00
4+
custom:
5+
Author: iknox-fa
6+
Issue: "7302"

core/dbt/contracts/results.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -393,6 +393,9 @@ def from_node_results(
393393
meta = FreshnessMetadata(generated_at=generated_at)
394394
return cls(metadata=meta, results=results, elapsed_time=elapsed_time)
395395

396+
def write(self, path):
397+
FreshnessExecutionResultArtifact.from_result(self).write(path)
398+
396399

397400
@dataclass
398401
@schema_version("sources", 3)

core/dbt/task/freshness.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@
99
from .runnable import GraphRunnableTask
1010

1111
from dbt.contracts.results import (
12-
FreshnessExecutionResultArtifact,
1312
FreshnessResult,
1413
PartialSourceFreshnessResult,
1514
SourceFreshnessResult,
@@ -178,10 +177,6 @@ def get_node_selector(self):
178177
def get_runner_type(self, _):
179178
return FreshnessRunner
180179

181-
def write_result(self, result):
182-
artifact = FreshnessExecutionResultArtifact.from_result(result)
183-
artifact.write(self.result_path())
184-
185180
def get_result(self, results, elapsed_time, generated_at):
186181
return FreshnessResult.from_node_results(
187182
elapsed_time=elapsed_time, generated_at=generated_at, results=results

core/dbt/task/runnable.py

Lines changed: 21 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -65,15 +65,16 @@ class GraphRunnableTask(ConfiguredTask):
6565

6666
def __init__(self, args, config, manifest):
6767
super().__init__(args, config, manifest)
68-
self.job_queue: Optional[GraphQueue] = None
6968
self._flattened_nodes: Optional[List[ResultNode]] = None
70-
71-
self.run_count: int = 0
72-
self.num_nodes: int = 0
73-
self.node_results = []
74-
self._skipped_children = {}
7569
self._raise_next_tick = None
70+
self._skipped_children = {}
71+
self.job_queue: Optional[GraphQueue] = None
72+
self.node_results = []
73+
self.num_nodes: int = 0
7674
self.previous_state: Optional[PreviousState] = None
75+
self.run_count: int = 0
76+
self.started_at: float = 0
77+
7778
self.set_previous_state()
7879

7980
def set_previous_state(self):
@@ -302,6 +303,15 @@ def _handle_result(self, result):
302303
cause = None
303304
self._mark_dependent_errors(node.unique_id, result, cause)
304305

306+
interim_run_result = self.get_result(
307+
results=self.node_results,
308+
elapsed_time=time.time() - self.started_at,
309+
generated_at=datetime.utcnow(),
310+
)
311+
312+
if self.args.write_json and hasattr(interim_run_result, "write"):
313+
interim_run_result.write(self.result_path())
314+
305315
def _cancel_connections(self, pool):
306316
"""Given a pool, cancel all adapter connections and wait until all
307317
runners gentle terminates.
@@ -393,24 +403,21 @@ def print_results_line(self, node_results, elapsed):
393403

394404
def execute_with_hooks(self, selected_uids: AbstractSet[str]):
395405
adapter = get_adapter(self.config)
396-
started = time.time()
406+
self.started_at = time.time()
397407
try:
398408
self.before_run(adapter, selected_uids)
399409
res = self.execute_nodes()
400410
self.after_run(adapter, res)
401411
finally:
402412
adapter.cleanup_connections()
403-
elapsed = time.time() - started
413+
elapsed = time.time() - self.started_at
404414
self.print_results_line(self.node_results, elapsed)
405415
result = self.get_result(
406416
results=self.node_results, elapsed_time=elapsed, generated_at=datetime.utcnow()
407417
)
408418

409419
return result
410420

411-
def write_result(self, result):
412-
result.write(self.result_path())
413-
414421
def run(self):
415422
"""
416423
Run dbt for the query, based on the graph.
@@ -447,9 +454,10 @@ def run(self):
447454
)
448455
)
449456

450-
if get_flags().WRITE_JSON:
457+
if self.args.write_json:
451458
write_manifest(self.manifest, self.config.target_path)
452-
self.write_result(result)
459+
if hasattr(result, "write"):
460+
result.write(self.result_path())
453461

454462
self.task_end_messages(result.results)
455463
return result

tests/functional/artifacts/test_run_results.py

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,8 @@
1+
from multiprocessing import Process
2+
from pathlib import Path
3+
import json
14
import pytest
5+
import platform
26
from dbt.tests.util import run_dbt
37

48
good_model_sql = """
@@ -9,6 +13,11 @@
913
something bad
1014
"""
1115

16+
slow_model_sql = """
17+
{{ config(materialized='table') }}
18+
select id from {{ ref('good_model') }}, pg_sleep(5)
19+
"""
20+
1221

1322
class TestRunResultsTimingSuccess:
1423
@pytest.fixture(scope="class")
@@ -30,3 +39,31 @@ def test_timing_exists(self, project):
3039
results = run_dbt(["run"], expect_pass=False)
3140
assert len(results.results) == 1
3241
assert len(results.results[0].timing) > 0
42+
43+
44+
@pytest.mark.skipif(platform.system() != "Darwin", reason="Fails on linux in github actions")
45+
class TestRunResultsWritesFileOnSignal:
46+
@pytest.fixture(scope="class")
47+
def models(self):
48+
return {"good_model.sql": good_model_sql, "slow_model.sql": slow_model_sql}
49+
50+
def test_run_results_are_written_on_signal(self, project):
51+
# Start the runner in a seperate process.
52+
external_process_dbt = Process(
53+
target=run_dbt, args=([["run"]]), kwargs={"expect_pass": False}
54+
)
55+
external_process_dbt.start()
56+
assert external_process_dbt.is_alive()
57+
58+
# Wait until the first file write, then kill the process.
59+
run_results_file = Path(project.project_root) / "target/run_results.json"
60+
while run_results_file.is_file() is False:
61+
pass
62+
external_process_dbt.terminate()
63+
64+
# Wait until the process is dead, then check the file that there is only one result.
65+
while external_process_dbt.is_alive() is True:
66+
pass
67+
with run_results_file.open() as run_results_str:
68+
run_results = json.loads(run_results_str.read())
69+
assert len(run_results["results"]) == 1

tests/functional/fail_fast/test_fail_fast_run.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,14 @@
11
import pytest
2+
import json
3+
from pathlib import Path
4+
25

36
from dbt.contracts.results import RunResult
47
from dbt.tests.util import run_dbt
58

69

710
models__one_sql = """
8-
select 1 /failed
11+
select 1
912
"""
1013

1114
models__two_sql = """
@@ -28,6 +31,12 @@ def test_fail_fast_run(
2831
res = run_dbt(["run", "--fail-fast", "--threads", "1"], expect_pass=False)
2932
# a RunResult contains only one node so we can be sure only one model was run
3033
assert type(res) == RunResult
34+
run_results_file = Path(project.project_root) / "target/run_results.json"
35+
assert run_results_file.is_file()
36+
with run_results_file.open() as run_results_str:
37+
run_results = json.loads(run_results_str.read())
38+
assert run_results["results"][0]["status"] == "success"
39+
assert run_results["results"][1]["status"] == "error"
3140

3241

3342
class TestFailFastFromConfig(FailFastBase):

0 commit comments

Comments
 (0)