forked from apache/airflow
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathconftest.py
643 lines (530 loc) · 23.1 KB
/
conftest.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import subprocess
import sys
from contextlib import ExitStack
from datetime import datetime, timedelta
import freezegun
import pytest
# We should set these before loading _any_ of the rest of airflow so that the
# unit test mode config is set as early as possible.
assert "airflow" not in sys.modules, "No airflow module can be imported before these lines"
tests_directory = os.path.dirname(os.path.realpath(__file__))
os.environ["AIRFLOW__CORE__DAGS_FOLDER"] = os.path.join(tests_directory, "dags")
os.environ["AIRFLOW__CORE__UNIT_TEST_MODE"] = "True"
os.environ["AWS_DEFAULT_REGION"] = os.environ.get("AWS_DEFAULT_REGION") or "us-east-1"
os.environ["CREDENTIALS_DIR"] = os.environ.get('CREDENTIALS_DIR') or "/files/airflow-breeze-config/keys"
from tests.test_utils.perf.perf_kit.sqlalchemy import ( # noqa isort:skip
count_queries,
trace_queries,
)
@pytest.fixture()
def reset_environment():
"""
Resets env variables.
"""
init_env = os.environ.copy()
yield
changed_env = os.environ
for key in changed_env:
if key not in init_env:
del os.environ[key]
else:
os.environ[key] = init_env[key]
@pytest.fixture()
def reset_db():
"""
Resets Airflow db.
"""
from airflow.utils import db
db.resetdb()
yield
ALLOWED_TRACE_SQL_COLUMNS = ['num', 'time', 'trace', 'sql', 'parameters', 'count']
@pytest.fixture(autouse=True)
def trace_sql(request):
"""
Displays queries from the tests to console.
"""
trace_sql_option = request.config.getoption("trace_sql")
if not trace_sql_option:
yield
return
terminal_reporter = request.config.pluginmanager.getplugin("terminalreporter")
# if no terminal reporter plugin is present, nothing we can do here;
# this can happen when this function executes in a slave node
# when using pytest-xdist, for example
if terminal_reporter is None:
yield
return
columns = [col.strip() for col in trace_sql_option.split(",")]
def pytest_print(text):
return terminal_reporter.write_line(text)
with ExitStack() as exit_stack:
if columns == ['num']:
# It is very unlikely that the user wants to display only numbers, but probably
# the user just wants to count the queries.
exit_stack.enter_context(count_queries(print_fn=pytest_print))
elif any(c for c in ['time', 'trace', 'sql', 'parameters']):
exit_stack.enter_context(
trace_queries(
display_num='num' in columns,
display_time='time' in columns,
display_trace='trace' in columns,
display_sql='sql' in columns,
display_parameters='parameters' in columns,
print_fn=pytest_print,
)
)
yield
def pytest_addoption(parser):
"""
Add options parser for custom plugins
"""
group = parser.getgroup("airflow")
group.addoption(
"--with-db-init",
action="store_true",
dest="db_init",
help="Forces database initialization before tests",
)
group.addoption(
"--integration",
action="append",
metavar="INTEGRATIONS",
help="only run tests matching integration specified: "
"[cassandra,kerberos,mongo,openldap,rabbitmq,redis,statsd,trino]. ",
)
group.addoption(
"--backend",
action="store",
metavar="BACKEND",
help="only run tests matching the backend: [sqlite,postgres,mysql].",
)
group.addoption(
"--system",
action="append",
metavar="SYSTEMS",
help="only run tests matching the system specified [google.cloud, google.marketing_platform]",
)
group.addoption(
"--include-long-running",
action="store_true",
help="Includes long running tests (marked with long_running marker). They are skipped by default.",
)
group.addoption(
"--include-quarantined",
action="store_true",
help="Includes quarantined tests (marked with quarantined marker). They are skipped by default.",
)
allowed_trace_sql_columns_list = ",".join(ALLOWED_TRACE_SQL_COLUMNS)
group.addoption(
"--trace-sql",
action="store",
help=(
"Trace SQL statements. As an argument, you must specify the columns to be "
f"displayed as a comma-separated list. Supported values: [f{allowed_trace_sql_columns_list}]"
),
metavar="COLUMNS",
)
def initial_db_init():
if os.environ.get("RUN_AIRFLOW_1_10") == "true":
print("Attempting to reset the db using airflow command")
os.system("airflow resetdb -y")
else:
from airflow.utils import db
db.resetdb()
@pytest.fixture(autouse=True, scope="session")
def breeze_test_helper(request):
"""
Helper that setups Airflow testing environment. It does the same thing
as the old 'run-tests' script.
"""
# fixme: this should use some other env variable ex. RUNNING_ON_K8S
if os.environ.get("SKIP_INIT_DB"):
print("Skipping db initialization. Tests do not require database")
return
from airflow import __version__
if __version__.startswith("1.10"):
os.environ['RUN_AIRFLOW_1_10'] = "true"
print(" AIRFLOW ".center(60, "="))
# Setup test environment for breeze
home = os.path.expanduser("~")
airflow_home = os.environ.get("AIRFLOW_HOME") or os.path.join(home, "airflow")
print(f"Home of the user: {home}\nAirflow home {airflow_home}")
# Initialize Airflow db if required
lock_file = os.path.join(airflow_home, ".airflow_db_initialised")
if request.config.option.db_init:
print("Initializing the DB - forced with --with-db-init switch.")
initial_db_init()
elif not os.path.exists(lock_file):
print(
"Initializing the DB - first time after entering the container.\n"
"You can force re-initialization the database by adding --with-db-init switch to run-tests."
)
initial_db_init()
# Create pid file
with open(lock_file, "w+"):
pass
else:
print(
"Skipping initializing of the DB as it was initialized already.\n"
"You can re-initialize the database by adding --with-db-init flag when running tests."
)
integration_kerberos = os.environ.get("INTEGRATION_KERBEROS")
if integration_kerberos == "true":
# Initialize kerberos
kerberos = os.environ.get("KRB5_KTNAME")
if kerberos:
subprocess.check_call(["kinit", "-kt", kerberos, '[email protected]'])
else:
print("Kerberos enabled! Please setup KRB5_KTNAME environment variable")
sys.exit(1)
def pytest_configure(config):
config.addinivalue_line("markers", "integration(name): mark test to run with named integration")
config.addinivalue_line("markers", "backend(name): mark test to run with named backend")
config.addinivalue_line("markers", "system(name): mark test to run with named system")
config.addinivalue_line("markers", "long_running: mark test that run for a long time (many minutes)")
config.addinivalue_line(
"markers", "quarantined: mark test that are in quarantine (i.e. flaky, need to be isolated and fixed)"
)
config.addinivalue_line(
"markers", "credential_file(name): mark tests that require credential file in CREDENTIALS_DIR"
)
config.addinivalue_line("markers", "airflow_2: mark tests that works only on Airflow 2.0 / master")
def skip_if_not_marked_with_integration(selected_integrations, item):
for marker in item.iter_markers(name="integration"):
integration_name = marker.args[0]
if integration_name in selected_integrations or "all" in selected_integrations:
return
pytest.skip(
"The test is skipped because it does not have the right integration marker. "
"Only tests marked with pytest.mark.integration(INTEGRATION) are run with INTEGRATION"
" being one of {integration}. {item}".format(integration=selected_integrations, item=item)
)
def skip_if_not_marked_with_backend(selected_backend, item):
for marker in item.iter_markers(name="backend"):
backend_names = marker.args
if selected_backend in backend_names:
return
pytest.skip(
"The test is skipped because it does not have the right backend marker "
"Only tests marked with pytest.mark.backend('{backend}') are run"
": {item}".format(backend=selected_backend, item=item)
)
def skip_if_not_marked_with_system(selected_systems, item):
for marker in item.iter_markers(name="system"):
systems_name = marker.args[0]
if systems_name in selected_systems or "all" in selected_systems:
return
pytest.skip(
"The test is skipped because it does not have the right system marker. "
"Only tests marked with pytest.mark.system(SYSTEM) are run with SYSTEM"
" being one of {systems}. {item}".format(systems=selected_systems, item=item)
)
def skip_system_test(item):
for marker in item.iter_markers(name="system"):
pytest.skip(
"The test is skipped because it has system marker. "
"System tests are only run when --system flag "
"with the right system ({system}) is passed to pytest. {item}".format(
system=marker.args[0], item=item
)
)
def skip_long_running_test(item):
for _ in item.iter_markers(name="long_running"):
pytest.skip(
"The test is skipped because it has long_running marker. "
"And --include-long-running flag is not passed to pytest. {item}".format(item=item)
)
def skip_quarantined_test(item):
for _ in item.iter_markers(name="quarantined"):
pytest.skip(
"The test is skipped because it has quarantined marker. "
"And --include-quarantined flag is passed to pytest. {item}".format(item=item)
)
def skip_if_integration_disabled(marker, item):
integration_name = marker.args[0]
environment_variable_name = "INTEGRATION_" + integration_name.upper()
environment_variable_value = os.environ.get(environment_variable_name)
if not environment_variable_value or environment_variable_value != "true":
pytest.skip(
"The test requires {integration_name} integration started and "
"{name} environment variable to be set to true (it is '{value}')."
" It can be set by specifying '--integration {integration_name}' at breeze startup"
": {item}".format(
name=environment_variable_name,
value=environment_variable_value,
integration_name=integration_name,
item=item,
)
)
def skip_if_wrong_backend(marker, item):
valid_backend_names = marker.args
environment_variable_name = "BACKEND"
environment_variable_value = os.environ.get(environment_variable_name)
if not environment_variable_value or environment_variable_value not in valid_backend_names:
pytest.skip(
"The test requires one of {valid_backend_names} backend started and "
"{name} environment variable to be set to 'true' (it is '{value}')."
" It can be set by specifying backend at breeze startup"
": {item}".format(
name=environment_variable_name,
value=environment_variable_value,
valid_backend_names=valid_backend_names,
item=item,
)
)
def skip_if_credential_file_missing(item):
for marker in item.iter_markers(name="credential_file"):
credential_file = marker.args[0]
credential_path = os.path.join(os.environ.get('CREDENTIALS_DIR'), credential_file)
if not os.path.exists(credential_path):
pytest.skip(f"The test requires credential file {credential_path}: {item}")
def skip_if_airflow_2_test(item):
for _ in item.iter_markers(name="airflow_2"):
if os.environ.get("RUN_AIRFLOW_1_10") == "true":
pytest.skip("The test works only with Airflow 2.0 / main branch")
def pytest_runtest_setup(item):
selected_integrations_list = item.config.getoption("--integration")
selected_systems_list = item.config.getoption("--system")
include_long_running = item.config.getoption("--include-long-running")
include_quarantined = item.config.getoption("--include-quarantined")
for marker in item.iter_markers(name="integration"):
skip_if_integration_disabled(marker, item)
if selected_integrations_list:
skip_if_not_marked_with_integration(selected_integrations_list, item)
if selected_systems_list:
skip_if_not_marked_with_system(selected_systems_list, item)
else:
skip_system_test(item)
for marker in item.iter_markers(name="backend"):
skip_if_wrong_backend(marker, item)
selected_backend = item.config.getoption("--backend")
if selected_backend:
skip_if_not_marked_with_backend(selected_backend, item)
if not include_long_running:
skip_long_running_test(item)
if not include_quarantined:
skip_quarantined_test(item)
skip_if_credential_file_missing(item)
skip_if_airflow_2_test(item)
@pytest.fixture
def frozen_sleep(monkeypatch):
"""
Use freezegun to "stub" sleep, so that it takes no time, but that
``datetime.now()`` appears to move forwards
If your module under test does ``import time`` and then ``time.sleep``::
def test_something(frozen_sleep):
my_mod.fn_under_test()
If your module under test does ``from time import sleep`` then you will
have to mock that sleep function directly::
def test_something(frozen_sleep, monkeypatch):
monkeypatch.setattr('my_mod.sleep', frozen_sleep)
my_mod.fn_under_test()
"""
freezegun_control = None
def fake_sleep(seconds):
nonlocal freezegun_control
utcnow = datetime.utcnow()
if freezegun_control is not None:
freezegun_control.stop()
freezegun_control = freezegun.freeze_time(utcnow + timedelta(seconds=seconds))
freezegun_control.start()
monkeypatch.setattr("time.sleep", fake_sleep)
yield fake_sleep
if freezegun_control is not None:
freezegun_control.stop()
@pytest.fixture(scope="session")
def app():
from airflow.www import app
return app.create_app(testing=True)
@pytest.fixture
def dag_maker(request):
"""
The dag_maker helps us to create DAG, DagModel, and SerializedDAG automatically.
You have to use the dag_maker as a context manager and it takes
the same argument as DAG::
with dag_maker(dag_id="mydag") as dag:
task1 = DummyOperator(task_id='mytask')
task2 = DummyOperator(task_id='mytask2')
If the DagModel you want to use needs different parameters than the one
automatically created by the dag_maker, you have to update the DagModel as below::
dag_maker.dag_model.is_active = False
session.merge(dag_maker.dag_model)
session.commit()
For any test you use the dag_maker, make sure to create a DagRun::
dag_maker.create_dagrun()
The dag_maker.create_dagrun takes the same arguments as dag.create_dagrun
If you want to operate on serialized DAGs, then either pass ``serialized=True` to the ``dag_maker()``
call, or you can mark your test/class/file with ``@pytest.mark.need_serialized_dag(True)``. In both of
these cases the ``dag`` returned by the context manager will be a lazily-evaluated proxy object to the
SerializedDAG.
"""
import lazy_object_proxy
# IMPORTANT: Delay _all_ imports from `airflow.*` to _inside a method_.
# This fixture is "called" early on in the pytest collection process, and
# if we import airflow.* here the wrong (non-test) config will be loaded
# and "baked" in to various constants
want_serialized = False
# Allow changing default serialized behaviour with `@ptest.mark.need_serialized_dag` or
# `@ptest.mark.need_serialized_dag(False)`
serialized_marker = request.node.get_closest_marker("need_serialized_dag")
if serialized_marker:
(want_serialized,) = serialized_marker.args or (True,)
class DagFactory:
def __init__(self):
from airflow.models import DagBag
# Keep all the serialized dags we've created in this test
self.dagbag = DagBag(os.devnull, include_examples=False, read_dags_from_db=False)
def __enter__(self):
self.dag.__enter__()
if self.want_serialized:
return lazy_object_proxy.Proxy(self._serialized_dag)
return self.dag
def _serialized_dag(self):
return self.serialized_model.dag
def __exit__(self, type, value, traceback):
from airflow.models import DagModel
from airflow.models.serialized_dag import SerializedDagModel
dag = self.dag
dag.__exit__(type, value, traceback)
if type is not None:
return
dag.clear()
dag.sync_to_db(self.session)
self.dag_model = self.session.query(DagModel).get(dag.dag_id)
if self.want_serialized:
self.serialized_model = SerializedDagModel(dag)
self.session.merge(self.serialized_model)
serialized_dag = self._serialized_dag()
self.dagbag.bag_dag(serialized_dag, root_dag=serialized_dag)
self.session.flush()
else:
self.dagbag.bag_dag(self.dag, self.dag)
def create_dagrun(self, **kwargs):
from airflow.utils.state import State
dag = self.dag
kwargs = {
"state": State.RUNNING,
"execution_date": self.start_date,
"start_date": self.start_date,
"session": self.session,
**kwargs,
}
# Need to provide run_id if the user does not either provide one
# explicitly, or pass run_type for inference in dag.create_dagrun().
if "run_id" not in kwargs and "run_type" not in kwargs:
kwargs["run_id"] = "test"
self.dag_run = dag.create_dagrun(**kwargs)
return self.dag_run
def __call__(
self, dag_id='test_dag', serialized=want_serialized, fileloc=None, session=None, **kwargs
):
from airflow import settings
from airflow.models import DAG
from airflow.utils import timezone
if session is None:
session = settings.Session()
self.kwargs = kwargs
self.session = session
self.start_date = self.kwargs.get('start_date', None)
default_args = kwargs.get('default_args', None)
if default_args and not self.start_date:
if 'start_date' in default_args:
self.start_date = default_args.get('start_date')
if not self.start_date:
if hasattr(request.module, 'DEFAULT_DATE'):
self.start_date = getattr(request.module, 'DEFAULT_DATE')
else:
DEFAULT_DATE = timezone.datetime(2016, 1, 1)
self.start_date = DEFAULT_DATE
self.kwargs['start_date'] = self.start_date
self.dag = DAG(dag_id, **self.kwargs)
self.dag.fileloc = fileloc or request.module.__file__
self.want_serialized = serialized
return self
def cleanup(self):
from airflow.models import DagModel, DagRun, TaskInstance
from airflow.models.serialized_dag import SerializedDagModel
dag_ids = list(self.dagbag.dag_ids)
if not dag_ids:
return
# To isolate problems here with problems from elsewhere on the session object
self.session.flush()
self.session.query(SerializedDagModel).filter(SerializedDagModel.dag_id.in_(dag_ids)).delete(
synchronize_session=False
)
self.session.query(DagRun).filter(DagRun.dag_id.in_(dag_ids)).delete(synchronize_session=False)
self.session.query(TaskInstance).filter(TaskInstance.dag_id.in_(dag_ids)).delete(
synchronize_session=False
)
self.session.query(DagModel).filter(DagModel.dag_id.in_(dag_ids)).delete(
synchronize_session=False
)
self.session.commit()
factory = DagFactory()
try:
yield factory
finally:
factory.cleanup()
del factory.session
@pytest.fixture
def create_dummy_dag(dag_maker):
"""
This fixture creates a `DAG` with a single `DummyOperator` task.
DagRun and DagModel is also created.
Apart from the already existing arguments, any other argument in kwargs
is passed to the DAG and not to the DummyOperator task.
If you have an argument that you want to pass to the DummyOperator that
is not here, please use `default_args` so that the DAG will pass it to the
Task::
dag, task = create_dummy_dag(default_args={'start_date':timezone.datetime(2016, 1, 1)})
You cannot be able to alter the created DagRun or DagModel, use `dag_maker` fixture instead.
"""
from airflow.operators.dummy import DummyOperator
from airflow.utils.types import DagRunType
def create_dag(
dag_id='dag',
task_id='op1',
max_active_tis_per_dag=16,
pool='default_pool',
executor_config={},
trigger_rule='all_done',
on_success_callback=None,
on_execute_callback=None,
on_failure_callback=None,
on_retry_callback=None,
email=None,
**kwargs,
):
with dag_maker(dag_id, **kwargs) as dag:
op = DummyOperator(
task_id=task_id,
max_active_tis_per_dag=max_active_tis_per_dag,
executor_config=executor_config,
on_success_callback=on_success_callback,
on_execute_callback=on_execute_callback,
on_failure_callback=on_failure_callback,
on_retry_callback=on_retry_callback,
email=email,
pool=pool,
trigger_rule=trigger_rule,
)
dag_maker.create_dagrun(run_type=DagRunType.SCHEDULED)
return dag, op
return create_dag