cli_config.py 73 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155
  1. #!/usr/bin/env python
  2. #
  3. # Licensed to the Apache Software Foundation (ASF) under one
  4. # or more contributor license agreements. See the NOTICE file
  5. # distributed with this work for additional information
  6. # regarding copyright ownership. The ASF licenses this file
  7. # to you under the Apache License, Version 2.0 (the
  8. # "License"); you may not use this file except in compliance
  9. # with the License. You may obtain a copy of the License at
  10. #
  11. # http://www.apache.org/licenses/LICENSE-2.0
  12. #
  13. # Unless required by applicable law or agreed to in writing,
  14. # software distributed under the License is distributed on an
  15. # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
  16. # KIND, either express or implied. See the License for the
  17. # specific language governing permissions and limitations
  18. # under the License.
  19. """Explicit configuration and definition of Airflow CLI commands."""
  20. from __future__ import annotations
  21. import argparse
  22. import json
  23. import os
  24. import textwrap
  25. from typing import Callable, Iterable, NamedTuple, Union
  26. import lazy_object_proxy
  27. from airflow import settings
  28. from airflow.cli.commands.legacy_commands import check_legacy_command
  29. from airflow.configuration import conf
  30. from airflow.settings import _ENABLE_AIP_44
  31. from airflow.utils.cli import ColorMode
  32. from airflow.utils.module_loading import import_string
  33. from airflow.utils.state import DagRunState, JobState
  34. from airflow.utils.timezone import parse as parsedate
  35. BUILD_DOCS = "BUILDING_AIRFLOW_DOCS" in os.environ
  36. def lazy_load_command(import_path: str) -> Callable:
  37. """Create a lazy loader for command."""
  38. _, _, name = import_path.rpartition(".")
  39. def command(*args, **kwargs):
  40. func = import_string(import_path)
  41. return func(*args, **kwargs)
  42. command.__name__ = name
  43. return command
  44. class DefaultHelpParser(argparse.ArgumentParser):
  45. """CustomParser to display help message."""
  46. def _check_value(self, action, value):
  47. """Override _check_value and check conditionally added command."""
  48. if action.choices is not None and value not in action.choices:
  49. check_legacy_command(action, value)
  50. super()._check_value(action, value)
  51. def error(self, message):
  52. """Override error and use print_help instead of print_usage."""
  53. self.print_help()
  54. self.exit(2, f"\n{self.prog} command error: {message}, see help above.\n")
  55. # Used in Arg to enable `None' as a distinct value from "not passed"
  56. _UNSET = object()
  57. class Arg:
  58. """Class to keep information about command line argument."""
  59. def __init__(
  60. self,
  61. flags=_UNSET,
  62. help=_UNSET,
  63. action=_UNSET,
  64. default=_UNSET,
  65. nargs=_UNSET,
  66. type=_UNSET,
  67. choices=_UNSET,
  68. required=_UNSET,
  69. metavar=_UNSET,
  70. dest=_UNSET,
  71. ):
  72. self.flags = flags
  73. self.kwargs = {}
  74. for k, v in locals().items():
  75. if k not in ("self", "flags") and v is not _UNSET:
  76. self.kwargs[k] = v
  77. def add_to_parser(self, parser: argparse.ArgumentParser):
  78. """Add this argument to an ArgumentParser."""
  79. if "metavar" in self.kwargs and "type" not in self.kwargs:
  80. if self.kwargs["metavar"] == "DIRPATH":
  81. def type(x):
  82. return self._is_valid_directory(parser, x)
  83. self.kwargs["type"] = type
  84. parser.add_argument(*self.flags, **self.kwargs)
  85. def _is_valid_directory(self, parser, arg):
  86. if not os.path.isdir(arg):
  87. parser.error(f"The directory '{arg}' does not exist!")
  88. return arg
  89. def positive_int(*, allow_zero):
  90. """Define a positive int type for an argument."""
  91. def _check(value):
  92. try:
  93. value = int(value)
  94. if allow_zero and value == 0:
  95. return value
  96. if value > 0:
  97. return value
  98. except ValueError:
  99. pass
  100. raise argparse.ArgumentTypeError(f"invalid positive int value: '{value}'")
  101. return _check
  102. def string_list_type(val):
  103. """Parse comma-separated list and returns list of string (strips whitespace)."""
  104. return [x.strip() for x in val.split(",")]
  105. def string_lower_type(val):
  106. """Lower arg."""
  107. if not val:
  108. return
  109. return val.strip().lower()
  110. # Shared
  111. ARG_DAG_ID = Arg(("dag_id",), help="The id of the dag")
  112. ARG_TASK_ID = Arg(("task_id",), help="The id of the task")
  113. ARG_EXECUTION_DATE = Arg(("execution_date",), help="The execution date of the DAG", type=parsedate)
  114. ARG_EXECUTION_DATE_OPTIONAL = Arg(
  115. ("execution_date",), nargs="?", help="The execution date of the DAG (optional)", type=parsedate
  116. )
  117. ARG_EXECUTION_DATE_OR_RUN_ID = Arg(
  118. ("execution_date_or_run_id",), help="The execution_date of the DAG or run_id of the DAGRun"
  119. )
  120. ARG_EXECUTION_DATE_OR_RUN_ID_OPTIONAL = Arg(
  121. ("execution_date_or_run_id",),
  122. nargs="?",
  123. help="The execution_date of the DAG or run_id of the DAGRun (optional)",
  124. )
  125. ARG_TASK_REGEX = Arg(("-t", "--task-regex"), help="The regex to filter specific task_ids (optional)")
  126. ARG_SUBDIR = Arg(
  127. ("-S", "--subdir"),
  128. help=(
  129. "File location or directory from which to look for the dag. "
  130. "Defaults to '[AIRFLOW_HOME]/dags' where [AIRFLOW_HOME] is the "
  131. "value you set for 'AIRFLOW_HOME' config you set in 'airflow.cfg' "
  132. ),
  133. default="[AIRFLOW_HOME]/dags" if BUILD_DOCS else settings.DAGS_FOLDER,
  134. )
  135. ARG_START_DATE = Arg(("-s", "--start-date"), help="Override start_date YYYY-MM-DD", type=parsedate)
  136. ARG_END_DATE = Arg(("-e", "--end-date"), help="Override end_date YYYY-MM-DD", type=parsedate)
  137. ARG_OUTPUT_PATH = Arg(
  138. (
  139. "-o",
  140. "--output-path",
  141. ),
  142. help="The output for generated yaml files",
  143. type=str,
  144. default="[CWD]" if BUILD_DOCS else os.getcwd(),
  145. )
  146. ARG_DRY_RUN = Arg(
  147. ("-n", "--dry-run"),
  148. help="Perform a dry run for each task. Only renders Template Fields for each task, nothing else",
  149. action="store_true",
  150. )
  151. ARG_PID = Arg(("--pid",), help="PID file location", nargs="?")
  152. ARG_DAEMON = Arg(
  153. ("-D", "--daemon"), help="Daemonize instead of running in the foreground", action="store_true"
  154. )
  155. ARG_STDERR = Arg(("--stderr",), help="Redirect stderr to this file")
  156. ARG_STDOUT = Arg(("--stdout",), help="Redirect stdout to this file")
  157. ARG_LOG_FILE = Arg(("-l", "--log-file"), help="Location of the log file")
  158. ARG_YES = Arg(
  159. ("-y", "--yes"),
  160. help="Do not prompt to confirm. Use with care!",
  161. action="store_true",
  162. default=False,
  163. )
  164. ARG_OUTPUT = Arg(
  165. (
  166. "-o",
  167. "--output",
  168. ),
  169. help="Output format. Allowed values: json, yaml, plain, table (default: table)",
  170. metavar="(table, json, yaml, plain)",
  171. choices=("table", "json", "yaml", "plain"),
  172. default="table",
  173. )
  174. ARG_COLOR = Arg(
  175. ("--color",),
  176. help="Do emit colored output (default: auto)",
  177. choices={ColorMode.ON, ColorMode.OFF, ColorMode.AUTO},
  178. default=ColorMode.AUTO,
  179. )
  180. # DB args
  181. ARG_VERSION_RANGE = Arg(
  182. ("-r", "--range"),
  183. help="Version range(start:end) for offline sql generation. Example: '2.0.2:2.2.3'",
  184. default=None,
  185. )
  186. ARG_REVISION_RANGE = Arg(
  187. ("--revision-range",),
  188. help=(
  189. "Migration revision range(start:end) to use for offline sql generation. "
  190. "Example: ``a13f7613ad25:7b2661a43ba3``"
  191. ),
  192. default=None,
  193. )
  194. ARG_SKIP_SERVE_LOGS = Arg(
  195. ("-s", "--skip-serve-logs"),
  196. default=False,
  197. help="Don't start the serve logs process along with the workers",
  198. action="store_true",
  199. )
  200. # list_dag_runs
  201. ARG_DAG_ID_REQ_FLAG = Arg(
  202. ("-d", "--dag-id"), required=True, help="The id of the dag"
  203. ) # TODO: convert this to a positional arg in Airflow 3
  204. ARG_NO_BACKFILL = Arg(
  205. ("--no-backfill",), help="filter all the backfill dagruns given the dag id", action="store_true"
  206. )
  207. dagrun_states = tuple(state.value for state in DagRunState)
  208. ARG_DR_STATE = Arg(
  209. ("--state",),
  210. help="Only list the DAG runs corresponding to the state",
  211. metavar=", ".join(dagrun_states),
  212. choices=dagrun_states,
  213. )
  214. # list_jobs
  215. ARG_DAG_ID_OPT = Arg(("-d", "--dag-id"), help="The id of the dag")
  216. ARG_LIMIT = Arg(("--limit",), help="Return a limited number of records")
  217. job_states = tuple(state.value for state in JobState)
  218. ARG_JOB_STATE = Arg(
  219. ("--state",),
  220. help="Only list the jobs corresponding to the state",
  221. metavar=", ".join(job_states),
  222. choices=job_states,
  223. )
  224. # next_execution
  225. ARG_NUM_EXECUTIONS = Arg(
  226. ("-n", "--num-executions"),
  227. default=1,
  228. type=positive_int(allow_zero=False),
  229. help="The number of next execution datetimes to show",
  230. )
  231. # backfill
  232. ARG_MARK_SUCCESS = Arg(
  233. ("-m", "--mark-success"), help="Mark jobs as succeeded without running them", action="store_true"
  234. )
  235. ARG_INCLUDE_DESCRIPTIONS = Arg(
  236. ("-d", "--include-descriptions"),
  237. help="Show descriptions for the configuration variables",
  238. action="store_true",
  239. )
  240. ARG_INCLUDE_EXAMPLES = Arg(
  241. ("-e", "--include-examples"), help="Show examples for the configuration variables", action="store_true"
  242. )
  243. ARG_INCLUDE_SOURCES = Arg(
  244. ("-s", "--include-sources"), help="Show source of the configuration variable", action="store_true"
  245. )
  246. ARG_INCLUDE_ENV_VARS = Arg(
  247. ("-V", "--include-env-vars"), help="Show environment variable for each option", action="store_true"
  248. )
  249. ARG_COMMENT_OUT_EVERYTHING = Arg(
  250. ("-c", "--comment-out-everything"),
  251. help="Comment out all configuration options. Useful as starting point for new installation",
  252. action="store_true",
  253. )
  254. ARG_EXCLUDE_PROVIDERS = Arg(
  255. ("-p", "--exclude-providers"),
  256. help="Exclude provider configuration (they are included by default)",
  257. action="store_true",
  258. )
  259. ARG_DEFAULTS = Arg(
  260. ("-a", "--defaults"),
  261. help="Show only defaults - do not include local configuration, sources,"
  262. " includes descriptions, examples, variables. Comment out everything.",
  263. action="store_true",
  264. )
  265. ARG_VERBOSE = Arg(("-v", "--verbose"), help="Make logging output more verbose", action="store_true")
  266. ARG_LOCAL = Arg(("-l", "--local"), help="Run the task using the LocalExecutor", action="store_true")
  267. ARG_DONOT_PICKLE = Arg(
  268. ("-x", "--donot-pickle"),
  269. help=(
  270. "Do not attempt to pickle the DAG object to send over "
  271. "to the workers, just tell the workers to run their version "
  272. "of the code"
  273. ),
  274. action="store_true",
  275. )
  276. ARG_BF_IGNORE_DEPENDENCIES = Arg(
  277. ("-i", "--ignore-dependencies"),
  278. help=(
  279. "Skip upstream tasks, run only the tasks "
  280. "matching the regexp. Only works in conjunction "
  281. "with task_regex"
  282. ),
  283. action="store_true",
  284. )
  285. ARG_BF_IGNORE_FIRST_DEPENDS_ON_PAST = Arg(
  286. ("-I", "--ignore-first-depends-on-past"),
  287. help=(
  288. "Ignores depends_on_past dependencies for the first "
  289. "set of tasks only (subsequent executions in the backfill "
  290. "DO respect depends_on_past)"
  291. ),
  292. action="store_true",
  293. )
  294. ARG_POOL = Arg(("--pool",), "Resource pool to use")
  295. ARG_DELAY_ON_LIMIT = Arg(
  296. ("--delay-on-limit",),
  297. help=(
  298. "Amount of time in seconds to wait when the limit "
  299. "on maximum active dag runs (max_active_runs) has "
  300. "been reached before trying to execute a dag run "
  301. "again"
  302. ),
  303. type=float,
  304. default=1.0,
  305. )
  306. ARG_RESET_DAG_RUN = Arg(
  307. ("--reset-dagruns",),
  308. help=(
  309. "if set, the backfill will delete existing "
  310. "backfill-related DAG runs and start "
  311. "anew with fresh, running DAG runs"
  312. ),
  313. action="store_true",
  314. )
  315. ARG_RERUN_FAILED_TASKS = Arg(
  316. ("--rerun-failed-tasks",),
  317. help=(
  318. "if set, the backfill will auto-rerun "
  319. "all the failed tasks for the backfill date range "
  320. "instead of throwing exceptions"
  321. ),
  322. action="store_true",
  323. )
  324. ARG_CONTINUE_ON_FAILURES = Arg(
  325. ("--continue-on-failures",),
  326. help=("if set, the backfill will keep going even if some of the tasks failed"),
  327. action="store_true",
  328. )
  329. ARG_DISABLE_RETRY = Arg(
  330. ("--disable-retry",),
  331. help=("if set, the backfill will set tasks as failed without retrying."),
  332. action="store_true",
  333. )
  334. ARG_RUN_BACKWARDS = Arg(
  335. (
  336. "-B",
  337. "--run-backwards",
  338. ),
  339. help=(
  340. "if set, the backfill will run tasks from the most "
  341. "recent day first. if there are tasks that depend_on_past "
  342. "this option will throw an exception"
  343. ),
  344. action="store_true",
  345. )
  346. ARG_TREAT_DAG_AS_REGEX = Arg(
  347. ("--treat-dag-as-regex",),
  348. help=("Deprecated -- use `--treat-dag-id-as-regex` instead"),
  349. action="store_true",
  350. )
  351. ARG_TREAT_DAG_ID_AS_REGEX = Arg(
  352. ("--treat-dag-id-as-regex",),
  353. help=("if set, dag_id will be treated as regex instead of an exact string"),
  354. action="store_true",
  355. )
  356. # test_dag
  357. ARG_SHOW_DAGRUN = Arg(
  358. ("--show-dagrun",),
  359. help=(
  360. "After completing the backfill, shows the diagram for current DAG Run.\n"
  361. "\n"
  362. "The diagram is in DOT language\n"
  363. ),
  364. action="store_true",
  365. )
  366. ARG_IMGCAT_DAGRUN = Arg(
  367. ("--imgcat-dagrun",),
  368. help=(
  369. "After completing the dag run, prints a diagram on the screen for the "
  370. "current DAG Run using the imgcat tool.\n"
  371. ),
  372. action="store_true",
  373. )
  374. ARG_SAVE_DAGRUN = Arg(
  375. ("--save-dagrun",),
  376. help="After completing the backfill, saves the diagram for current DAG Run to the indicated file.\n\n",
  377. )
  378. ARG_USE_EXECUTOR = Arg(
  379. ("--use-executor",),
  380. help="Use an executor to test the DAG. By default it runs the DAG without an executor. "
  381. "If set, it uses the executor configured in the environment.",
  382. action="store_true",
  383. )
  384. ARG_MARK_SUCCESS_PATTERN = Arg(
  385. ("--mark-success-pattern",),
  386. help=(
  387. "Don't run task_ids matching the regex <MARK_SUCCESS_PATTERN>, mark them as successful instead.\n"
  388. "Can be used to skip e.g. dependency check sensors or cleanup steps in local testing.\n"
  389. ),
  390. )
  391. # list_tasks
  392. ARG_TREE = Arg(
  393. ("-t", "--tree"),
  394. help="Deprecated - use `dags show` instead. Display tasks in a tree. Note that generating the tree can be slow and the output very large for some DAGs.",
  395. action="store_true",
  396. )
  397. # tasks_run
  398. # This is a hidden option -- not meant for users to set or know about
  399. ARG_SHUT_DOWN_LOGGING = Arg(
  400. ("--no-shut-down-logging",),
  401. help=argparse.SUPPRESS,
  402. dest="shut_down_logging",
  403. action="store_false",
  404. default=True,
  405. )
  406. # clear
  407. ARG_UPSTREAM = Arg(("-u", "--upstream"), help="Include upstream tasks", action="store_true")
  408. ARG_ONLY_FAILED = Arg(("-f", "--only-failed"), help="Only failed jobs", action="store_true")
  409. ARG_ONLY_RUNNING = Arg(("-r", "--only-running"), help="Only running jobs", action="store_true")
  410. ARG_DOWNSTREAM = Arg(("-d", "--downstream"), help="Include downstream tasks", action="store_true")
  411. ARG_EXCLUDE_SUBDAGS = Arg(("-x", "--exclude-subdags"), help="Exclude subdags", action="store_true")
  412. ARG_EXCLUDE_PARENTDAG = Arg(
  413. ("-X", "--exclude-parentdag"),
  414. help="Exclude ParentDAGS if the task cleared is a part of a SubDAG",
  415. action="store_true",
  416. )
  417. ARG_DAG_REGEX = Arg(
  418. ("-R", "--dag-regex"), help="Search dag_id as regex instead of exact string", action="store_true"
  419. )
  420. # show_dag
  421. ARG_SAVE = Arg(("-s", "--save"), help="Saves the result to the indicated file.")
  422. ARG_IMGCAT = Arg(("--imgcat",), help="Displays graph using the imgcat tool.", action="store_true")
  423. # trigger_dag
  424. ARG_RUN_ID = Arg(("-r", "--run-id"), help="Helps to identify this run")
  425. ARG_CONF = Arg(("-c", "--conf"), help="JSON string that gets pickled into the DagRun's conf attribute")
  426. ARG_EXEC_DATE = Arg(("-e", "--exec-date"), help="The execution date of the DAG", type=parsedate)
  427. ARG_REPLACE_MICRO = Arg(
  428. ("--no-replace-microseconds",),
  429. help="whether microseconds should be zeroed",
  430. dest="replace_microseconds",
  431. action="store_false",
  432. default=True,
  433. )
  434. # db
  435. ARG_DB_TABLES = Arg(
  436. ("-t", "--tables"),
  437. help=lazy_object_proxy.Proxy(
  438. lambda: f"Table names to perform maintenance on (use comma-separated list).\n"
  439. f"Options: {import_string('airflow.cli.commands.db_command.all_tables')}"
  440. ),
  441. type=string_list_type,
  442. )
  443. ARG_DB_CLEANUP_TIMESTAMP = Arg(
  444. ("--clean-before-timestamp",),
  445. help="The date or timestamp before which data should be purged.\n"
  446. "If no timezone info is supplied then dates are assumed to be in airflow default timezone.\n"
  447. "Example: '2022-01-01 00:00:00+01:00'",
  448. type=parsedate,
  449. required=True,
  450. )
  451. ARG_DB_DRY_RUN = Arg(
  452. ("--dry-run",),
  453. help="Perform a dry run",
  454. action="store_true",
  455. )
  456. ARG_DB_SKIP_ARCHIVE = Arg(
  457. ("--skip-archive",),
  458. help="Don't preserve purged records in an archive table.",
  459. action="store_true",
  460. )
  461. ARG_DB_EXPORT_FORMAT = Arg(
  462. ("--export-format",),
  463. help="The file format to export the cleaned data",
  464. choices=("csv",),
  465. default="csv",
  466. )
  467. ARG_DB_OUTPUT_PATH = Arg(
  468. ("--output-path",),
  469. metavar="DIRPATH",
  470. help="The path to the output directory to export the cleaned data. This directory must exist.",
  471. required=True,
  472. )
  473. ARG_DB_DROP_ARCHIVES = Arg(
  474. ("--drop-archives",),
  475. help="Drop the archive tables after exporting. Use with caution.",
  476. action="store_true",
  477. )
  478. ARG_DB_RETRY = Arg(
  479. ("--retry",),
  480. default=0,
  481. type=positive_int(allow_zero=True),
  482. help="Retry database check upon failure",
  483. )
  484. ARG_DB_RETRY_DELAY = Arg(
  485. ("--retry-delay",),
  486. default=1,
  487. type=positive_int(allow_zero=False),
  488. help="Wait time between retries in seconds",
  489. )
  490. # pool
  491. ARG_POOL_NAME = Arg(("pool",), metavar="NAME", help="Pool name")
  492. ARG_POOL_SLOTS = Arg(("slots",), type=int, help="Pool slots")
  493. ARG_POOL_DESCRIPTION = Arg(("description",), help="Pool description")
  494. ARG_POOL_INCLUDE_DEFERRED = Arg(
  495. ("--include-deferred",), help="Include deferred tasks in calculations for Pool", action="store_true"
  496. )
  497. ARG_POOL_IMPORT = Arg(
  498. ("file",),
  499. metavar="FILEPATH",
  500. help="Import pools from JSON file. Example format::\n"
  501. + textwrap.indent(
  502. textwrap.dedent(
  503. """
  504. {
  505. "pool_1": {"slots": 5, "description": "", "include_deferred": true},
  506. "pool_2": {"slots": 10, "description": "test", "include_deferred": false}
  507. }"""
  508. ),
  509. " " * 4,
  510. ),
  511. )
  512. ARG_POOL_EXPORT = Arg(("file",), metavar="FILEPATH", help="Export all pools to JSON file")
  513. # variables
  514. ARG_VAR = Arg(("key",), help="Variable key")
  515. ARG_VAR_VALUE = Arg(("value",), metavar="VALUE", help="Variable value")
  516. ARG_DEFAULT = Arg(
  517. ("-d", "--default"), metavar="VAL", default=None, help="Default value returned if variable does not exist"
  518. )
  519. ARG_VAR_DESCRIPTION = Arg(
  520. ("--description",),
  521. default=None,
  522. required=False,
  523. help="Variable description, optional when setting a variable",
  524. )
  525. ARG_DESERIALIZE_JSON = Arg(("-j", "--json"), help="Deserialize JSON variable", action="store_true")
  526. ARG_SERIALIZE_JSON = Arg(("-j", "--json"), help="Serialize JSON variable", action="store_true")
  527. ARG_VAR_IMPORT = Arg(("file",), help="Import variables from JSON file")
  528. ARG_VAR_EXPORT = Arg(
  529. ("file",),
  530. help="Export all variables to JSON file",
  531. type=argparse.FileType("w", encoding="UTF-8"),
  532. )
  533. ARG_VAR_ACTION_ON_EXISTING_KEY = Arg(
  534. ("-a", "--action-on-existing-key"),
  535. help="Action to take if we encounter a variable key that already exists.",
  536. default="overwrite",
  537. choices=("overwrite", "fail", "skip"),
  538. )
  539. # kerberos
  540. ARG_PRINCIPAL = Arg(("principal",), help="kerberos principal", nargs="?")
  541. ARG_KEYTAB = Arg(("-k", "--keytab"), help="keytab", nargs="?", default=conf.get("kerberos", "keytab"))
  542. ARG_KERBEROS_ONE_TIME_MODE = Arg(
  543. ("-o", "--one-time"), help="Run airflow kerberos one time instead of forever", action="store_true"
  544. )
  545. # run
  546. ARG_INTERACTIVE = Arg(
  547. ("-N", "--interactive"),
  548. help="Do not capture standard output and error streams (useful for interactive debugging)",
  549. action="store_true",
  550. )
  551. # TODO(aoen): "force" is a poor choice of name here since it implies it overrides
  552. # all dependencies (not just past success), e.g. the ignore_depends_on_past
  553. # dependency. This flag should be deprecated and renamed to 'ignore_ti_state' and
  554. # the "ignore_all_dependencies" command should be called the"force" command
  555. # instead.
  556. ARG_FORCE = Arg(
  557. ("-f", "--force"),
  558. help="Ignore previous task instance state, rerun regardless if task already succeeded/failed",
  559. action="store_true",
  560. )
  561. ARG_RAW = Arg(("-r", "--raw"), argparse.SUPPRESS, "store_true")
  562. ARG_IGNORE_ALL_DEPENDENCIES = Arg(
  563. ("-A", "--ignore-all-dependencies"),
  564. help="Ignores all non-critical dependencies, including ignore_ti_state and ignore_task_deps",
  565. action="store_true",
  566. )
  567. # TODO(aoen): ignore_dependencies is a poor choice of name here because it is too
  568. # vague (e.g. a task being in the appropriate state to be run is also a dependency
  569. # but is not ignored by this flag), the name 'ignore_task_dependencies' is
  570. # slightly better (as it ignores all dependencies that are specific to the task),
  571. # so deprecate the old command name and use this instead.
  572. ARG_IGNORE_DEPENDENCIES = Arg(
  573. ("-i", "--ignore-dependencies"),
  574. help="Ignore task-specific dependencies, e.g. upstream, depends_on_past, and retry delay dependencies",
  575. action="store_true",
  576. )
  577. ARG_IGNORE_DEPENDS_ON_PAST = Arg(
  578. ("-I", "--ignore-depends-on-past"),
  579. help="Deprecated -- use `--depends-on-past ignore` instead. "
  580. "Ignore depends_on_past dependencies (but respect upstream dependencies)",
  581. action="store_true",
  582. )
  583. ARG_DEPENDS_ON_PAST = Arg(
  584. ("-d", "--depends-on-past"),
  585. help="Determine how Airflow should deal with past dependencies. The default action is `check`, Airflow "
  586. "will check if the past dependencies are met for the tasks having `depends_on_past=True` before run "
  587. "them, if `ignore` is provided, the past dependencies will be ignored, if `wait` is provided and "
  588. "`depends_on_past=True`, Airflow will wait the past dependencies until they are met before running or "
  589. "skipping the task",
  590. choices={"check", "ignore", "wait"},
  591. default="check",
  592. )
  593. ARG_SHIP_DAG = Arg(
  594. ("--ship-dag",), help="Pickles (serializes) the DAG and ships it to the worker", action="store_true"
  595. )
  596. ARG_PICKLE = Arg(("-p", "--pickle"), help="Serialized pickle object of the entire dag (used internally)")
  597. ARG_JOB_ID = Arg(("-j", "--job-id"), help=argparse.SUPPRESS)
  598. ARG_CFG_PATH = Arg(("--cfg-path",), help="Path to config file to use instead of airflow.cfg")
  599. ARG_MAP_INDEX = Arg(("--map-index",), type=int, default=-1, help="Mapped task index")
  600. ARG_READ_FROM_DB = Arg(("--read-from-db",), help="Read dag from DB instead of dag file", action="store_true")
  601. # database
  602. ARG_MIGRATION_TIMEOUT = Arg(
  603. ("-t", "--migration-wait-timeout"),
  604. help="timeout to wait for db to migrate ",
  605. type=int,
  606. default=60,
  607. )
  608. ARG_DB_RESERIALIZE_DAGS = Arg(
  609. ("--no-reserialize-dags",),
  610. # Not intended for user, so dont show in help
  611. help=argparse.SUPPRESS,
  612. action="store_false",
  613. default=True,
  614. dest="reserialize_dags",
  615. )
  616. ARG_DB_VERSION__UPGRADE = Arg(
  617. ("-n", "--to-version"),
  618. help=(
  619. "(Optional) The airflow version to upgrade to. Note: must provide either "
  620. "`--to-revision` or `--to-version`."
  621. ),
  622. )
  623. ARG_DB_REVISION__UPGRADE = Arg(
  624. ("-r", "--to-revision"),
  625. help="(Optional) If provided, only run migrations up to and including this Alembic revision.",
  626. )
  627. ARG_DB_VERSION__DOWNGRADE = Arg(
  628. ("-n", "--to-version"),
  629. help="(Optional) If provided, only run migrations up to this version.",
  630. )
  631. ARG_DB_FROM_VERSION = Arg(
  632. ("--from-version",),
  633. help="(Optional) If generating sql, may supply a *from* version",
  634. )
  635. ARG_DB_REVISION__DOWNGRADE = Arg(
  636. ("-r", "--to-revision"),
  637. help="The Alembic revision to downgrade to. Note: must provide either `--to-revision` or `--to-version`.",
  638. )
  639. ARG_DB_FROM_REVISION = Arg(
  640. ("--from-revision",),
  641. help="(Optional) If generating sql, may supply a *from* Alembic revision",
  642. )
  643. ARG_DB_SQL_ONLY = Arg(
  644. ("-s", "--show-sql-only"),
  645. help="Don't actually run migrations; just print out sql scripts for offline migration. "
  646. "Required if using either `--from-revision` or `--from-version`.",
  647. action="store_true",
  648. default=False,
  649. )
  650. ARG_DB_SKIP_INIT = Arg(
  651. ("-s", "--skip-init"),
  652. help="Only remove tables; do not perform db init.",
  653. action="store_true",
  654. default=False,
  655. )
  656. ARG_DB_USE_MIGRATION_FILES = Arg(
  657. ("-m", "--use-migration-files"),
  658. help="Use migration files to perform migration",
  659. action="store_true",
  660. default=False,
  661. )
  662. # webserver
  663. ARG_PORT = Arg(
  664. ("-p", "--port"),
  665. default=conf.get("webserver", "WEB_SERVER_PORT"),
  666. type=int,
  667. help="The port on which to run the server",
  668. )
  669. ARG_SSL_CERT = Arg(
  670. ("--ssl-cert",),
  671. default=conf.get("webserver", "WEB_SERVER_SSL_CERT"),
  672. help="Path to the SSL certificate for the webserver",
  673. )
  674. ARG_SSL_KEY = Arg(
  675. ("--ssl-key",),
  676. default=conf.get("webserver", "WEB_SERVER_SSL_KEY"),
  677. help="Path to the key to use with the SSL certificate",
  678. )
  679. ARG_WORKERS = Arg(
  680. ("-w", "--workers"),
  681. default=conf.get("webserver", "WORKERS"),
  682. type=int,
  683. help="Number of workers to run the webserver on",
  684. )
  685. ARG_WORKERCLASS = Arg(
  686. ("-k", "--workerclass"),
  687. default=conf.get("webserver", "WORKER_CLASS"),
  688. choices=["sync", "eventlet", "gevent", "tornado"],
  689. help="The worker class to use for Gunicorn",
  690. )
  691. ARG_WORKER_TIMEOUT = Arg(
  692. ("-t", "--worker-timeout"),
  693. default=conf.get("webserver", "WEB_SERVER_WORKER_TIMEOUT"),
  694. type=int,
  695. help="The timeout for waiting on webserver workers",
  696. )
  697. ARG_HOSTNAME = Arg(
  698. ("-H", "--hostname"),
  699. default=conf.get("webserver", "WEB_SERVER_HOST"),
  700. help="Set the hostname on which to run the web server",
  701. )
  702. ARG_DEBUG = Arg(
  703. ("-d", "--debug"), help="Use the server that ships with Flask in debug mode", action="store_true"
  704. )
  705. ARG_ACCESS_LOGFILE = Arg(
  706. ("-A", "--access-logfile"),
  707. default=conf.get("webserver", "ACCESS_LOGFILE"),
  708. help="The logfile to store the webserver access log. Use '-' to print to stdout",
  709. )
  710. ARG_ERROR_LOGFILE = Arg(
  711. ("-E", "--error-logfile"),
  712. default=conf.get("webserver", "ERROR_LOGFILE"),
  713. help="The logfile to store the webserver error log. Use '-' to print to stderr",
  714. )
  715. ARG_ACCESS_LOGFORMAT = Arg(
  716. ("-L", "--access-logformat"),
  717. default=conf.get("webserver", "ACCESS_LOGFORMAT"),
  718. help="The access log format for gunicorn logs",
  719. )
  720. # internal-api
  721. ARG_INTERNAL_API_PORT = Arg(
  722. ("-p", "--port"),
  723. default=9080,
  724. type=int,
  725. help="The port on which to run the server",
  726. )
  727. ARG_INTERNAL_API_WORKERS = Arg(
  728. ("-w", "--workers"),
  729. default=4,
  730. type=int,
  731. help="Number of workers to run the Internal API-on",
  732. )
  733. ARG_INTERNAL_API_WORKERCLASS = Arg(
  734. ("-k", "--workerclass"),
  735. default="sync",
  736. choices=["sync", "eventlet", "gevent", "tornado"],
  737. help="The worker class to use for Gunicorn",
  738. )
  739. ARG_INTERNAL_API_WORKER_TIMEOUT = Arg(
  740. ("-t", "--worker-timeout"),
  741. default=120,
  742. type=int,
  743. help="The timeout for waiting on Internal API workers",
  744. )
  745. ARG_INTERNAL_API_HOSTNAME = Arg(
  746. ("-H", "--hostname"),
  747. default="0.0.0.0", # nosec
  748. help="Set the hostname on which to run the web server",
  749. )
  750. ARG_INTERNAL_API_ACCESS_LOGFILE = Arg(
  751. ("-A", "--access-logfile"),
  752. help="The logfile to store the access log. Use '-' to print to stdout",
  753. )
  754. ARG_INTERNAL_API_ERROR_LOGFILE = Arg(
  755. ("-E", "--error-logfile"),
  756. help="The logfile to store the error log. Use '-' to print to stderr",
  757. )
  758. ARG_INTERNAL_API_ACCESS_LOGFORMAT = Arg(
  759. ("-L", "--access-logformat"),
  760. help="The access log format for gunicorn logs",
  761. )
  762. # scheduler
  763. ARG_NUM_RUNS = Arg(
  764. ("-n", "--num-runs"),
  765. default=conf.getint("scheduler", "num_runs"),
  766. type=int,
  767. help="Set the number of runs to execute before exiting",
  768. )
  769. ARG_DO_PICKLE = Arg(
  770. ("-p", "--do-pickle"),
  771. default=False,
  772. help=(
  773. "Attempt to pickle the DAG object to send over "
  774. "to the workers, instead of letting workers run their version "
  775. "of the code"
  776. ),
  777. action="store_true",
  778. )
  779. ARG_WITHOUT_MINGLE = Arg(
  780. ("--without-mingle",),
  781. default=False,
  782. help="Don't synchronize with other workers at start-up",
  783. action="store_true",
  784. )
  785. ARG_WITHOUT_GOSSIP = Arg(
  786. ("--without-gossip",),
  787. default=False,
  788. help="Don't subscribe to other workers events",
  789. action="store_true",
  790. )
  791. ARG_TASK_PARAMS = Arg(("-t", "--task-params"), help="Sends a JSON params dict to the task")
  792. ARG_POST_MORTEM = Arg(
  793. ("-m", "--post-mortem"), action="store_true", help="Open debugger on uncaught exception"
  794. )
  795. ARG_ENV_VARS = Arg(
  796. ("--env-vars",),
  797. help="Set env var in both parsing time and runtime for each of entry supplied in a JSON dict",
  798. type=json.loads,
  799. )
  800. # connections
  801. ARG_CONN_ID = Arg(("conn_id",), help="Connection id, required to get/add/delete/test a connection", type=str)
  802. ARG_CONN_ID_FILTER = Arg(
  803. ("--conn-id",), help="If passed, only items with the specified connection ID will be displayed", type=str
  804. )
  805. ARG_CONN_URI = Arg(
  806. ("--conn-uri",), help="Connection URI, required to add a connection without conn_type", type=str
  807. )
  808. ARG_CONN_JSON = Arg(
  809. ("--conn-json",), help="Connection JSON, required to add a connection using JSON representation", type=str
  810. )
  811. ARG_CONN_TYPE = Arg(
  812. ("--conn-type",), help="Connection type, required to add a connection without conn_uri", type=str
  813. )
  814. ARG_CONN_DESCRIPTION = Arg(
  815. ("--conn-description",), help="Connection description, optional when adding a connection", type=str
  816. )
  817. ARG_CONN_HOST = Arg(("--conn-host",), help="Connection host, optional when adding a connection", type=str)
  818. ARG_CONN_LOGIN = Arg(("--conn-login",), help="Connection login, optional when adding a connection", type=str)
  819. ARG_CONN_PASSWORD = Arg(
  820. ("--conn-password",), help="Connection password, optional when adding a connection", type=str
  821. )
  822. ARG_CONN_SCHEMA = Arg(
  823. ("--conn-schema",), help="Connection schema, optional when adding a connection", type=str
  824. )
  825. ARG_CONN_PORT = Arg(("--conn-port",), help="Connection port, optional when adding a connection", type=str)
  826. ARG_CONN_EXTRA = Arg(
  827. ("--conn-extra",), help="Connection `Extra` field, optional when adding a connection", type=str
  828. )
  829. ARG_CONN_EXPORT = Arg(
  830. ("file",),
  831. help="Output file path for exporting the connections",
  832. type=argparse.FileType("w", encoding="UTF-8"),
  833. )
  834. ARG_CONN_EXPORT_FORMAT = Arg(
  835. ("--format",),
  836. help="Deprecated -- use `--file-format` instead. File format to use for the export.",
  837. type=str,
  838. choices=["json", "yaml", "env"],
  839. )
  840. ARG_CONN_EXPORT_FILE_FORMAT = Arg(
  841. ("--file-format",), help="File format for the export", type=str, choices=["json", "yaml", "env"]
  842. )
  843. ARG_CONN_SERIALIZATION_FORMAT = Arg(
  844. ("--serialization-format",),
  845. help="When exporting as `.env` format, defines how connections should be serialized. Default is `uri`.",
  846. type=string_lower_type,
  847. choices=["json", "uri"],
  848. )
  849. ARG_CONN_IMPORT = Arg(("file",), help="Import connections from a file")
  850. ARG_CONN_OVERWRITE = Arg(
  851. ("--overwrite",),
  852. help="Overwrite existing entries if a conflict occurs",
  853. required=False,
  854. action="store_true",
  855. )
  856. # providers
  857. ARG_PROVIDER_NAME = Arg(
  858. ("provider_name",), help="Provider name, required to get provider information", type=str
  859. )
  860. ARG_FULL = Arg(
  861. ("-f", "--full"),
  862. help="Full information about the provider, including documentation information.",
  863. required=False,
  864. action="store_true",
  865. )
  866. # info
  867. ARG_ANONYMIZE = Arg(
  868. ("--anonymize",),
  869. help="Minimize any personal identifiable information. Use it when sharing output with others.",
  870. action="store_true",
  871. )
  872. ARG_FILE_IO = Arg(
  873. ("--file-io",), help="Send output to file.io service and returns link.", action="store_true"
  874. )
  875. # config
  876. ARG_SECTION = Arg(
  877. ("section",),
  878. help="The section name",
  879. )
  880. ARG_OPTION = Arg(
  881. ("option",),
  882. help="The option name",
  883. )
  884. ARG_OPTIONAL_SECTION = Arg(
  885. ("--section",),
  886. help="The section name",
  887. )
  888. # kubernetes cleanup-pods
  889. ARG_NAMESPACE = Arg(
  890. ("--namespace",),
  891. default=conf.get("kubernetes_executor", "namespace"),
  892. help="Kubernetes Namespace. Default value is `[kubernetes] namespace` in configuration.",
  893. )
  894. ARG_MIN_PENDING_MINUTES = Arg(
  895. ("--min-pending-minutes",),
  896. default=30,
  897. type=positive_int(allow_zero=False),
  898. help=(
  899. "Pending pods created before the time interval are to be cleaned up, "
  900. "measured in minutes. Default value is 30(m). The minimum value is 5(m)."
  901. ),
  902. )
  903. # jobs check
  904. ARG_JOB_TYPE_FILTER = Arg(
  905. ("--job-type",),
  906. choices=("BackfillJob", "LocalTaskJob", "SchedulerJob", "TriggererJob", "DagProcessorJob"),
  907. action="store",
  908. help="The type of job(s) that will be checked.",
  909. )
  910. ARG_JOB_HOSTNAME_FILTER = Arg(
  911. ("--hostname",),
  912. default=None,
  913. type=str,
  914. help="The hostname of job(s) that will be checked.",
  915. )
  916. ARG_JOB_HOSTNAME_CALLABLE_FILTER = Arg(
  917. ("--local",),
  918. action="store_true",
  919. help="If passed, this command will only show jobs from the local host "
  920. "(those with a hostname matching what `hostname_callable` returns).",
  921. )
  922. ARG_JOB_LIMIT = Arg(
  923. ("--limit",),
  924. default=1,
  925. type=positive_int(allow_zero=True),
  926. help="The number of recent jobs that will be checked. To disable limit, set 0. ",
  927. )
  928. ARG_ALLOW_MULTIPLE = Arg(
  929. ("--allow-multiple",),
  930. action="store_true",
  931. help="If passed, this command will be successful even if multiple matching alive jobs are found.",
  932. )
  933. # triggerer
  934. ARG_CAPACITY = Arg(
  935. ("--capacity",),
  936. type=positive_int(allow_zero=False),
  937. help="The maximum number of triggers that a Triggerer will run at one time.",
  938. )
  939. # reserialize
  940. ARG_CLEAR_ONLY = Arg(
  941. ("--clear-only",),
  942. action="store_true",
  943. help="If passed, serialized DAGs will be cleared but not reserialized.",
  944. )
  945. ARG_DAG_LIST_COLUMNS = Arg(
  946. ("--columns",),
  947. type=string_list_type,
  948. help="List of columns to render. (default: ['dag_id', 'fileloc', 'owner', 'is_paused'])",
  949. default=("dag_id", "fileloc", "owners", "is_paused"),
  950. )
  951. ALTERNATIVE_CONN_SPECS_ARGS = [
  952. ARG_CONN_TYPE,
  953. ARG_CONN_DESCRIPTION,
  954. ARG_CONN_HOST,
  955. ARG_CONN_LOGIN,
  956. ARG_CONN_PASSWORD,
  957. ARG_CONN_SCHEMA,
  958. ARG_CONN_PORT,
  959. ]
  960. class ActionCommand(NamedTuple):
  961. """Single CLI command."""
  962. name: str
  963. help: str
  964. func: Callable
  965. args: Iterable[Arg]
  966. description: str | None = None
  967. epilog: str | None = None
  968. hide: bool = False
  969. class GroupCommand(NamedTuple):
  970. """ClI command with subcommands."""
  971. name: str
  972. help: str
  973. subcommands: Iterable
  974. description: str | None = None
  975. epilog: str | None = None
  976. CLICommand = Union[ActionCommand, GroupCommand]
  977. DAGS_COMMANDS = (
  978. ActionCommand(
  979. name="details",
  980. help="Get DAG details given a DAG id",
  981. func=lazy_load_command("airflow.cli.commands.dag_command.dag_details"),
  982. args=(ARG_DAG_ID, ARG_OUTPUT, ARG_VERBOSE),
  983. ),
  984. ActionCommand(
  985. name="list",
  986. help="List all the DAGs",
  987. func=lazy_load_command("airflow.cli.commands.dag_command.dag_list_dags"),
  988. args=(ARG_SUBDIR, ARG_OUTPUT, ARG_VERBOSE, ARG_DAG_LIST_COLUMNS),
  989. ),
  990. ActionCommand(
  991. name="list-import-errors",
  992. help="List all the DAGs that have import errors",
  993. func=lazy_load_command("airflow.cli.commands.dag_command.dag_list_import_errors"),
  994. args=(ARG_SUBDIR, ARG_OUTPUT, ARG_VERBOSE),
  995. ),
  996. ActionCommand(
  997. name="report",
  998. help="Show DagBag loading report",
  999. func=lazy_load_command("airflow.cli.commands.dag_command.dag_report"),
  1000. args=(ARG_SUBDIR, ARG_OUTPUT, ARG_VERBOSE),
  1001. ),
  1002. ActionCommand(
  1003. name="list-runs",
  1004. help="List DAG runs given a DAG id",
  1005. description=(
  1006. "List DAG runs given a DAG id. If state option is given, it will only search for all the "
  1007. "dagruns with the given state. If no_backfill option is given, it will filter out all "
  1008. "backfill dagruns for given dag id. If start_date is given, it will filter out all the "
  1009. "dagruns that were executed before this date. If end_date is given, it will filter out "
  1010. "all the dagruns that were executed after this date. "
  1011. ),
  1012. func=lazy_load_command("airflow.cli.commands.dag_command.dag_list_dag_runs"),
  1013. args=(
  1014. ARG_DAG_ID_REQ_FLAG,
  1015. ARG_NO_BACKFILL,
  1016. ARG_DR_STATE,
  1017. ARG_OUTPUT,
  1018. ARG_VERBOSE,
  1019. ARG_START_DATE,
  1020. ARG_END_DATE,
  1021. ),
  1022. ),
  1023. ActionCommand(
  1024. name="list-jobs",
  1025. help="List the jobs",
  1026. func=lazy_load_command("airflow.cli.commands.dag_command.dag_list_jobs"),
  1027. args=(ARG_DAG_ID_OPT, ARG_JOB_STATE, ARG_LIMIT, ARG_OUTPUT, ARG_VERBOSE),
  1028. ),
  1029. ActionCommand(
  1030. name="state",
  1031. help="Get the status of a dag run",
  1032. func=lazy_load_command("airflow.cli.commands.dag_command.dag_state"),
  1033. args=(ARG_DAG_ID, ARG_EXECUTION_DATE, ARG_SUBDIR, ARG_VERBOSE),
  1034. ),
  1035. ActionCommand(
  1036. name="next-execution",
  1037. help="Get the next execution datetimes of a DAG",
  1038. description=(
  1039. "Get the next execution datetimes of a DAG. It returns one execution unless the "
  1040. "num-executions option is given"
  1041. ),
  1042. func=lazy_load_command("airflow.cli.commands.dag_command.dag_next_execution"),
  1043. args=(ARG_DAG_ID, ARG_SUBDIR, ARG_NUM_EXECUTIONS, ARG_VERBOSE),
  1044. ),
  1045. ActionCommand(
  1046. name="pause",
  1047. help="Pause DAG(s)",
  1048. description=(
  1049. "Pause one or more DAGs. This command allows to halt the execution of specified DAGs, "
  1050. "disabling further task scheduling. Use `--treat-dag-id-as-regex` to target multiple DAGs by "
  1051. "treating the `--dag-id` as a regex pattern."
  1052. ),
  1053. func=lazy_load_command("airflow.cli.commands.dag_command.dag_pause"),
  1054. args=(ARG_DAG_ID, ARG_SUBDIR, ARG_TREAT_DAG_ID_AS_REGEX, ARG_YES, ARG_OUTPUT, ARG_VERBOSE),
  1055. ),
  1056. ActionCommand(
  1057. name="unpause",
  1058. help="Resume paused DAG(s)",
  1059. description=(
  1060. "Resume one or more DAGs. This command allows to restore the execution of specified "
  1061. "DAGs, enabling further task scheduling. Use `--treat-dag-id-as-regex` to target multiple DAGs "
  1062. "treating the `--dag-id` as a regex pattern."
  1063. ),
  1064. func=lazy_load_command("airflow.cli.commands.dag_command.dag_unpause"),
  1065. args=(ARG_DAG_ID, ARG_SUBDIR, ARG_TREAT_DAG_ID_AS_REGEX, ARG_YES, ARG_OUTPUT, ARG_VERBOSE),
  1066. ),
  1067. ActionCommand(
  1068. name="trigger",
  1069. help=(
  1070. "Trigger a new DAG run. If DAG is paused then dagrun state will remain queued, "
  1071. "and the task won't run."
  1072. ),
  1073. func=lazy_load_command("airflow.cli.commands.dag_command.dag_trigger"),
  1074. args=(
  1075. ARG_DAG_ID,
  1076. ARG_SUBDIR,
  1077. ARG_RUN_ID,
  1078. ARG_CONF,
  1079. ARG_EXEC_DATE,
  1080. ARG_VERBOSE,
  1081. ARG_REPLACE_MICRO,
  1082. ARG_OUTPUT,
  1083. ),
  1084. ),
  1085. ActionCommand(
  1086. name="delete",
  1087. help="Delete all DB records related to the specified DAG",
  1088. func=lazy_load_command("airflow.cli.commands.dag_command.dag_delete"),
  1089. args=(ARG_DAG_ID, ARG_YES, ARG_VERBOSE),
  1090. ),
  1091. ActionCommand(
  1092. name="show",
  1093. help="Displays DAG's tasks with their dependencies",
  1094. description=(
  1095. "The --imgcat option only works in iTerm.\n"
  1096. "\n"
  1097. "For more information, see: https://www.iterm2.com/documentation-images.html\n"
  1098. "\n"
  1099. "The --save option saves the result to the indicated file.\n"
  1100. "\n"
  1101. "The file format is determined by the file extension. "
  1102. "For more information about supported "
  1103. "format, see: https://www.graphviz.org/doc/info/output.html\n"
  1104. "\n"
  1105. "If you want to create a PNG file then you should execute the following command:\n"
  1106. "airflow dags show <DAG_ID> --save output.png\n"
  1107. "\n"
  1108. "If you want to create a DOT file then you should execute the following command:\n"
  1109. "airflow dags show <DAG_ID> --save output.dot\n"
  1110. ),
  1111. func=lazy_load_command("airflow.cli.commands.dag_command.dag_show"),
  1112. args=(
  1113. ARG_DAG_ID,
  1114. ARG_SUBDIR,
  1115. ARG_SAVE,
  1116. ARG_IMGCAT,
  1117. ARG_VERBOSE,
  1118. ),
  1119. ),
  1120. ActionCommand(
  1121. name="show-dependencies",
  1122. help="Displays DAGs with their dependencies",
  1123. description=(
  1124. "The --imgcat option only works in iTerm.\n"
  1125. "\n"
  1126. "For more information, see: https://www.iterm2.com/documentation-images.html\n"
  1127. "\n"
  1128. "The --save option saves the result to the indicated file.\n"
  1129. "\n"
  1130. "The file format is determined by the file extension. "
  1131. "For more information about supported "
  1132. "format, see: https://www.graphviz.org/doc/info/output.html\n"
  1133. "\n"
  1134. "If you want to create a PNG file then you should execute the following command:\n"
  1135. "airflow dags show-dependencies --save output.png\n"
  1136. "\n"
  1137. "If you want to create a DOT file then you should execute the following command:\n"
  1138. "airflow dags show-dependencies --save output.dot\n"
  1139. ),
  1140. func=lazy_load_command("airflow.cli.commands.dag_command.dag_dependencies_show"),
  1141. args=(
  1142. ARG_SUBDIR,
  1143. ARG_SAVE,
  1144. ARG_IMGCAT,
  1145. ARG_VERBOSE,
  1146. ),
  1147. ),
  1148. ActionCommand(
  1149. name="backfill",
  1150. help="Run subsections of a DAG for a specified date range",
  1151. description=(
  1152. "Run subsections of a DAG for a specified date range. If reset_dag_run option is used, "
  1153. "backfill will first prompt users whether airflow should clear all the previous dag_run and "
  1154. "task_instances within the backfill date range. If rerun_failed_tasks is used, backfill "
  1155. "will auto re-run the previous failed task instances within the backfill date range"
  1156. ),
  1157. func=lazy_load_command("airflow.cli.commands.dag_command.dag_backfill"),
  1158. args=(
  1159. ARG_DAG_ID,
  1160. ARG_TASK_REGEX,
  1161. ARG_START_DATE,
  1162. ARG_END_DATE,
  1163. ARG_MARK_SUCCESS,
  1164. ARG_LOCAL,
  1165. ARG_DONOT_PICKLE,
  1166. ARG_YES,
  1167. ARG_CONTINUE_ON_FAILURES,
  1168. ARG_DISABLE_RETRY,
  1169. ARG_BF_IGNORE_DEPENDENCIES,
  1170. ARG_BF_IGNORE_FIRST_DEPENDS_ON_PAST,
  1171. ARG_SUBDIR,
  1172. ARG_POOL,
  1173. ARG_DELAY_ON_LIMIT,
  1174. ARG_DRY_RUN,
  1175. ARG_VERBOSE,
  1176. ARG_CONF,
  1177. ARG_RESET_DAG_RUN,
  1178. ARG_RERUN_FAILED_TASKS,
  1179. ARG_RUN_BACKWARDS,
  1180. ARG_TREAT_DAG_AS_REGEX,
  1181. ARG_TREAT_DAG_ID_AS_REGEX,
  1182. ),
  1183. ),
  1184. ActionCommand(
  1185. name="test",
  1186. help="Execute one single DagRun",
  1187. description=(
  1188. "Execute one single DagRun for a given DAG and execution date.\n"
  1189. "\n"
  1190. "The --imgcat-dagrun option only works in iTerm.\n"
  1191. "\n"
  1192. "For more information, see: https://www.iterm2.com/documentation-images.html\n"
  1193. "\n"
  1194. "If --save-dagrun is used, then, after completing the backfill, saves the diagram "
  1195. "for current DAG Run to the indicated file.\n"
  1196. "The file format is determined by the file extension. "
  1197. "For more information about supported format, "
  1198. "see: https://www.graphviz.org/doc/info/output.html\n"
  1199. "\n"
  1200. "If you want to create a PNG file then you should execute the following command:\n"
  1201. "airflow dags test <DAG_ID> <EXECUTION_DATE> --save-dagrun output.png\n"
  1202. "\n"
  1203. "If you want to create a DOT file then you should execute the following command:\n"
  1204. "airflow dags test <DAG_ID> <EXECUTION_DATE> --save-dagrun output.dot\n"
  1205. ),
  1206. func=lazy_load_command("airflow.cli.commands.dag_command.dag_test"),
  1207. args=(
  1208. ARG_DAG_ID,
  1209. ARG_EXECUTION_DATE_OPTIONAL,
  1210. ARG_CONF,
  1211. ARG_SUBDIR,
  1212. ARG_SHOW_DAGRUN,
  1213. ARG_IMGCAT_DAGRUN,
  1214. ARG_SAVE_DAGRUN,
  1215. ARG_USE_EXECUTOR,
  1216. ARG_VERBOSE,
  1217. ARG_MARK_SUCCESS_PATTERN,
  1218. ),
  1219. ),
  1220. ActionCommand(
  1221. name="reserialize",
  1222. help="Reserialize all DAGs by parsing the DagBag files",
  1223. description=(
  1224. "Drop all serialized dags from the metadata DB. This will cause all DAGs to be reserialized "
  1225. "from the DagBag folder. This can be helpful if your serialized DAGs get out of sync with the "
  1226. "version of Airflow that you are running."
  1227. ),
  1228. func=lazy_load_command("airflow.cli.commands.dag_command.dag_reserialize"),
  1229. args=(
  1230. ARG_CLEAR_ONLY,
  1231. ARG_SUBDIR,
  1232. ARG_VERBOSE,
  1233. ),
  1234. ),
  1235. )
  1236. TASKS_COMMANDS = (
  1237. ActionCommand(
  1238. name="list",
  1239. help="List the tasks within a DAG",
  1240. func=lazy_load_command("airflow.cli.commands.task_command.task_list"),
  1241. args=(ARG_DAG_ID, ARG_TREE, ARG_SUBDIR, ARG_VERBOSE),
  1242. ),
  1243. ActionCommand(
  1244. name="clear",
  1245. help="Clear a set of task instance, as if they never ran",
  1246. func=lazy_load_command("airflow.cli.commands.task_command.task_clear"),
  1247. args=(
  1248. ARG_DAG_ID,
  1249. ARG_TASK_REGEX,
  1250. ARG_START_DATE,
  1251. ARG_END_DATE,
  1252. ARG_SUBDIR,
  1253. ARG_UPSTREAM,
  1254. ARG_DOWNSTREAM,
  1255. ARG_YES,
  1256. ARG_ONLY_FAILED,
  1257. ARG_ONLY_RUNNING,
  1258. ARG_EXCLUDE_SUBDAGS,
  1259. ARG_EXCLUDE_PARENTDAG,
  1260. ARG_DAG_REGEX,
  1261. ARG_VERBOSE,
  1262. ),
  1263. ),
  1264. ActionCommand(
  1265. name="state",
  1266. help="Get the status of a task instance",
  1267. func=lazy_load_command("airflow.cli.commands.task_command.task_state"),
  1268. args=(
  1269. ARG_DAG_ID,
  1270. ARG_TASK_ID,
  1271. ARG_EXECUTION_DATE_OR_RUN_ID,
  1272. ARG_SUBDIR,
  1273. ARG_VERBOSE,
  1274. ARG_MAP_INDEX,
  1275. ),
  1276. ),
  1277. ActionCommand(
  1278. name="failed-deps",
  1279. help="Returns the unmet dependencies for a task instance",
  1280. description=(
  1281. "Returns the unmet dependencies for a task instance from the perspective of the scheduler. "
  1282. "In other words, why a task instance doesn't get scheduled and then queued by the scheduler, "
  1283. "and then run by an executor."
  1284. ),
  1285. func=lazy_load_command("airflow.cli.commands.task_command.task_failed_deps"),
  1286. args=(ARG_DAG_ID, ARG_TASK_ID, ARG_EXECUTION_DATE_OR_RUN_ID, ARG_SUBDIR, ARG_MAP_INDEX, ARG_VERBOSE),
  1287. ),
  1288. ActionCommand(
  1289. name="render",
  1290. help="Render a task instance's template(s)",
  1291. func=lazy_load_command("airflow.cli.commands.task_command.task_render"),
  1292. args=(
  1293. ARG_DAG_ID,
  1294. ARG_TASK_ID,
  1295. ARG_EXECUTION_DATE_OR_RUN_ID,
  1296. ARG_SUBDIR,
  1297. ARG_VERBOSE,
  1298. ARG_MAP_INDEX,
  1299. ),
  1300. ),
  1301. ActionCommand(
  1302. name="run",
  1303. help="Run a single task instance",
  1304. func=lazy_load_command("airflow.cli.commands.task_command.task_run"),
  1305. args=(
  1306. ARG_DAG_ID,
  1307. ARG_TASK_ID,
  1308. ARG_EXECUTION_DATE_OR_RUN_ID,
  1309. ARG_SUBDIR,
  1310. ARG_MARK_SUCCESS,
  1311. ARG_FORCE,
  1312. ARG_POOL,
  1313. ARG_CFG_PATH,
  1314. ARG_LOCAL,
  1315. ARG_RAW,
  1316. ARG_IGNORE_ALL_DEPENDENCIES,
  1317. ARG_IGNORE_DEPENDENCIES,
  1318. ARG_IGNORE_DEPENDS_ON_PAST,
  1319. ARG_DEPENDS_ON_PAST,
  1320. ARG_SHIP_DAG,
  1321. ARG_PICKLE,
  1322. ARG_JOB_ID,
  1323. ARG_INTERACTIVE,
  1324. ARG_SHUT_DOWN_LOGGING,
  1325. ARG_MAP_INDEX,
  1326. ARG_VERBOSE,
  1327. ARG_READ_FROM_DB,
  1328. ),
  1329. ),
  1330. ActionCommand(
  1331. name="test",
  1332. help="Test a task instance",
  1333. description=(
  1334. "Test a task instance. This will run a task without checking for dependencies or recording "
  1335. "its state in the database"
  1336. ),
  1337. func=lazy_load_command("airflow.cli.commands.task_command.task_test"),
  1338. args=(
  1339. ARG_DAG_ID,
  1340. ARG_TASK_ID,
  1341. ARG_EXECUTION_DATE_OR_RUN_ID_OPTIONAL,
  1342. ARG_SUBDIR,
  1343. ARG_DRY_RUN,
  1344. ARG_TASK_PARAMS,
  1345. ARG_POST_MORTEM,
  1346. ARG_ENV_VARS,
  1347. ARG_MAP_INDEX,
  1348. ARG_VERBOSE,
  1349. ),
  1350. ),
  1351. ActionCommand(
  1352. name="states-for-dag-run",
  1353. help="Get the status of all task instances in a dag run",
  1354. func=lazy_load_command("airflow.cli.commands.task_command.task_states_for_dag_run"),
  1355. args=(ARG_DAG_ID, ARG_EXECUTION_DATE_OR_RUN_ID, ARG_OUTPUT, ARG_VERBOSE),
  1356. ),
  1357. )
  1358. POOLS_COMMANDS = (
  1359. ActionCommand(
  1360. name="list",
  1361. help="List pools",
  1362. func=lazy_load_command("airflow.cli.commands.pool_command.pool_list"),
  1363. args=(ARG_OUTPUT, ARG_VERBOSE),
  1364. ),
  1365. ActionCommand(
  1366. name="get",
  1367. help="Get pool size",
  1368. func=lazy_load_command("airflow.cli.commands.pool_command.pool_get"),
  1369. args=(ARG_POOL_NAME, ARG_OUTPUT, ARG_VERBOSE),
  1370. ),
  1371. ActionCommand(
  1372. name="set",
  1373. help="Configure pool",
  1374. func=lazy_load_command("airflow.cli.commands.pool_command.pool_set"),
  1375. args=(
  1376. ARG_POOL_NAME,
  1377. ARG_POOL_SLOTS,
  1378. ARG_POOL_DESCRIPTION,
  1379. ARG_POOL_INCLUDE_DEFERRED,
  1380. ARG_OUTPUT,
  1381. ARG_VERBOSE,
  1382. ),
  1383. ),
  1384. ActionCommand(
  1385. name="delete",
  1386. help="Delete pool",
  1387. func=lazy_load_command("airflow.cli.commands.pool_command.pool_delete"),
  1388. args=(ARG_POOL_NAME, ARG_OUTPUT, ARG_VERBOSE),
  1389. ),
  1390. ActionCommand(
  1391. name="import",
  1392. help="Import pools",
  1393. func=lazy_load_command("airflow.cli.commands.pool_command.pool_import"),
  1394. args=(ARG_POOL_IMPORT, ARG_VERBOSE),
  1395. ),
  1396. ActionCommand(
  1397. name="export",
  1398. help="Export all pools",
  1399. func=lazy_load_command("airflow.cli.commands.pool_command.pool_export"),
  1400. args=(ARG_POOL_EXPORT, ARG_VERBOSE),
  1401. ),
  1402. )
  1403. VARIABLES_COMMANDS = (
  1404. ActionCommand(
  1405. name="list",
  1406. help="List variables",
  1407. func=lazy_load_command("airflow.cli.commands.variable_command.variables_list"),
  1408. args=(ARG_OUTPUT, ARG_VERBOSE),
  1409. ),
  1410. ActionCommand(
  1411. name="get",
  1412. help="Get variable",
  1413. func=lazy_load_command("airflow.cli.commands.variable_command.variables_get"),
  1414. args=(ARG_VAR, ARG_DESERIALIZE_JSON, ARG_DEFAULT, ARG_VERBOSE),
  1415. ),
  1416. ActionCommand(
  1417. name="set",
  1418. help="Set variable",
  1419. func=lazy_load_command("airflow.cli.commands.variable_command.variables_set"),
  1420. args=(ARG_VAR, ARG_VAR_VALUE, ARG_VAR_DESCRIPTION, ARG_SERIALIZE_JSON, ARG_VERBOSE),
  1421. ),
  1422. ActionCommand(
  1423. name="delete",
  1424. help="Delete variable",
  1425. func=lazy_load_command("airflow.cli.commands.variable_command.variables_delete"),
  1426. args=(ARG_VAR, ARG_VERBOSE),
  1427. ),
  1428. ActionCommand(
  1429. name="import",
  1430. help="Import variables",
  1431. func=lazy_load_command("airflow.cli.commands.variable_command.variables_import"),
  1432. args=(ARG_VAR_IMPORT, ARG_VAR_ACTION_ON_EXISTING_KEY, ARG_VERBOSE),
  1433. ),
  1434. ActionCommand(
  1435. name="export",
  1436. help="Export all variables",
  1437. description=(
  1438. "All variables can be exported in STDOUT using the following command:\n"
  1439. "airflow variables export -\n"
  1440. ),
  1441. func=lazy_load_command("airflow.cli.commands.variable_command.variables_export"),
  1442. args=(ARG_VAR_EXPORT, ARG_VERBOSE),
  1443. ),
  1444. )
  1445. DB_COMMANDS = (
  1446. ActionCommand(
  1447. name="init",
  1448. help=(
  1449. "Deprecated -- use `migrate` instead. "
  1450. "To create default connections use `airflow connections create-default-connections`. "
  1451. "Initialize the metadata database"
  1452. ),
  1453. func=lazy_load_command("airflow.cli.commands.db_command.initdb"),
  1454. args=(ARG_VERBOSE,),
  1455. hide=True,
  1456. ),
  1457. ActionCommand(
  1458. name="check-migrations",
  1459. help="Check if migration have finished",
  1460. description="Check if migration have finished (or continually check until timeout)",
  1461. func=lazy_load_command("airflow.cli.commands.db_command.check_migrations"),
  1462. args=(ARG_MIGRATION_TIMEOUT, ARG_VERBOSE),
  1463. ),
  1464. ActionCommand(
  1465. name="reset",
  1466. help="Burn down and rebuild the metadata database",
  1467. func=lazy_load_command("airflow.cli.commands.db_command.resetdb"),
  1468. args=(ARG_YES, ARG_DB_SKIP_INIT, ARG_DB_USE_MIGRATION_FILES, ARG_VERBOSE),
  1469. ),
  1470. ActionCommand(
  1471. name="upgrade",
  1472. help="Deprecated -- use `migrate` instead. Upgrade the metadata database to latest version",
  1473. description=(
  1474. "Upgrade the schema of the metadata database. "
  1475. "To print but not execute commands, use option ``--show-sql-only``. "
  1476. "If using options ``--from-revision`` or ``--from-version``, you must also use "
  1477. "``--show-sql-only``, because if actually *running* migrations, we should only "
  1478. "migrate from the *current* Alembic revision."
  1479. ),
  1480. func=lazy_load_command("airflow.cli.commands.db_command.upgradedb"),
  1481. args=(
  1482. ARG_DB_REVISION__UPGRADE,
  1483. ARG_DB_VERSION__UPGRADE,
  1484. ARG_DB_SQL_ONLY,
  1485. ARG_DB_FROM_REVISION,
  1486. ARG_DB_FROM_VERSION,
  1487. ARG_DB_RESERIALIZE_DAGS,
  1488. ARG_DB_USE_MIGRATION_FILES,
  1489. ARG_VERBOSE,
  1490. ),
  1491. hide=True,
  1492. ),
  1493. ActionCommand(
  1494. name="migrate",
  1495. help="Migrates the metadata database to the latest version",
  1496. description=(
  1497. "Migrate the schema of the metadata database. "
  1498. "Create the database if it does not exist "
  1499. "To print but not execute commands, use option ``--show-sql-only``. "
  1500. "If using options ``--from-revision`` or ``--from-version``, you must also use "
  1501. "``--show-sql-only``, because if actually *running* migrations, we should only "
  1502. "migrate from the *current* Alembic revision."
  1503. ),
  1504. func=lazy_load_command("airflow.cli.commands.db_command.migratedb"),
  1505. args=(
  1506. ARG_DB_REVISION__UPGRADE,
  1507. ARG_DB_VERSION__UPGRADE,
  1508. ARG_DB_SQL_ONLY,
  1509. ARG_DB_FROM_REVISION,
  1510. ARG_DB_FROM_VERSION,
  1511. ARG_DB_RESERIALIZE_DAGS,
  1512. ARG_DB_USE_MIGRATION_FILES,
  1513. ARG_VERBOSE,
  1514. ),
  1515. ),
  1516. ActionCommand(
  1517. name="downgrade",
  1518. help="Downgrade the schema of the metadata database.",
  1519. description=(
  1520. "Downgrade the schema of the metadata database. "
  1521. "You must provide either `--to-revision` or `--to-version`. "
  1522. "To print but not execute commands, use option `--show-sql-only`. "
  1523. "If using options `--from-revision` or `--from-version`, you must also use `--show-sql-only`, "
  1524. "because if actually *running* migrations, we should only migrate from the *current* Alembic "
  1525. "revision."
  1526. ),
  1527. func=lazy_load_command("airflow.cli.commands.db_command.downgrade"),
  1528. args=(
  1529. ARG_DB_REVISION__DOWNGRADE,
  1530. ARG_DB_VERSION__DOWNGRADE,
  1531. ARG_DB_SQL_ONLY,
  1532. ARG_YES,
  1533. ARG_DB_FROM_REVISION,
  1534. ARG_DB_FROM_VERSION,
  1535. ARG_VERBOSE,
  1536. ),
  1537. ),
  1538. ActionCommand(
  1539. name="shell",
  1540. help="Runs a shell to access the database",
  1541. func=lazy_load_command("airflow.cli.commands.db_command.shell"),
  1542. args=(ARG_VERBOSE,),
  1543. ),
  1544. ActionCommand(
  1545. name="check",
  1546. help="Check if the database can be reached",
  1547. func=lazy_load_command("airflow.cli.commands.db_command.check"),
  1548. args=(ARG_VERBOSE, ARG_DB_RETRY, ARG_DB_RETRY_DELAY),
  1549. ),
  1550. ActionCommand(
  1551. name="clean",
  1552. help="Purge old records in metastore tables",
  1553. func=lazy_load_command("airflow.cli.commands.db_command.cleanup_tables"),
  1554. args=(
  1555. ARG_DB_TABLES,
  1556. ARG_DB_DRY_RUN,
  1557. ARG_DB_CLEANUP_TIMESTAMP,
  1558. ARG_VERBOSE,
  1559. ARG_YES,
  1560. ARG_DB_SKIP_ARCHIVE,
  1561. ),
  1562. ),
  1563. ActionCommand(
  1564. name="export-archived",
  1565. help="Export archived data from the archive tables",
  1566. func=lazy_load_command("airflow.cli.commands.db_command.export_archived"),
  1567. args=(
  1568. ARG_DB_EXPORT_FORMAT,
  1569. ARG_DB_OUTPUT_PATH,
  1570. ARG_DB_DROP_ARCHIVES,
  1571. ARG_DB_TABLES,
  1572. ARG_YES,
  1573. ),
  1574. ),
  1575. ActionCommand(
  1576. name="drop-archived",
  1577. help="Drop archived tables created through the db clean command",
  1578. func=lazy_load_command("airflow.cli.commands.db_command.drop_archived"),
  1579. args=(ARG_DB_TABLES, ARG_YES),
  1580. ),
  1581. )
  1582. CONNECTIONS_COMMANDS = (
  1583. ActionCommand(
  1584. name="get",
  1585. help="Get a connection",
  1586. func=lazy_load_command("airflow.cli.commands.connection_command.connections_get"),
  1587. args=(ARG_CONN_ID, ARG_COLOR, ARG_OUTPUT, ARG_VERBOSE),
  1588. ),
  1589. ActionCommand(
  1590. name="list",
  1591. help="List connections",
  1592. func=lazy_load_command("airflow.cli.commands.connection_command.connections_list"),
  1593. args=(ARG_OUTPUT, ARG_VERBOSE, ARG_CONN_ID_FILTER),
  1594. ),
  1595. ActionCommand(
  1596. name="add",
  1597. help="Add a connection",
  1598. func=lazy_load_command("airflow.cli.commands.connection_command.connections_add"),
  1599. args=(ARG_CONN_ID, ARG_CONN_URI, ARG_CONN_JSON, ARG_CONN_EXTRA, *ALTERNATIVE_CONN_SPECS_ARGS),
  1600. ),
  1601. ActionCommand(
  1602. name="delete",
  1603. help="Delete a connection",
  1604. func=lazy_load_command("airflow.cli.commands.connection_command.connections_delete"),
  1605. args=(ARG_CONN_ID, ARG_COLOR, ARG_VERBOSE),
  1606. ),
  1607. ActionCommand(
  1608. name="export",
  1609. help="Export all connections",
  1610. description=(
  1611. "All connections can be exported in STDOUT using the following command:\n"
  1612. "airflow connections export -\n"
  1613. "The file format can be determined by the provided file extension. E.g., The following "
  1614. "command will export the connections in JSON format:\n"
  1615. "airflow connections export /tmp/connections.json\n"
  1616. "The --file-format parameter can be used to control the file format. E.g., "
  1617. "the default format is JSON in STDOUT mode, which can be overridden using: \n"
  1618. "airflow connections export - --file-format yaml\n"
  1619. "The --file-format parameter can also be used for the files, for example:\n"
  1620. "airflow connections export /tmp/connections --file-format json.\n"
  1621. "When exporting in `env` file format, you control whether URI format or JSON format "
  1622. "is used to serialize the connection by passing `uri` or `json` with option "
  1623. "`--serialization-format`.\n"
  1624. ),
  1625. func=lazy_load_command("airflow.cli.commands.connection_command.connections_export"),
  1626. args=(
  1627. ARG_CONN_EXPORT,
  1628. ARG_CONN_EXPORT_FORMAT,
  1629. ARG_CONN_EXPORT_FILE_FORMAT,
  1630. ARG_CONN_SERIALIZATION_FORMAT,
  1631. ARG_VERBOSE,
  1632. ),
  1633. ),
  1634. ActionCommand(
  1635. name="import",
  1636. help="Import connections from a file",
  1637. description=(
  1638. "Connections can be imported from the output of the export command.\n"
  1639. "The filetype must by json, yaml or env and will be automatically inferred."
  1640. ),
  1641. func=lazy_load_command("airflow.cli.commands.connection_command.connections_import"),
  1642. args=(
  1643. ARG_CONN_IMPORT,
  1644. ARG_CONN_OVERWRITE,
  1645. ARG_VERBOSE,
  1646. ),
  1647. ),
  1648. ActionCommand(
  1649. name="test",
  1650. help="Test a connection",
  1651. func=lazy_load_command("airflow.cli.commands.connection_command.connections_test"),
  1652. args=(ARG_CONN_ID, ARG_VERBOSE),
  1653. ),
  1654. ActionCommand(
  1655. name="create-default-connections",
  1656. help="Creates all the default connections from all the providers",
  1657. func=lazy_load_command("airflow.cli.commands.connection_command.create_default_connections"),
  1658. # func=lazy_load_command("airflow.utils.db.create_default_connections"),
  1659. args=(ARG_VERBOSE,),
  1660. ),
  1661. )
  1662. PROVIDERS_COMMANDS = (
  1663. ActionCommand(
  1664. name="list",
  1665. help="List installed providers",
  1666. func=lazy_load_command("airflow.cli.commands.provider_command.providers_list"),
  1667. args=(ARG_OUTPUT, ARG_VERBOSE),
  1668. ),
  1669. ActionCommand(
  1670. name="get",
  1671. help="Get detailed information about a provider",
  1672. func=lazy_load_command("airflow.cli.commands.provider_command.provider_get"),
  1673. args=(ARG_OUTPUT, ARG_VERBOSE, ARG_FULL, ARG_COLOR, ARG_PROVIDER_NAME),
  1674. ),
  1675. ActionCommand(
  1676. name="links",
  1677. help="List extra links registered by the providers",
  1678. func=lazy_load_command("airflow.cli.commands.provider_command.extra_links_list"),
  1679. args=(ARG_OUTPUT, ARG_VERBOSE),
  1680. ),
  1681. ActionCommand(
  1682. name="widgets",
  1683. help="Get information about registered connection form widgets",
  1684. func=lazy_load_command("airflow.cli.commands.provider_command.connection_form_widget_list"),
  1685. args=(
  1686. ARG_OUTPUT,
  1687. ARG_VERBOSE,
  1688. ),
  1689. ),
  1690. ActionCommand(
  1691. name="hooks",
  1692. help="List registered provider hooks",
  1693. func=lazy_load_command("airflow.cli.commands.provider_command.hooks_list"),
  1694. args=(ARG_OUTPUT, ARG_VERBOSE),
  1695. ),
  1696. ActionCommand(
  1697. name="triggers",
  1698. help="List registered provider triggers",
  1699. func=lazy_load_command("airflow.cli.commands.provider_command.triggers_list"),
  1700. args=(ARG_OUTPUT, ARG_VERBOSE),
  1701. ),
  1702. ActionCommand(
  1703. name="behaviours",
  1704. help="Get information about registered connection types with custom behaviours",
  1705. func=lazy_load_command("airflow.cli.commands.provider_command.connection_field_behaviours"),
  1706. args=(ARG_OUTPUT, ARG_VERBOSE),
  1707. ),
  1708. ActionCommand(
  1709. name="logging",
  1710. help="Get information about task logging handlers provided",
  1711. func=lazy_load_command("airflow.cli.commands.provider_command.logging_list"),
  1712. args=(ARG_OUTPUT, ARG_VERBOSE),
  1713. ),
  1714. ActionCommand(
  1715. name="secrets",
  1716. help="Get information about secrets backends provided",
  1717. func=lazy_load_command("airflow.cli.commands.provider_command.secrets_backends_list"),
  1718. args=(ARG_OUTPUT, ARG_VERBOSE),
  1719. ),
  1720. ActionCommand(
  1721. name="auth",
  1722. help="Get information about API auth backends provided",
  1723. func=lazy_load_command("airflow.cli.commands.provider_command.auth_backend_list"),
  1724. args=(ARG_OUTPUT, ARG_VERBOSE),
  1725. ),
  1726. ActionCommand(
  1727. name="executors",
  1728. help="Get information about executors provided",
  1729. func=lazy_load_command("airflow.cli.commands.provider_command.executors_list"),
  1730. args=(ARG_OUTPUT, ARG_VERBOSE),
  1731. ),
  1732. ActionCommand(
  1733. name="notifications",
  1734. help="Get information about notifications provided",
  1735. func=lazy_load_command("airflow.cli.commands.provider_command.notifications_list"),
  1736. args=(ARG_OUTPUT, ARG_VERBOSE),
  1737. ),
  1738. ActionCommand(
  1739. name="configs",
  1740. help="Get information about provider configuration",
  1741. func=lazy_load_command("airflow.cli.commands.provider_command.config_list"),
  1742. args=(ARG_OUTPUT, ARG_VERBOSE),
  1743. ),
  1744. ActionCommand(
  1745. name="lazy-loaded",
  1746. help="Checks that provider configuration is lazy loaded",
  1747. func=lazy_load_command("airflow.cli.commands.provider_command.lazy_loaded"),
  1748. args=(ARG_VERBOSE,),
  1749. ),
  1750. ActionCommand(
  1751. name="auth-managers",
  1752. help="Get information about auth managers provided",
  1753. func=lazy_load_command("airflow.cli.commands.provider_command.auth_managers_list"),
  1754. args=(ARG_OUTPUT, ARG_VERBOSE),
  1755. ),
  1756. )
  1757. CONFIG_COMMANDS = (
  1758. ActionCommand(
  1759. name="get-value",
  1760. help="Print the value of the configuration",
  1761. func=lazy_load_command("airflow.cli.commands.config_command.get_value"),
  1762. args=(
  1763. ARG_SECTION,
  1764. ARG_OPTION,
  1765. ARG_VERBOSE,
  1766. ),
  1767. ),
  1768. ActionCommand(
  1769. name="list",
  1770. help="List options for the configuration",
  1771. func=lazy_load_command("airflow.cli.commands.config_command.show_config"),
  1772. args=(
  1773. ARG_OPTIONAL_SECTION,
  1774. ARG_COLOR,
  1775. ARG_INCLUDE_DESCRIPTIONS,
  1776. ARG_INCLUDE_EXAMPLES,
  1777. ARG_INCLUDE_SOURCES,
  1778. ARG_INCLUDE_ENV_VARS,
  1779. ARG_COMMENT_OUT_EVERYTHING,
  1780. ARG_EXCLUDE_PROVIDERS,
  1781. ARG_DEFAULTS,
  1782. ARG_VERBOSE,
  1783. ),
  1784. ),
  1785. )
  1786. KUBERNETES_COMMANDS = (
  1787. ActionCommand(
  1788. name="cleanup-pods",
  1789. help=(
  1790. "Clean up Kubernetes pods "
  1791. "(created by KubernetesExecutor/KubernetesPodOperator) "
  1792. "in evicted/failed/succeeded/pending states"
  1793. ),
  1794. func=lazy_load_command("airflow.providers.cncf.kubernetes.cli.kubernetes_command.cleanup_pods"),
  1795. args=(ARG_NAMESPACE, ARG_MIN_PENDING_MINUTES, ARG_VERBOSE),
  1796. ),
  1797. ActionCommand(
  1798. name="generate-dag-yaml",
  1799. help="Generate YAML files for all tasks in DAG. Useful for debugging tasks without "
  1800. "launching into a cluster",
  1801. func=lazy_load_command("airflow.providers.cncf.kubernetes.cli.kubernetes_command.generate_pod_yaml"),
  1802. args=(ARG_DAG_ID, ARG_EXECUTION_DATE, ARG_SUBDIR, ARG_OUTPUT_PATH, ARG_VERBOSE),
  1803. ),
  1804. )
  1805. JOBS_COMMANDS = (
  1806. ActionCommand(
  1807. name="check",
  1808. help="Checks if job(s) are still alive",
  1809. func=lazy_load_command("airflow.cli.commands.jobs_command.check"),
  1810. args=(
  1811. ARG_JOB_TYPE_FILTER,
  1812. ARG_JOB_HOSTNAME_FILTER,
  1813. ARG_JOB_HOSTNAME_CALLABLE_FILTER,
  1814. ARG_JOB_LIMIT,
  1815. ARG_ALLOW_MULTIPLE,
  1816. ARG_VERBOSE,
  1817. ),
  1818. epilog=(
  1819. "examples:\n"
  1820. "To check if the local scheduler is still working properly, run:\n"
  1821. "\n"
  1822. ' $ airflow jobs check --job-type SchedulerJob --local"\n'
  1823. "\n"
  1824. "To check if any scheduler is running when you are using high availability, run:\n"
  1825. "\n"
  1826. " $ airflow jobs check --job-type SchedulerJob --allow-multiple --limit 100"
  1827. ),
  1828. ),
  1829. )
  1830. core_commands: list[CLICommand] = [
  1831. GroupCommand(
  1832. name="dags",
  1833. help="Manage DAGs",
  1834. subcommands=DAGS_COMMANDS,
  1835. ),
  1836. GroupCommand(
  1837. name="tasks",
  1838. help="Manage tasks",
  1839. subcommands=TASKS_COMMANDS,
  1840. ),
  1841. GroupCommand(
  1842. name="pools",
  1843. help="Manage pools",
  1844. subcommands=POOLS_COMMANDS,
  1845. ),
  1846. GroupCommand(
  1847. name="variables",
  1848. help="Manage variables",
  1849. subcommands=VARIABLES_COMMANDS,
  1850. ),
  1851. GroupCommand(
  1852. name="jobs",
  1853. help="Manage jobs",
  1854. subcommands=JOBS_COMMANDS,
  1855. ),
  1856. GroupCommand(
  1857. name="db",
  1858. help="Database operations",
  1859. subcommands=DB_COMMANDS,
  1860. ),
  1861. ActionCommand(
  1862. name="kerberos",
  1863. help="Start a kerberos ticket renewer",
  1864. func=lazy_load_command("airflow.cli.commands.kerberos_command.kerberos"),
  1865. args=(
  1866. ARG_PRINCIPAL,
  1867. ARG_KEYTAB,
  1868. ARG_PID,
  1869. ARG_DAEMON,
  1870. ARG_KERBEROS_ONE_TIME_MODE,
  1871. ARG_STDOUT,
  1872. ARG_STDERR,
  1873. ARG_LOG_FILE,
  1874. ARG_VERBOSE,
  1875. ),
  1876. ),
  1877. ActionCommand(
  1878. name="webserver",
  1879. help="Start a Airflow webserver instance",
  1880. func=lazy_load_command("airflow.cli.commands.webserver_command.webserver"),
  1881. args=(
  1882. ARG_PORT,
  1883. ARG_WORKERS,
  1884. ARG_WORKERCLASS,
  1885. ARG_WORKER_TIMEOUT,
  1886. ARG_HOSTNAME,
  1887. ARG_PID,
  1888. ARG_DAEMON,
  1889. ARG_STDOUT,
  1890. ARG_STDERR,
  1891. ARG_ACCESS_LOGFILE,
  1892. ARG_ERROR_LOGFILE,
  1893. ARG_ACCESS_LOGFORMAT,
  1894. ARG_LOG_FILE,
  1895. ARG_SSL_CERT,
  1896. ARG_SSL_KEY,
  1897. ARG_DEBUG,
  1898. ),
  1899. ),
  1900. ActionCommand(
  1901. name="scheduler",
  1902. help="Start a scheduler instance",
  1903. func=lazy_load_command("airflow.cli.commands.scheduler_command.scheduler"),
  1904. args=(
  1905. ARG_SUBDIR,
  1906. ARG_NUM_RUNS,
  1907. ARG_DO_PICKLE,
  1908. ARG_PID,
  1909. ARG_DAEMON,
  1910. ARG_STDOUT,
  1911. ARG_STDERR,
  1912. ARG_LOG_FILE,
  1913. ARG_SKIP_SERVE_LOGS,
  1914. ARG_VERBOSE,
  1915. ),
  1916. epilog=(
  1917. "Signals:\n"
  1918. "\n"
  1919. " - SIGUSR2: Dump a snapshot of task state being tracked by the executor.\n"
  1920. "\n"
  1921. " Example:\n"
  1922. ' pkill -f -USR2 "airflow scheduler"'
  1923. ),
  1924. ),
  1925. ActionCommand(
  1926. name="triggerer",
  1927. help="Start a triggerer instance",
  1928. func=lazy_load_command("airflow.cli.commands.triggerer_command.triggerer"),
  1929. args=(
  1930. ARG_PID,
  1931. ARG_DAEMON,
  1932. ARG_STDOUT,
  1933. ARG_STDERR,
  1934. ARG_LOG_FILE,
  1935. ARG_CAPACITY,
  1936. ARG_VERBOSE,
  1937. ARG_SKIP_SERVE_LOGS,
  1938. ),
  1939. ),
  1940. ActionCommand(
  1941. name="dag-processor",
  1942. help="Start a standalone Dag Processor instance",
  1943. func=lazy_load_command("airflow.cli.commands.dag_processor_command.dag_processor"),
  1944. args=(
  1945. ARG_PID,
  1946. ARG_DAEMON,
  1947. ARG_SUBDIR,
  1948. ARG_NUM_RUNS,
  1949. ARG_DO_PICKLE,
  1950. ARG_STDOUT,
  1951. ARG_STDERR,
  1952. ARG_LOG_FILE,
  1953. ARG_VERBOSE,
  1954. ),
  1955. ),
  1956. ActionCommand(
  1957. name="version",
  1958. help="Show the version",
  1959. func=lazy_load_command("airflow.cli.commands.version_command.version"),
  1960. args=(),
  1961. ),
  1962. ActionCommand(
  1963. name="cheat-sheet",
  1964. help="Display cheat sheet",
  1965. func=lazy_load_command("airflow.cli.commands.cheat_sheet_command.cheat_sheet"),
  1966. args=(ARG_VERBOSE,),
  1967. ),
  1968. GroupCommand(
  1969. name="connections",
  1970. help="Manage connections",
  1971. subcommands=CONNECTIONS_COMMANDS,
  1972. ),
  1973. GroupCommand(
  1974. name="providers",
  1975. help="Display providers",
  1976. subcommands=PROVIDERS_COMMANDS,
  1977. ),
  1978. ActionCommand(
  1979. name="rotate-fernet-key",
  1980. func=lazy_load_command("airflow.cli.commands.rotate_fernet_key_command.rotate_fernet_key"),
  1981. help="Rotate encrypted connection credentials and variables",
  1982. description=(
  1983. "Rotate all encrypted connection credentials and variables; see "
  1984. "https://airflow.apache.org/docs/apache-airflow/stable/howto/secure-connections.html"
  1985. "#rotating-encryption-keys"
  1986. ),
  1987. args=(),
  1988. ),
  1989. GroupCommand(name="config", help="View configuration", subcommands=CONFIG_COMMANDS),
  1990. ActionCommand(
  1991. name="info",
  1992. help="Show information about current Airflow and environment",
  1993. func=lazy_load_command("airflow.cli.commands.info_command.show_info"),
  1994. args=(
  1995. ARG_ANONYMIZE,
  1996. ARG_FILE_IO,
  1997. ARG_VERBOSE,
  1998. ARG_OUTPUT,
  1999. ),
  2000. ),
  2001. ActionCommand(
  2002. name="plugins",
  2003. help="Dump information about loaded plugins",
  2004. func=lazy_load_command("airflow.cli.commands.plugins_command.dump_plugins"),
  2005. args=(ARG_OUTPUT, ARG_VERBOSE),
  2006. ),
  2007. ActionCommand(
  2008. name="standalone",
  2009. help="Run an all-in-one copy of Airflow",
  2010. func=lazy_load_command("airflow.cli.commands.standalone_command.standalone"),
  2011. args=(),
  2012. ),
  2013. ]
  2014. if _ENABLE_AIP_44:
  2015. core_commands.append(
  2016. ActionCommand(
  2017. name="internal-api",
  2018. help="Start a Airflow Internal API instance",
  2019. func=lazy_load_command("airflow.cli.commands.internal_api_command.internal_api"),
  2020. args=(
  2021. ARG_INTERNAL_API_PORT,
  2022. ARG_INTERNAL_API_WORKERS,
  2023. ARG_INTERNAL_API_WORKERCLASS,
  2024. ARG_INTERNAL_API_WORKER_TIMEOUT,
  2025. ARG_INTERNAL_API_HOSTNAME,
  2026. ARG_PID,
  2027. ARG_DAEMON,
  2028. ARG_STDOUT,
  2029. ARG_STDERR,
  2030. ARG_INTERNAL_API_ACCESS_LOGFILE,
  2031. ARG_INTERNAL_API_ERROR_LOGFILE,
  2032. ARG_INTERNAL_API_ACCESS_LOGFORMAT,
  2033. ARG_LOG_FILE,
  2034. ARG_SSL_CERT,
  2035. ARG_SSL_KEY,
  2036. ARG_DEBUG,
  2037. ),
  2038. ),
  2039. )
  2040. def _remove_dag_id_opt(command: ActionCommand):
  2041. cmd = command._asdict()
  2042. cmd["args"] = (arg for arg in command.args if arg is not ARG_DAG_ID)
  2043. return ActionCommand(**cmd)
  2044. dag_cli_commands: list[CLICommand] = [
  2045. GroupCommand(
  2046. name="dags",
  2047. help="Manage DAGs",
  2048. subcommands=[
  2049. _remove_dag_id_opt(sp)
  2050. for sp in DAGS_COMMANDS
  2051. if sp.name in ["backfill", "list-runs", "pause", "unpause", "test"]
  2052. ],
  2053. ),
  2054. GroupCommand(
  2055. name="tasks",
  2056. help="Manage tasks",
  2057. subcommands=[_remove_dag_id_opt(sp) for sp in TASKS_COMMANDS if sp.name in ["list", "test", "run"]],
  2058. ),
  2059. ]
  2060. DAG_CLI_DICT: dict[str, CLICommand] = {sp.name: sp for sp in dag_cli_commands}