From 05d8cc2db1082af4d1f437709c3d6dd881691806 Mon Sep 17 00:00:00 2001 From: eladkal <45845474+eladkal@users.noreply.github.com> Date: Mon, 19 Dec 2022 21:49:59 +0200 Subject: [PATCH] Add documentation for packages - 2022-12-14 (#711) * Add documentation for packages - 2022-12-14 * fixup! Add documentation for packages - 2022-12-14 * fixup! fixup! Add documentation for packages - 2022-12-14 * fixup! fixup! fixup! Add documentation for packages - 2022-12-14 * Revert submodule update Co-authored-by: Jarek Potiuk Co-authored-by: Ash Berlin-Taylor --- .../5.0.0/.buildinfo | 4 + .../apache/hive/hooks/hive/index.html | 1558 +++ .../providers/apache/hive/hooks/index.html | 853 ++ .../airflow/providers/apache/hive/index.html | 874 ++ .../apache/hive/operators/hive/index.html | 1004 ++ .../hive/operators/hive_stats/index.html | 947 ++ .../apache/hive/operators/index.html | 854 ++ .../hive/sensors/hive_partition/index.html | 923 ++ .../providers/apache/hive/sensors/index.html | 855 ++ .../sensors/metastore_partition/index.html | 923 ++ .../sensors/named_hive_partition/index.html | 941 ++ .../hive/transfers/hive_to_mysql/index.html | 949 ++ .../hive/transfers/hive_to_samba/index.html | 928 ++ .../apache/hive/transfers/index.html | 858 ++ .../hive/transfers/mssql_to_hive/index.html | 963 ++ .../hive/transfers/mysql_to_hive/index.html | 968 ++ .../hive/transfers/s3_to_hive/index.html | 974 ++ .../hive/transfers/vertica_to_hive/index.html | 961 ++ .../hive/example_twitter_dag/index.html | 966 ++ .../system/providers/apache/hive/index.html | 851 ++ .../providers/apache/hive/hooks/hive.html | 1843 +++ .../providers/apache/hive/operators/hive.html | 988 ++ .../apache/hive/operators/hive_stats.html | 997 ++ .../apache/hive/sensors/hive_partition.html | 889 ++ .../hive/sensors/metastore_partition.html | 898 ++ .../hive/sensors/named_hive_partition.html | 918 ++ .../apache/hive/transfers/hive_to_mysql.html | 938 ++ .../apache/hive/transfers/hive_to_samba.html | 885 ++ .../apache/hive/transfers/mssql_to_hive.html | 944 ++ .../apache/hive/transfers/mysql_to_hive.html | 973 ++ .../apache/hive/transfers/s3_to_hive.html | 1078 ++ .../hive/transfers/vertica_to_hive.html | 947 ++ .../5.0.0/_modules/index.html | 822 ++ .../apache/hive/example_twitter_dag.html | 977 ++ .../apache/hive/hooks/hive/index.rst.txt | 446 + .../providers/apache/hive/hooks/index.rst.txt | 15 + .../providers/apache/hive/index.rst.txt | 18 + .../apache/hive/operators/hive/index.rst.txt | 107 + .../hive/operators/hive_stats/index.rst.txt | 72 + .../apache/hive/operators/index.rst.txt | 16 + .../hive/sensors/hive_partition/index.rst.txt | 54 + .../apache/hive/sensors/index.rst.txt | 17 + .../sensors/metastore_partition/index.rst.txt | 53 + .../named_hive_partition/index.rst.txt | 62 + .../transfers/hive_to_mysql/index.rst.txt | 80 + .../transfers/hive_to_samba/index.rst.txt | 61 + .../apache/hive/transfers/index.rst.txt | 20 + .../transfers/mssql_to_hive/index.rst.txt | 90 + .../transfers/mysql_to_hive/index.rst.txt | 95 + .../hive/transfers/s3_to_hive/index.rst.txt | 100 + .../transfers/vertica_to_hive/index.rst.txt | 90 + .../hive/example_twitter_dag/index.rst.txt | 83 + .../providers/apache/hive/index.rst.txt | 15 + .../5.0.0/_sources/commits.rst.txt | 413 + .../_sources/connections/hive_cli.rst.txt | 91 + .../connections/hive_metastore.rst.txt | 69 + .../_sources/connections/hiveserver2.rst.txt | 78 + .../5.0.0/_sources/connections/index.rst.txt | 25 + .../5.0.0/_sources/index.rst.txt | 132 + .../installing-providers-from-sources.rst.txt | 18 + .../5.0.0/_sources/operators.rst.txt | 40 + .../_static/_gen/css/main-custom.min.css | 1 + .../5.0.0/_static/_gen/css/main.min.css | 7 + .../5.0.0/_static/_gen/js/docs.js | 1 + .../_sphinx_javascript_frameworks_compat.js | 134 + .../5.0.0/_static/basic.css | 899 ++ .../5.0.0/_static/check-solid.svg | 4 + .../5.0.0/_static/clipboard.min.js | 7 + .../5.0.0/_static/copy-button.svg | 5 + .../5.0.0/_static/copybutton.css | 94 + .../5.0.0/_static/copybutton.js | 248 + .../5.0.0/_static/copybutton_funcs.js | 73 + .../5.0.0/_static/doctools.js | 156 + .../5.0.0/_static/documentation_options.js | 14 + .../5.0.0/_static/file.png | Bin 0 -> 286 bytes .../5.0.0/_static/graphviz.css | 19 + .../5.0.0/_static/jquery-3.6.0.js | 10881 ++++++++++++++++ .../5.0.0/_static/jquery.js | 2 + .../5.0.0/_static/js/globaltoc.js | 24 + .../5.0.0/_static/language_data.js | 199 + .../5.0.0/_static/minus.png | Bin 0 -> 90 bytes .../5.0.0/_static/pin_32.png | Bin 0 -> 1201 bytes .../5.0.0/_static/plus.png | Bin 0 -> 90 bytes .../5.0.0/_static/pygments.css | 74 + .../5.0.0/_static/searchtools.js | 566 + .../5.0.0/_static/sphinx_highlight.js | 144 + .../5.0.0/_static/underscore-1.13.1.js | 2042 +++ .../5.0.0/_static/underscore.js | 6 + .../5.0.0/commits.html | 1959 +++ .../5.0.0/connections/hive_cli.html | 903 ++ .../5.0.0/connections/hive_metastore.html | 882 ++ .../5.0.0/connections/hiveserver2.html | 892 ++ .../5.0.0/connections/index.html | 849 ++ .../5.0.0/genindex.html | 1423 ++ .../5.0.0/index.html | 1339 ++ .../installing-providers-from-sources.html | 948 ++ .../5.0.0/objects.inv | Bin 0 -> 1934 bytes .../5.0.0/operators.html | 872 ++ .../5.0.0/py-modindex.html | 934 ++ .../5.0.0/search.html | 861 ++ .../5.0.0/searchindex.js | 1 + .../stable.txt | 2 +- .../5.0.1/.buildinfo | 4 + .../microsoft/azure/hooks/adx/index.html | 997 ++ .../microsoft/azure/hooks/asb/index.html | 1127 ++ .../azure/hooks/base_azure/index.html | 960 ++ .../microsoft/azure/hooks/batch/index.html | 1103 ++ .../azure/hooks/container_instance/index.html | 1072 ++ .../azure/hooks/container_registry/index.html | 941 ++ .../azure/hooks/container_volume/index.html | 961 ++ .../microsoft/azure/hooks/cosmos/index.html | 1126 ++ .../azure/hooks/data_factory/index.html | 1917 +++ .../azure/hooks/data_lake/index.html | 1067 ++ .../azure/hooks/fileshare/index.html | 1227 ++ .../microsoft/azure/hooks/index.html | 869 ++ .../microsoft/azure/hooks/synapse/index.html | 1111 ++ .../microsoft/azure/hooks/wasb/index.html | 1218 ++ .../providers/microsoft/azure/index.html | 913 ++ .../providers/microsoft/azure/log/index.html | 857 ++ .../azure/log/wasb_task_handler/index.html | 977 ++ .../microsoft/azure/operators/adls/index.html | 996 ++ .../microsoft/azure/operators/adx/index.html | 942 ++ .../microsoft/azure/operators/asb/index.html | 1499 +++ .../azure/operators/batch/index.html | 1014 ++ .../operators/container_instances/index.html | 1058 ++ .../azure/operators/cosmos/index.html | 924 ++ .../azure/operators/data_factory/index.html | 1023 ++ .../microsoft/azure/operators/index.html | 865 ++ .../azure/operators/synapse/index.html | 946 ++ .../operators/wasb_delete_blob/index.html | 919 ++ .../microsoft/azure/secrets/index.html | 857 ++ .../azure/secrets/key_vault/index.html | 1019 ++ .../microsoft/azure/sensors/cosmos/index.html | 924 ++ .../azure/sensors/data_factory/index.html | 920 ++ .../microsoft/azure/sensors/index.html | 859 ++ .../microsoft/azure/sensors/wasb/index.html | 958 ++ .../transfers/azure_blob_to_gcs/index.html | 936 ++ .../microsoft/azure/transfers/index.html | 861 ++ .../azure/transfers/local_to_adls/index.html | 954 ++ .../azure/transfers/local_to_wasb/index.html | 917 ++ .../oracle_to_azure_data_lake/index.html | 936 ++ .../azure/transfers/sftp_to_wasb/index.html | 1058 ++ .../microsoft/azure/utils/index.html | 881 ++ .../azure/example_adf_run_pipeline/index.html | 882 ++ .../azure/example_adls_delete/index.html | 896 ++ .../example_azure_blob_to_gcs/index.html | 917 ++ .../index.html | 883 ++ .../azure/example_azure_cosmosdb/index.html | 887 ++ .../example_azure_service_bus/index.html | 917 ++ .../azure/example_azure_synapse/index.html | 896 ++ .../azure/example_fileshare/index.html | 952 ++ .../azure/example_local_to_adls/index.html | 896 ++ .../azure/example_local_to_wasb/index.html | 889 ++ .../azure/example_sftp_to_wasb/index.html | 1001 ++ .../providers/microsoft/azure/index.html | 865 ++ .../providers/microsoft/azure/hooks/adx.html | 1021 ++ .../providers/microsoft/azure/hooks/asb.html | 1065 ++ .../microsoft/azure/hooks/base_azure.html | 935 ++ .../microsoft/azure/hooks/batch.html | 1206 ++ .../azure/hooks/container_instance.html | 962 ++ .../azure/hooks/container_registry.html | 881 ++ .../azure/hooks/container_volume.html | 923 ++ .../microsoft/azure/hooks/cosmos.html | 1186 ++ .../microsoft/azure/hooks/data_factory.html | 1856 +++ .../microsoft/azure/hooks/data_lake.html | 1047 ++ .../microsoft/azure/hooks/fileshare.html | 1142 ++ .../microsoft/azure/hooks/synapse.html | 1015 ++ .../providers/microsoft/azure/hooks/wasb.html | 1316 ++ .../azure/log/wasb_task_handler.html | 1000 ++ .../microsoft/azure/operators/adls.html | 915 ++ .../microsoft/azure/operators/adx.html | 894 ++ .../microsoft/azure/operators/asb.html | 1449 ++ .../microsoft/azure/operators/batch.html | 1145 ++ .../azure/operators/container_instances.html | 1196 ++ .../microsoft/azure/operators/cosmos.html | 887 ++ .../azure/operators/data_factory.html | 1022 ++ .../microsoft/azure/operators/synapse.html | 922 ++ .../azure/operators/wasb_delete_blob.html | 887 ++ .../microsoft/azure/secrets/key_vault.html | 1006 ++ .../microsoft/azure/sensors/cosmos.html | 886 ++ .../microsoft/azure/sensors/data_factory.html | 895 ++ .../microsoft/azure/sensors/wasb.html | 913 ++ .../azure/transfers/azure_blob_to_gcs.html | 943 ++ .../azure/transfers/local_to_adls.html | 931 ++ .../azure/transfers/local_to_wasb.html | 896 ++ .../transfers/oracle_to_azure_data_lake.html | 929 ++ .../azure/transfers/sftp_to_wasb.html | 1017 ++ .../providers/microsoft/azure/utils.html | 889 ++ .../5.0.1/_modules/index.html | 857 ++ .../azure/example_adf_run_pipeline.html | 907 ++ .../microsoft/azure/example_adls_delete.html | 874 ++ .../azure/example_azure_blob_to_gcs.html | 889 ++ .../example_azure_container_instances.html | 874 ++ .../azure/example_azure_cosmosdb.html | 890 ++ .../azure/example_azure_service_bus.html | 993 ++ .../azure/example_azure_synapse.html | 888 ++ .../microsoft/azure/example_fileshare.html | 882 ++ .../azure/example_local_to_adls.html | 874 ++ .../azure/example_local_to_wasb.html | 870 ++ .../microsoft/azure/example_sftp_to_wasb.html | 902 ++ .../microsoft/azure/hooks/adx/index.rst.txt | 115 + .../microsoft/azure/hooks/asb/index.rst.txt | 164 + .../azure/hooks/base_azure/index.rst.txt | 70 + .../microsoft/azure/hooks/batch/index.rst.txt | 169 + .../hooks/container_instance/index.rst.txt | 123 + .../hooks/container_registry/index.rst.txt | 66 + .../hooks/container_volume/index.rst.txt | 72 + .../azure/hooks/cosmos/index.rst.txt | 166 + .../azure/hooks/data_factory/index.rst.txt | 570 + .../azure/hooks/data_lake/index.rst.txt | 148 + .../azure/hooks/fileshare/index.rst.txt | 210 + .../microsoft/azure/hooks/index.rst.txt | 27 + .../azure/hooks/synapse/index.rst.txt | 168 + .../microsoft/azure/hooks/wasb/index.rst.txt | 227 + .../providers/microsoft/azure/index.rst.txt | 29 + .../microsoft/azure/log/index.rst.txt | 15 + .../azure/log/wasb_task_handler/index.rst.txt | 74 + .../azure/operators/adls/index.rst.txt | 96 + .../azure/operators/adx/index.rst.txt | 65 + .../azure/operators/asb/index.rst.txt | 417 + .../azure/operators/batch/index.rst.txt | 124 + .../container_instances/index.rst.txt | 151 + .../azure/operators/cosmos/index.rst.txt | 51 + .../operators/data_factory/index.rst.txt | 118 + .../microsoft/azure/operators/index.rst.txt | 23 + .../azure/operators/synapse/index.rst.txt | 70 + .../operators/wasb_delete_blob/index.rst.txt | 48 + .../microsoft/azure/secrets/index.rst.txt | 15 + .../azure/secrets/key_vault/index.rst.txt | 108 + .../azure/sensors/cosmos/index.rst.txt | 53 + .../azure/sensors/data_factory/index.rst.txt | 46 + .../microsoft/azure/sensors/index.rst.txt | 17 + .../azure/sensors/wasb/index.rst.txt | 66 + .../transfers/azure_blob_to_gcs/index.rst.txt | 64 + .../microsoft/azure/transfers/index.rst.txt | 19 + .../transfers/local_to_adls/index.rst.txt | 76 + .../transfers/local_to_wasb/index.rst.txt | 45 + .../oracle_to_azure_data_lake/index.rst.txt | 62 + .../transfers/sftp_to_wasb/index.rst.txt | 132 + .../microsoft/azure/utils/index.rst.txt | 24 + .../example_adf_run_pipeline/index.rst.txt | 29 + .../azure/example_adls_delete/index.rst.txt | 39 + .../example_azure_blob_to_gcs/index.rst.txt | 54 + .../index.rst.txt | 34 + .../example_azure_cosmosdb/index.rst.txt | 40 + .../example_azure_service_bus/index.rst.txt | 54 + .../azure/example_azure_synapse/index.rst.txt | 39 + .../azure/example_fileshare/index.rst.txt | 67 + .../azure/example_local_to_adls/index.rst.txt | 39 + .../azure/example_local_to_wasb/index.rst.txt | 34 + .../azure/example_sftp_to_wasb/index.rst.txt | 97 + .../providers/microsoft/azure/index.rst.txt | 25 + .../5.0.1/_sources/commits.rst.txt | 518 + .../5.0.1/_sources/connections/acr.rst.txt | 62 + .../5.0.1/_sources/connections/adf.rst.txt | 93 + .../5.0.1/_sources/connections/adl.rst.txt | 70 + .../5.0.1/_sources/connections/adx.rst.txt | 91 + .../5.0.1/_sources/connections/asb.rst.txt | 50 + .../5.0.1/_sources/connections/azure.rst.txt | 83 + .../_sources/connections/azure_batch.rst.txt | 61 + .../azure_container_volume.rst.txt | 75 + .../_sources/connections/azure_cosmos.rst.txt | 66 + .../connections/azure_fileshare.rst.txt | 80 + .../connections/azure_synapse.rst.txt | 69 + .../5.0.1/_sources/connections/index.rst.txt | 25 + .../5.0.1/_sources/connections/wasb.rst.txt | 84 + .../5.0.1/_sources/index.rst.txt | 140 + .../installing-providers-from-sources.rst.txt | 18 + .../5.0.1/_sources/logging/index.rst.txt | 55 + .../operators/adf_run_pipeline.rst.txt | 53 + .../5.0.1/_sources/operators/adls.rst.txt | 50 + .../5.0.1/_sources/operators/asb.rst.txt | 214 + .../operators/azure_blob_to_gcs.rst.txt | 60 + .../_sources/operators/azure_synapse.rst.txt | 49 + .../5.0.1/_sources/operators/index.rst.txt | 28 + .../_sources/operators/local_to_adls.rst.txt | 53 + .../_sources/operators/sftp_to_wasb.rst.txt | 61 + .../secrets-backends/azure-key-vault.rst.txt | 73 + .../_static/_gen/css/main-custom.min.css | 1 + .../5.0.1/_static/_gen/css/main.min.css | 7 + .../5.0.1/_static/_gen/js/docs.js | 1 + .../_sphinx_javascript_frameworks_compat.js | 134 + .../5.0.1/_static/basic.css | 899 ++ .../5.0.1/_static/check-solid.svg | 4 + .../5.0.1/_static/clipboard.min.js | 7 + .../5.0.1/_static/copy-button.svg | 5 + .../5.0.1/_static/copybutton.css | 94 + .../5.0.1/_static/copybutton.js | 248 + .../5.0.1/_static/copybutton_funcs.js | 73 + .../5.0.1/_static/doctools.js | 156 + .../5.0.1/_static/documentation_options.js | 14 + .../5.0.1/_static/file.png | Bin 0 -> 286 bytes .../5.0.1/_static/graphviz.css | 19 + .../5.0.1/_static/jquery-3.6.0.js | 10881 ++++++++++++++++ .../5.0.1/_static/jquery.js | 2 + .../5.0.1/_static/js/globaltoc.js | 24 + .../5.0.1/_static/language_data.js | 199 + .../5.0.1/_static/minus.png | Bin 0 -> 90 bytes .../5.0.1/_static/pin_32.png | Bin 0 -> 1201 bytes .../5.0.1/_static/plus.png | Bin 0 -> 90 bytes .../5.0.1/_static/pygments.css | 74 + .../5.0.1/_static/searchtools.js | 566 + .../5.0.1/_static/sphinx_highlight.js | 144 + .../5.0.1/_static/underscore-1.13.1.js | 2042 +++ .../5.0.1/_static/underscore.js | 6 + .../5.0.1/commits.html | 2323 ++++ .../5.0.1/connections/acr.html | 882 ++ .../5.0.1/connections/adf.html | 909 ++ .../5.0.1/connections/adl.html | 891 ++ .../5.0.1/connections/adx.html | 904 ++ .../5.0.1/connections/asb.html | 875 ++ .../5.0.1/connections/azure.html | 903 ++ .../5.0.1/connections/azure_batch.html | 881 ++ .../connections/azure_container_volume.html | 895 ++ .../5.0.1/connections/azure_cosmos.html | 887 ++ .../5.0.1/connections/azure_fileshare.html | 899 ++ .../5.0.1/connections/azure_synapse.html | 890 ++ .../5.0.1/connections/index.html | 862 ++ .../5.0.1/connections/wasb.html | 902 ++ .../5.0.1/genindex.html | 2508 ++++ .../5.0.1/index.html | 1579 +++ .../installing-providers-from-sources.html | 952 ++ .../5.0.1/logging.html | 1 + .../5.0.1/logging/index.html | 879 ++ .../5.0.1/objects.inv | Bin 0 -> 6386 bytes .../5.0.1/operators/adf_run_pipeline.html | 900 ++ .../5.0.1/operators/adls.html | 878 ++ .../5.0.1/operators/asb.html | 1064 ++ .../5.0.1/operators/azure_blob_to_gcs.html | 906 ++ .../5.0.1/operators/azure_synapse.html | 884 ++ .../5.0.1/operators/index.html | 867 ++ .../5.0.1/operators/local_to_adls.html | 886 ++ .../5.0.1/operators/sftp_to_wasb.html | 889 ++ .../5.0.1/py-modindex.html | 1103 ++ .../5.0.1/search.html | 865 ++ .../5.0.1/searchindex.js | 1 + .../secrets-backends/azure-key-vault.html | 892 ++ .../5.0.1/secrets-backends/index.html | 1 + .../stable.txt | 2 +- .../core-extensions/connections.html | 26 +- .../core-extensions/extra-links.html | 2 +- .../core-extensions/logging.html | 4 +- .../core-extensions/secrets-backends.html | 2 +- .../operators-and-hooks-ref/azure.html | 66 +- .../operators-and-hooks-ref/google.html | 4 +- .../operators-and-hooks-ref/protocol.html | 19 +- .../operators-and-hooks-ref/software.html | 2 +- .../packages-ref.html | 8 +- .../apache-airflow-providers/searchindex.js | 2 +- sphinx_airflow_theme/demo/docs.sh | 5 +- 350 files changed, 211310 insertions(+), 72 deletions(-) create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/.buildinfo create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/hooks/hive/index.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/hooks/index.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/index.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/operators/hive/index.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/operators/hive_stats/index.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/operators/index.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/sensors/hive_partition/index.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/sensors/index.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/sensors/metastore_partition/index.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/sensors/named_hive_partition/index.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/hive_to_mysql/index.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/hive_to_samba/index.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/index.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/mssql_to_hive/index.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/mysql_to_hive/index.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/s3_to_hive/index.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/vertica_to_hive/index.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/tests/system/providers/apache/hive/example_twitter_dag/index.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/tests/system/providers/apache/hive/index.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/hooks/hive.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/operators/hive.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/operators/hive_stats.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/sensors/hive_partition.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/sensors/metastore_partition.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/sensors/named_hive_partition.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/transfers/hive_to_mysql.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/transfers/hive_to_samba.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/transfers/mssql_to_hive.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/transfers/mysql_to_hive.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/transfers/s3_to_hive.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/transfers/vertica_to_hive.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/index.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/tests/system/providers/apache/hive/example_twitter_dag.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/hooks/hive/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/hooks/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/operators/hive/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/operators/hive_stats/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/operators/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/sensors/hive_partition/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/sensors/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/sensors/metastore_partition/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/sensors/named_hive_partition/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/hive_to_mysql/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/hive_to_samba/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/mssql_to_hive/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/mysql_to_hive/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/s3_to_hive/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/vertica_to_hive/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/tests/system/providers/apache/hive/example_twitter_dag/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/tests/system/providers/apache/hive/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/commits.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/connections/hive_cli.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/connections/hive_metastore.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/connections/hiveserver2.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/connections/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/installing-providers-from-sources.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/operators.rst.txt create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/_gen/css/main-custom.min.css create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/_gen/css/main.min.css create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/_gen/js/docs.js create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/_sphinx_javascript_frameworks_compat.js create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/basic.css create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/check-solid.svg create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/clipboard.min.js create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/copy-button.svg create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/copybutton.css create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/copybutton.js create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/copybutton_funcs.js create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/doctools.js create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/documentation_options.js create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/file.png create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/graphviz.css create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/jquery-3.6.0.js create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/jquery.js create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/js/globaltoc.js create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/language_data.js create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/minus.png create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/pin_32.png create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/plus.png create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/pygments.css create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/searchtools.js create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/sphinx_highlight.js create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/underscore-1.13.1.js create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/underscore.js create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/commits.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/connections/hive_cli.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/connections/hive_metastore.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/connections/hiveserver2.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/connections/index.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/genindex.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/index.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/installing-providers-from-sources.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/objects.inv create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/operators.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/py-modindex.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/search.html create mode 100644 docs-archive/apache-airflow-providers-apache-hive/5.0.0/searchindex.js create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/.buildinfo create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/adx/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/asb/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/base_azure/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/batch/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/container_instance/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/container_registry/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/container_volume/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/cosmos/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/data_factory/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/data_lake/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/fileshare/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/synapse/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/wasb/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/log/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/log/wasb_task_handler/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/adls/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/adx/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/asb/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/batch/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/container_instances/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/cosmos/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/data_factory/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/synapse/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/wasb_delete_blob/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/secrets/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/secrets/key_vault/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/sensors/cosmos/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/sensors/data_factory/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/sensors/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/sensors/wasb/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/transfers/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/transfers/local_to_adls/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/transfers/local_to_wasb/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/transfers/sftp_to_wasb/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/utils/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_adf_run_pipeline/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_adls_delete/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_azure_blob_to_gcs/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_azure_container_instances/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_azure_cosmosdb/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_azure_service_bus/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_azure_synapse/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_fileshare/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_local_to_adls/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_local_to_wasb/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_sftp_to_wasb/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/adx.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/asb.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/base_azure.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/batch.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/container_instance.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/container_registry.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/container_volume.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/cosmos.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/data_factory.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/data_lake.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/fileshare.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/synapse.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/wasb.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/log/wasb_task_handler.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/adls.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/adx.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/asb.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/batch.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/container_instances.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/cosmos.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/data_factory.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/synapse.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/wasb_delete_blob.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/secrets/key_vault.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/sensors/cosmos.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/sensors/data_factory.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/sensors/wasb.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/transfers/local_to_adls.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/transfers/local_to_wasb.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/transfers/sftp_to_wasb.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/utils.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_adf_run_pipeline.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_adls_delete.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_azure_blob_to_gcs.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_azure_container_instances.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_azure_cosmosdb.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_azure_service_bus.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_azure_synapse.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_fileshare.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_local_to_adls.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_local_to_wasb.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_sftp_to_wasb.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/adx/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/asb/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/base_azure/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/batch/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/container_instance/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/container_registry/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/container_volume/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/cosmos/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/data_factory/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/data_lake/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/fileshare/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/synapse/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/wasb/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/log/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/log/wasb_task_handler/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/adls/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/adx/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/asb/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/batch/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/container_instances/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/cosmos/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/data_factory/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/synapse/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/wasb_delete_blob/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/secrets/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/secrets/key_vault/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/sensors/cosmos/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/sensors/data_factory/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/sensors/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/sensors/wasb/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/transfers/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/transfers/local_to_adls/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/transfers/local_to_wasb/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/transfers/sftp_to_wasb/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/utils/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_adf_run_pipeline/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_adls_delete/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_azure_blob_to_gcs/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_azure_container_instances/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_azure_cosmosdb/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_azure_service_bus/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_azure_synapse/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_fileshare/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_local_to_adls/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_local_to_wasb/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_sftp_to_wasb/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/commits.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/acr.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/adf.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/adl.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/adx.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/asb.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/azure.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/azure_batch.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/azure_container_volume.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/azure_cosmos.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/azure_fileshare.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/azure_synapse.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/wasb.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/installing-providers-from-sources.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/logging/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/adf_run_pipeline.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/adls.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/asb.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/azure_blob_to_gcs.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/azure_synapse.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/index.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/local_to_adls.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/sftp_to_wasb.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/secrets-backends/azure-key-vault.rst.txt create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/_gen/css/main-custom.min.css create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/_gen/css/main.min.css create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/_gen/js/docs.js create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/_sphinx_javascript_frameworks_compat.js create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/basic.css create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/check-solid.svg create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/clipboard.min.js create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/copy-button.svg create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/copybutton.css create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/copybutton.js create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/copybutton_funcs.js create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/doctools.js create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/documentation_options.js create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/file.png create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/graphviz.css create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/jquery-3.6.0.js create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/jquery.js create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/js/globaltoc.js create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/language_data.js create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/minus.png create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/pin_32.png create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/plus.png create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/pygments.css create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/searchtools.js create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/sphinx_highlight.js create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/underscore-1.13.1.js create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/underscore.js create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/commits.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/acr.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/adf.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/adl.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/adx.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/asb.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/azure.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/azure_batch.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/azure_container_volume.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/azure_cosmos.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/azure_fileshare.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/azure_synapse.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/wasb.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/genindex.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/installing-providers-from-sources.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/logging.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/logging/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/objects.inv create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/adf_run_pipeline.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/adls.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/asb.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/azure_blob_to_gcs.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/azure_synapse.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/index.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/local_to_adls.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/sftp_to_wasb.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/py-modindex.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/search.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/searchindex.js create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/secrets-backends/azure-key-vault.html create mode 100644 docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/secrets-backends/index.html diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/.buildinfo b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/.buildinfo new file mode 100644 index 00000000000..36a5f580356 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/.buildinfo @@ -0,0 +1,4 @@ +# Sphinx build info version 1 +# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. +config: c51524a7dae7fc57120b8a88c632eb04 +tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/hooks/hive/index.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/hooks/hive/index.html new file mode 100644 index 00000000000..1670282be55 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/hooks/hive/index.html @@ -0,0 +1,1558 @@ + + + + + + + + + + + + airflow.providers.apache.hive.hooks.hive — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.apache.hive.hooks.hive

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + + + + + + + +

HiveCliHook

Simple wrapper around the hive CLI.

HiveMetastoreHook

Wrapper to interact with the Hive Metastore

HiveServer2Hook

Wrapper around the pyhive library

+
+
+

Functions

+ ++++ + + + + + +

get_context_from_env_var()

Extract context from env variable, e.g. dag_id, task_id and execution_date,

+
+
+

Attributes

+ ++++ + + + + + +

HIVE_QUEUE_PRIORITIES

+
+
+airflow.providers.apache.hive.hooks.hive.HIVE_QUEUE_PRIORITIES = ['VERY_HIGH', 'HIGH', 'NORMAL', 'LOW', 'VERY_LOW'][source]
+
+ +
+
+airflow.providers.apache.hive.hooks.hive.get_context_from_env_var()[source]
+

Extract context from env variable, e.g. dag_id, task_id and execution_date, +so that they can be used inside BashOperator and PythonOperator.

+
+
Returns
+

The context of interest.

+
+
Return type
+

dict[Any, Any]

+
+
+
+ +
+
+class airflow.providers.apache.hive.hooks.hive.HiveCliHook(hive_cli_conn_id=default_conn_name, run_as=None, mapred_queue=None, mapred_queue_priority=None, mapred_job_name=None, hive_cli_params='')[source]
+

Bases: airflow.hooks.base.BaseHook

+

Simple wrapper around the hive CLI.

+

It also supports the beeline +a lighter CLI that runs JDBC and is replacing the heavier +traditional CLI. To enable beeline, set the use_beeline param in the +extra field of your connection as in { "use_beeline": true }

+

Note that you can also set default hive CLI parameters by passing hive_cli_params +space separated list of parameters to add to the hive command.

+

The extra connection parameter auth gets passed as in the jdbc +connection string as is.

+
+
Parameters
+
    +
  • hive_cli_conn_id (str) – Reference to the +Hive CLI connection id.

  • +
  • mapred_queue (str | None) – queue used by the Hadoop Scheduler (Capacity or Fair)

  • +
  • mapred_queue_priority (str | None) – priority within the job queue. +Possible settings include: VERY_HIGH, HIGH, NORMAL, LOW, VERY_LOW

  • +
  • mapred_job_name (str | None) – This name will appear in the jobtracker. +This can make monitoring easier.

  • +
  • hive_cli_params (str) – Space separated list of hive command parameters to add to the +hive command.

  • +
+
+
+
+
+conn_name_attr = hive_cli_conn_id[source]
+
+ +
+
+default_conn_name = hive_cli_default[source]
+
+ +
+
+conn_type = hive_cli[source]
+
+ +
+
+hook_name = Hive Client Wrapper[source]
+
+ +
+
+run_cli(hql, schema=None, verbose=True, hive_conf=None)[source]
+

Run an hql statement using the hive cli. If hive_conf is specified +it should be a dict and the entries will be set as key/value pairs +in HiveConf.

+
+
Parameters
+
    +
  • hql (str) – an hql (hive query language) statement to run with hive cli

  • +
  • schema (str | None) – Name of hive schema (database) to use

  • +
  • verbose (bool) – Provides additional logging. Defaults to True.

  • +
  • hive_conf (dict[Any, Any] | None) – if specified these key value pairs will be passed +to hive as -hiveconf "key"="value". Note that they will be +passed after the hive_cli_params and thus will override +whatever values are specified in the database.

  • +
+
+
+
>>> hh = HiveCliHook()
+>>> result = hh.run_cli("USE airflow;")
+>>> ("OK" in result)
+True
+
+
+
+ +
+
+test_hql(hql)[source]
+

Test an hql statement using the hive cli and EXPLAIN

+
+
+
+ +
+
+load_df(df, table, field_dict=None, delimiter=',', encoding='utf8', pandas_kwargs=None, **kwargs)[source]
+

Loads a pandas DataFrame into hive.

+

Hive data types will be inferred if not passed but column names will +not be sanitized.

+
+
Parameters
+
    +
  • df (pandas.DataFrame) – DataFrame to load into a Hive table

  • +
  • table (str) – target Hive table, use dot notation to target a +specific database

  • +
  • field_dict (dict[Any, Any] | None) – mapping from column name to hive data type. +Note that it must be OrderedDict so as to keep columns’ order.

  • +
  • delimiter (str) – field delimiter in the file

  • +
  • encoding (str) – str encoding to use when writing DataFrame to file

  • +
  • pandas_kwargs (Any) – passed to DataFrame.to_csv

  • +
  • kwargs (Any) – passed to self.load_file

  • +
+
+
+
+ +
+
+load_file(filepath, table, delimiter=',', field_dict=None, create=True, overwrite=True, partition=None, recreate=False, tblproperties=None)[source]
+

Loads a local file into Hive

+

Note that the table generated in Hive uses STORED AS textfile +which isn’t the most efficient serialization format. If a +large amount of data is loaded and/or if the tables gets +queried considerably, you may want to use this operator only to +stage the data into a temporary table before loading it into its +final destination using a HiveOperator.

+
+
Parameters
+
    +
  • filepath (str) – local filepath of the file to load

  • +
  • table (str) – target Hive table, use dot notation to target a +specific database

  • +
  • delimiter (str) – field delimiter in the file

  • +
  • field_dict (dict[Any, Any] | None) – A dictionary of the fields name in the file +as keys and their Hive types as values. +Note that it must be OrderedDict so as to keep columns’ order.

  • +
  • create (bool) – whether to create the table if it doesn’t exist

  • +
  • overwrite (bool) – whether to overwrite the data in table or partition

  • +
  • partition (dict[str, Any] | None) – target partition as a dict of partition columns +and values

  • +
  • recreate (bool) – whether to drop and recreate the table at every +execution

  • +
  • tblproperties (dict[str, Any] | None) – TBLPROPERTIES of the hive table being created

  • +
+
+
+
+ +
+
+kill()[source]
+

Kill Hive cli command

+
+
+
+ +
+ +
+
+class airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook(metastore_conn_id=default_conn_name)[source]
+

Bases: airflow.hooks.base.BaseHook

+

Wrapper to interact with the Hive Metastore

+
+
Parameters
+

metastore_conn_id (str) – reference to the +:ref: metastore thrift service connection id <howto/connection:hive_metastore>.

+
+
+
+
+MAX_PART_COUNT = 32767[source]
+
+ +
+
+conn_name_attr = metastore_conn_id[source]
+
+ +
+
+default_conn_name = metastore_default[source]
+
+ +
+
+conn_type = hive_metastore[source]
+
+ +
+
+hook_name = Hive Metastore Thrift[source]
+
+ +
+
+__getstate__()[source]
+
+
+
+ +
+
+__setstate__(d)[source]
+
+
+
+ +
+
+get_metastore_client()[source]
+

Returns a Hive thrift client.

+
+
+
+ +
+
+get_conn()[source]
+

Returns connection for the hook.

+
+
+
+ +
+
+check_for_partition(schema, table, partition)[source]
+

Checks whether a partition exists

+
+
Parameters
+
    +
  • schema (str) – Name of hive schema (database) @table belongs to

  • +
  • table (str) – Name of hive table @partition belongs to

  • +
  • partition (str) – Expression that matches the partitions to check for +(eg a = ‘b’ AND c = ‘d’)

  • +
+
+
+
>>> hh = HiveMetastoreHook()
+>>> t = 'static_babynames_partitioned'
+>>> hh.check_for_partition('airflow', t, "ds='2015-01-01'")
+True
+
+
+
+ +
+
+check_for_named_partition(schema, table, partition_name)[source]
+

Checks whether a partition with a given name exists

+
+
Parameters
+
    +
  • schema (str) – Name of hive schema (database) @table belongs to

  • +
  • table (str) – Name of hive table @partition belongs to

  • +
  • partition_name (str) – Name of the partitions to check for (eg a=b/c=d)

  • +
+
+
+
>>> hh = HiveMetastoreHook()
+>>> t = 'static_babynames_partitioned'
+>>> hh.check_for_named_partition('airflow', t, "ds=2015-01-01")
+True
+>>> hh.check_for_named_partition('airflow', t, "ds=xxx")
+False
+
+
+
+ +
+
+get_table(table_name, db='default')[source]
+

Get a metastore table object

+
>>> hh = HiveMetastoreHook()
+>>> t = hh.get_table(db='airflow', table_name='static_babynames')
+>>> t.tableName
+'static_babynames'
+>>> [col.name for col in t.sd.cols]
+['state', 'year', 'name', 'gender', 'num']
+
+
+
+
+
+ +
+
+get_tables(db, pattern='*')[source]
+

Get a metastore table object

+
+
+
+ +
+
+get_databases(pattern='*')[source]
+

Get a metastore table object

+
+
+
+ +
+
+get_partitions(schema, table_name, partition_filter=None)[source]
+

Returns a list of all partitions in a table. Works only +for tables with less than 32767 (java short max val). +For subpartitioned table, the number might easily exceed this.

+
>>> hh = HiveMetastoreHook()
+>>> t = 'static_babynames_partitioned'
+>>> parts = hh.get_partitions(schema='airflow', table_name=t)
+>>> len(parts)
+1
+>>> parts
+[{'ds': '2015-01-01'}]
+
+
+
+
+
+ +
+
+max_partition(schema, table_name, field=None, filter_map=None)[source]
+

Returns the maximum value for all partitions with given field in a table. +If only one partition key exist in the table, the key will be used as field. +filter_map should be a partition_key:partition_value map and will be used to +filter out partitions.

+
+
Parameters
+
    +
  • schema (str) – schema name.

  • +
  • table_name (str) – table name.

  • +
  • field (str | None) – partition key to get max partition from.

  • +
  • filter_map (dict[Any, Any] | None) – partition_key:partition_value map used for partition filtering.

  • +
+
+
+
>>> hh = HiveMetastoreHook()
+>>> filter_map = {'ds': '2015-01-01'}
+>>> t = 'static_babynames_partitioned'
+>>> hh.max_partition(schema='airflow',        ... table_name=t, field='ds', filter_map=filter_map)
+'2015-01-01'
+
+
+
+ +
+
+table_exists(table_name, db='default')[source]
+

Check if table exists

+
>>> hh = HiveMetastoreHook()
+>>> hh.table_exists(db='airflow', table_name='static_babynames')
+True
+>>> hh.table_exists(db='airflow', table_name='does_not_exist')
+False
+
+
+
+
+
+ +
+
+drop_partitions(table_name, part_vals, delete_data=False, db='default')[source]
+

Drop partitions from the given table matching the part_vals input

+
+
Parameters
+
    +
  • table_name – table name.

  • +
  • part_vals – list of partition specs.

  • +
  • delete_data – Setting to control if underlying data have to deleted +in addition to dropping partitions.

  • +
  • db – Name of hive schema (database) @table belongs to

  • +
+
+
+
>>> hh = HiveMetastoreHook()
+>>> hh.drop_partitions(db='airflow', table_name='static_babynames',
+part_vals="['2020-05-01']")
+True
+
+
+
+ +
+ +
+
+class airflow.providers.apache.hive.hooks.hive.HiveServer2Hook(*args, schema=None, log_sql=True, **kwargs)[source]
+

Bases: airflow.providers.common.sql.hooks.sql.DbApiHook

+

Wrapper around the pyhive library

+

Notes: +* the default auth_mechanism is PLAIN, to override it you +can specify it in the extra of your connection in the UI +* the default for run_set_variable_statements is true, if you +are using impala you may need to set it to false in the +extra of your connection in the UI

+
+
Parameters
+
    +
  • hiveserver2_conn_id – Reference to the +:ref: Hive Server2 thrift service connection id <howto/connection:hiveserver2>.

  • +
  • schema (str | None) – Hive database name.

  • +
+
+
+
+
+conn_name_attr = hiveserver2_conn_id[source]
+
+ +
+
+default_conn_name = hiveserver2_default[source]
+
+ +
+
+conn_type = hiveserver2[source]
+
+ +
+
+hook_name = Hive Server 2 Thrift[source]
+
+ +
+
+supports_autocommit = False[source]
+
+ +
+
+get_conn(schema=None)[source]
+

Returns a Hive connection object.

+
+
+
+ +
+
+get_results(sql, schema='default', fetch_size=None, hive_conf=None)[source]
+

Get results of the provided hql in target schema.

+
+
Parameters
+
    +
  • sql (str | list[str]) – hql to be executed.

  • +
  • schema (str) – target schema, default to ‘default’.

  • +
  • fetch_size (int | None) – max size of result to fetch.

  • +
  • hive_conf (Iterable | Mapping | None) – hive_conf to execute alone with the hql.

  • +
+
+
Returns
+

results of hql execution, dict with data (list of results) and header

+
+
Return type
+

dict[str, Any]

+
+
+
+ +
+
+to_csv(sql, csv_filepath, schema='default', delimiter=',', lineterminator='\r\n', output_header=True, fetch_size=1000, hive_conf=None)[source]
+

Execute hql in target schema and write results to a csv file.

+
+
Parameters
+
    +
  • sql (str) – hql to be executed.

  • +
  • csv_filepath (str) – filepath of csv to write results into.

  • +
  • schema (str) – target schema, default to ‘default’.

  • +
  • delimiter (str) – delimiter of the csv file, default to ‘,’.

  • +
  • lineterminator (str) – lineterminator of the csv file.

  • +
  • output_header (bool) – header of the csv file, default to True.

  • +
  • fetch_size (int) – number of result rows to write into the csv file, default to 1000.

  • +
  • hive_conf (dict[Any, Any] | None) – hive_conf to execute alone with the hql.

  • +
+
+
+
+ +
+
+get_records(sql, parameters=None, **kwargs)[source]
+

Get a set of records from a Hive query. You can optionally pass ‘schema’ kwarg +which specifies target schema and default to ‘default’.

+
+
Parameters
+
    +
  • sql (str | list[str]) – hql to be executed.

  • +
  • parameters (Iterable | Mapping | None) – optional configuration passed to get_results

  • +
+
+
Returns
+

result of hive execution

+
+
Return type
+

Any

+
+
+
>>> hh = HiveServer2Hook()
+>>> sql = "SELECT * FROM airflow.static_babynames LIMIT 100"
+>>> len(hh.get_records(sql))
+100
+
+
+
+ +
+
+get_pandas_df(sql, schema='default', hive_conf=None, **kwargs)[source]
+

Get a pandas dataframe from a Hive query

+
+
Parameters
+
    +
  • sql (str) – hql to be executed.

  • +
  • schema (str) – target schema, default to ‘default’.

  • +
  • hive_conf (dict[Any, Any] | None) – hive_conf to execute alone with the hql.

  • +
  • kwargs – (optional) passed into pandas.DataFrame constructor

  • +
+
+
Returns
+

result of hive execution

+
+
Return type
+

pandas.DataFrame

+
+
+
>>> hh = HiveServer2Hook()
+>>> sql = "SELECT * FROM airflow.static_babynames LIMIT 100"
+>>> df = hh.get_pandas_df(sql)
+>>> len(df.index)
+100
+
+
+
+
Returns
+

pandas.DateFrame

+
+
Return type
+

pandas.DataFrame

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/hooks/index.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/hooks/index.html new file mode 100644 index 00000000000..95ef7a86067 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/hooks/index.html @@ -0,0 +1,853 @@ + + + + + + + + + + + + airflow.providers.apache.hive.hooks — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + + +
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/index.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/index.html new file mode 100644 index 00000000000..b27994251bf --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/index.html @@ -0,0 +1,874 @@ + + + + + + + + + + + + airflow.providers.apache.hive — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + + +
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/operators/hive/index.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/operators/hive/index.html new file mode 100644 index 00000000000..582a1006117 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/operators/hive/index.html @@ -0,0 +1,1004 @@ + + + + + + + + + + + + airflow.providers.apache.hive.operators.hive — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.apache.hive.operators.hive

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

HiveOperator

Executes hql code or hive script in a specific Hive database.

+
+
+class airflow.providers.apache.hive.operators.hive.HiveOperator(*, hql, hive_cli_conn_id='hive_cli_default', schema='default', hiveconfs=None, hiveconf_jinja_translate=False, script_begin_tag=None, run_as_owner=False, mapred_queue=None, mapred_queue_priority=None, mapred_job_name=None, hive_cli_params='', **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Executes hql code or hive script in a specific Hive database.

+
+
Parameters
+
    +
  • hql (str) – the hql to be executed. Note that you may also use +a relative path from the dag file of a (template) hive +script. (templated)

  • +
  • hive_cli_conn_id (str) – Reference to the +Hive CLI connection id. (templated)

  • +
  • hiveconfs (dict[Any, Any] | None) – if defined, these key value pairs will be passed +to hive as -hiveconf "key"="value"

  • +
  • hiveconf_jinja_translate (bool) – when True, hiveconf-type templating +${var} gets translated into jinja-type templating {{ var }} and +${hiveconf:var} gets translated into jinja-type templating {{ var }}. +Note that you may want to use this along with the +DAG(user_defined_macros=myargs) parameter. View the DAG +object documentation for more details.

  • +
  • script_begin_tag (str | None) – If defined, the operator will get rid of the +part of the script before the first occurrence of script_begin_tag

  • +
  • run_as_owner (bool) – Run HQL code as a DAG’s owner.

  • +
  • mapred_queue (str | None) – queue used by the Hadoop CapacityScheduler. (templated)

  • +
  • mapred_queue_priority (str | None) – priority within CapacityScheduler queue. +Possible settings include: VERY_HIGH, HIGH, NORMAL, LOW, VERY_LOW

  • +
  • mapred_job_name (str | None) – This name will appear in the jobtracker. +This can make monitoring easier.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['hql', 'schema', 'hive_cli_conn_id', 'mapred_queue', 'hiveconfs', 'mapred_job_name',...[source]
+
+ +
+
+template_ext :Sequence[str] = ['.hql', '.sql'][source]
+
+ +
+
+template_fields_renderers[source]
+
+ +
+
+ui_color = #f0e4ec[source]
+
+ +
+
+get_hook()[source]
+

Get Hive cli hook

+
+
+
+ +
+
+prepare_template()[source]
+

Hook triggered after the templated fields get replaced by their content.

+

If you need your operator to alter the content of the file before the +template is rendered, it should override this method to do so.

+
+
+
+ +
+
+execute(context)[source]
+

This is the main method to derive when creating an operator. +Context is the same dictionary used as when rendering jinja templates.

+

Refer to get_template_context for more context.

+
+
+
+ +
+
+dry_run()[source]
+

Performs dry run for the operator - just render template fields.

+
+
+
+ +
+
+on_kill()[source]
+

Override this method to cleanup subprocesses when a task instance +gets killed. Any use of the threading, subprocess or multiprocessing +module within an operator needs to be cleaned up or it will leave +ghost processes behind.

+
+
+
+ +
+
+clear_airflow_vars()[source]
+

Reset airflow environment variables to prevent existing ones from impacting behavior.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/operators/hive_stats/index.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/operators/hive_stats/index.html new file mode 100644 index 00000000000..f7d2f4bc489 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/operators/hive_stats/index.html @@ -0,0 +1,947 @@ + + + + + + + + + + + + airflow.providers.apache.hive.operators.hive_stats — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.apache.hive.operators.hive_stats

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

HiveStatsCollectionOperator

Gathers partition statistics using a dynamically generated Presto

+
+
+class airflow.providers.apache.hive.operators.hive_stats.HiveStatsCollectionOperator(*, table, partition, extra_exprs=None, excluded_columns=None, assignment_func=None, metastore_conn_id='metastore_default', presto_conn_id='presto_default', mysql_conn_id='airflow_db', **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Gathers partition statistics using a dynamically generated Presto +query, inserts the stats into a MySql table with this format. Stats +overwrite themselves if you rerun the same date/partition.

+
CREATE TABLE hive_stats (
+    ds VARCHAR(16),
+    table_name VARCHAR(500),
+    metric VARCHAR(200),
+    value BIGINT
+);
+
+
+
+
Parameters
+
    +
  • metastore_conn_id (str) – Reference to the +Hive Metastore connection id.

  • +
  • table (str) – the source table, in the format database.table_name. (templated)

  • +
  • partition (Any) – the source partition. (templated)

  • +
  • extra_exprs (dict[str, Any] | None) – dict of expression to run against the table where +keys are metric names and values are Presto compatible expressions

  • +
  • excluded_columns (list[str] | None) – list of columns to exclude, consider +excluding blobs, large json columns, …

  • +
  • assignment_func (Callable[[str, str], dict[Any, Any] | None] | None) – a function that receives a column name and +a type, and returns a dict of metric names and an Presto expressions. +If None is returned, the global defaults are applied. If an +empty dictionary is returned, no stats are computed for that +column.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['table', 'partition', 'ds', 'dttm'][source]
+
+ +
+
+ui_color = #aff7a6[source]
+
+ +
+
+get_default_exprs(col, col_type)[source]
+

Get default expressions

+
+
+
+ +
+
+execute(context)[source]
+

This is the main method to derive when creating an operator. +Context is the same dictionary used as when rendering jinja templates.

+

Refer to get_template_context for more context.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/operators/index.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/operators/index.html new file mode 100644 index 00000000000..e905da4bd91 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/operators/index.html @@ -0,0 +1,854 @@ + + + + + + + + + + + + airflow.providers.apache.hive.operators — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + + +
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/sensors/hive_partition/index.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/sensors/hive_partition/index.html new file mode 100644 index 00000000000..fd65889f99d --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/sensors/hive_partition/index.html @@ -0,0 +1,923 @@ + + + + + + + + + + + + airflow.providers.apache.hive.sensors.hive_partition — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.apache.hive.sensors.hive_partition

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

HivePartitionSensor

Waits for a partition to show up in Hive.

+
+
+class airflow.providers.apache.hive.sensors.hive_partition.HivePartitionSensor(*, table, partition="ds='{{ ds }}'", metastore_conn_id='metastore_default', schema='default', poke_interval=60 * 3, **kwargs)[source]
+

Bases: airflow.sensors.base.BaseSensorOperator

+

Waits for a partition to show up in Hive.

+

Note: Because partition supports general logical operators, it +can be inefficient. Consider using NamedHivePartitionSensor instead if +you don’t need the full flexibility of HivePartitionSensor.

+
+
Parameters
+
    +
  • table (str) – The name of the table to wait for, supports the dot +notation (my_database.my_table)

  • +
  • partition (str | None) – The partition clause to wait for. This is passed as +is to the metastore Thrift client get_partitions_by_filter method, +and apparently supports SQL like notation as in ds='2015-01-01' +AND type='value' and comparison operators as in "ds>=2015-01-01"

  • +
  • metastore_conn_id (str) – reference to the +:ref: metastore thrift service connection id <howto/connection:hive_metastore>

  • +
+
+
+
+
+template_fields :Sequence[str] = ['schema', 'table', 'partition'][source]
+
+ +
+
+ui_color = #C5CAE9[source]
+
+ +
+
+poke(context)[source]
+

Function defined by the sensors while deriving this class should override.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/sensors/index.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/sensors/index.html new file mode 100644 index 00000000000..63a2a86d0d7 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/sensors/index.html @@ -0,0 +1,855 @@ + + + + + + + + + + + + airflow.providers.apache.hive.sensors — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + + +
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/sensors/metastore_partition/index.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/sensors/metastore_partition/index.html new file mode 100644 index 00000000000..3e9e8ece40a --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/sensors/metastore_partition/index.html @@ -0,0 +1,923 @@ + + + + + + + + + + + + airflow.providers.apache.hive.sensors.metastore_partition — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.apache.hive.sensors.metastore_partition

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

MetastorePartitionSensor

An alternative to the HivePartitionSensor that talk directly to the

+
+
+class airflow.providers.apache.hive.sensors.metastore_partition.MetastorePartitionSensor(*, table, partition_name, schema='default', mysql_conn_id='metastore_mysql', **kwargs)[source]
+

Bases: airflow.providers.common.sql.sensors.sql.SqlSensor

+

An alternative to the HivePartitionSensor that talk directly to the +MySQL db. This was created as a result of observing sub optimal +queries generated by the Metastore thrift service when hitting +subpartitioned tables. The Thrift service’s queries were written in a +way that wouldn’t leverage the indexes.

+
+
Parameters
+
    +
  • schema (str) – the schema

  • +
  • table (str) – the table

  • +
  • partition_name (str) – the partition name, as defined in the PARTITIONS +table of the Metastore. Order of the fields does matter. +Examples: ds=2016-01-01 or +ds=2016-01-01/sub=foo for a sub partitioned table

  • +
  • mysql_conn_id (str) – a reference to the MySQL conn_id for the metastore

  • +
+
+
+
+
+template_fields :Sequence[str] = ['partition_name', 'table', 'schema'][source]
+
+ +
+
+ui_color = #8da7be[source]
+
+ +
+
+poke(context)[source]
+

Function defined by the sensors while deriving this class should override.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/sensors/named_hive_partition/index.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/sensors/named_hive_partition/index.html new file mode 100644 index 00000000000..e59f1e3edd4 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/sensors/named_hive_partition/index.html @@ -0,0 +1,941 @@ + + + + + + + + + + + + airflow.providers.apache.hive.sensors.named_hive_partition — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.apache.hive.sensors.named_hive_partition

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

NamedHivePartitionSensor

Waits for a set of partitions to show up in Hive.

+
+
+class airflow.providers.apache.hive.sensors.named_hive_partition.NamedHivePartitionSensor(*, partition_names, metastore_conn_id='metastore_default', poke_interval=60 * 3, hook=None, **kwargs)[source]
+

Bases: airflow.sensors.base.BaseSensorOperator

+

Waits for a set of partitions to show up in Hive.

+
+
Parameters
+
    +
  • partition_names (list[str]) – List of fully qualified names of the +partitions to wait for. A fully qualified name is of the +form schema.table/pk1=pv1/pk2=pv2, for example, +default.users/ds=2016-01-01. This is passed as is to the metastore +Thrift client get_partitions_by_name method. Note that +you cannot use logical or comparison operators as in +HivePartitionSensor.

  • +
  • metastore_conn_id (str) – Reference to the +metastore thrift service connection id.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['partition_names'][source]
+
+ +
+
+ui_color = #8d99ae[source]
+
+ +
+
+static parse_partition_name(partition)[source]
+

Get schema, table, and partition info.

+
+
+
+ +
+
+poke_partition(partition)[source]
+

Check for a named partition.

+
+
+
+ +
+
+poke(context)[source]
+

Function defined by the sensors while deriving this class should override.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/hive_to_mysql/index.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/hive_to_mysql/index.html new file mode 100644 index 00000000000..b2860deade4 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/hive_to_mysql/index.html @@ -0,0 +1,949 @@ + + + + + + + + + + + + airflow.providers.apache.hive.transfers.hive_to_mysql — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.apache.hive.transfers.hive_to_mysql

+

This module contains an operator to move data from Hive to MySQL.

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

HiveToMySqlOperator

Moves data from Hive to MySQL, note that for now the data is loaded

+
+
+class airflow.providers.apache.hive.transfers.hive_to_mysql.HiveToMySqlOperator(*, sql, mysql_table, hiveserver2_conn_id='hiveserver2_default', mysql_conn_id='mysql_default', mysql_preoperator=None, mysql_postoperator=None, bulk_load=False, hive_conf=None, **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Moves data from Hive to MySQL, note that for now the data is loaded +into memory before being pushed to MySQL, so this operator should +be used for smallish amount of data.

+
+
Parameters
+
    +
  • sql (str) – SQL query to execute against Hive server. (templated)

  • +
  • mysql_table (str) – target MySQL table, use dot notation to target a +specific database. (templated)

  • +
  • mysql_conn_id (str) – source mysql connection

  • +
  • hiveserver2_conn_id (str) – Reference to the +Hive Server2 thrift service connection id.

  • +
  • mysql_preoperator (str | None) – sql statement to run against mysql prior to +import, typically use to truncate of delete in place +of the data coming in, allowing the task to be idempotent (running +the task twice won’t double load data). (templated)

  • +
  • mysql_postoperator (str | None) – sql statement to run against mysql after the +import, typically used to move data from staging to +production and issue cleanup commands. (templated)

  • +
  • bulk_load (bool) – flag to use bulk_load option. This loads mysql directly +from a tab-delimited text file using the LOAD DATA LOCAL INFILE command. +This option requires an extra connection parameter for the +destination MySQL connection: {‘local_infile’: true}.

  • +
  • hive_conf (dict | None) –

  • +
+
+
+
+
+template_fields :Sequence[str] = ['sql', 'mysql_table', 'mysql_preoperator', 'mysql_postoperator'][source]
+
+ +
+
+template_ext :Sequence[str] = ['.sql'][source]
+
+ +
+
+template_fields_renderers[source]
+
+ +
+
+ui_color = #a0e08c[source]
+
+ +
+
+execute(context)[source]
+

This is the main method to derive when creating an operator. +Context is the same dictionary used as when rendering jinja templates.

+

Refer to get_template_context for more context.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/hive_to_samba/index.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/hive_to_samba/index.html new file mode 100644 index 00000000000..0014444cee9 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/hive_to_samba/index.html @@ -0,0 +1,928 @@ + + + + + + + + + + + + airflow.providers.apache.hive.transfers.hive_to_samba — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.apache.hive.transfers.hive_to_samba

+

This module contains an operator to move data from Hive to Samba.

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

HiveToSambaOperator

Executes hql code in a specific Hive database and loads the

+
+
+class airflow.providers.apache.hive.transfers.hive_to_samba.HiveToSambaOperator(*, hql, destination_filepath, samba_conn_id='samba_default', hiveserver2_conn_id='hiveserver2_default', **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Executes hql code in a specific Hive database and loads the +results of the query as a csv to a Samba location.

+
+
Parameters
+
    +
  • hql (str) – the hql to be exported. (templated)

  • +
  • destination_filepath (str) – the file path to where the file will be pushed onto samba

  • +
  • samba_conn_id (str) – reference to the samba destination

  • +
  • hiveserver2_conn_id (str) – Reference to the +:ref: Hive Server2 thrift service connection id <howto/connection:hiveserver2>.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['hql', 'destination_filepath'][source]
+
+ +
+
+template_ext :Sequence[str] = ['.hql', '.sql'][source]
+
+ +
+
+template_fields_renderers[source]
+
+ +
+
+execute(context)[source]
+

This is the main method to derive when creating an operator. +Context is the same dictionary used as when rendering jinja templates.

+

Refer to get_template_context for more context.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/index.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/index.html new file mode 100644 index 00000000000..7db5e842334 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/index.html @@ -0,0 +1,858 @@ + + + + + + + + + + + + airflow.providers.apache.hive.transfers — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + + +
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/mssql_to_hive/index.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/mssql_to_hive/index.html new file mode 100644 index 00000000000..13f9f93e0da --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/mssql_to_hive/index.html @@ -0,0 +1,963 @@ + + + + + + + + + + + + airflow.providers.apache.hive.transfers.mssql_to_hive — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.apache.hive.transfers.mssql_to_hive

+

This module contains an operator to move data from MSSQL to Hive.

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

MsSqlToHiveOperator

Moves data from Microsoft SQL Server to Hive. The operator runs

+
+
+class airflow.providers.apache.hive.transfers.mssql_to_hive.MsSqlToHiveOperator(*, sql, hive_table, create=True, recreate=False, partition=None, delimiter=chr(1), mssql_conn_id='mssql_default', hive_cli_conn_id='hive_cli_default', tblproperties=None, **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Moves data from Microsoft SQL Server to Hive. The operator runs +your query against Microsoft SQL Server, stores the file locally +before loading it into a Hive table. If the create or +recreate arguments are set to True, +a CREATE TABLE and DROP TABLE statements are generated. +Hive data types are inferred from the cursor’s metadata. +Note that the table generated in Hive uses STORED AS textfile +which isn’t the most efficient serialization format. If a +large amount of data is loaded and/or if the table gets +queried considerably, you may want to use this operator only to +stage the data into a temporary table before loading it into its +final destination using a HiveOperator.

+
+
Parameters
+
    +
  • sql (str) – SQL query to execute against the Microsoft SQL Server +database. (templated)

  • +
  • hive_table (str) – target Hive table, use dot notation to target a specific +database. (templated)

  • +
  • create (bool) – whether to create the table if it doesn’t exist

  • +
  • recreate (bool) – whether to drop and recreate the table at every execution

  • +
  • partition (dict | None) – target partition as a dict of partition columns and +values. (templated)

  • +
  • delimiter (str) – field delimiter in the file

  • +
  • mssql_conn_id (str) – source Microsoft SQL Server connection

  • +
  • hive_cli_conn_id (str) – Reference to the +Hive CLI connection id.

  • +
  • tblproperties (dict | None) – TBLPROPERTIES of the hive table being created

  • +
+
+
+
+
+template_fields :Sequence[str] = ['sql', 'partition', 'hive_table'][source]
+
+ +
+
+template_ext :Sequence[str] = ['.sql'][source]
+
+ +
+
+template_fields_renderers[source]
+
+ +
+
+ui_color = #a0e08c[source]
+
+ +
+
+classmethod type_map(mssql_type)[source]
+

Maps MsSQL type to Hive type.

+
+
+
+ +
+
+execute(context)[source]
+

This is the main method to derive when creating an operator. +Context is the same dictionary used as when rendering jinja templates.

+

Refer to get_template_context for more context.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/mysql_to_hive/index.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/mysql_to_hive/index.html new file mode 100644 index 00000000000..914ff6496e3 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/mysql_to_hive/index.html @@ -0,0 +1,968 @@ + + + + + + + + + + + + airflow.providers.apache.hive.transfers.mysql_to_hive — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.apache.hive.transfers.mysql_to_hive

+

This module contains an operator to move data from MySQL to Hive.

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

MySqlToHiveOperator

Moves data from MySql to Hive. The operator runs your query against

+
+
+class airflow.providers.apache.hive.transfers.mysql_to_hive.MySqlToHiveOperator(*, sql, hive_table, create=True, recreate=False, partition=None, delimiter=chr(1), quoting=None, quotechar='"', escapechar=None, mysql_conn_id='mysql_default', hive_cli_conn_id='hive_cli_default', tblproperties=None, **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Moves data from MySql to Hive. The operator runs your query against +MySQL, stores the file locally before loading it into a Hive table. +If the create or recreate arguments are set to True, +a CREATE TABLE and DROP TABLE statements are generated. +Hive data types are inferred from the cursor’s metadata. Note that the +table generated in Hive uses STORED AS textfile +which isn’t the most efficient serialization format. If a +large amount of data is loaded and/or if the table gets +queried considerably, you may want to use this operator only to +stage the data into a temporary table before loading it into its +final destination using a HiveOperator.

+
+
Parameters
+
    +
  • sql (str) – SQL query to execute against the MySQL database. (templated)

  • +
  • hive_table (str) – target Hive table, use dot notation to target a +specific database. (templated)

  • +
  • create (bool) – whether to create the table if it doesn’t exist

  • +
  • recreate (bool) – whether to drop and recreate the table at every +execution

  • +
  • partition (dict | None) – target partition as a dict of partition columns +and values. (templated)

  • +
  • delimiter (str) – field delimiter in the file

  • +
  • quoting (str | None) – controls when quotes should be generated by csv writer, +It can take on any of the csv.QUOTE_* constants.

  • +
  • quotechar (str) – one-character string used to quote fields +containing special characters.

  • +
  • escapechar (str | None) – one-character string used by csv writer to escape +the delimiter or quotechar.

  • +
  • mysql_conn_id (str) – source mysql connection

  • +
  • hive_cli_conn_id (str) – Reference to the +Hive CLI connection id.

  • +
  • tblproperties (dict | None) – TBLPROPERTIES of the hive table being created

  • +
+
+
+
+
+template_fields :Sequence[str] = ['sql', 'partition', 'hive_table'][source]
+
+ +
+
+template_ext :Sequence[str] = ['.sql'][source]
+
+ +
+
+template_fields_renderers[source]
+
+ +
+
+ui_color = #a0e08c[source]
+
+ +
+
+classmethod type_map(mysql_type)[source]
+

Maps MySQL type to Hive type.

+
+
+
+ +
+
+execute(context)[source]
+

This is the main method to derive when creating an operator. +Context is the same dictionary used as when rendering jinja templates.

+

Refer to get_template_context for more context.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/s3_to_hive/index.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/s3_to_hive/index.html new file mode 100644 index 00000000000..df8da5cc72e --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/s3_to_hive/index.html @@ -0,0 +1,974 @@ + + + + + + + + + + + + airflow.providers.apache.hive.transfers.s3_to_hive — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.apache.hive.transfers.s3_to_hive

+

This module contains an operator to move data from an S3 bucket to Hive.

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

S3ToHiveOperator

Moves data from S3 to Hive. The operator downloads a file from S3,

+
+
+class airflow.providers.apache.hive.transfers.s3_to_hive.S3ToHiveOperator(*, s3_key, field_dict, hive_table, delimiter=',', create=True, recreate=False, partition=None, headers=False, check_headers=False, wildcard_match=False, aws_conn_id='aws_default', verify=None, hive_cli_conn_id='hive_cli_default', input_compressed=False, tblproperties=None, select_expression=None, **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Moves data from S3 to Hive. The operator downloads a file from S3, +stores the file locally before loading it into a Hive table. +If the create or recreate arguments are set to True, +a CREATE TABLE and DROP TABLE statements are generated. +Hive data types are inferred from the cursor’s metadata from.

+

Note that the table generated in Hive uses STORED AS textfile +which isn’t the most efficient serialization format. If a +large amount of data is loaded and/or if the tables gets +queried considerably, you may want to use this operator only to +stage the data into a temporary table before loading it into its +final destination using a HiveOperator.

+
+
Parameters
+
    +
  • s3_key (str) – The key to be retrieved from S3. (templated)

  • +
  • field_dict (dict) – A dictionary of the fields name in the file +as keys and their Hive types as values

  • +
  • hive_table (str) – target Hive table, use dot notation to target a +specific database. (templated)

  • +
  • delimiter (str) – field delimiter in the file

  • +
  • create (bool) – whether to create the table if it doesn’t exist

  • +
  • recreate (bool) – whether to drop and recreate the table at every +execution

  • +
  • partition (dict | None) – target partition as a dict of partition columns +and values. (templated)

  • +
  • headers (bool) – whether the file contains column names on the first +line

  • +
  • check_headers (bool) – whether the column names on the first line should be +checked against the keys of field_dict

  • +
  • wildcard_match (bool) – whether the s3_key should be interpreted as a Unix +wildcard pattern

  • +
  • aws_conn_id (str) – source s3 connection

  • +
  • verify (bool | str | None) –

    Whether or not to verify SSL certificates for S3 connection. +By default SSL certificates are verified. +You can provide the following values:

    +
      +
    • +
      False: do not validate SSL certificates. SSL will still be used

      (unless use_ssl is False), but SSL certificates will not be +verified.

      +
      +
      +
    • +
    • +
      path/to/cert/bundle.pem: A filename of the CA cert bundle to uses.

      You can specify this argument if you want to use a different +CA cert bundle than the one used by botocore.

      +
      +
      +
    • +
    +

  • +
  • hive_cli_conn_id (str) – Reference to the +Hive CLI connection id.

  • +
  • input_compressed (bool) – Boolean to determine if file decompression is +required to process headers

  • +
  • tblproperties (dict | None) – TBLPROPERTIES of the hive table being created

  • +
  • select_expression (str | None) – S3 Select expression

  • +
+
+
+
+
+template_fields :Sequence[str] = ['s3_key', 'partition', 'hive_table'][source]
+
+ +
+
+template_ext :Sequence[str] = [][source]
+
+ +
+
+ui_color = #a0e08c[source]
+
+ +
+
+execute(context)[source]
+

This is the main method to derive when creating an operator. +Context is the same dictionary used as when rendering jinja templates.

+

Refer to get_template_context for more context.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/vertica_to_hive/index.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/vertica_to_hive/index.html new file mode 100644 index 00000000000..7b26b723557 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/airflow/providers/apache/hive/transfers/vertica_to_hive/index.html @@ -0,0 +1,961 @@ + + + + + + + + + + + + airflow.providers.apache.hive.transfers.vertica_to_hive — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.apache.hive.transfers.vertica_to_hive

+

This module contains an operator to move data from Vertica to Hive.

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

VerticaToHiveOperator

Moves data from Vertica to Hive. The operator runs

+
+
+class airflow.providers.apache.hive.transfers.vertica_to_hive.VerticaToHiveOperator(*, sql, hive_table, create=True, recreate=False, partition=None, delimiter=chr(1), vertica_conn_id='vertica_default', hive_cli_conn_id='hive_cli_default', **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Moves data from Vertica to Hive. The operator runs +your query against Vertica, stores the file locally +before loading it into a Hive table. If the create or +recreate arguments are set to True, +a CREATE TABLE and DROP TABLE statements are generated. +Hive data types are inferred from the cursor’s metadata. +Note that the table generated in Hive uses STORED AS textfile +which isn’t the most efficient serialization format. If a +large amount of data is loaded and/or if the table gets +queried considerably, you may want to use this operator only to +stage the data into a temporary table before loading it into its +final destination using a HiveOperator.

+
+
Parameters
+
    +
  • sql (str) – SQL query to execute against the Vertica database. (templated)

  • +
  • hive_table (str) – target Hive table, use dot notation to target a +specific database. (templated)

  • +
  • create (bool) – whether to create the table if it doesn’t exist

  • +
  • recreate (bool) – whether to drop and recreate the table at every execution

  • +
  • partition (dict | None) – target partition as a dict of partition columns +and values. (templated)

  • +
  • delimiter (str) – field delimiter in the file

  • +
  • vertica_conn_id (str) – source Vertica connection

  • +
  • hive_cli_conn_id (str) – Reference to the +Hive CLI connection id.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['sql', 'partition', 'hive_table'][source]
+
+ +
+
+template_ext :Sequence[str] = ['.sql'][source]
+
+ +
+
+template_fields_renderers[source]
+
+ +
+
+ui_color = #b4e0ff[source]
+
+ +
+
+classmethod type_map(vertica_type)[source]
+

Vertica-python datatype.py does not provide the full type mapping access. +Manual hack. Reference: +https://github.com/uber/vertica-python/blob/master/vertica_python/vertica/column.py

+
+ +
+
+execute(context)[source]
+

This is the main method to derive when creating an operator. +Context is the same dictionary used as when rendering jinja templates.

+

Refer to get_template_context for more context.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/tests/system/providers/apache/hive/example_twitter_dag/index.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/tests/system/providers/apache/hive/example_twitter_dag/index.html new file mode 100644 index 00000000000..225e5e2331d --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/tests/system/providers/apache/hive/example_twitter_dag/index.html @@ -0,0 +1,966 @@ + + + + + + + + + + + + tests.system.providers.apache.hive.example_twitter_dag — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

tests.system.providers.apache.hive.example_twitter_dag

+

This is an example dag for managing twitter data.

+
+

Module Contents

+
+

Functions

+ ++++ + + + + + + + + + + + + + + +

fetch_tweets()

This task should call Twitter API and retrieve tweets from yesterday from and to for the four twitter

clean_tweets()

This is a placeholder to clean the eight files. In this step you can get rid of or cherry pick columns

analyze_tweets()

This is a placeholder to analyze the twitter data. Could simply be a sentiment analysis through algorithms

transfer_to_db()

This is a placeholder to extract summary from Hive data and store it to MySQL.

+
+
+

Attributes

+ ++++ + + + + + + + + + + + + + + +

ENV_ID

DAG_ID

fetch

test_run

+
+
+tests.system.providers.apache.hive.example_twitter_dag.ENV_ID[source]
+
+ +
+
+tests.system.providers.apache.hive.example_twitter_dag.DAG_ID = example_twitter_dag[source]
+
+ +
+
+tests.system.providers.apache.hive.example_twitter_dag.fetch_tweets()[source]
+

This task should call Twitter API and retrieve tweets from yesterday from and to for the four twitter +users (Twitter_A,..,Twitter_D) There should be eight csv output files generated by this task and naming +convention is direction(from or to)_twitterHandle_date.csv

+
+ +
+
+tests.system.providers.apache.hive.example_twitter_dag.clean_tweets()[source]
+

This is a placeholder to clean the eight files. In this step you can get rid of or cherry pick columns +and different parts of the text.

+
+ +
+
+tests.system.providers.apache.hive.example_twitter_dag.analyze_tweets()[source]
+

This is a placeholder to analyze the twitter data. Could simply be a sentiment analysis through algorithms +like bag of words or something more complicated. You can also take a look at Web Services to do such +tasks.

+
+ +
+
+tests.system.providers.apache.hive.example_twitter_dag.transfer_to_db()[source]
+

This is a placeholder to extract summary from Hive data and store it to MySQL.

+
+ +
+
+tests.system.providers.apache.hive.example_twitter_dag.fetch[source]
+
+ +
+
+tests.system.providers.apache.hive.example_twitter_dag.test_run[source]
+
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/tests/system/providers/apache/hive/index.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/tests/system/providers/apache/hive/index.html new file mode 100644 index 00000000000..4bd5ced2b4f --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_api/tests/system/providers/apache/hive/index.html @@ -0,0 +1,851 @@ + + + + + + + + + + + + tests.system.providers.apache.hive — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + + +
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/hooks/hive.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/hooks/hive.html new file mode 100644 index 00000000000..1d693ff207d --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/hooks/hive.html @@ -0,0 +1,1843 @@ + + + + + + + + + + + + airflow.providers.apache.hive.hooks.hive — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.apache.hive.hooks.hive

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import contextlib
+import os
+import re
+import socket
+import subprocess
+import time
+import warnings
+from collections import OrderedDict
+from tempfile import NamedTemporaryFile, TemporaryDirectory
+from typing import Any, Iterable, Mapping
+
+import pandas
+import unicodecsv as csv
+
+from airflow.configuration import conf
+from airflow.exceptions import AirflowException
+from airflow.hooks.base import BaseHook
+from airflow.providers.common.sql.hooks.sql import DbApiHook
+from airflow.security import utils
+from airflow.utils.helpers import as_flattened_list
+from airflow.utils.operator_helpers import AIRFLOW_VAR_NAME_FORMAT_MAPPING
+
+
[docs]HIVE_QUEUE_PRIORITIES = ["VERY_HIGH", "HIGH", "NORMAL", "LOW", "VERY_LOW"]
+ + +
[docs]def get_context_from_env_var() -> dict[Any, Any]: + """ + Extract context from env variable, e.g. dag_id, task_id and execution_date, + so that they can be used inside BashOperator and PythonOperator. + + :return: The context of interest. + """ + return { + format_map["default"]: os.environ.get(format_map["env_var_format"], "") + for format_map in AIRFLOW_VAR_NAME_FORMAT_MAPPING.values()
+ } + + +
[docs]class HiveCliHook(BaseHook): + """Simple wrapper around the hive CLI. + + It also supports the ``beeline`` + a lighter CLI that runs JDBC and is replacing the heavier + traditional CLI. To enable ``beeline``, set the use_beeline param in the + extra field of your connection as in ``{ "use_beeline": true }`` + + Note that you can also set default hive CLI parameters by passing ``hive_cli_params`` + space separated list of parameters to add to the hive command. + + The extra connection parameter ``auth`` gets passed as in the ``jdbc`` + connection string as is. + + :param hive_cli_conn_id: Reference to the + :ref:`Hive CLI connection id <howto/connection:hive_cli>`. + :param mapred_queue: queue used by the Hadoop Scheduler (Capacity or Fair) + :param mapred_queue_priority: priority within the job queue. + Possible settings include: VERY_HIGH, HIGH, NORMAL, LOW, VERY_LOW + :param mapred_job_name: This name will appear in the jobtracker. + This can make monitoring easier. + :param hive_cli_params: Space separated list of hive command parameters to add to the + hive command. + """ + +
[docs] conn_name_attr = "hive_cli_conn_id"
+
[docs] default_conn_name = "hive_cli_default"
+
[docs] conn_type = "hive_cli"
+
[docs] hook_name = "Hive Client Wrapper"
+ + def __init__( + self, + hive_cli_conn_id: str = default_conn_name, + run_as: str | None = None, + mapred_queue: str | None = None, + mapred_queue_priority: str | None = None, + mapred_job_name: str | None = None, + hive_cli_params: str = "", + ) -> None: + super().__init__() + conn = self.get_connection(hive_cli_conn_id) + self.hive_cli_params: str = hive_cli_params + self.use_beeline: bool = conn.extra_dejson.get("use_beeline", False) + self.auth = conn.extra_dejson.get("auth", "noSasl") + self.conn = conn + self.run_as = run_as + self.sub_process: Any = None + + if mapred_queue_priority: + mapred_queue_priority = mapred_queue_priority.upper() + if mapred_queue_priority not in HIVE_QUEUE_PRIORITIES: + raise AirflowException( + f"Invalid Mapred Queue Priority. Valid values are: {', '.join(HIVE_QUEUE_PRIORITIES)}" + ) + + self.mapred_queue = mapred_queue or conf.get("hive", "default_hive_mapred_queue") + self.mapred_queue_priority = mapred_queue_priority + self.mapred_job_name = mapred_job_name + + def _get_proxy_user(self) -> str: + """This function set the proper proxy_user value in case the user overwrite the default.""" + conn = self.conn + + proxy_user_value: str = conn.extra_dejson.get("proxy_user", "") + if proxy_user_value == "login" and conn.login: + return f"hive.server2.proxy.user={conn.login}" + if proxy_user_value == "owner" and self.run_as: + return f"hive.server2.proxy.user={self.run_as}" + if proxy_user_value != "": # There is a custom proxy user + return f"hive.server2.proxy.user={proxy_user_value}" + return proxy_user_value # The default proxy user (undefined) + + def _prepare_cli_cmd(self) -> list[Any]: + """This function creates the command list from available information""" + conn = self.conn + hive_bin = "hive" + cmd_extra = [] + + if self.use_beeline: + hive_bin = "beeline" + jdbc_url = f"jdbc:hive2://{conn.host}:{conn.port}/{conn.schema}" + if conf.get("core", "security") == "kerberos": + template = conn.extra_dejson.get("principal", "hive/_HOST@EXAMPLE.COM") + if "_HOST" in template: + template = utils.replace_hostname_pattern(utils.get_components(template)) + + proxy_user = self._get_proxy_user() + + jdbc_url += f";principal={template};{proxy_user}" + elif self.auth: + jdbc_url += ";auth=" + self.auth + + jdbc_url = f'"{jdbc_url}"' + + cmd_extra += ["-u", jdbc_url] + if conn.login: + cmd_extra += ["-n", conn.login] + if conn.password: + cmd_extra += ["-p", conn.password] + + hive_params_list = self.hive_cli_params.split() + + return [hive_bin] + cmd_extra + hive_params_list + + @staticmethod + def _prepare_hiveconf(d: dict[Any, Any]) -> list[Any]: + """ + This function prepares a list of hiveconf params + from a dictionary of key value pairs. + + :param d: + + >>> hh = HiveCliHook() + >>> hive_conf = {"hive.exec.dynamic.partition": "true", + ... "hive.exec.dynamic.partition.mode": "nonstrict"} + >>> hh._prepare_hiveconf(hive_conf) + ["-hiveconf", "hive.exec.dynamic.partition=true",\ + "-hiveconf", "hive.exec.dynamic.partition.mode=nonstrict"] + """ + if not d: + return [] + return as_flattened_list(zip(["-hiveconf"] * len(d), [f"{k}={v}" for k, v in d.items()])) + +
[docs] def run_cli( + self, + hql: str, + schema: str | None = None, + verbose: bool = True, + hive_conf: dict[Any, Any] | None = None, + ) -> Any: + """ + Run an hql statement using the hive cli. If hive_conf is specified + it should be a dict and the entries will be set as key/value pairs + in HiveConf. + + :param hql: an hql (hive query language) statement to run with hive cli + :param schema: Name of hive schema (database) to use + :param verbose: Provides additional logging. Defaults to True. + :param hive_conf: if specified these key value pairs will be passed + to hive as ``-hiveconf "key"="value"``. Note that they will be + passed after the ``hive_cli_params`` and thus will override + whatever values are specified in the database. + + >>> hh = HiveCliHook() + >>> result = hh.run_cli("USE airflow;") + >>> ("OK" in result) + True + """ + conn = self.conn + schema = schema or conn.schema + + invalid_chars_list = re.findall(r"[^a-z0-9_]", schema) + if invalid_chars_list: + invalid_chars = "".join(char for char in invalid_chars_list) + raise RuntimeError(f"The schema `{schema}` contains invalid characters: {invalid_chars}") + + if schema: + hql = f"USE {schema};\n{hql}" + + with TemporaryDirectory(prefix="airflow_hiveop_") as tmp_dir: + with NamedTemporaryFile(dir=tmp_dir) as f: + hql += "\n" + f.write(hql.encode("UTF-8")) + f.flush() + hive_cmd = self._prepare_cli_cmd() + env_context = get_context_from_env_var() + # Only extend the hive_conf if it is defined. + if hive_conf: + env_context.update(hive_conf) + hive_conf_params = self._prepare_hiveconf(env_context) + if self.mapred_queue: + hive_conf_params.extend( + [ + "-hiveconf", + f"mapreduce.job.queuename={self.mapred_queue}", + "-hiveconf", + f"mapred.job.queue.name={self.mapred_queue}", + "-hiveconf", + f"tez.queue.name={self.mapred_queue}", + ] + ) + + if self.mapred_queue_priority: + hive_conf_params.extend( + ["-hiveconf", f"mapreduce.job.priority={self.mapred_queue_priority}"] + ) + + if self.mapred_job_name: + hive_conf_params.extend(["-hiveconf", f"mapred.job.name={self.mapred_job_name}"]) + + hive_cmd.extend(hive_conf_params) + hive_cmd.extend(["-f", f.name]) + + if verbose: + self.log.info("%s", " ".join(hive_cmd)) + sub_process: Any = subprocess.Popen( + hive_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=tmp_dir, close_fds=True + ) + self.sub_process = sub_process + stdout = "" + while True: + line = sub_process.stdout.readline() + if not line: + break + stdout += line.decode("UTF-8") + if verbose: + self.log.info(line.decode("UTF-8").strip()) + sub_process.wait() + + if sub_process.returncode: + raise AirflowException(stdout) + + return stdout
+ +
[docs] def test_hql(self, hql: str) -> None: + """Test an hql statement using the hive cli and EXPLAIN""" + create, insert, other = [], [], [] + for query in hql.split(";"): # naive + query_original = query + query = query.lower().strip() + + if query.startswith("create table"): + create.append(query_original) + elif query.startswith(("set ", "add jar ", "create temporary function")): + other.append(query_original) + elif query.startswith("insert"): + insert.append(query_original) + other_ = ";".join(other) + for query_set in [create, insert]: + for query in query_set: + + query_preview = " ".join(query.split())[:50] + self.log.info("Testing HQL [%s (...)]", query_preview) + if query_set == insert: + query = other_ + "; explain " + query + else: + query = "explain " + query + try: + self.run_cli(query, verbose=False) + except AirflowException as e: + message = e.args[0].split("\n")[-2] + self.log.info(message) + error_loc = re.search(r"(\d+):(\d+)", message) + if error_loc and error_loc.group(1).isdigit(): + lst = int(error_loc.group(1)) + begin = max(lst - 2, 0) + end = min(lst + 3, len(query.split("\n"))) + context = "\n".join(query.split("\n")[begin:end]) + self.log.info("Context :\n %s", context) + else: + self.log.info("SUCCESS")
+ +
[docs] def load_df( + self, + df: pandas.DataFrame, + table: str, + field_dict: dict[Any, Any] | None = None, + delimiter: str = ",", + encoding: str = "utf8", + pandas_kwargs: Any = None, + **kwargs: Any, + ) -> None: + """ + Loads a pandas DataFrame into hive. + + Hive data types will be inferred if not passed but column names will + not be sanitized. + + :param df: DataFrame to load into a Hive table + :param table: target Hive table, use dot notation to target a + specific database + :param field_dict: mapping from column name to hive data type. + Note that it must be OrderedDict so as to keep columns' order. + :param delimiter: field delimiter in the file + :param encoding: str encoding to use when writing DataFrame to file + :param pandas_kwargs: passed to DataFrame.to_csv + :param kwargs: passed to self.load_file + """ + + def _infer_field_types_from_df(df: pandas.DataFrame) -> dict[Any, Any]: + dtype_kind_hive_type = { + "b": "BOOLEAN", # boolean + "i": "BIGINT", # signed integer + "u": "BIGINT", # unsigned integer + "f": "DOUBLE", # floating-point + "c": "STRING", # complex floating-point + "M": "TIMESTAMP", # datetime + "O": "STRING", # object + "S": "STRING", # (byte-)string + "U": "STRING", # Unicode + "V": "STRING", # void + } + + order_type = OrderedDict() + for col, dtype in df.dtypes.iteritems(): + order_type[col] = dtype_kind_hive_type[dtype.kind] + return order_type + + if pandas_kwargs is None: + pandas_kwargs = {} + + with TemporaryDirectory(prefix="airflow_hiveop_") as tmp_dir: + with NamedTemporaryFile(dir=tmp_dir, mode="w") as f: + if field_dict is None: + field_dict = _infer_field_types_from_df(df) + + df.to_csv( + path_or_buf=f, + sep=delimiter, + header=False, + index=False, + encoding=encoding, + date_format="%Y-%m-%d %H:%M:%S", + **pandas_kwargs, + ) + f.flush() + + return self.load_file( + filepath=f.name, table=table, delimiter=delimiter, field_dict=field_dict, **kwargs
+ ) + +
[docs] def load_file( + self, + filepath: str, + table: str, + delimiter: str = ",", + field_dict: dict[Any, Any] | None = None, + create: bool = True, + overwrite: bool = True, + partition: dict[str, Any] | None = None, + recreate: bool = False, + tblproperties: dict[str, Any] | None = None, + ) -> None: + """ + Loads a local file into Hive + + Note that the table generated in Hive uses ``STORED AS textfile`` + which isn't the most efficient serialization format. If a + large amount of data is loaded and/or if the tables gets + queried considerably, you may want to use this operator only to + stage the data into a temporary table before loading it into its + final destination using a ``HiveOperator``. + + :param filepath: local filepath of the file to load + :param table: target Hive table, use dot notation to target a + specific database + :param delimiter: field delimiter in the file + :param field_dict: A dictionary of the fields name in the file + as keys and their Hive types as values. + Note that it must be OrderedDict so as to keep columns' order. + :param create: whether to create the table if it doesn't exist + :param overwrite: whether to overwrite the data in table or partition + :param partition: target partition as a dict of partition columns + and values + :param recreate: whether to drop and recreate the table at every + execution + :param tblproperties: TBLPROPERTIES of the hive table being created + """ + hql = "" + if recreate: + hql += f"DROP TABLE IF EXISTS {table};\n" + if create or recreate: + if field_dict is None: + raise ValueError("Must provide a field dict when creating a table") + fields = ",\n ".join(f"`{k.strip('`')}` {v}" for k, v in field_dict.items()) + hql += f"CREATE TABLE IF NOT EXISTS {table} (\n{fields})\n" + if partition: + pfields = ",\n ".join(p + " STRING" for p in partition) + hql += f"PARTITIONED BY ({pfields})\n" + hql += "ROW FORMAT DELIMITED\n" + hql += f"FIELDS TERMINATED BY '{delimiter}'\n" + hql += "STORED AS textfile\n" + if tblproperties is not None: + tprops = ", ".join(f"'{k}'='{v}'" for k, v in tblproperties.items()) + hql += f"TBLPROPERTIES({tprops})\n" + hql += ";" + self.log.info(hql) + self.run_cli(hql) + hql = f"LOAD DATA LOCAL INPATH '{filepath}' " + if overwrite: + hql += "OVERWRITE " + hql += f"INTO TABLE {table} " + if partition: + pvals = ", ".join(f"{k}='{v}'" for k, v in partition.items()) + hql += f"PARTITION ({pvals})" + + # As a workaround for HIVE-10541, add a newline character + # at the end of hql (AIRFLOW-2412). + hql += ";\n" + + self.log.info(hql) + self.run_cli(hql)
+ +
[docs] def kill(self) -> None: + """Kill Hive cli command""" + if hasattr(self, "sub_process"): + if self.sub_process.poll() is None: + print("Killing the Hive job") + self.sub_process.terminate() + time.sleep(60) + self.sub_process.kill()
+ + +
[docs]class HiveMetastoreHook(BaseHook): + """ + Wrapper to interact with the Hive Metastore + + :param metastore_conn_id: reference to the + :ref: `metastore thrift service connection id <howto/connection:hive_metastore>`. + """ + + # java short max val +
[docs] MAX_PART_COUNT = 32767
+ +
[docs] conn_name_attr = "metastore_conn_id"
+
[docs] default_conn_name = "metastore_default"
+
[docs] conn_type = "hive_metastore"
+
[docs] hook_name = "Hive Metastore Thrift"
+ + def __init__(self, metastore_conn_id: str = default_conn_name) -> None: + super().__init__() + self.conn = self.get_connection(metastore_conn_id) + self.metastore = self.get_metastore_client() + +
[docs] def __getstate__(self) -> dict[str, Any]: + # This is for pickling to work despite the thrift hive client not + # being picklable + state = dict(self.__dict__) + del state["metastore"] + return state
+ +
[docs] def __setstate__(self, d: dict[str, Any]) -> None: + self.__dict__.update(d) + self.__dict__["metastore"] = self.get_metastore_client()
+ +
[docs] def get_metastore_client(self) -> Any: + """Returns a Hive thrift client.""" + import hmsclient + from thrift.protocol import TBinaryProtocol + from thrift.transport import TSocket, TTransport + + host = self._find_valid_host() + conn = self.conn + + if not host: + raise AirflowException("Failed to locate the valid server.") + + if "authMechanism" in conn.extra_dejson: + warnings.warn( + "The 'authMechanism' option is deprecated. Please use 'auth_mechanism'.", + DeprecationWarning, + stacklevel=2, + ) + conn.extra_dejson["auth_mechanism"] = conn.extra_dejson["authMechanism"] + del conn.extra_dejson["authMechanism"] + + auth_mechanism = conn.extra_dejson.get("auth_mechanism", "NOSASL") + + if conf.get("core", "security") == "kerberos": + auth_mechanism = conn.extra_dejson.get("auth_mechanism", "GSSAPI") + kerberos_service_name = conn.extra_dejson.get("kerberos_service_name", "hive") + + conn_socket = TSocket.TSocket(host, conn.port) + + if conf.get("core", "security") == "kerberos" and auth_mechanism == "GSSAPI": + try: + import saslwrapper as sasl + except ImportError: + import sasl + + def sasl_factory() -> sasl.Client: + sasl_client = sasl.Client() + sasl_client.setAttr("host", host) + sasl_client.setAttr("service", kerberos_service_name) + sasl_client.init() + return sasl_client + + from thrift_sasl import TSaslClientTransport + + transport = TSaslClientTransport(sasl_factory, "GSSAPI", conn_socket) + else: + transport = TTransport.TBufferedTransport(conn_socket) + + protocol = TBinaryProtocol.TBinaryProtocol(transport) + + return hmsclient.HMSClient(iprot=protocol)
+ + def _find_valid_host(self) -> Any: + conn = self.conn + hosts = conn.host.split(",") + for host in hosts: + host_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self.log.info("Trying to connect to %s:%s", host, conn.port) + if host_socket.connect_ex((host, conn.port)) == 0: + self.log.info("Connected to %s:%s", host, conn.port) + host_socket.close() + return host + else: + self.log.error("Could not connect to %s:%s", host, conn.port) + return None + +
[docs] def get_conn(self) -> Any: + return self.metastore
+ +
[docs] def check_for_partition(self, schema: str, table: str, partition: str) -> bool: + """ + Checks whether a partition exists + + :param schema: Name of hive schema (database) @table belongs to + :param table: Name of hive table @partition belongs to + :param partition: Expression that matches the partitions to check for + (eg `a = 'b' AND c = 'd'`) + + >>> hh = HiveMetastoreHook() + >>> t = 'static_babynames_partitioned' + >>> hh.check_for_partition('airflow', t, "ds='2015-01-01'") + True + """ + with self.metastore as client: + partitions = client.get_partitions_by_filter( + schema, table, partition, HiveMetastoreHook.MAX_PART_COUNT + ) + + return bool(partitions)
+ +
[docs] def check_for_named_partition(self, schema: str, table: str, partition_name: str) -> Any: + """ + Checks whether a partition with a given name exists + + :param schema: Name of hive schema (database) @table belongs to + :param table: Name of hive table @partition belongs to + :param partition_name: Name of the partitions to check for (eg `a=b/c=d`) + + >>> hh = HiveMetastoreHook() + >>> t = 'static_babynames_partitioned' + >>> hh.check_for_named_partition('airflow', t, "ds=2015-01-01") + True + >>> hh.check_for_named_partition('airflow', t, "ds=xxx") + False + """ + with self.metastore as client: + return client.check_for_named_partition(schema, table, partition_name)
+ +
[docs] def get_table(self, table_name: str, db: str = "default") -> Any: + """Get a metastore table object + + >>> hh = HiveMetastoreHook() + >>> t = hh.get_table(db='airflow', table_name='static_babynames') + >>> t.tableName + 'static_babynames' + >>> [col.name for col in t.sd.cols] + ['state', 'year', 'name', 'gender', 'num'] + """ + if db == "default" and "." in table_name: + db, table_name = table_name.split(".")[:2] + with self.metastore as client: + return client.get_table(dbname=db, tbl_name=table_name)
+ +
[docs] def get_tables(self, db: str, pattern: str = "*") -> Any: + """Get a metastore table object""" + with self.metastore as client: + tables = client.get_tables(db_name=db, pattern=pattern) + return client.get_table_objects_by_name(db, tables)
+ +
[docs] def get_databases(self, pattern: str = "*") -> Any: + """Get a metastore table object""" + with self.metastore as client: + return client.get_databases(pattern)
+ +
[docs] def get_partitions(self, schema: str, table_name: str, partition_filter: str | None = None) -> list[Any]: + """ + Returns a list of all partitions in a table. Works only + for tables with less than 32767 (java short max val). + For subpartitioned table, the number might easily exceed this. + + >>> hh = HiveMetastoreHook() + >>> t = 'static_babynames_partitioned' + >>> parts = hh.get_partitions(schema='airflow', table_name=t) + >>> len(parts) + 1 + >>> parts + [{'ds': '2015-01-01'}] + """ + with self.metastore as client: + table = client.get_table(dbname=schema, tbl_name=table_name) + if len(table.partitionKeys) == 0: + raise AirflowException("The table isn't partitioned") + else: + if partition_filter: + parts = client.get_partitions_by_filter( + db_name=schema, + tbl_name=table_name, + filter=partition_filter, + max_parts=HiveMetastoreHook.MAX_PART_COUNT, + ) + else: + parts = client.get_partitions( + db_name=schema, tbl_name=table_name, max_parts=HiveMetastoreHook.MAX_PART_COUNT + ) + + pnames = [p.name for p in table.partitionKeys] + return [dict(zip(pnames, p.values)) for p in parts]
+ + @staticmethod + def _get_max_partition_from_part_specs( + part_specs: list[Any], partition_key: str | None, filter_map: dict[str, Any] | None + ) -> Any: + """ + Helper method to get max partition of partitions with partition_key + from part specs. key:value pair in filter_map will be used to + filter out partitions. + + :param part_specs: list of partition specs. + :param partition_key: partition key name. + :param filter_map: partition_key:partition_value map used for partition filtering, + e.g. {'key1': 'value1', 'key2': 'value2'}. + Only partitions matching all partition_key:partition_value + pairs will be considered as candidates of max partition. + :return: Max partition or None if part_specs is empty. + """ + if not part_specs: + return None + + # Assuming all specs have the same keys. + if partition_key not in part_specs[0].keys(): + raise AirflowException(f"Provided partition_key {partition_key} is not in part_specs.") + is_subset = None + if filter_map: + is_subset = set(filter_map.keys()).issubset(set(part_specs[0].keys())) + if filter_map and not is_subset: + raise AirflowException( + f"Keys in provided filter_map {', '.join(filter_map.keys())} " + f"are not subset of part_spec keys: {', '.join(part_specs[0].keys())}" + ) + + candidates = [ + p_dict[partition_key] + for p_dict in part_specs + if filter_map is None or all(item in p_dict.items() for item in filter_map.items()) + ] + + if not candidates: + return None + else: + return max(candidates) + +
[docs] def max_partition( + self, + schema: str, + table_name: str, + field: str | None = None, + filter_map: dict[Any, Any] | None = None, + ) -> Any: + """ + Returns the maximum value for all partitions with given field in a table. + If only one partition key exist in the table, the key will be used as field. + filter_map should be a partition_key:partition_value map and will be used to + filter out partitions. + + :param schema: schema name. + :param table_name: table name. + :param field: partition key to get max partition from. + :param filter_map: partition_key:partition_value map used for partition filtering. + + >>> hh = HiveMetastoreHook() + >>> filter_map = {'ds': '2015-01-01'} + >>> t = 'static_babynames_partitioned' + >>> hh.max_partition(schema='airflow',\ + ... table_name=t, field='ds', filter_map=filter_map) + '2015-01-01' + """ + with self.metastore as client: + table = client.get_table(dbname=schema, tbl_name=table_name) + key_name_set = {key.name for key in table.partitionKeys} + if len(table.partitionKeys) == 1: + field = table.partitionKeys[0].name + elif not field: + raise AirflowException("Please specify the field you want the max value for.") + elif field not in key_name_set: + raise AirflowException("Provided field is not a partition key.") + + if filter_map and not set(filter_map.keys()).issubset(key_name_set): + raise AirflowException("Provided filter_map contains keys that are not partition key.") + + part_names = client.get_partition_names( + schema, table_name, max_parts=HiveMetastoreHook.MAX_PART_COUNT + ) + part_specs = [client.partition_name_to_spec(part_name) for part_name in part_names] + + return HiveMetastoreHook._get_max_partition_from_part_specs(part_specs, field, filter_map)
+ +
[docs] def table_exists(self, table_name: str, db: str = "default") -> bool: + """ + Check if table exists + + >>> hh = HiveMetastoreHook() + >>> hh.table_exists(db='airflow', table_name='static_babynames') + True + >>> hh.table_exists(db='airflow', table_name='does_not_exist') + False + """ + try: + self.get_table(table_name, db) + return True + except Exception: + return False
+ +
[docs] def drop_partitions(self, table_name, part_vals, delete_data=False, db="default"): + """ + Drop partitions from the given table matching the part_vals input + + :param table_name: table name. + :param part_vals: list of partition specs. + :param delete_data: Setting to control if underlying data have to deleted + in addition to dropping partitions. + :param db: Name of hive schema (database) @table belongs to + + >>> hh = HiveMetastoreHook() + >>> hh.drop_partitions(db='airflow', table_name='static_babynames', + part_vals="['2020-05-01']") + True + """ + if self.table_exists(table_name, db): + with self.metastore as client: + self.log.info( + "Dropping partition of table %s.%s matching the spec: %s", db, table_name, part_vals + ) + return client.drop_partition(db, table_name, part_vals, delete_data) + else: + self.log.info("Table %s.%s does not exist!", db, table_name) + return False
+ + +
[docs]class HiveServer2Hook(DbApiHook): + """ + Wrapper around the pyhive library + + Notes: + * the default auth_mechanism is PLAIN, to override it you + can specify it in the ``extra`` of your connection in the UI + * the default for run_set_variable_statements is true, if you + are using impala you may need to set it to false in the + ``extra`` of your connection in the UI + + :param hiveserver2_conn_id: Reference to the + :ref: `Hive Server2 thrift service connection id <howto/connection:hiveserver2>`. + :param schema: Hive database name. + """ + +
[docs] conn_name_attr = "hiveserver2_conn_id"
+
[docs] default_conn_name = "hiveserver2_default"
+
[docs] conn_type = "hiveserver2"
+
[docs] hook_name = "Hive Server 2 Thrift"
+
[docs] supports_autocommit = False
+ +
[docs] def get_conn(self, schema: str | None = None) -> Any: + """Returns a Hive connection object.""" + username: str | None = None + password: str | None = None + + db = self.get_connection(self.hiveserver2_conn_id) # type: ignore + + if "authMechanism" in db.extra_dejson: + warnings.warn( + "The 'authMechanism' option is deprecated. Please use 'auth_mechanism'.", + DeprecationWarning, + stacklevel=2, + ) + db.extra_dejson["auth_mechanism"] = db.extra_dejson["authMechanism"] + del db.extra_dejson["authMechanism"] + + auth_mechanism = db.extra_dejson.get("auth_mechanism", "NONE") + if auth_mechanism == "NONE" and db.login is None: + # we need to give a username + username = "airflow" + kerberos_service_name = None + if conf.get("core", "security") == "kerberos": + auth_mechanism = db.extra_dejson.get("auth_mechanism", "KERBEROS") + kerberos_service_name = db.extra_dejson.get("kerberos_service_name", "hive") + + # pyhive uses GSSAPI instead of KERBEROS as a auth_mechanism identifier + if auth_mechanism == "GSSAPI": + self.log.warning( + "Detected deprecated 'GSSAPI' for auth_mechanism for %s. Please use 'KERBEROS' instead", + self.hiveserver2_conn_id, # type: ignore + ) + auth_mechanism = "KERBEROS" + + # Password should be set if and only if in LDAP or CUSTOM mode + if auth_mechanism in ("LDAP", "CUSTOM"): + password = db.password + + from pyhive.hive import connect + + return connect( + host=db.host, + port=db.port, + auth=auth_mechanism, + kerberos_service_name=kerberos_service_name, + username=db.login or username, + password=password, + database=schema or db.schema or "default",
+ ) + + def _get_results( + self, + sql: str | list[str], + schema: str = "default", + fetch_size: int | None = None, + hive_conf: Iterable | Mapping | None = None, + ) -> Any: + from pyhive.exc import ProgrammingError + + if isinstance(sql, str): + sql = [sql] + previous_description = None + with contextlib.closing(self.get_conn(schema)) as conn, contextlib.closing(conn.cursor()) as cur: + + cur.arraysize = fetch_size or 1000 + + # not all query services (e.g. impala AIRFLOW-4434) support the set command + + db = self.get_connection(self.hiveserver2_conn_id) # type: ignore + + if db.extra_dejson.get("run_set_variable_statements", True): + env_context = get_context_from_env_var() + if hive_conf: + env_context.update(hive_conf) + for k, v in env_context.items(): + cur.execute(f"set {k}={v}") + + for statement in sql: + cur.execute(statement) + # we only get results of statements that returns + lowered_statement = statement.lower().strip() + if ( + lowered_statement.startswith("select") + or lowered_statement.startswith("with") + or lowered_statement.startswith("show") + or (lowered_statement.startswith("set") and "=" not in lowered_statement) + ): + description = cur.description + if previous_description and previous_description != description: + message = f"""The statements are producing different descriptions: + Current: {repr(description)} + Previous: {repr(previous_description)}""" + raise ValueError(message) + elif not previous_description: + previous_description = description + yield description + try: + # DB API 2 raises when no results are returned + # we're silencing here as some statements in the list + # may be `SET` or DDL + yield from cur + except ProgrammingError: + self.log.debug("get_results returned no records") + +
[docs] def get_results( + self, + sql: str | list[str], + schema: str = "default", + fetch_size: int | None = None, + hive_conf: Iterable | Mapping | None = None, + ) -> dict[str, Any]: + """ + Get results of the provided hql in target schema. + + :param sql: hql to be executed. + :param schema: target schema, default to 'default'. + :param fetch_size: max size of result to fetch. + :param hive_conf: hive_conf to execute alone with the hql. + :return: results of hql execution, dict with data (list of results) and header + """ + results_iter = self._get_results(sql, schema, fetch_size=fetch_size, hive_conf=hive_conf) + header = next(results_iter) + results = {"data": list(results_iter), "header": header} + return results
+ +
[docs] def to_csv( + self, + sql: str, + csv_filepath: str, + schema: str = "default", + delimiter: str = ",", + lineterminator: str = "\r\n", + output_header: bool = True, + fetch_size: int = 1000, + hive_conf: dict[Any, Any] | None = None, + ) -> None: + """ + Execute hql in target schema and write results to a csv file. + + :param sql: hql to be executed. + :param csv_filepath: filepath of csv to write results into. + :param schema: target schema, default to 'default'. + :param delimiter: delimiter of the csv file, default to ','. + :param lineterminator: lineterminator of the csv file. + :param output_header: header of the csv file, default to True. + :param fetch_size: number of result rows to write into the csv file, default to 1000. + :param hive_conf: hive_conf to execute alone with the hql. + + """ + results_iter = self._get_results(sql, schema, fetch_size=fetch_size, hive_conf=hive_conf) + header = next(results_iter) + message = None + + i = 0 + with open(csv_filepath, "wb") as file: + writer = csv.writer(file, delimiter=delimiter, lineterminator=lineterminator, encoding="utf-8") + try: + if output_header: + self.log.debug("Cursor description is %s", header) + writer.writerow([c[0] for c in header]) + + for i, row in enumerate(results_iter, 1): + writer.writerow(row) + if i % fetch_size == 0: + self.log.info("Written %s rows so far.", i) + except ValueError as exception: + message = str(exception) + + if message: + # need to clean up the file first + os.remove(csv_filepath) + raise ValueError(message) + + self.log.info("Done. Loaded a total of %s rows.", i)
+ +
[docs] def get_records( + self, sql: str | list[str], parameters: Iterable | Mapping | None = None, **kwargs + ) -> Any: + """ + Get a set of records from a Hive query. You can optionally pass 'schema' kwarg + which specifies target schema and default to 'default'. + + :param sql: hql to be executed. + :param parameters: optional configuration passed to get_results + :return: result of hive execution + + >>> hh = HiveServer2Hook() + >>> sql = "SELECT * FROM airflow.static_babynames LIMIT 100" + >>> len(hh.get_records(sql)) + 100 + """ + schema = kwargs["schema"] if "schema" in kwargs else "default" + return self.get_results(sql, schema=schema, hive_conf=parameters)["data"]
+ +
[docs] def get_pandas_df( # type: ignore + self, + sql: str, + schema: str = "default", + hive_conf: dict[Any, Any] | None = None, + **kwargs, + ) -> pandas.DataFrame: + """ + Get a pandas dataframe from a Hive query + + :param sql: hql to be executed. + :param schema: target schema, default to 'default'. + :param hive_conf: hive_conf to execute alone with the hql. + :param kwargs: (optional) passed into pandas.DataFrame constructor + :return: result of hive execution + + >>> hh = HiveServer2Hook() + >>> sql = "SELECT * FROM airflow.static_babynames LIMIT 100" + >>> df = hh.get_pandas_df(sql) + >>> len(df.index) + 100 + + :return: pandas.DateFrame + """ + res = self.get_results(sql, schema=schema, hive_conf=hive_conf) + df = pandas.DataFrame(res["data"], columns=[c[0] for c in res["header"]], **kwargs) + return df
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/operators/hive.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/operators/hive.html new file mode 100644 index 00000000000..d141847b63a --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/operators/hive.html @@ -0,0 +1,988 @@ + + + + + + + + + + + + airflow.providers.apache.hive.operators.hive — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.apache.hive.operators.hive

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import os
+import re
+from typing import TYPE_CHECKING, Any, Sequence
+
+from airflow.configuration import conf
+from airflow.models import BaseOperator
+from airflow.providers.apache.hive.hooks.hive import HiveCliHook
+from airflow.utils import operator_helpers
+from airflow.utils.operator_helpers import context_to_airflow_vars
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+
[docs]class HiveOperator(BaseOperator): + """ + Executes hql code or hive script in a specific Hive database. + + :param hql: the hql to be executed. Note that you may also use + a relative path from the dag file of a (template) hive + script. (templated) + :param hive_cli_conn_id: Reference to the + :ref:`Hive CLI connection id <howto/connection:hive_cli>`. (templated) + :param hiveconfs: if defined, these key value pairs will be passed + to hive as ``-hiveconf "key"="value"`` + :param hiveconf_jinja_translate: when True, hiveconf-type templating + ${var} gets translated into jinja-type templating {{ var }} and + ${hiveconf:var} gets translated into jinja-type templating {{ var }}. + Note that you may want to use this along with the + ``DAG(user_defined_macros=myargs)`` parameter. View the DAG + object documentation for more details. + :param script_begin_tag: If defined, the operator will get rid of the + part of the script before the first occurrence of `script_begin_tag` + :param run_as_owner: Run HQL code as a DAG's owner. + :param mapred_queue: queue used by the Hadoop CapacityScheduler. (templated) + :param mapred_queue_priority: priority within CapacityScheduler queue. + Possible settings include: VERY_HIGH, HIGH, NORMAL, LOW, VERY_LOW + :param mapred_job_name: This name will appear in the jobtracker. + This can make monitoring easier. + """ + +
[docs] template_fields: Sequence[str] = ( + "hql", + "schema", + "hive_cli_conn_id", + "mapred_queue", + "hiveconfs", + "mapred_job_name", + "mapred_queue_priority",
+ ) +
[docs] template_ext: Sequence[str] = ( + ".hql", + ".sql",
+ ) +
[docs] template_fields_renderers = {"hql": "hql"}
+
[docs] ui_color = "#f0e4ec"
+ + def __init__( + self, + *, + hql: str, + hive_cli_conn_id: str = "hive_cli_default", + schema: str = "default", + hiveconfs: dict[Any, Any] | None = None, + hiveconf_jinja_translate: bool = False, + script_begin_tag: str | None = None, + run_as_owner: bool = False, + mapred_queue: str | None = None, + mapred_queue_priority: str | None = None, + mapred_job_name: str | None = None, + hive_cli_params: str = "", + **kwargs: Any, + ) -> None: + super().__init__(**kwargs) + self.hql = hql + self.hive_cli_conn_id = hive_cli_conn_id + self.schema = schema + self.hiveconfs = hiveconfs or {} + self.hiveconf_jinja_translate = hiveconf_jinja_translate + self.script_begin_tag = script_begin_tag + self.run_as = None + if run_as_owner: + self.run_as = self.dag.owner + self.mapred_queue = mapred_queue + self.mapred_queue_priority = mapred_queue_priority + self.mapred_job_name = mapred_job_name + self.hive_cli_params = hive_cli_params + + job_name_template = conf.get_mandatory_value( + "hive", + "mapred_job_name_template", + fallback="Airflow HiveOperator task for {hostname}.{dag_id}.{task_id}.{execution_date}", + ) + self.mapred_job_name_template: str = job_name_template + + # assigned lazily - just for consistency we can create the attribute with a + # `None` initial value, later it will be populated by the execute method. + # This also makes `on_kill` implementation consistent since it assumes `self.hook` + # is defined. + self.hook: HiveCliHook | None = None + +
[docs] def get_hook(self) -> HiveCliHook: + """Get Hive cli hook""" + return HiveCliHook( + hive_cli_conn_id=self.hive_cli_conn_id, + run_as=self.run_as, + mapred_queue=self.mapred_queue, + mapred_queue_priority=self.mapred_queue_priority, + mapred_job_name=self.mapred_job_name, + hive_cli_params=self.hive_cli_params,
+ ) + +
[docs] def prepare_template(self) -> None: + if self.hiveconf_jinja_translate: + self.hql = re.sub(r"(\$\{(hiveconf:)?([ a-zA-Z0-9_]*)\})", r"{{ \g<3> }}", self.hql) + if self.script_begin_tag and self.script_begin_tag in self.hql: + self.hql = "\n".join(self.hql.split(self.script_begin_tag)[1:])
+ +
[docs] def execute(self, context: Context) -> None: + self.log.info("Executing: %s", self.hql) + self.hook = self.get_hook() + + # set the mapred_job_name if it's not set with dag, task, execution time info + if not self.mapred_job_name: + ti = context["ti"] + self.hook.mapred_job_name = self.mapred_job_name_template.format( + dag_id=ti.dag_id, + task_id=ti.task_id, + execution_date=ti.execution_date.isoformat(), + hostname=ti.hostname.split(".")[0], + ) + + if self.hiveconf_jinja_translate: + self.hiveconfs = context_to_airflow_vars(context) + else: + self.hiveconfs.update(context_to_airflow_vars(context)) + + self.log.info("Passing HiveConf: %s", self.hiveconfs) + self.hook.run_cli(hql=self.hql, schema=self.schema, hive_conf=self.hiveconfs)
+ +
[docs] def dry_run(self) -> None: + # Reset airflow environment variables to prevent + # existing env vars from impacting behavior. + self.clear_airflow_vars() + + self.hook = self.get_hook() + self.hook.test_hql(hql=self.hql)
+ +
[docs] def on_kill(self) -> None: + if self.hook: + self.hook.kill()
+ +
[docs] def clear_airflow_vars(self) -> None: + """Reset airflow environment variables to prevent existing ones from impacting behavior.""" + blank_env_vars = { + value["env_var_format"]: "" for value in operator_helpers.AIRFLOW_VAR_NAME_FORMAT_MAPPING.values() + } + os.environ.update(blank_env_vars)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/operators/hive_stats.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/operators/hive_stats.html new file mode 100644 index 00000000000..272601219c5 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/operators/hive_stats.html @@ -0,0 +1,997 @@ + + + + + + + + + + + + airflow.providers.apache.hive.operators.hive_stats — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.apache.hive.operators.hive_stats

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import json
+import warnings
+from collections import OrderedDict
+from typing import TYPE_CHECKING, Any, Callable, Sequence
+
+from airflow.exceptions import AirflowException
+from airflow.models import BaseOperator
+from airflow.providers.apache.hive.hooks.hive import HiveMetastoreHook
+from airflow.providers.mysql.hooks.mysql import MySqlHook
+from airflow.providers.presto.hooks.presto import PrestoHook
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+
[docs]class HiveStatsCollectionOperator(BaseOperator): + """ + Gathers partition statistics using a dynamically generated Presto + query, inserts the stats into a MySql table with this format. Stats + overwrite themselves if you rerun the same date/partition. :: + + CREATE TABLE hive_stats ( + ds VARCHAR(16), + table_name VARCHAR(500), + metric VARCHAR(200), + value BIGINT + ); + + :param metastore_conn_id: Reference to the + :ref:`Hive Metastore connection id <howto/connection:hive_metastore>`. + :param table: the source table, in the format ``database.table_name``. (templated) + :param partition: the source partition. (templated) + :param extra_exprs: dict of expression to run against the table where + keys are metric names and values are Presto compatible expressions + :param excluded_columns: list of columns to exclude, consider + excluding blobs, large json columns, ... + :param assignment_func: a function that receives a column name and + a type, and returns a dict of metric names and an Presto expressions. + If None is returned, the global defaults are applied. If an + empty dictionary is returned, no stats are computed for that + column. + """ + +
[docs] template_fields: Sequence[str] = ("table", "partition", "ds", "dttm")
+
[docs] ui_color = "#aff7a6"
+ + def __init__( + self, + *, + table: str, + partition: Any, + extra_exprs: dict[str, Any] | None = None, + excluded_columns: list[str] | None = None, + assignment_func: Callable[[str, str], dict[Any, Any] | None] | None = None, + metastore_conn_id: str = "metastore_default", + presto_conn_id: str = "presto_default", + mysql_conn_id: str = "airflow_db", + **kwargs: Any, + ) -> None: + if "col_blacklist" in kwargs: + warnings.warn( + f"col_blacklist kwarg passed to {self.__class__.__name__} " + f"(task_id: {kwargs.get('task_id')}) is deprecated, " + f"please rename it to excluded_columns instead", + category=FutureWarning, + stacklevel=2, + ) + excluded_columns = kwargs.pop("col_blacklist") + super().__init__(**kwargs) + self.table = table + self.partition = partition + self.extra_exprs = extra_exprs or {} + self.excluded_columns: list[str] = excluded_columns or [] + self.metastore_conn_id = metastore_conn_id + self.presto_conn_id = presto_conn_id + self.mysql_conn_id = mysql_conn_id + self.assignment_func = assignment_func + self.ds = "{{ ds }}" + self.dttm = "{{ execution_date.isoformat() }}" + +
[docs] def get_default_exprs(self, col: str, col_type: str) -> dict[Any, Any]: + """Get default expressions""" + if col in self.excluded_columns: + return {} + exp = {(col, "non_null"): f"COUNT({col})"} + if col_type in {"double", "int", "bigint", "float"}: + exp[(col, "sum")] = f"SUM({col})" + exp[(col, "min")] = f"MIN({col})" + exp[(col, "max")] = f"MAX({col})" + exp[(col, "avg")] = f"AVG({col})" + elif col_type == "boolean": + exp[(col, "true")] = f"SUM(CASE WHEN {col} THEN 1 ELSE 0 END)" + exp[(col, "false")] = f"SUM(CASE WHEN NOT {col} THEN 1 ELSE 0 END)" + elif col_type == "string": + exp[(col, "len")] = f"SUM(CAST(LENGTH({col}) AS BIGINT))" + exp[(col, "approx_distinct")] = f"APPROX_DISTINCT({col})" + + return exp
+ +
[docs] def execute(self, context: Context) -> None: + metastore = HiveMetastoreHook(metastore_conn_id=self.metastore_conn_id) + table = metastore.get_table(table_name=self.table) + field_types = {col.name: col.type for col in table.sd.cols} + + exprs: Any = {("", "count"): "COUNT(*)"} + for col, col_type in list(field_types.items()): + if self.assignment_func: + assign_exprs = self.assignment_func(col, col_type) + if assign_exprs is None: + assign_exprs = self.get_default_exprs(col, col_type) + else: + assign_exprs = self.get_default_exprs(col, col_type) + exprs.update(assign_exprs) + exprs.update(self.extra_exprs) + exprs = OrderedDict(exprs) + exprs_str = ",\n ".join(f"{v} AS {k[0]}__{k[1]}" for k, v in exprs.items()) + + where_clause_ = [f"{k} = '{v}'" for k, v in self.partition.items()] + where_clause = " AND\n ".join(where_clause_) + sql = f"SELECT {exprs_str} FROM {self.table} WHERE {where_clause};" + + presto = PrestoHook(presto_conn_id=self.presto_conn_id) + self.log.info("Executing SQL check: %s", sql) + row = presto.get_first(sql) + self.log.info("Record: %s", row) + if not row: + raise AirflowException("The query returned None") + + part_json = json.dumps(self.partition, sort_keys=True) + + self.log.info("Deleting rows from previous runs if they exist") + mysql = MySqlHook(self.mysql_conn_id) + sql = f""" + SELECT 1 FROM hive_stats + WHERE + table_name='{self.table}' AND + partition_repr='{part_json}' AND + dttm='{self.dttm}' + LIMIT 1; + """ + if mysql.get_records(sql): + sql = f""" + DELETE FROM hive_stats + WHERE + table_name='{self.table}' AND + partition_repr='{part_json}' AND + dttm='{self.dttm}'; + """ + mysql.run(sql) + + self.log.info("Pivoting and loading cells into the Airflow db") + rows = [ + (self.ds, self.dttm, self.table, part_json) + (r[0][0], r[0][1], r[1]) for r in zip(exprs, row) + ] + mysql.insert_rows( + table="hive_stats", + rows=rows, + target_fields=[ + "ds", + "dttm", + "table_name", + "partition_repr", + "col", + "metric", + "value",
+ ], + ) +
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/sensors/hive_partition.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/sensors/hive_partition.html new file mode 100644 index 00000000000..e23185bb83b --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/sensors/hive_partition.html @@ -0,0 +1,889 @@ + + + + + + + + + + + + airflow.providers.apache.hive.sensors.hive_partition — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.apache.hive.sensors.hive_partition

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, Sequence
+
+from airflow.providers.apache.hive.hooks.hive import HiveMetastoreHook
+from airflow.sensors.base import BaseSensorOperator
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+
[docs]class HivePartitionSensor(BaseSensorOperator): + """ + Waits for a partition to show up in Hive. + + Note: Because ``partition`` supports general logical operators, it + can be inefficient. Consider using NamedHivePartitionSensor instead if + you don't need the full flexibility of HivePartitionSensor. + + :param table: The name of the table to wait for, supports the dot + notation (my_database.my_table) + :param partition: The partition clause to wait for. This is passed as + is to the metastore Thrift client ``get_partitions_by_filter`` method, + and apparently supports SQL like notation as in ``ds='2015-01-01' + AND type='value'`` and comparison operators as in ``"ds>=2015-01-01"`` + :param metastore_conn_id: reference to the + :ref: `metastore thrift service connection id <howto/connection:hive_metastore>` + """ + +
[docs] template_fields: Sequence[str] = ( + "schema", + "table", + "partition",
+ ) +
[docs] ui_color = "#C5CAE9"
+ + def __init__( + self, + *, + table: str, + partition: str | None = "ds='{{ ds }}'", + metastore_conn_id: str = "metastore_default", + schema: str = "default", + poke_interval: int = 60 * 3, + **kwargs: Any, + ): + super().__init__(poke_interval=poke_interval, **kwargs) + if not partition: + partition = "ds='{{ ds }}'" + self.metastore_conn_id = metastore_conn_id + self.table = table + self.partition = partition + self.schema = schema + +
[docs] def poke(self, context: Context) -> bool: + if "." in self.table: + self.schema, self.table = self.table.split(".") + self.log.info("Poking for table %s.%s, partition %s", self.schema, self.table, self.partition) + if not hasattr(self, "hook"): + hook = HiveMetastoreHook(metastore_conn_id=self.metastore_conn_id) + return hook.check_for_partition(self.schema, self.table, self.partition)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/sensors/metastore_partition.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/sensors/metastore_partition.html new file mode 100644 index 00000000000..c7d535aea7f --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/sensors/metastore_partition.html @@ -0,0 +1,898 @@ + + + + + + + + + + + + airflow.providers.apache.hive.sensors.metastore_partition — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.apache.hive.sensors.metastore_partition

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, Sequence
+
+from airflow.providers.common.sql.sensors.sql import SqlSensor
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+
[docs]class MetastorePartitionSensor(SqlSensor): + """ + An alternative to the HivePartitionSensor that talk directly to the + MySQL db. This was created as a result of observing sub optimal + queries generated by the Metastore thrift service when hitting + subpartitioned tables. The Thrift service's queries were written in a + way that wouldn't leverage the indexes. + + :param schema: the schema + :param table: the table + :param partition_name: the partition name, as defined in the PARTITIONS + table of the Metastore. Order of the fields does matter. + Examples: ``ds=2016-01-01`` or + ``ds=2016-01-01/sub=foo`` for a sub partitioned table + :param mysql_conn_id: a reference to the MySQL conn_id for the metastore + """ + +
[docs] template_fields: Sequence[str] = ("partition_name", "table", "schema")
+
[docs] ui_color = "#8da7be"
+ + def __init__( + self, + *, + table: str, + partition_name: str, + schema: str = "default", + mysql_conn_id: str = "metastore_mysql", + **kwargs: Any, + ): + + self.partition_name = partition_name + self.table = table + self.schema = schema + self.first_poke = True + self.conn_id = mysql_conn_id + # TODO(aoen): We shouldn't be using SqlSensor here but MetastorePartitionSensor. + # The problem is the way apply_defaults works isn't compatible with inheritance. + # The inheritance model needs to be reworked in order to support overriding args/ + # kwargs with arguments here, then 'conn_id' and 'sql' can be passed into the + # constructor below and apply_defaults will no longer throw an exception. + super().__init__(**kwargs) + +
[docs] def poke(self, context: Context) -> Any: + if self.first_poke: + self.first_poke = False + if "." in self.table: + self.schema, self.table = self.table.split(".") + self.sql = """ + SELECT 'X' + FROM PARTITIONS A0 + LEFT OUTER JOIN TBLS B0 ON A0.TBL_ID = B0.TBL_ID + LEFT OUTER JOIN DBS C0 ON B0.DB_ID = C0.DB_ID + WHERE + B0.TBL_NAME = '{self.table}' AND + C0.NAME = '{self.schema}' AND + A0.PART_NAME = '{self.partition_name}'; + """.format( + self=self + ) + return super().poke(context)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/sensors/named_hive_partition.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/sensors/named_hive_partition.html new file mode 100644 index 00000000000..e5a41575fd4 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/sensors/named_hive_partition.html @@ -0,0 +1,918 @@ + + + + + + + + + + + + airflow.providers.apache.hive.sensors.named_hive_partition — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.apache.hive.sensors.named_hive_partition

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, Sequence
+
+from airflow.sensors.base import BaseSensorOperator
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+
[docs]class NamedHivePartitionSensor(BaseSensorOperator): + """ + Waits for a set of partitions to show up in Hive. + + :param partition_names: List of fully qualified names of the + partitions to wait for. A fully qualified name is of the + form ``schema.table/pk1=pv1/pk2=pv2``, for example, + default.users/ds=2016-01-01. This is passed as is to the metastore + Thrift client ``get_partitions_by_name`` method. Note that + you cannot use logical or comparison operators as in + HivePartitionSensor. + :param metastore_conn_id: Reference to the + :ref:`metastore thrift service connection id <howto/connection:hive_metastore>`. + """ + +
[docs] template_fields: Sequence[str] = ("partition_names",)
+
[docs] ui_color = "#8d99ae"
+ + def __init__( + self, + *, + partition_names: list[str], + metastore_conn_id: str = "metastore_default", + poke_interval: int = 60 * 3, + hook: Any = None, + **kwargs: Any, + ): + super().__init__(poke_interval=poke_interval, **kwargs) + + self.next_index_to_poke = 0 + if isinstance(partition_names, str): + raise TypeError("partition_names must be an array of strings") + + self.metastore_conn_id = metastore_conn_id + self.partition_names = partition_names + self.hook = hook + if self.hook and metastore_conn_id != "metastore_default": + self.log.warning( + "A hook was passed but a non default metastore_conn_id=%s was used", metastore_conn_id + ) + + @staticmethod +
[docs] def parse_partition_name(partition: str) -> tuple[Any, ...]: + """Get schema, table, and partition info.""" + first_split = partition.split(".", 1) + if len(first_split) == 1: + schema = "default" + table_partition = max(first_split) # poor man first + else: + schema, table_partition = first_split + second_split = table_partition.split("/", 1) + if len(second_split) == 1: + raise ValueError(f"Could not parse {partition}into table, partition") + else: + table, partition = second_split + return schema, table, partition
+ +
[docs] def poke_partition(self, partition: str) -> Any: + """Check for a named partition.""" + if not self.hook: + from airflow.providers.apache.hive.hooks.hive import HiveMetastoreHook + + self.hook = HiveMetastoreHook(metastore_conn_id=self.metastore_conn_id) + + schema, table, partition = self.parse_partition_name(partition) + + self.log.info("Poking for %s.%s/%s", schema, table, partition) + return self.hook.check_for_named_partition(schema, table, partition)
+ +
[docs] def poke(self, context: Context) -> bool: + + number_of_partitions = len(self.partition_names) + poke_index_start = self.next_index_to_poke + for i in range(number_of_partitions): + self.next_index_to_poke = (poke_index_start + i) % number_of_partitions + if not self.poke_partition(self.partition_names[self.next_index_to_poke]): + return False + + self.next_index_to_poke = 0 + return True
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/transfers/hive_to_mysql.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/transfers/hive_to_mysql.html new file mode 100644 index 00000000000..ec684857309 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/transfers/hive_to_mysql.html @@ -0,0 +1,938 @@ + + + + + + + + + + + + airflow.providers.apache.hive.transfers.hive_to_mysql — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.apache.hive.transfers.hive_to_mysql

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""This module contains an operator to move data from Hive to MySQL."""
+from __future__ import annotations
+
+from tempfile import NamedTemporaryFile
+from typing import TYPE_CHECKING, Sequence
+
+from airflow.models import BaseOperator
+from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook
+from airflow.providers.mysql.hooks.mysql import MySqlHook
+from airflow.utils.operator_helpers import context_to_airflow_vars
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+
[docs]class HiveToMySqlOperator(BaseOperator): + """ + Moves data from Hive to MySQL, note that for now the data is loaded + into memory before being pushed to MySQL, so this operator should + be used for smallish amount of data. + + :param sql: SQL query to execute against Hive server. (templated) + :param mysql_table: target MySQL table, use dot notation to target a + specific database. (templated) + :param mysql_conn_id: source mysql connection + :param hiveserver2_conn_id: Reference to the + :ref:`Hive Server2 thrift service connection id <howto/connection:hiveserver2>`. + :param mysql_preoperator: sql statement to run against mysql prior to + import, typically use to truncate of delete in place + of the data coming in, allowing the task to be idempotent (running + the task twice won't double load data). (templated) + :param mysql_postoperator: sql statement to run against mysql after the + import, typically used to move data from staging to + production and issue cleanup commands. (templated) + :param bulk_load: flag to use bulk_load option. This loads mysql directly + from a tab-delimited text file using the LOAD DATA LOCAL INFILE command. + This option requires an extra connection parameter for the + destination MySQL connection: {'local_infile': true}. + :param hive_conf: + """ + +
[docs] template_fields: Sequence[str] = ("sql", "mysql_table", "mysql_preoperator", "mysql_postoperator")
+
[docs] template_ext: Sequence[str] = (".sql",)
+
[docs] template_fields_renderers = { + "sql": "hql", + "mysql_preoperator": "mysql", + "mysql_postoperator": "mysql",
+ } +
[docs] ui_color = "#a0e08c"
+ + def __init__( + self, + *, + sql: str, + mysql_table: str, + hiveserver2_conn_id: str = "hiveserver2_default", + mysql_conn_id: str = "mysql_default", + mysql_preoperator: str | None = None, + mysql_postoperator: str | None = None, + bulk_load: bool = False, + hive_conf: dict | None = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.sql = sql + self.mysql_table = mysql_table + self.mysql_conn_id = mysql_conn_id + self.mysql_preoperator = mysql_preoperator + self.mysql_postoperator = mysql_postoperator + self.hiveserver2_conn_id = hiveserver2_conn_id + self.bulk_load = bulk_load + self.hive_conf = hive_conf + +
[docs] def execute(self, context: Context): + hive = HiveServer2Hook(hiveserver2_conn_id=self.hiveserver2_conn_id) + + self.log.info("Extracting data from Hive: %s", self.sql) + hive_conf = context_to_airflow_vars(context) + if self.hive_conf: + hive_conf.update(self.hive_conf) + if self.bulk_load: + with NamedTemporaryFile() as tmp_file: + hive.to_csv( + self.sql, + tmp_file.name, + delimiter="\t", + lineterminator="\n", + output_header=False, + hive_conf=hive_conf, + ) + mysql = self._call_preoperator() + mysql.bulk_load(table=self.mysql_table, tmp_file=tmp_file.name) + else: + hive_results = hive.get_records(self.sql, parameters=hive_conf) + mysql = self._call_preoperator() + mysql.insert_rows(table=self.mysql_table, rows=hive_results) + + if self.mysql_postoperator: + self.log.info("Running MySQL postoperator") + mysql.run(self.mysql_postoperator) + + self.log.info("Done.")
+ + def _call_preoperator(self): + mysql = MySqlHook(mysql_conn_id=self.mysql_conn_id) + if self.mysql_preoperator: + self.log.info("Running MySQL preoperator") + mysql.run(self.mysql_preoperator) + self.log.info("Inserting rows into MySQL") + return mysql
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/transfers/hive_to_samba.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/transfers/hive_to_samba.html new file mode 100644 index 00000000000..3c40cf1ee33 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/transfers/hive_to_samba.html @@ -0,0 +1,885 @@ + + + + + + + + + + + + airflow.providers.apache.hive.transfers.hive_to_samba — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.apache.hive.transfers.hive_to_samba

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""This module contains an operator to move data from Hive to Samba."""
+from __future__ import annotations
+
+from tempfile import NamedTemporaryFile
+from typing import TYPE_CHECKING, Sequence
+
+from airflow.models import BaseOperator
+from airflow.providers.apache.hive.hooks.hive import HiveServer2Hook
+from airflow.providers.samba.hooks.samba import SambaHook
+from airflow.utils.operator_helpers import context_to_airflow_vars
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+
[docs]class HiveToSambaOperator(BaseOperator): + """ + Executes hql code in a specific Hive database and loads the + results of the query as a csv to a Samba location. + + :param hql: the hql to be exported. (templated) + :param destination_filepath: the file path to where the file will be pushed onto samba + :param samba_conn_id: reference to the samba destination + :param hiveserver2_conn_id: Reference to the + :ref: `Hive Server2 thrift service connection id <howto/connection:hiveserver2>`. + """ + +
[docs] template_fields: Sequence[str] = ("hql", "destination_filepath")
+
[docs] template_ext: Sequence[str] = ( + ".hql", + ".sql",
+ ) +
[docs] template_fields_renderers = {"hql": "hql"}
+ + def __init__( + self, + *, + hql: str, + destination_filepath: str, + samba_conn_id: str = "samba_default", + hiveserver2_conn_id: str = "hiveserver2_default", + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.hiveserver2_conn_id = hiveserver2_conn_id + self.samba_conn_id = samba_conn_id + self.destination_filepath = destination_filepath + self.hql = hql.strip().rstrip(";") + +
[docs] def execute(self, context: Context): + with NamedTemporaryFile() as tmp_file: + self.log.info("Fetching file from Hive") + hive = HiveServer2Hook(hiveserver2_conn_id=self.hiveserver2_conn_id) + hive.to_csv(self.hql, csv_filepath=tmp_file.name, hive_conf=context_to_airflow_vars(context)) + self.log.info("Pushing to samba") + samba = SambaHook(samba_conn_id=self.samba_conn_id) + samba.push_from_local(self.destination_filepath, tmp_file.name)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/transfers/mssql_to_hive.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/transfers/mssql_to_hive.html new file mode 100644 index 00000000000..35f58d80589 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/transfers/mssql_to_hive.html @@ -0,0 +1,944 @@ + + + + + + + + + + + + airflow.providers.apache.hive.transfers.mssql_to_hive — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.apache.hive.transfers.mssql_to_hive

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""This module contains an operator to move data from MSSQL to Hive."""
+from __future__ import annotations
+
+from collections import OrderedDict
+from tempfile import NamedTemporaryFile
+from typing import TYPE_CHECKING, Sequence
+
+import pymssql
+import unicodecsv as csv
+
+from airflow.models import BaseOperator
+from airflow.providers.apache.hive.hooks.hive import HiveCliHook
+from airflow.providers.microsoft.mssql.hooks.mssql import MsSqlHook
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+
[docs]class MsSqlToHiveOperator(BaseOperator): + """ + Moves data from Microsoft SQL Server to Hive. The operator runs + your query against Microsoft SQL Server, stores the file locally + before loading it into a Hive table. If the ``create`` or + ``recreate`` arguments are set to ``True``, + a ``CREATE TABLE`` and ``DROP TABLE`` statements are generated. + Hive data types are inferred from the cursor's metadata. + Note that the table generated in Hive uses ``STORED AS textfile`` + which isn't the most efficient serialization format. If a + large amount of data is loaded and/or if the table gets + queried considerably, you may want to use this operator only to + stage the data into a temporary table before loading it into its + final destination using a ``HiveOperator``. + + :param sql: SQL query to execute against the Microsoft SQL Server + database. (templated) + :param hive_table: target Hive table, use dot notation to target a specific + database. (templated) + :param create: whether to create the table if it doesn't exist + :param recreate: whether to drop and recreate the table at every execution + :param partition: target partition as a dict of partition columns and + values. (templated) + :param delimiter: field delimiter in the file + :param mssql_conn_id: source Microsoft SQL Server connection + :param hive_cli_conn_id: Reference to the + :ref:`Hive CLI connection id <howto/connection:hive_cli>`. + :param tblproperties: TBLPROPERTIES of the hive table being created + """ + +
[docs] template_fields: Sequence[str] = ("sql", "partition", "hive_table")
+
[docs] template_ext: Sequence[str] = (".sql",)
+
[docs] template_fields_renderers = {"sql": "tsql"}
+
[docs] ui_color = "#a0e08c"
+ + def __init__( + self, + *, + sql: str, + hive_table: str, + create: bool = True, + recreate: bool = False, + partition: dict | None = None, + delimiter: str = chr(1), + mssql_conn_id: str = "mssql_default", + hive_cli_conn_id: str = "hive_cli_default", + tblproperties: dict | None = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.sql = sql + self.hive_table = hive_table + self.partition = partition + self.create = create + self.recreate = recreate + self.delimiter = delimiter + self.mssql_conn_id = mssql_conn_id + self.hive_cli_conn_id = hive_cli_conn_id + self.partition = partition or {} + self.tblproperties = tblproperties + + @classmethod +
[docs] def type_map(cls, mssql_type: int) -> str: + """Maps MsSQL type to Hive type.""" + map_dict = { + pymssql.BINARY.value: "INT", + pymssql.DECIMAL.value: "FLOAT", + pymssql.NUMBER.value: "INT", + } + return map_dict.get(mssql_type, "STRING")
+ +
[docs] def execute(self, context: Context): + mssql = MsSqlHook(mssql_conn_id=self.mssql_conn_id) + self.log.info("Dumping Microsoft SQL Server query results to local file") + with mssql.get_conn() as conn: + with conn.cursor() as cursor: + cursor.execute(self.sql) + with NamedTemporaryFile("w") as tmp_file: + csv_writer = csv.writer(tmp_file, delimiter=self.delimiter, encoding="utf-8") + field_dict = OrderedDict() + for col_count, field in enumerate(cursor.description, start=1): + col_position = f"Column{col_count}" + field_dict[col_position if field[0] == "" else field[0]] = self.type_map(field[1]) + csv_writer.writerows(cursor) + tmp_file.flush() + + hive = HiveCliHook(hive_cli_conn_id=self.hive_cli_conn_id) + self.log.info("Loading file into Hive") + hive.load_file( + tmp_file.name, + self.hive_table, + field_dict=field_dict, + create=self.create, + partition=self.partition, + delimiter=self.delimiter, + recreate=self.recreate, + tblproperties=self.tblproperties,
+ ) +
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/transfers/mysql_to_hive.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/transfers/mysql_to_hive.html new file mode 100644 index 00000000000..9b20ff5dad3 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/transfers/mysql_to_hive.html @@ -0,0 +1,973 @@ + + + + + + + + + + + + airflow.providers.apache.hive.transfers.mysql_to_hive — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.apache.hive.transfers.mysql_to_hive

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""This module contains an operator to move data from MySQL to Hive."""
+from __future__ import annotations
+
+from collections import OrderedDict
+from tempfile import NamedTemporaryFile
+from typing import TYPE_CHECKING, Sequence
+
+import MySQLdb
+import unicodecsv as csv
+
+from airflow.models import BaseOperator
+from airflow.providers.apache.hive.hooks.hive import HiveCliHook
+from airflow.providers.mysql.hooks.mysql import MySqlHook
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+
[docs]class MySqlToHiveOperator(BaseOperator): + """ + Moves data from MySql to Hive. The operator runs your query against + MySQL, stores the file locally before loading it into a Hive table. + If the ``create`` or ``recreate`` arguments are set to ``True``, + a ``CREATE TABLE`` and ``DROP TABLE`` statements are generated. + Hive data types are inferred from the cursor's metadata. Note that the + table generated in Hive uses ``STORED AS textfile`` + which isn't the most efficient serialization format. If a + large amount of data is loaded and/or if the table gets + queried considerably, you may want to use this operator only to + stage the data into a temporary table before loading it into its + final destination using a ``HiveOperator``. + + :param sql: SQL query to execute against the MySQL database. (templated) + :param hive_table: target Hive table, use dot notation to target a + specific database. (templated) + :param create: whether to create the table if it doesn't exist + :param recreate: whether to drop and recreate the table at every + execution + :param partition: target partition as a dict of partition columns + and values. (templated) + :param delimiter: field delimiter in the file + :param quoting: controls when quotes should be generated by csv writer, + It can take on any of the csv.QUOTE_* constants. + :param quotechar: one-character string used to quote fields + containing special characters. + :param escapechar: one-character string used by csv writer to escape + the delimiter or quotechar. + :param mysql_conn_id: source mysql connection + :param hive_cli_conn_id: Reference to the + :ref:`Hive CLI connection id <howto/connection:hive_cli>`. + :param tblproperties: TBLPROPERTIES of the hive table being created + """ + +
[docs] template_fields: Sequence[str] = ("sql", "partition", "hive_table")
+
[docs] template_ext: Sequence[str] = (".sql",)
+
[docs] template_fields_renderers = {"sql": "mysql"}
+
[docs] ui_color = "#a0e08c"
+ + def __init__( + self, + *, + sql: str, + hive_table: str, + create: bool = True, + recreate: bool = False, + partition: dict | None = None, + delimiter: str = chr(1), + quoting: str | None = None, + quotechar: str = '"', + escapechar: str | None = None, + mysql_conn_id: str = "mysql_default", + hive_cli_conn_id: str = "hive_cli_default", + tblproperties: dict | None = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.sql = sql + self.hive_table = hive_table + self.partition = partition + self.create = create + self.recreate = recreate + self.delimiter = str(delimiter) + self.quoting = quoting or csv.QUOTE_MINIMAL + self.quotechar = quotechar + self.escapechar = escapechar + self.mysql_conn_id = mysql_conn_id + self.hive_cli_conn_id = hive_cli_conn_id + self.partition = partition or {} + self.tblproperties = tblproperties + + @classmethod +
[docs] def type_map(cls, mysql_type: int) -> str: + """Maps MySQL type to Hive type.""" + types = MySQLdb.constants.FIELD_TYPE + type_map = { + types.BIT: "INT", + types.DECIMAL: "DOUBLE", + types.NEWDECIMAL: "DOUBLE", + types.DOUBLE: "DOUBLE", + types.FLOAT: "DOUBLE", + types.INT24: "INT", + types.LONG: "BIGINT", + types.LONGLONG: "DECIMAL(38,0)", + types.SHORT: "INT", + types.TINY: "SMALLINT", + types.YEAR: "INT", + types.TIMESTAMP: "TIMESTAMP", + } + return type_map.get(mysql_type, "STRING")
+ +
[docs] def execute(self, context: Context): + hive = HiveCliHook(hive_cli_conn_id=self.hive_cli_conn_id) + mysql = MySqlHook(mysql_conn_id=self.mysql_conn_id) + + self.log.info("Dumping MySQL query results to local file") + conn = mysql.get_conn() + cursor = conn.cursor() + cursor.execute(self.sql) + with NamedTemporaryFile("wb") as f: + csv_writer = csv.writer( + f, + delimiter=self.delimiter, + quoting=self.quoting, + quotechar=self.quotechar, + escapechar=self.escapechar, + encoding="utf-8", + ) + field_dict = OrderedDict() + for field in cursor.description: + field_dict[field[0]] = self.type_map(field[1]) + csv_writer.writerows(cursor) + f.flush() + cursor.close() + conn.close() + self.log.info("Loading file into Hive") + hive.load_file( + f.name, + self.hive_table, + field_dict=field_dict, + create=self.create, + partition=self.partition, + delimiter=self.delimiter, + recreate=self.recreate, + tblproperties=self.tblproperties,
+ ) +
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/transfers/s3_to_hive.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/transfers/s3_to_hive.html new file mode 100644 index 00000000000..8bb08c9dc5e --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/transfers/s3_to_hive.html @@ -0,0 +1,1078 @@ + + + + + + + + + + + + airflow.providers.apache.hive.transfers.s3_to_hive — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.apache.hive.transfers.s3_to_hive

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""This module contains an operator to move data from an S3 bucket to Hive."""
+from __future__ import annotations
+
+import bz2
+import gzip
+import os
+import tempfile
+from tempfile import NamedTemporaryFile, TemporaryDirectory
+from typing import TYPE_CHECKING, Any, Sequence
+
+from airflow.exceptions import AirflowException
+from airflow.models import BaseOperator
+from airflow.providers.amazon.aws.hooks.s3 import S3Hook
+from airflow.providers.apache.hive.hooks.hive import HiveCliHook
+from airflow.utils.compression import uncompress_file
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+
[docs]class S3ToHiveOperator(BaseOperator): + """ + Moves data from S3 to Hive. The operator downloads a file from S3, + stores the file locally before loading it into a Hive table. + If the ``create`` or ``recreate`` arguments are set to ``True``, + a ``CREATE TABLE`` and ``DROP TABLE`` statements are generated. + Hive data types are inferred from the cursor's metadata from. + + Note that the table generated in Hive uses ``STORED AS textfile`` + which isn't the most efficient serialization format. If a + large amount of data is loaded and/or if the tables gets + queried considerably, you may want to use this operator only to + stage the data into a temporary table before loading it into its + final destination using a ``HiveOperator``. + + :param s3_key: The key to be retrieved from S3. (templated) + :param field_dict: A dictionary of the fields name in the file + as keys and their Hive types as values + :param hive_table: target Hive table, use dot notation to target a + specific database. (templated) + :param delimiter: field delimiter in the file + :param create: whether to create the table if it doesn't exist + :param recreate: whether to drop and recreate the table at every + execution + :param partition: target partition as a dict of partition columns + and values. (templated) + :param headers: whether the file contains column names on the first + line + :param check_headers: whether the column names on the first line should be + checked against the keys of field_dict + :param wildcard_match: whether the s3_key should be interpreted as a Unix + wildcard pattern + :param aws_conn_id: source s3 connection + :param verify: Whether or not to verify SSL certificates for S3 connection. + By default SSL certificates are verified. + You can provide the following values: + + - ``False``: do not validate SSL certificates. SSL will still be used + (unless use_ssl is False), but SSL certificates will not be + verified. + - ``path/to/cert/bundle.pem``: A filename of the CA cert bundle to uses. + You can specify this argument if you want to use a different + CA cert bundle than the one used by botocore. + :param hive_cli_conn_id: Reference to the + :ref:`Hive CLI connection id <howto/connection:hive_cli>`. + :param input_compressed: Boolean to determine if file decompression is + required to process headers + :param tblproperties: TBLPROPERTIES of the hive table being created + :param select_expression: S3 Select expression + """ + +
[docs] template_fields: Sequence[str] = ("s3_key", "partition", "hive_table")
+
[docs] template_ext: Sequence[str] = ()
+
[docs] ui_color = "#a0e08c"
+ + def __init__( + self, + *, + s3_key: str, + field_dict: dict, + hive_table: str, + delimiter: str = ",", + create: bool = True, + recreate: bool = False, + partition: dict | None = None, + headers: bool = False, + check_headers: bool = False, + wildcard_match: bool = False, + aws_conn_id: str = "aws_default", + verify: bool | str | None = None, + hive_cli_conn_id: str = "hive_cli_default", + input_compressed: bool = False, + tblproperties: dict | None = None, + select_expression: str | None = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.s3_key = s3_key + self.field_dict = field_dict + self.hive_table = hive_table + self.delimiter = delimiter + self.create = create + self.recreate = recreate + self.partition = partition + self.headers = headers + self.check_headers = check_headers + self.wildcard_match = wildcard_match + self.hive_cli_conn_id = hive_cli_conn_id + self.aws_conn_id = aws_conn_id + self.verify = verify + self.input_compressed = input_compressed + self.tblproperties = tblproperties + self.select_expression = select_expression + + if self.check_headers and not (self.field_dict is not None and self.headers): + raise AirflowException("To check_headers provide field_dict and headers") + +
[docs] def execute(self, context: Context): + # Downloading file from S3 + s3_hook = S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify) + hive_hook = HiveCliHook(hive_cli_conn_id=self.hive_cli_conn_id) + self.log.info("Downloading S3 file") + + if self.wildcard_match: + if not s3_hook.check_for_wildcard_key(self.s3_key): + raise AirflowException(f"No key matches {self.s3_key}") + s3_key_object = s3_hook.get_wildcard_key(self.s3_key) + elif s3_hook.check_for_key(self.s3_key): + s3_key_object = s3_hook.get_key(self.s3_key) + + else: + raise AirflowException(f"The key {self.s3_key} does not exists") + _, file_ext = os.path.splitext(s3_key_object.key) + if self.select_expression and self.input_compressed and file_ext.lower() != ".gz": + raise AirflowException("GZIP is the only compression format Amazon S3 Select supports") + + with TemporaryDirectory(prefix="tmps32hive_") as tmp_dir, NamedTemporaryFile( + mode="wb", dir=tmp_dir, suffix=file_ext + ) as f: + self.log.info("Dumping S3 key %s contents to local file %s", s3_key_object.key, f.name) + if self.select_expression: + option = {} + if self.headers: + option["FileHeaderInfo"] = "USE" + if self.delimiter: + option["FieldDelimiter"] = self.delimiter + + input_serialization: dict[str, Any] = {"CSV": option} + if self.input_compressed: + input_serialization["CompressionType"] = "GZIP" + + content = s3_hook.select_key( + bucket_name=s3_key_object.bucket_name, + key=s3_key_object.key, + expression=self.select_expression, + input_serialization=input_serialization, + ) + f.write(content.encode("utf-8")) + else: + s3_key_object.download_fileobj(f) + f.flush() + + if self.select_expression or not self.headers: + self.log.info("Loading file %s into Hive", f.name) + hive_hook.load_file( + f.name, + self.hive_table, + field_dict=self.field_dict, + create=self.create, + partition=self.partition, + delimiter=self.delimiter, + recreate=self.recreate, + tblproperties=self.tblproperties, + ) + else: + # Decompressing file + if self.input_compressed: + self.log.info("Uncompressing file %s", f.name) + fn_uncompressed = uncompress_file(f.name, file_ext, tmp_dir) + self.log.info("Uncompressed to %s", fn_uncompressed) + # uncompressed file available now so deleting + # compressed file to save disk space + f.close() + else: + fn_uncompressed = f.name + + # Testing if header matches field_dict + if self.check_headers: + self.log.info("Matching file header against field_dict") + header_list = self._get_top_row_as_list(fn_uncompressed) + if not self._match_headers(header_list): + raise AirflowException("Header check failed") + + # Deleting top header row + self.log.info("Removing header from file %s", fn_uncompressed) + headless_file = self._delete_top_row_and_compress(fn_uncompressed, file_ext, tmp_dir) + self.log.info("Headless file %s", headless_file) + self.log.info("Loading file %s into Hive", headless_file) + hive_hook.load_file( + headless_file, + self.hive_table, + field_dict=self.field_dict, + create=self.create, + partition=self.partition, + delimiter=self.delimiter, + recreate=self.recreate, + tblproperties=self.tblproperties,
+ ) + + def _get_top_row_as_list(self, file_name): + with open(file_name) as file: + header_line = file.readline().strip() + return header_line.split(self.delimiter) + + def _match_headers(self, header_list): + if not header_list: + raise AirflowException("Unable to retrieve header row from file") + field_names = self.field_dict.keys() + if len(field_names) != len(header_list): + self.log.warning( + "Headers count mismatch File headers:\n %s\nField names: \n %s\n", header_list, field_names + ) + return False + test_field_match = [h1.lower() == h2.lower() for h1, h2 in zip(header_list, field_names)] + if not all(test_field_match): + self.log.warning( + "Headers do not match field names File headers:\n %s\nField names: \n %s\n", + header_list, + field_names, + ) + return False + else: + return True + + @staticmethod + def _delete_top_row_and_compress(input_file_name, output_file_ext, dest_dir): + # When output_file_ext is not defined, file is not compressed + open_fn = open + if output_file_ext.lower() == ".gz": + open_fn = gzip.GzipFile + elif output_file_ext.lower() == ".bz2": + open_fn = bz2.BZ2File + + _, fn_output = tempfile.mkstemp(suffix=output_file_ext, dir=dest_dir) + with open(input_file_name, "rb") as f_in, open_fn(fn_output, "wb") as f_out: + f_in.seek(0) + next(f_in) + for line in f_in: + f_out.write(line) + return fn_output
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/transfers/vertica_to_hive.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/transfers/vertica_to_hive.html new file mode 100644 index 00000000000..22c3ac8f117 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/airflow/providers/apache/hive/transfers/vertica_to_hive.html @@ -0,0 +1,947 @@ + + + + + + + + + + + + airflow.providers.apache.hive.transfers.vertica_to_hive — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.apache.hive.transfers.vertica_to_hive

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""This module contains an operator to move data from Vertica to Hive."""
+from __future__ import annotations
+
+from collections import OrderedDict
+from tempfile import NamedTemporaryFile
+from typing import TYPE_CHECKING, Any, Sequence
+
+import unicodecsv as csv
+
+from airflow.models import BaseOperator
+from airflow.providers.apache.hive.hooks.hive import HiveCliHook
+from airflow.providers.vertica.hooks.vertica import VerticaHook
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+
[docs]class VerticaToHiveOperator(BaseOperator): + """ + Moves data from Vertica to Hive. The operator runs + your query against Vertica, stores the file locally + before loading it into a Hive table. If the ``create`` or + ``recreate`` arguments are set to ``True``, + a ``CREATE TABLE`` and ``DROP TABLE`` statements are generated. + Hive data types are inferred from the cursor's metadata. + Note that the table generated in Hive uses ``STORED AS textfile`` + which isn't the most efficient serialization format. If a + large amount of data is loaded and/or if the table gets + queried considerably, you may want to use this operator only to + stage the data into a temporary table before loading it into its + final destination using a ``HiveOperator``. + + :param sql: SQL query to execute against the Vertica database. (templated) + :param hive_table: target Hive table, use dot notation to target a + specific database. (templated) + :param create: whether to create the table if it doesn't exist + :param recreate: whether to drop and recreate the table at every execution + :param partition: target partition as a dict of partition columns + and values. (templated) + :param delimiter: field delimiter in the file + :param vertica_conn_id: source Vertica connection + :param hive_cli_conn_id: Reference to the + :ref:`Hive CLI connection id <howto/connection:hive_cli>`. + """ + +
[docs] template_fields: Sequence[str] = ("sql", "partition", "hive_table")
+
[docs] template_ext: Sequence[str] = (".sql",)
+
[docs] template_fields_renderers = {"sql": "sql"}
+
[docs] ui_color = "#b4e0ff"
+ + def __init__( + self, + *, + sql: str, + hive_table: str, + create: bool = True, + recreate: bool = False, + partition: dict | None = None, + delimiter: str = chr(1), + vertica_conn_id: str = "vertica_default", + hive_cli_conn_id: str = "hive_cli_default", + **kwargs: Any, + ) -> None: + super().__init__(**kwargs) + self.sql = sql + self.hive_table = hive_table + self.partition = partition + self.create = create + self.recreate = recreate + self.delimiter = str(delimiter) + self.vertica_conn_id = vertica_conn_id + self.hive_cli_conn_id = hive_cli_conn_id + self.partition = partition or {} + + @classmethod +
[docs] def type_map(cls, vertica_type): + """ + Vertica-python datatype.py does not provide the full type mapping access. + Manual hack. Reference: + https://github.com/uber/vertica-python/blob/master/vertica_python/vertica/column.py + """ + type_map = { + 5: "BOOLEAN", + 6: "INT", + 7: "FLOAT", + 8: "STRING", + 9: "STRING", + 16: "FLOAT", + } + return type_map.get(vertica_type, "STRING")
+ +
[docs] def execute(self, context: Context): + hive = HiveCliHook(hive_cli_conn_id=self.hive_cli_conn_id) + vertica = VerticaHook(vertica_conn_id=self.vertica_conn_id) + + self.log.info("Dumping Vertica query results to local file") + conn = vertica.get_conn() + cursor = conn.cursor() + cursor.execute(self.sql) + with NamedTemporaryFile("w") as f: + csv_writer = csv.writer(f, delimiter=self.delimiter, encoding="utf-8") + field_dict = OrderedDict() + for col_count, field in enumerate(cursor.description, start=1): + col_position = f"Column{col_count}" + field_dict[col_position if field[0] == "" else field[0]] = self.type_map(field[1]) + csv_writer.writerows(cursor.iterate()) + f.flush() + cursor.close() + conn.close() + self.log.info("Loading file into Hive") + hive.load_file( + f.name, + self.hive_table, + field_dict=field_dict, + create=self.create, + partition=self.partition, + delimiter=self.delimiter, + recreate=self.recreate,
+ ) +
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/index.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/index.html new file mode 100644 index 00000000000..8bae9477c4b --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/index.html @@ -0,0 +1,822 @@ + + + + + + + + + + + + Overview: module code — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + + +
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/tests/system/providers/apache/hive/example_twitter_dag.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/tests/system/providers/apache/hive/example_twitter_dag.html new file mode 100644 index 00000000000..648f3471048 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_modules/tests/system/providers/apache/hive/example_twitter_dag.html @@ -0,0 +1,977 @@ + + + + + + + + + + + + tests.system.providers.apache.hive.example_twitter_dag — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for tests.system.providers.apache.hive.example_twitter_dag

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+This is an example dag for managing twitter data.
+"""
+from __future__ import annotations
+
+import os
+from datetime import date, datetime, timedelta
+
+from airflow import DAG
+from airflow.decorators import task
+from airflow.operators.bash import BashOperator
+from airflow.providers.apache.hive.operators.hive import HiveOperator
+
+# --------------------------------------------------------------------------------
+# Caveat: This Dag will not run because of missing scripts.
+# The purpose of this is to give you a sample of a real world example DAG!
+# --------------------------------------------------------------------------------
+
+# --------------------------------------------------------------------------------
+# Load The Dependencies
+# --------------------------------------------------------------------------------
+
+
+
[docs]ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+
[docs]DAG_ID = "example_twitter_dag"
+ + +@task +
[docs]def fetch_tweets(): + """ + This task should call Twitter API and retrieve tweets from yesterday from and to for the four twitter + users (Twitter_A,..,Twitter_D) There should be eight csv output files generated by this task and naming + convention is direction(from or to)_twitterHandle_date.csv + """
+ + +@task +
[docs]def clean_tweets(): + """ + This is a placeholder to clean the eight files. In this step you can get rid of or cherry pick columns + and different parts of the text. + """
+ + +@task +
[docs]def analyze_tweets(): + """ + This is a placeholder to analyze the twitter data. Could simply be a sentiment analysis through algorithms + like bag of words or something more complicated. You can also take a look at Web Services to do such + tasks. + """
+ + +@task +
[docs]def transfer_to_db(): + """ + This is a placeholder to extract summary from Hive data and store it to MySQL. + """
+ + +with DAG( + dag_id=DAG_ID, + default_args={ + "owner": "Ekhtiar", + "retries": 1, + }, + schedule="@daily", + start_date=datetime(2021, 1, 1), + tags=["example"], + catchup=False, +) as dag: +
[docs] fetch = fetch_tweets()
+ clean = clean_tweets() + analyze = analyze_tweets() + hive_to_mysql = transfer_to_db() + + fetch >> clean >> analyze + + # -------------------------------------------------------------------------------- + # The following tasks are generated using for loop. The first task puts the eight + # csv files to HDFS. The second task loads these files from HDFS to respected Hive + # tables. These two for loops could be combined into one loop. However, in most cases, + # you will be running different analysis on your incoming and outgoing tweets, + # and hence they are kept separated in this example. + # -------------------------------------------------------------------------------- + + from_channels = ["fromTwitter_A", "fromTwitter_B", "fromTwitter_C", "fromTwitter_D"] + to_channels = ["toTwitter_A", "toTwitter_B", "toTwitter_C", "toTwitter_D"] + yesterday = date.today() - timedelta(days=1) + dt = yesterday.strftime("%Y-%m-%d") + # define where you want to store the tweets csv file in your local directory + local_dir = "/tmp/" + # define the location where you want to store in HDFS + hdfs_dir = " /tmp/" + + for channel in to_channels: + + file_name = f"to_{channel}_{dt}.csv" + + load_to_hdfs = BashOperator( + task_id=f"put_{channel}_to_hdfs", + bash_command=( + f"HADOOP_USER_NAME=hdfs hadoop fs -put -f {local_dir}{file_name}{hdfs_dir}{channel}/" + ), + ) + + # [START create_hive] + load_to_hive = HiveOperator( + task_id=f"load_{channel}_to_hive", + hql=( + f"LOAD DATA INPATH '{hdfs_dir}{channel}/{file_name}'" + f"INTO TABLE {channel}" + f"PARTITION(dt='{dt}')" + ), + ) + # [END create_hive] + + analyze >> load_to_hdfs >> load_to_hive >> hive_to_mysql + + for channel in from_channels: + file_name = f"from_{channel}_{dt}.csv" + load_to_hdfs = BashOperator( + task_id=f"put_{channel}_to_hdfs", + bash_command=( + f"HADOOP_USER_NAME=hdfs hadoop fs -put -f {local_dir}{file_name}{hdfs_dir}{channel}/" + ), + ) + + load_to_hive = HiveOperator( + task_id=f"load_{channel}_to_hive", + hql=( + f"LOAD DATA INPATH '{hdfs_dir}{channel}/{file_name}' " + f"INTO TABLE {channel} " + f"PARTITION(dt='{dt}')" + ), + ) + + analyze >> load_to_hdfs >> load_to_hive >> hive_to_mysql + + from tests.system.utils.watcher import watcher + + # This test needs watcher in order to properly mark success/failure + # when "tearDown" task with trigger rule is part of the DAG + list(dag.tasks) >> watcher() + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +
[docs]test_run = get_test_run(dag)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/hooks/hive/index.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/hooks/hive/index.rst.txt new file mode 100644 index 00000000000..542fb893707 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/hooks/hive/index.rst.txt @@ -0,0 +1,446 @@ +:py:mod:`airflow.providers.apache.hive.hooks.hive` +================================================== + +.. py:module:: airflow.providers.apache.hive.hooks.hive + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.apache.hive.hooks.hive.HiveCliHook + airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook + airflow.providers.apache.hive.hooks.hive.HiveServer2Hook + + + +Functions +~~~~~~~~~ + +.. autoapisummary:: + + airflow.providers.apache.hive.hooks.hive.get_context_from_env_var + + + +Attributes +~~~~~~~~~~ + +.. autoapisummary:: + + airflow.providers.apache.hive.hooks.hive.HIVE_QUEUE_PRIORITIES + + +.. py:data:: HIVE_QUEUE_PRIORITIES + :annotation: = ['VERY_HIGH', 'HIGH', 'NORMAL', 'LOW', 'VERY_LOW'] + + + +.. py:function:: get_context_from_env_var() + + Extract context from env variable, e.g. dag_id, task_id and execution_date, + so that they can be used inside BashOperator and PythonOperator. + + :return: The context of interest. + + +.. py:class:: HiveCliHook(hive_cli_conn_id = default_conn_name, run_as = None, mapred_queue = None, mapred_queue_priority = None, mapred_job_name = None, hive_cli_params = '') + + Bases: :py:obj:`airflow.hooks.base.BaseHook` + + Simple wrapper around the hive CLI. + + It also supports the ``beeline`` + a lighter CLI that runs JDBC and is replacing the heavier + traditional CLI. To enable ``beeline``, set the use_beeline param in the + extra field of your connection as in ``{ "use_beeline": true }`` + + Note that you can also set default hive CLI parameters by passing ``hive_cli_params`` + space separated list of parameters to add to the hive command. + + The extra connection parameter ``auth`` gets passed as in the ``jdbc`` + connection string as is. + + :param hive_cli_conn_id: Reference to the + :ref:`Hive CLI connection id `. + :param mapred_queue: queue used by the Hadoop Scheduler (Capacity or Fair) + :param mapred_queue_priority: priority within the job queue. + Possible settings include: VERY_HIGH, HIGH, NORMAL, LOW, VERY_LOW + :param mapred_job_name: This name will appear in the jobtracker. + This can make monitoring easier. + :param hive_cli_params: Space separated list of hive command parameters to add to the + hive command. + + .. py:attribute:: conn_name_attr + :annotation: = hive_cli_conn_id + + + + .. py:attribute:: default_conn_name + :annotation: = hive_cli_default + + + + .. py:attribute:: conn_type + :annotation: = hive_cli + + + + .. py:attribute:: hook_name + :annotation: = Hive Client Wrapper + + + + .. py:method:: run_cli(hql, schema = None, verbose = True, hive_conf = None) + + Run an hql statement using the hive cli. If hive_conf is specified + it should be a dict and the entries will be set as key/value pairs + in HiveConf. + + :param hql: an hql (hive query language) statement to run with hive cli + :param schema: Name of hive schema (database) to use + :param verbose: Provides additional logging. Defaults to True. + :param hive_conf: if specified these key value pairs will be passed + to hive as ``-hiveconf "key"="value"``. Note that they will be + passed after the ``hive_cli_params`` and thus will override + whatever values are specified in the database. + + >>> hh = HiveCliHook() + >>> result = hh.run_cli("USE airflow;") + >>> ("OK" in result) + True + + + .. py:method:: test_hql(hql) + + Test an hql statement using the hive cli and EXPLAIN + + + .. py:method:: load_df(df, table, field_dict = None, delimiter = ',', encoding = 'utf8', pandas_kwargs = None, **kwargs) + + Loads a pandas DataFrame into hive. + + Hive data types will be inferred if not passed but column names will + not be sanitized. + + :param df: DataFrame to load into a Hive table + :param table: target Hive table, use dot notation to target a + specific database + :param field_dict: mapping from column name to hive data type. + Note that it must be OrderedDict so as to keep columns' order. + :param delimiter: field delimiter in the file + :param encoding: str encoding to use when writing DataFrame to file + :param pandas_kwargs: passed to DataFrame.to_csv + :param kwargs: passed to self.load_file + + + .. py:method:: load_file(filepath, table, delimiter = ',', field_dict = None, create = True, overwrite = True, partition = None, recreate = False, tblproperties = None) + + Loads a local file into Hive + + Note that the table generated in Hive uses ``STORED AS textfile`` + which isn't the most efficient serialization format. If a + large amount of data is loaded and/or if the tables gets + queried considerably, you may want to use this operator only to + stage the data into a temporary table before loading it into its + final destination using a ``HiveOperator``. + + :param filepath: local filepath of the file to load + :param table: target Hive table, use dot notation to target a + specific database + :param delimiter: field delimiter in the file + :param field_dict: A dictionary of the fields name in the file + as keys and their Hive types as values. + Note that it must be OrderedDict so as to keep columns' order. + :param create: whether to create the table if it doesn't exist + :param overwrite: whether to overwrite the data in table or partition + :param partition: target partition as a dict of partition columns + and values + :param recreate: whether to drop and recreate the table at every + execution + :param tblproperties: TBLPROPERTIES of the hive table being created + + + .. py:method:: kill() + + Kill Hive cli command + + + +.. py:class:: HiveMetastoreHook(metastore_conn_id = default_conn_name) + + Bases: :py:obj:`airflow.hooks.base.BaseHook` + + Wrapper to interact with the Hive Metastore + + :param metastore_conn_id: reference to the + :ref: `metastore thrift service connection id `. + + .. py:attribute:: MAX_PART_COUNT + :annotation: = 32767 + + + + .. py:attribute:: conn_name_attr + :annotation: = metastore_conn_id + + + + .. py:attribute:: default_conn_name + :annotation: = metastore_default + + + + .. py:attribute:: conn_type + :annotation: = hive_metastore + + + + .. py:attribute:: hook_name + :annotation: = Hive Metastore Thrift + + + + .. py:method:: __getstate__() + + + .. py:method:: __setstate__(d) + + + .. py:method:: get_metastore_client() + + Returns a Hive thrift client. + + + .. py:method:: get_conn() + + Returns connection for the hook. + + + .. py:method:: check_for_partition(schema, table, partition) + + Checks whether a partition exists + + :param schema: Name of hive schema (database) @table belongs to + :param table: Name of hive table @partition belongs to + :param partition: Expression that matches the partitions to check for + (eg `a = 'b' AND c = 'd'`) + + >>> hh = HiveMetastoreHook() + >>> t = 'static_babynames_partitioned' + >>> hh.check_for_partition('airflow', t, "ds='2015-01-01'") + True + + + .. py:method:: check_for_named_partition(schema, table, partition_name) + + Checks whether a partition with a given name exists + + :param schema: Name of hive schema (database) @table belongs to + :param table: Name of hive table @partition belongs to + :param partition_name: Name of the partitions to check for (eg `a=b/c=d`) + + >>> hh = HiveMetastoreHook() + >>> t = 'static_babynames_partitioned' + >>> hh.check_for_named_partition('airflow', t, "ds=2015-01-01") + True + >>> hh.check_for_named_partition('airflow', t, "ds=xxx") + False + + + .. py:method:: get_table(table_name, db = 'default') + + Get a metastore table object + + >>> hh = HiveMetastoreHook() + >>> t = hh.get_table(db='airflow', table_name='static_babynames') + >>> t.tableName + 'static_babynames' + >>> [col.name for col in t.sd.cols] + ['state', 'year', 'name', 'gender', 'num'] + + + .. py:method:: get_tables(db, pattern = '*') + + Get a metastore table object + + + .. py:method:: get_databases(pattern = '*') + + Get a metastore table object + + + .. py:method:: get_partitions(schema, table_name, partition_filter = None) + + Returns a list of all partitions in a table. Works only + for tables with less than 32767 (java short max val). + For subpartitioned table, the number might easily exceed this. + + >>> hh = HiveMetastoreHook() + >>> t = 'static_babynames_partitioned' + >>> parts = hh.get_partitions(schema='airflow', table_name=t) + >>> len(parts) + 1 + >>> parts + [{'ds': '2015-01-01'}] + + + .. py:method:: max_partition(schema, table_name, field = None, filter_map = None) + + Returns the maximum value for all partitions with given field in a table. + If only one partition key exist in the table, the key will be used as field. + filter_map should be a partition_key:partition_value map and will be used to + filter out partitions. + + :param schema: schema name. + :param table_name: table name. + :param field: partition key to get max partition from. + :param filter_map: partition_key:partition_value map used for partition filtering. + + >>> hh = HiveMetastoreHook() + >>> filter_map = {'ds': '2015-01-01'} + >>> t = 'static_babynames_partitioned' + >>> hh.max_partition(schema='airflow', ... table_name=t, field='ds', filter_map=filter_map) + '2015-01-01' + + + .. py:method:: table_exists(table_name, db = 'default') + + Check if table exists + + >>> hh = HiveMetastoreHook() + >>> hh.table_exists(db='airflow', table_name='static_babynames') + True + >>> hh.table_exists(db='airflow', table_name='does_not_exist') + False + + + .. py:method:: drop_partitions(table_name, part_vals, delete_data=False, db='default') + + Drop partitions from the given table matching the part_vals input + + :param table_name: table name. + :param part_vals: list of partition specs. + :param delete_data: Setting to control if underlying data have to deleted + in addition to dropping partitions. + :param db: Name of hive schema (database) @table belongs to + + >>> hh = HiveMetastoreHook() + >>> hh.drop_partitions(db='airflow', table_name='static_babynames', + part_vals="['2020-05-01']") + True + + + +.. py:class:: HiveServer2Hook(*args, schema = None, log_sql = True, **kwargs) + + Bases: :py:obj:`airflow.providers.common.sql.hooks.sql.DbApiHook` + + Wrapper around the pyhive library + + Notes: + * the default auth_mechanism is PLAIN, to override it you + can specify it in the ``extra`` of your connection in the UI + * the default for run_set_variable_statements is true, if you + are using impala you may need to set it to false in the + ``extra`` of your connection in the UI + + :param hiveserver2_conn_id: Reference to the + :ref: `Hive Server2 thrift service connection id `. + :param schema: Hive database name. + + .. py:attribute:: conn_name_attr + :annotation: = hiveserver2_conn_id + + + + .. py:attribute:: default_conn_name + :annotation: = hiveserver2_default + + + + .. py:attribute:: conn_type + :annotation: = hiveserver2 + + + + .. py:attribute:: hook_name + :annotation: = Hive Server 2 Thrift + + + + .. py:attribute:: supports_autocommit + :annotation: = False + + + + .. py:method:: get_conn(schema = None) + + Returns a Hive connection object. + + + .. py:method:: get_results(sql, schema = 'default', fetch_size = None, hive_conf = None) + + Get results of the provided hql in target schema. + + :param sql: hql to be executed. + :param schema: target schema, default to 'default'. + :param fetch_size: max size of result to fetch. + :param hive_conf: hive_conf to execute alone with the hql. + :return: results of hql execution, dict with data (list of results) and header + + + .. py:method:: to_csv(sql, csv_filepath, schema = 'default', delimiter = ',', lineterminator = '\r\n', output_header = True, fetch_size = 1000, hive_conf = None) + + Execute hql in target schema and write results to a csv file. + + :param sql: hql to be executed. + :param csv_filepath: filepath of csv to write results into. + :param schema: target schema, default to 'default'. + :param delimiter: delimiter of the csv file, default to ','. + :param lineterminator: lineterminator of the csv file. + :param output_header: header of the csv file, default to True. + :param fetch_size: number of result rows to write into the csv file, default to 1000. + :param hive_conf: hive_conf to execute alone with the hql. + + + + .. py:method:: get_records(sql, parameters = None, **kwargs) + + Get a set of records from a Hive query. You can optionally pass 'schema' kwarg + which specifies target schema and default to 'default'. + + :param sql: hql to be executed. + :param parameters: optional configuration passed to get_results + :return: result of hive execution + + >>> hh = HiveServer2Hook() + >>> sql = "SELECT * FROM airflow.static_babynames LIMIT 100" + >>> len(hh.get_records(sql)) + 100 + + + .. py:method:: get_pandas_df(sql, schema = 'default', hive_conf = None, **kwargs) + + Get a pandas dataframe from a Hive query + + :param sql: hql to be executed. + :param schema: target schema, default to 'default'. + :param hive_conf: hive_conf to execute alone with the hql. + :param kwargs: (optional) passed into pandas.DataFrame constructor + :return: result of hive execution + + >>> hh = HiveServer2Hook() + >>> sql = "SELECT * FROM airflow.static_babynames LIMIT 100" + >>> df = hh.get_pandas_df(sql) + >>> len(df.index) + 100 + + :return: pandas.DateFrame + + + diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/hooks/index.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/hooks/index.rst.txt new file mode 100644 index 00000000000..2759f01fb0b --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/hooks/index.rst.txt @@ -0,0 +1,15 @@ +:py:mod:`airflow.providers.apache.hive.hooks` +============================================= + +.. py:module:: airflow.providers.apache.hive.hooks + + +Submodules +---------- +.. toctree:: + :titlesonly: + :maxdepth: 1 + + hive/index.rst + + diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/index.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/index.rst.txt new file mode 100644 index 00000000000..41927b27aa0 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/index.rst.txt @@ -0,0 +1,18 @@ +:py:mod:`airflow.providers.apache.hive` +======================================= + +.. py:module:: airflow.providers.apache.hive + + +Subpackages +----------- +.. toctree:: + :titlesonly: + :maxdepth: 3 + + hooks/index.rst + operators/index.rst + sensors/index.rst + transfers/index.rst + + diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/operators/hive/index.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/operators/hive/index.rst.txt new file mode 100644 index 00000000000..48425c4a7b5 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/operators/hive/index.rst.txt @@ -0,0 +1,107 @@ +:py:mod:`airflow.providers.apache.hive.operators.hive` +====================================================== + +.. py:module:: airflow.providers.apache.hive.operators.hive + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.apache.hive.operators.hive.HiveOperator + + + + +.. py:class:: HiveOperator(*, hql, hive_cli_conn_id = 'hive_cli_default', schema = 'default', hiveconfs = None, hiveconf_jinja_translate = False, script_begin_tag = None, run_as_owner = False, mapred_queue = None, mapred_queue_priority = None, mapred_job_name = None, hive_cli_params = '', **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Executes hql code or hive script in a specific Hive database. + + :param hql: the hql to be executed. Note that you may also use + a relative path from the dag file of a (template) hive + script. (templated) + :param hive_cli_conn_id: Reference to the + :ref:`Hive CLI connection id `. (templated) + :param hiveconfs: if defined, these key value pairs will be passed + to hive as ``-hiveconf "key"="value"`` + :param hiveconf_jinja_translate: when True, hiveconf-type templating + ${var} gets translated into jinja-type templating {{ var }} and + ${hiveconf:var} gets translated into jinja-type templating {{ var }}. + Note that you may want to use this along with the + ``DAG(user_defined_macros=myargs)`` parameter. View the DAG + object documentation for more details. + :param script_begin_tag: If defined, the operator will get rid of the + part of the script before the first occurrence of `script_begin_tag` + :param run_as_owner: Run HQL code as a DAG's owner. + :param mapred_queue: queue used by the Hadoop CapacityScheduler. (templated) + :param mapred_queue_priority: priority within CapacityScheduler queue. + Possible settings include: VERY_HIGH, HIGH, NORMAL, LOW, VERY_LOW + :param mapred_job_name: This name will appear in the jobtracker. + This can make monitoring easier. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['hql', 'schema', 'hive_cli_conn_id', 'mapred_queue', 'hiveconfs', 'mapred_job_name',... + + + + .. py:attribute:: template_ext + :annotation: :Sequence[str] = ['.hql', '.sql'] + + + + .. py:attribute:: template_fields_renderers + + + + + .. py:attribute:: ui_color + :annotation: = #f0e4ec + + + + .. py:method:: get_hook() + + Get Hive cli hook + + + .. py:method:: prepare_template() + + Hook triggered after the templated fields get replaced by their content. + + If you need your operator to alter the content of the file before the + template is rendered, it should override this method to do so. + + + .. py:method:: execute(context) + + This is the main method to derive when creating an operator. + Context is the same dictionary used as when rendering jinja templates. + + Refer to get_template_context for more context. + + + .. py:method:: dry_run() + + Performs dry run for the operator - just render template fields. + + + .. py:method:: on_kill() + + Override this method to cleanup subprocesses when a task instance + gets killed. Any use of the threading, subprocess or multiprocessing + module within an operator needs to be cleaned up or it will leave + ghost processes behind. + + + .. py:method:: clear_airflow_vars() + + Reset airflow environment variables to prevent existing ones from impacting behavior. + + + diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/operators/hive_stats/index.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/operators/hive_stats/index.rst.txt new file mode 100644 index 00000000000..f8f59bd6179 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/operators/hive_stats/index.rst.txt @@ -0,0 +1,72 @@ +:py:mod:`airflow.providers.apache.hive.operators.hive_stats` +============================================================ + +.. py:module:: airflow.providers.apache.hive.operators.hive_stats + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.apache.hive.operators.hive_stats.HiveStatsCollectionOperator + + + + +.. py:class:: HiveStatsCollectionOperator(*, table, partition, extra_exprs = None, excluded_columns = None, assignment_func = None, metastore_conn_id = 'metastore_default', presto_conn_id = 'presto_default', mysql_conn_id = 'airflow_db', **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Gathers partition statistics using a dynamically generated Presto + query, inserts the stats into a MySql table with this format. Stats + overwrite themselves if you rerun the same date/partition. :: + + CREATE TABLE hive_stats ( + ds VARCHAR(16), + table_name VARCHAR(500), + metric VARCHAR(200), + value BIGINT + ); + + :param metastore_conn_id: Reference to the + :ref:`Hive Metastore connection id `. + :param table: the source table, in the format ``database.table_name``. (templated) + :param partition: the source partition. (templated) + :param extra_exprs: dict of expression to run against the table where + keys are metric names and values are Presto compatible expressions + :param excluded_columns: list of columns to exclude, consider + excluding blobs, large json columns, ... + :param assignment_func: a function that receives a column name and + a type, and returns a dict of metric names and an Presto expressions. + If None is returned, the global defaults are applied. If an + empty dictionary is returned, no stats are computed for that + column. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['table', 'partition', 'ds', 'dttm'] + + + + .. py:attribute:: ui_color + :annotation: = #aff7a6 + + + + .. py:method:: get_default_exprs(col, col_type) + + Get default expressions + + + .. py:method:: execute(context) + + This is the main method to derive when creating an operator. + Context is the same dictionary used as when rendering jinja templates. + + Refer to get_template_context for more context. + + + diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/operators/index.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/operators/index.rst.txt new file mode 100644 index 00000000000..f666c82a71a --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/operators/index.rst.txt @@ -0,0 +1,16 @@ +:py:mod:`airflow.providers.apache.hive.operators` +================================================= + +.. py:module:: airflow.providers.apache.hive.operators + + +Submodules +---------- +.. toctree:: + :titlesonly: + :maxdepth: 1 + + hive/index.rst + hive_stats/index.rst + + diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/sensors/hive_partition/index.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/sensors/hive_partition/index.rst.txt new file mode 100644 index 00000000000..e27c5492275 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/sensors/hive_partition/index.rst.txt @@ -0,0 +1,54 @@ +:py:mod:`airflow.providers.apache.hive.sensors.hive_partition` +============================================================== + +.. py:module:: airflow.providers.apache.hive.sensors.hive_partition + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.apache.hive.sensors.hive_partition.HivePartitionSensor + + + + +.. py:class:: HivePartitionSensor(*, table, partition = "ds='{{ ds }}'", metastore_conn_id = 'metastore_default', schema = 'default', poke_interval = 60 * 3, **kwargs) + + Bases: :py:obj:`airflow.sensors.base.BaseSensorOperator` + + Waits for a partition to show up in Hive. + + Note: Because ``partition`` supports general logical operators, it + can be inefficient. Consider using NamedHivePartitionSensor instead if + you don't need the full flexibility of HivePartitionSensor. + + :param table: The name of the table to wait for, supports the dot + notation (my_database.my_table) + :param partition: The partition clause to wait for. This is passed as + is to the metastore Thrift client ``get_partitions_by_filter`` method, + and apparently supports SQL like notation as in ``ds='2015-01-01' + AND type='value'`` and comparison operators as in ``"ds>=2015-01-01"`` + :param metastore_conn_id: reference to the + :ref: `metastore thrift service connection id ` + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['schema', 'table', 'partition'] + + + + .. py:attribute:: ui_color + :annotation: = #C5CAE9 + + + + .. py:method:: poke(context) + + Function defined by the sensors while deriving this class should override. + + + diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/sensors/index.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/sensors/index.rst.txt new file mode 100644 index 00000000000..7c0ea3abe5e --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/sensors/index.rst.txt @@ -0,0 +1,17 @@ +:py:mod:`airflow.providers.apache.hive.sensors` +=============================================== + +.. py:module:: airflow.providers.apache.hive.sensors + + +Submodules +---------- +.. toctree:: + :titlesonly: + :maxdepth: 1 + + hive_partition/index.rst + metastore_partition/index.rst + named_hive_partition/index.rst + + diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/sensors/metastore_partition/index.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/sensors/metastore_partition/index.rst.txt new file mode 100644 index 00000000000..413bba8b059 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/sensors/metastore_partition/index.rst.txt @@ -0,0 +1,53 @@ +:py:mod:`airflow.providers.apache.hive.sensors.metastore_partition` +=================================================================== + +.. py:module:: airflow.providers.apache.hive.sensors.metastore_partition + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.apache.hive.sensors.metastore_partition.MetastorePartitionSensor + + + + +.. py:class:: MetastorePartitionSensor(*, table, partition_name, schema = 'default', mysql_conn_id = 'metastore_mysql', **kwargs) + + Bases: :py:obj:`airflow.providers.common.sql.sensors.sql.SqlSensor` + + An alternative to the HivePartitionSensor that talk directly to the + MySQL db. This was created as a result of observing sub optimal + queries generated by the Metastore thrift service when hitting + subpartitioned tables. The Thrift service's queries were written in a + way that wouldn't leverage the indexes. + + :param schema: the schema + :param table: the table + :param partition_name: the partition name, as defined in the PARTITIONS + table of the Metastore. Order of the fields does matter. + Examples: ``ds=2016-01-01`` or + ``ds=2016-01-01/sub=foo`` for a sub partitioned table + :param mysql_conn_id: a reference to the MySQL conn_id for the metastore + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['partition_name', 'table', 'schema'] + + + + .. py:attribute:: ui_color + :annotation: = #8da7be + + + + .. py:method:: poke(context) + + Function defined by the sensors while deriving this class should override. + + + diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/sensors/named_hive_partition/index.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/sensors/named_hive_partition/index.rst.txt new file mode 100644 index 00000000000..f6dd8102264 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/sensors/named_hive_partition/index.rst.txt @@ -0,0 +1,62 @@ +:py:mod:`airflow.providers.apache.hive.sensors.named_hive_partition` +==================================================================== + +.. py:module:: airflow.providers.apache.hive.sensors.named_hive_partition + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.apache.hive.sensors.named_hive_partition.NamedHivePartitionSensor + + + + +.. py:class:: NamedHivePartitionSensor(*, partition_names, metastore_conn_id = 'metastore_default', poke_interval = 60 * 3, hook = None, **kwargs) + + Bases: :py:obj:`airflow.sensors.base.BaseSensorOperator` + + Waits for a set of partitions to show up in Hive. + + :param partition_names: List of fully qualified names of the + partitions to wait for. A fully qualified name is of the + form ``schema.table/pk1=pv1/pk2=pv2``, for example, + default.users/ds=2016-01-01. This is passed as is to the metastore + Thrift client ``get_partitions_by_name`` method. Note that + you cannot use logical or comparison operators as in + HivePartitionSensor. + :param metastore_conn_id: Reference to the + :ref:`metastore thrift service connection id `. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['partition_names'] + + + + .. py:attribute:: ui_color + :annotation: = #8d99ae + + + + .. py:method:: parse_partition_name(partition) + :staticmethod: + + Get schema, table, and partition info. + + + .. py:method:: poke_partition(partition) + + Check for a named partition. + + + .. py:method:: poke(context) + + Function defined by the sensors while deriving this class should override. + + + diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/hive_to_mysql/index.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/hive_to_mysql/index.rst.txt new file mode 100644 index 00000000000..a6f55f659b1 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/hive_to_mysql/index.rst.txt @@ -0,0 +1,80 @@ +:py:mod:`airflow.providers.apache.hive.transfers.hive_to_mysql` +=============================================================== + +.. py:module:: airflow.providers.apache.hive.transfers.hive_to_mysql + +.. autoapi-nested-parse:: + + This module contains an operator to move data from Hive to MySQL. + + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.apache.hive.transfers.hive_to_mysql.HiveToMySqlOperator + + + + +.. py:class:: HiveToMySqlOperator(*, sql, mysql_table, hiveserver2_conn_id = 'hiveserver2_default', mysql_conn_id = 'mysql_default', mysql_preoperator = None, mysql_postoperator = None, bulk_load = False, hive_conf = None, **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Moves data from Hive to MySQL, note that for now the data is loaded + into memory before being pushed to MySQL, so this operator should + be used for smallish amount of data. + + :param sql: SQL query to execute against Hive server. (templated) + :param mysql_table: target MySQL table, use dot notation to target a + specific database. (templated) + :param mysql_conn_id: source mysql connection + :param hiveserver2_conn_id: Reference to the + :ref:`Hive Server2 thrift service connection id `. + :param mysql_preoperator: sql statement to run against mysql prior to + import, typically use to truncate of delete in place + of the data coming in, allowing the task to be idempotent (running + the task twice won't double load data). (templated) + :param mysql_postoperator: sql statement to run against mysql after the + import, typically used to move data from staging to + production and issue cleanup commands. (templated) + :param bulk_load: flag to use bulk_load option. This loads mysql directly + from a tab-delimited text file using the LOAD DATA LOCAL INFILE command. + This option requires an extra connection parameter for the + destination MySQL connection: {'local_infile': true}. + :param hive_conf: + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['sql', 'mysql_table', 'mysql_preoperator', 'mysql_postoperator'] + + + + .. py:attribute:: template_ext + :annotation: :Sequence[str] = ['.sql'] + + + + .. py:attribute:: template_fields_renderers + + + + + .. py:attribute:: ui_color + :annotation: = #a0e08c + + + + .. py:method:: execute(context) + + This is the main method to derive when creating an operator. + Context is the same dictionary used as when rendering jinja templates. + + Refer to get_template_context for more context. + + + diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/hive_to_samba/index.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/hive_to_samba/index.rst.txt new file mode 100644 index 00000000000..5d448358476 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/hive_to_samba/index.rst.txt @@ -0,0 +1,61 @@ +:py:mod:`airflow.providers.apache.hive.transfers.hive_to_samba` +=============================================================== + +.. py:module:: airflow.providers.apache.hive.transfers.hive_to_samba + +.. autoapi-nested-parse:: + + This module contains an operator to move data from Hive to Samba. + + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.apache.hive.transfers.hive_to_samba.HiveToSambaOperator + + + + +.. py:class:: HiveToSambaOperator(*, hql, destination_filepath, samba_conn_id = 'samba_default', hiveserver2_conn_id = 'hiveserver2_default', **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Executes hql code in a specific Hive database and loads the + results of the query as a csv to a Samba location. + + :param hql: the hql to be exported. (templated) + :param destination_filepath: the file path to where the file will be pushed onto samba + :param samba_conn_id: reference to the samba destination + :param hiveserver2_conn_id: Reference to the + :ref: `Hive Server2 thrift service connection id `. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['hql', 'destination_filepath'] + + + + .. py:attribute:: template_ext + :annotation: :Sequence[str] = ['.hql', '.sql'] + + + + .. py:attribute:: template_fields_renderers + + + + + .. py:method:: execute(context) + + This is the main method to derive when creating an operator. + Context is the same dictionary used as when rendering jinja templates. + + Refer to get_template_context for more context. + + + diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/index.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/index.rst.txt new file mode 100644 index 00000000000..9e9afbc8713 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/index.rst.txt @@ -0,0 +1,20 @@ +:py:mod:`airflow.providers.apache.hive.transfers` +================================================= + +.. py:module:: airflow.providers.apache.hive.transfers + + +Submodules +---------- +.. toctree:: + :titlesonly: + :maxdepth: 1 + + hive_to_mysql/index.rst + hive_to_samba/index.rst + mssql_to_hive/index.rst + mysql_to_hive/index.rst + s3_to_hive/index.rst + vertica_to_hive/index.rst + + diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/mssql_to_hive/index.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/mssql_to_hive/index.rst.txt new file mode 100644 index 00000000000..b82ffbafbaa --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/mssql_to_hive/index.rst.txt @@ -0,0 +1,90 @@ +:py:mod:`airflow.providers.apache.hive.transfers.mssql_to_hive` +=============================================================== + +.. py:module:: airflow.providers.apache.hive.transfers.mssql_to_hive + +.. autoapi-nested-parse:: + + This module contains an operator to move data from MSSQL to Hive. + + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.apache.hive.transfers.mssql_to_hive.MsSqlToHiveOperator + + + + +.. py:class:: MsSqlToHiveOperator(*, sql, hive_table, create = True, recreate = False, partition = None, delimiter = chr(1), mssql_conn_id = 'mssql_default', hive_cli_conn_id = 'hive_cli_default', tblproperties = None, **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Moves data from Microsoft SQL Server to Hive. The operator runs + your query against Microsoft SQL Server, stores the file locally + before loading it into a Hive table. If the ``create`` or + ``recreate`` arguments are set to ``True``, + a ``CREATE TABLE`` and ``DROP TABLE`` statements are generated. + Hive data types are inferred from the cursor's metadata. + Note that the table generated in Hive uses ``STORED AS textfile`` + which isn't the most efficient serialization format. If a + large amount of data is loaded and/or if the table gets + queried considerably, you may want to use this operator only to + stage the data into a temporary table before loading it into its + final destination using a ``HiveOperator``. + + :param sql: SQL query to execute against the Microsoft SQL Server + database. (templated) + :param hive_table: target Hive table, use dot notation to target a specific + database. (templated) + :param create: whether to create the table if it doesn't exist + :param recreate: whether to drop and recreate the table at every execution + :param partition: target partition as a dict of partition columns and + values. (templated) + :param delimiter: field delimiter in the file + :param mssql_conn_id: source Microsoft SQL Server connection + :param hive_cli_conn_id: Reference to the + :ref:`Hive CLI connection id `. + :param tblproperties: TBLPROPERTIES of the hive table being created + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['sql', 'partition', 'hive_table'] + + + + .. py:attribute:: template_ext + :annotation: :Sequence[str] = ['.sql'] + + + + .. py:attribute:: template_fields_renderers + + + + + .. py:attribute:: ui_color + :annotation: = #a0e08c + + + + .. py:method:: type_map(mssql_type) + :classmethod: + + Maps MsSQL type to Hive type. + + + .. py:method:: execute(context) + + This is the main method to derive when creating an operator. + Context is the same dictionary used as when rendering jinja templates. + + Refer to get_template_context for more context. + + + diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/mysql_to_hive/index.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/mysql_to_hive/index.rst.txt new file mode 100644 index 00000000000..13fd7535bca --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/mysql_to_hive/index.rst.txt @@ -0,0 +1,95 @@ +:py:mod:`airflow.providers.apache.hive.transfers.mysql_to_hive` +=============================================================== + +.. py:module:: airflow.providers.apache.hive.transfers.mysql_to_hive + +.. autoapi-nested-parse:: + + This module contains an operator to move data from MySQL to Hive. + + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.apache.hive.transfers.mysql_to_hive.MySqlToHiveOperator + + + + +.. py:class:: MySqlToHiveOperator(*, sql, hive_table, create = True, recreate = False, partition = None, delimiter = chr(1), quoting = None, quotechar = '"', escapechar = None, mysql_conn_id = 'mysql_default', hive_cli_conn_id = 'hive_cli_default', tblproperties = None, **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Moves data from MySql to Hive. The operator runs your query against + MySQL, stores the file locally before loading it into a Hive table. + If the ``create`` or ``recreate`` arguments are set to ``True``, + a ``CREATE TABLE`` and ``DROP TABLE`` statements are generated. + Hive data types are inferred from the cursor's metadata. Note that the + table generated in Hive uses ``STORED AS textfile`` + which isn't the most efficient serialization format. If a + large amount of data is loaded and/or if the table gets + queried considerably, you may want to use this operator only to + stage the data into a temporary table before loading it into its + final destination using a ``HiveOperator``. + + :param sql: SQL query to execute against the MySQL database. (templated) + :param hive_table: target Hive table, use dot notation to target a + specific database. (templated) + :param create: whether to create the table if it doesn't exist + :param recreate: whether to drop and recreate the table at every + execution + :param partition: target partition as a dict of partition columns + and values. (templated) + :param delimiter: field delimiter in the file + :param quoting: controls when quotes should be generated by csv writer, + It can take on any of the csv.QUOTE_* constants. + :param quotechar: one-character string used to quote fields + containing special characters. + :param escapechar: one-character string used by csv writer to escape + the delimiter or quotechar. + :param mysql_conn_id: source mysql connection + :param hive_cli_conn_id: Reference to the + :ref:`Hive CLI connection id `. + :param tblproperties: TBLPROPERTIES of the hive table being created + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['sql', 'partition', 'hive_table'] + + + + .. py:attribute:: template_ext + :annotation: :Sequence[str] = ['.sql'] + + + + .. py:attribute:: template_fields_renderers + + + + + .. py:attribute:: ui_color + :annotation: = #a0e08c + + + + .. py:method:: type_map(mysql_type) + :classmethod: + + Maps MySQL type to Hive type. + + + .. py:method:: execute(context) + + This is the main method to derive when creating an operator. + Context is the same dictionary used as when rendering jinja templates. + + Refer to get_template_context for more context. + + + diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/s3_to_hive/index.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/s3_to_hive/index.rst.txt new file mode 100644 index 00000000000..8497ba71b1e --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/s3_to_hive/index.rst.txt @@ -0,0 +1,100 @@ +:py:mod:`airflow.providers.apache.hive.transfers.s3_to_hive` +============================================================ + +.. py:module:: airflow.providers.apache.hive.transfers.s3_to_hive + +.. autoapi-nested-parse:: + + This module contains an operator to move data from an S3 bucket to Hive. + + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.apache.hive.transfers.s3_to_hive.S3ToHiveOperator + + + + +.. py:class:: S3ToHiveOperator(*, s3_key, field_dict, hive_table, delimiter = ',', create = True, recreate = False, partition = None, headers = False, check_headers = False, wildcard_match = False, aws_conn_id = 'aws_default', verify = None, hive_cli_conn_id = 'hive_cli_default', input_compressed = False, tblproperties = None, select_expression = None, **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Moves data from S3 to Hive. The operator downloads a file from S3, + stores the file locally before loading it into a Hive table. + If the ``create`` or ``recreate`` arguments are set to ``True``, + a ``CREATE TABLE`` and ``DROP TABLE`` statements are generated. + Hive data types are inferred from the cursor's metadata from. + + Note that the table generated in Hive uses ``STORED AS textfile`` + which isn't the most efficient serialization format. If a + large amount of data is loaded and/or if the tables gets + queried considerably, you may want to use this operator only to + stage the data into a temporary table before loading it into its + final destination using a ``HiveOperator``. + + :param s3_key: The key to be retrieved from S3. (templated) + :param field_dict: A dictionary of the fields name in the file + as keys and their Hive types as values + :param hive_table: target Hive table, use dot notation to target a + specific database. (templated) + :param delimiter: field delimiter in the file + :param create: whether to create the table if it doesn't exist + :param recreate: whether to drop and recreate the table at every + execution + :param partition: target partition as a dict of partition columns + and values. (templated) + :param headers: whether the file contains column names on the first + line + :param check_headers: whether the column names on the first line should be + checked against the keys of field_dict + :param wildcard_match: whether the s3_key should be interpreted as a Unix + wildcard pattern + :param aws_conn_id: source s3 connection + :param verify: Whether or not to verify SSL certificates for S3 connection. + By default SSL certificates are verified. + You can provide the following values: + + - ``False``: do not validate SSL certificates. SSL will still be used + (unless use_ssl is False), but SSL certificates will not be + verified. + - ``path/to/cert/bundle.pem``: A filename of the CA cert bundle to uses. + You can specify this argument if you want to use a different + CA cert bundle than the one used by botocore. + :param hive_cli_conn_id: Reference to the + :ref:`Hive CLI connection id `. + :param input_compressed: Boolean to determine if file decompression is + required to process headers + :param tblproperties: TBLPROPERTIES of the hive table being created + :param select_expression: S3 Select expression + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['s3_key', 'partition', 'hive_table'] + + + + .. py:attribute:: template_ext + :annotation: :Sequence[str] = [] + + + + .. py:attribute:: ui_color + :annotation: = #a0e08c + + + + .. py:method:: execute(context) + + This is the main method to derive when creating an operator. + Context is the same dictionary used as when rendering jinja templates. + + Refer to get_template_context for more context. + + + diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/vertica_to_hive/index.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/vertica_to_hive/index.rst.txt new file mode 100644 index 00000000000..5782d931af2 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/airflow/providers/apache/hive/transfers/vertica_to_hive/index.rst.txt @@ -0,0 +1,90 @@ +:py:mod:`airflow.providers.apache.hive.transfers.vertica_to_hive` +================================================================= + +.. py:module:: airflow.providers.apache.hive.transfers.vertica_to_hive + +.. autoapi-nested-parse:: + + This module contains an operator to move data from Vertica to Hive. + + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.apache.hive.transfers.vertica_to_hive.VerticaToHiveOperator + + + + +.. py:class:: VerticaToHiveOperator(*, sql, hive_table, create = True, recreate = False, partition = None, delimiter = chr(1), vertica_conn_id = 'vertica_default', hive_cli_conn_id = 'hive_cli_default', **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Moves data from Vertica to Hive. The operator runs + your query against Vertica, stores the file locally + before loading it into a Hive table. If the ``create`` or + ``recreate`` arguments are set to ``True``, + a ``CREATE TABLE`` and ``DROP TABLE`` statements are generated. + Hive data types are inferred from the cursor's metadata. + Note that the table generated in Hive uses ``STORED AS textfile`` + which isn't the most efficient serialization format. If a + large amount of data is loaded and/or if the table gets + queried considerably, you may want to use this operator only to + stage the data into a temporary table before loading it into its + final destination using a ``HiveOperator``. + + :param sql: SQL query to execute against the Vertica database. (templated) + :param hive_table: target Hive table, use dot notation to target a + specific database. (templated) + :param create: whether to create the table if it doesn't exist + :param recreate: whether to drop and recreate the table at every execution + :param partition: target partition as a dict of partition columns + and values. (templated) + :param delimiter: field delimiter in the file + :param vertica_conn_id: source Vertica connection + :param hive_cli_conn_id: Reference to the + :ref:`Hive CLI connection id `. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['sql', 'partition', 'hive_table'] + + + + .. py:attribute:: template_ext + :annotation: :Sequence[str] = ['.sql'] + + + + .. py:attribute:: template_fields_renderers + + + + + .. py:attribute:: ui_color + :annotation: = #b4e0ff + + + + .. py:method:: type_map(vertica_type) + :classmethod: + + Vertica-python datatype.py does not provide the full type mapping access. + Manual hack. Reference: + https://github.com/uber/vertica-python/blob/master/vertica_python/vertica/column.py + + + .. py:method:: execute(context) + + This is the main method to derive when creating an operator. + Context is the same dictionary used as when rendering jinja templates. + + Refer to get_template_context for more context. + + + diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/tests/system/providers/apache/hive/example_twitter_dag/index.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/tests/system/providers/apache/hive/example_twitter_dag/index.rst.txt new file mode 100644 index 00000000000..51446a551c0 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/tests/system/providers/apache/hive/example_twitter_dag/index.rst.txt @@ -0,0 +1,83 @@ +:py:mod:`tests.system.providers.apache.hive.example_twitter_dag` +================================================================ + +.. py:module:: tests.system.providers.apache.hive.example_twitter_dag + +.. autoapi-nested-parse:: + + This is an example dag for managing twitter data. + + + +Module Contents +--------------- + + +Functions +~~~~~~~~~ + +.. autoapisummary:: + + tests.system.providers.apache.hive.example_twitter_dag.fetch_tweets + tests.system.providers.apache.hive.example_twitter_dag.clean_tweets + tests.system.providers.apache.hive.example_twitter_dag.analyze_tweets + tests.system.providers.apache.hive.example_twitter_dag.transfer_to_db + + + +Attributes +~~~~~~~~~~ + +.. autoapisummary:: + + tests.system.providers.apache.hive.example_twitter_dag.ENV_ID + tests.system.providers.apache.hive.example_twitter_dag.DAG_ID + tests.system.providers.apache.hive.example_twitter_dag.fetch + tests.system.providers.apache.hive.example_twitter_dag.test_run + + +.. py:data:: ENV_ID + + + + +.. py:data:: DAG_ID + :annotation: = example_twitter_dag + + + +.. py:function:: fetch_tweets() + + This task should call Twitter API and retrieve tweets from yesterday from and to for the four twitter + users (Twitter_A,..,Twitter_D) There should be eight csv output files generated by this task and naming + convention is direction(from or to)_twitterHandle_date.csv + + +.. py:function:: clean_tweets() + + This is a placeholder to clean the eight files. In this step you can get rid of or cherry pick columns + and different parts of the text. + + +.. py:function:: analyze_tweets() + + This is a placeholder to analyze the twitter data. Could simply be a sentiment analysis through algorithms + like bag of words or something more complicated. You can also take a look at Web Services to do such + tasks. + + +.. py:function:: transfer_to_db() + + This is a placeholder to extract summary from Hive data and store it to MySQL. + + +.. py:data:: fetch + + + + +.. py:data:: test_run + + + + diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/tests/system/providers/apache/hive/index.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/tests/system/providers/apache/hive/index.rst.txt new file mode 100644 index 00000000000..43df0aa071b --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/_api/tests/system/providers/apache/hive/index.rst.txt @@ -0,0 +1,15 @@ +:py:mod:`tests.system.providers.apache.hive` +============================================ + +.. py:module:: tests.system.providers.apache.hive + + +Submodules +---------- +.. toctree:: + :titlesonly: + :maxdepth: 1 + + example_twitter_dag/index.rst + + diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/commits.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/commits.rst.txt new file mode 100644 index 00000000000..bf9912540e7 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/commits.rst.txt @@ -0,0 +1,413 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Package apache-airflow-providers-apache-hive +------------------------------------------------------ + +`Apache Hive `__ + + +This is detailed commit list of changes for versions provider package: ``apache.hive``. +For high-level changelog, see :doc:`package information including changelog `. + + + +5.0.0 +..... + +Latest change: 2022-12-06 + +================================================================================================= =========== =============================================================== +Commit Committed Subject +================================================================================================= =========== =============================================================== +`5f8481c799 `_ 2022-12-06 ``Move hive_cli_params to hook parameters (#28101)`` +`2d45f9d6c3 `_ 2022-11-27 ``Improve filtering for invalid schemas in Hive hook (#27808)`` +================================================================================================= =========== =============================================================== + +4.1.1 +..... + +Latest change: 2022-11-26 + +================================================================================================= =========== ================================================================ +Commit Committed Subject +================================================================================================= =========== ================================================================ +`25bdbc8e67 `_ 2022-11-26 ``Updated docs for RC3 wave of providers (#27937)`` +`2e20e9f7eb `_ 2022-11-24 ``Prepare for follow-up relase for November providers (#27774)`` +`80c327bd3b `_ 2022-11-24 ``Bump common.sql provider to 1.3.1 (#27888)`` +================================================================================================= =========== ================================================================ + +4.1.0 +..... + +Latest change: 2022-11-15 + +================================================================================================= =========== ========================================================================= +Commit Committed Subject +================================================================================================= =========== ========================================================================= +`12c3c39d1a `_ 2022-11-15 ``pRepare docs for November 2022 wave of Providers (#27613)`` +`150dd927c3 `_ 2022-11-14 ``Filter out invalid schemas in Hive hook (#27647)`` +`9ab1a6a3e7 `_ 2022-10-27 ``Update old style typing (#26872)`` +`78b8ea2f22 `_ 2022-10-24 ``Move min airflow version to 2.3.0 for all providers (#27196)`` +`2a34dc9e84 `_ 2022-10-23 ``Enable string normalization in python formatting - providers (#27205)`` +================================================================================================= =========== ========================================================================= + +4.0.1 +..... + +Latest change: 2022-09-28 + +================================================================================================= =========== ==================================================================================== +Commit Committed Subject +================================================================================================= =========== ==================================================================================== +`f8db64c35c `_ 2022-09-28 ``Update docs for September Provider's release (#26731)`` +`06acf40a43 `_ 2022-09-13 ``Apply PEP-563 (Postponed Evaluation of Annotations) to non-core airflow (#26289)`` +`ca9229b6fe `_ 2022-08-18 ``Add common-sql lower bound for common-sql (#25789)`` +================================================================================================= =========== ==================================================================================== + +4.0.0 +..... + +Latest change: 2022-08-10 + +================================================================================================= =========== =========================================================================== +Commit Committed Subject +================================================================================================= =========== =========================================================================== +`e5ac6c7cfb `_ 2022-08-10 ``Prepare docs for new providers release (August 2022) (#25618)`` +`7e3d2350db `_ 2022-08-04 ``Remove Smart Sensors (#25507)`` +`5d4abbd58c `_ 2022-07-27 ``Deprecate hql parameters and synchronize DBApiHook method APIs (#25299)`` +================================================================================================= =========== =========================================================================== + +3.1.0 +..... + +Latest change: 2022-07-13 + +================================================================================================= =========== ========================================================================================================= +Commit Committed Subject +================================================================================================= =========== ========================================================================================================= +`d2459a241b `_ 2022-07-13 ``Add documentation for July 2022 Provider's release (#25030)`` +`46bbfdade0 `_ 2022-07-07 ``Move all SQL classes to common-sql provider (#24836)`` +`0de31bd73a `_ 2022-06-29 ``Move provider dependencies to inside provider folders (#24672)`` +`cef97fccd5 `_ 2022-06-29 ``fix connection extra parameter 'auth_mechanism' in 'HiveMetastoreHook' and 'HiveServer2Hook' (#24713)`` +`510a6bab45 `_ 2022-06-28 ``Remove 'hook-class-names' from provider.yaml (#24702)`` +================================================================================================= =========== ========================================================================================================= + +3.0.0 +..... + +Latest change: 2022-06-09 + +================================================================================================= =========== ================================================================================== +Commit Committed Subject +================================================================================================= =========== ================================================================================== +`dcdcf3a2b8 `_ 2022-06-09 ``Update release notes for RC2 release of Providers for May 2022 (#24307)`` +`717a7588bc `_ 2022-06-07 ``Update package description to remove double min-airflow specification (#24292)`` +`aeabe994b3 `_ 2022-06-07 ``Prepare docs for May 2022 provider's release (#24231)`` +`b4a5783a2a `_ 2022-06-06 ``chore: Refactoring and Cleaning Apache Providers (#24219)`` +`027b707d21 `_ 2022-06-05 ``Add explanatory note for contributors about updating Changelog (#24229)`` +`100ea9d1fc `_ 2022-06-05 ``AIP-47 - Migrate hive DAGs to new design #22439 (#24204)`` +`71e4deb1b0 `_ 2022-05-16 ``Add typing for airflow/configuration.py (#23716)`` +================================================================================================= =========== ================================================================================== + +2.3.3 +..... + +Latest change: 2022-05-12 + +================================================================================================= =========== ====================================================== +Commit Committed Subject +================================================================================================= =========== ====================================================== +`75c60923e0 `_ 2022-05-12 ``Prepare provider documentation 2022.05.11 (#23631)`` +`2d109401b3 `_ 2022-05-04 ``Bump pre-commit hook versions (#22887)`` +`0c9c1cf94a `_ 2022-04-28 ``Fix HiveToMySqlOperator's wrong docstring (#23316)`` +================================================================================================= =========== ====================================================== + +2.3.2 +..... + +Latest change: 2022-03-22 + +================================================================================================= =========== ============================================================== +Commit Committed Subject +================================================================================================= =========== ============================================================== +`d7dbfb7e26 `_ 2022-03-22 ``Add documentation for bugfix release of Providers (#22383)`` +================================================================================================= =========== ============================================================== + +2.3.1 +..... + +Latest change: 2022-03-14 + +================================================================================================= =========== ==================================================================== +Commit Committed Subject +================================================================================================= =========== ==================================================================== +`16adc035b1 `_ 2022-03-14 ``Add documentation for Classifier release for March 2022 (#22226)`` +================================================================================================= =========== ==================================================================== + +2.3.0 +..... + +Latest change: 2022-03-07 + +================================================================================================= =========== =========================================================================== +Commit Committed Subject +================================================================================================= =========== =========================================================================== +`f5b96315fe `_ 2022-03-07 ``Add documentation for Feb Providers release (#22056)`` +`563ecfa053 `_ 2022-03-01 ``Add Python 3.9 support to Hive (#21893)`` +`f6e0ed0dcc `_ 2022-02-15 ``Add how-to guide for hive operator (#21590)`` +`041babb060 `_ 2022-02-15 ``Fix mypy issues in 'example_twitter_dag' (#21571)`` +`2d6282d6b7 `_ 2022-02-15 ``Remove unnecessary/stale comments (#21572)`` +`06010fa12a `_ 2022-02-11 ``Fix key typo in 'template_fields_renderers' for 'HiveOperator' (#21525)`` +`d927507899 `_ 2022-02-11 ``Set larger limit get_partitions_by_filter in HiveMetastoreHook (#21504)`` +================================================================================================= =========== =========================================================================== + +2.2.0 +..... + +Latest change: 2022-02-08 + +================================================================================================= =========== ================================================================================= +Commit Committed Subject +================================================================================================= =========== ================================================================================= +`d94fa37830 `_ 2022-02-08 ``Fixed changelog for January 2022 (delayed) provider's release (#21439)`` +`8f81b9a01c `_ 2022-02-08 ``Add conditional 'template_fields_renderers' check for new SQL lexers (#21403)`` +`6c3a67d4fc `_ 2022-02-05 ``Add documentation for January 2021 providers release (#21257)`` +`39e395f981 `_ 2022-02-04 ``Add more SQL template fields renderers (#21237)`` +`602abe8394 `_ 2022-01-20 ``Remove ':type' lines now sphinx-autoapi supports typehints (#20951)`` +`5569b868a9 `_ 2022-01-09 ``Fix MyPy Errors for providers: Tableau, CNCF, Apache (#20654)`` +`f77417eb0d `_ 2021-12-31 ``Fix K8S changelog to be PyPI-compatible (#20614)`` +`97496ba2b4 `_ 2021-12-31 ``Update documentation for provider December 2021 release (#20523)`` +`83f8e178ba `_ 2021-12-31 ``Even more typing in operators (template_fields/ext) (#20608)`` +`d56e7b56bb `_ 2021-12-30 ``Fix template_fields type to have MyPy friendly Sequence type (#20571)`` +`a0821235fb `_ 2021-12-30 ``Use typed Context EVERYWHERE (#20565)`` +`485ff6cc64 `_ 2021-12-29 ``Fix MyPy errors in Apache Providers (#20422)`` +`f760823b4a `_ 2021-12-11 ``Add some type hints for Hive providers (#20210)`` +================================================================================================= =========== ================================================================================= + +2.1.0 +..... + +Latest change: 2021-11-30 + +================================================================================================= =========== ============================================================================== +Commit Committed Subject +================================================================================================= =========== ============================================================================== +`853576d901 `_ 2021-11-30 ``Update documentation for November 2021 provider's release (#19882)`` +`16b3ab5860 `_ 2021-11-29 ``Improve various docstrings in Apache Hive providers (#19866)`` +`ac752e777b `_ 2021-11-24 ``hive provider: restore HA support for metastore (#19777)`` +`f50f677514 `_ 2021-11-08 ``Fix typos in Hive transfer operator docstrings (#19474)`` +`ae044884d1 `_ 2021-11-03 ``Cleanup of start_date and default arg use for Apache example DAGs (#18657)`` +================================================================================================= =========== ============================================================================== + +2.0.3 +..... + +Latest change: 2021-10-29 + +================================================================================================= =========== ========================================================================================== +Commit Committed Subject +================================================================================================= =========== ========================================================================================== +`d9567eb106 `_ 2021-10-29 ``Prepare documentation for October Provider's release (#19321)`` +`86a2a19ad2 `_ 2021-10-17 ``More f-strings (#18855)`` +`80b5e65a6a `_ 2021-10-17 ``Remove unnecessary string concatenations in AirflowException in s3_to_hive.py (#19026)`` +`232f7d1587 `_ 2021-10-10 ``fix get_connections deprecation warn in hivemetastore hook (#18854)`` +`840ea3efb9 `_ 2021-09-30 ``Update documentation for September providers release (#18613)`` +`a458fcc573 `_ 2021-09-27 ``Updating miscellaneous provider DAGs to use TaskFlow API where applicable (#18278)`` +================================================================================================= =========== ========================================================================================== + +2.0.2 +..... + +Latest change: 2021-08-30 + +================================================================================================= =========== ====================================================================================== +Commit Committed Subject +================================================================================================= =========== ====================================================================================== +`0a68588479 `_ 2021-08-30 ``Add August 2021 Provider's documentation (#17890)`` +`da99c3fa6c `_ 2021-08-30 ``HiveHook fix get_pandas_df() failure when it tries to read an empty table (#17777)`` +`be75dcd39c `_ 2021-08-23 ``Update description about the new ''connection-types'' provider meta-data`` +`76ed2a49c6 `_ 2021-08-19 ``Import Hooks lazily individually in providers manager (#17682)`` +================================================================================================= =========== ====================================================================================== + +2.0.1 +..... + +Latest change: 2021-07-26 + +================================================================================================= =========== =================================================================== +Commit Committed Subject +================================================================================================= =========== =================================================================== +`87f408b1e7 `_ 2021-07-26 ``Prepares docs for Rc2 release of July providers (#17116)`` +`91f4d80ff0 `_ 2021-07-23 ``Updating Apache example DAGs to use XComArgs (#16869)`` +`d02ded65ea `_ 2021-07-15 ``Fixed wrongly escaped characters in amazon's changelog (#17020)`` +`b916b75079 `_ 2021-07-15 ``Prepare documentation for July release of providers. (#17015)`` +`866a601b76 `_ 2021-06-28 ``Removes pylint from our toolchain (#16682)`` +`ce44b62890 `_ 2021-06-25 ``Add Python 3.9 support (#15515)`` +================================================================================================= =========== =================================================================== + +2.0.0 +..... + +Latest change: 2021-06-18 + +================================================================================================= =========== ================================================================= +Commit Committed Subject +================================================================================================= =========== ================================================================= +`bbc627a3da `_ 2021-06-18 ``Prepares documentation for rc2 release of Providers (#16501)`` +`cbf8001d76 `_ 2021-06-16 ``Synchronizes updated changelog after buggfix release (#16464)`` +`1fba5402bb `_ 2021-06-15 ``More documentation update for June providers release (#16405)`` +`9c94b72d44 `_ 2021-06-07 ``Updated documentation for June 2021 provider release (#16294)`` +`476d0f6e3d `_ 2021-05-22 ``Bump pyupgrade v2.13.0 to v2.18.1 (#15991)`` +`736a62f824 `_ 2021-05-08 ``Remove duplicate key from Python dictionary (#15735)`` +`37681bca00 `_ 2021-05-07 ``Auto-apply apply_default decorator (#15667)`` +`9953a047c4 `_ 2021-05-07 ``Add Connection Documentation for the Hive Provider (#15704)`` +`807ad32ce5 `_ 2021-05-01 ``Prepares provider release after PIP 21 compatibility (#15576)`` +`4b031d39e1 `_ 2021-04-27 ``Make Airflow code Pylint 2.8 compatible (#15534)`` +`e229f3541d `_ 2021-04-27 ``Use Pip 21.* to install airflow officially (#15513)`` +================================================================================================= =========== ================================================================= + +1.0.3 +..... + +Latest change: 2021-04-06 + +================================================================================================= =========== ============================================================================= +Commit Committed Subject +================================================================================================= =========== ============================================================================= +`042be2e4e0 `_ 2021-04-06 ``Updated documentation for provider packages before April release (#15236)`` +`53dafa593f `_ 2021-04-04 ``Fix mistake and typos in doc/docstrings (#15180)`` +`85e0e76074 `_ 2021-03-29 ``Pin flynt to fix failing PRs (#15076)`` +`68e4c4dcb0 `_ 2021-03-20 ``Remove Backport Providers (#14886)`` +`6dc24c95e3 `_ 2021-03-07 ``Fix grammar and remove duplicate words (#14647)`` +`b0d6069d25 `_ 2021-03-05 ``Fix broken static check on Master (#14633)`` +`d9e4454c66 `_ 2021-03-01 ``Resolve issue related to HiveCliHook kill (#14542)`` +================================================================================================= =========== ============================================================================= + +1.0.2 +..... + +Latest change: 2021-02-27 + +================================================================================================= =========== ======================================================================= +Commit Committed Subject +================================================================================================= =========== ======================================================================= +`589d6dec92 `_ 2021-02-27 ``Prepare to release the next wave of providers: (#14487)`` +`10343ec29f `_ 2021-02-05 ``Corrections in docs and tools after releasing provider RCs (#14082)`` +================================================================================================= =========== ======================================================================= + +1.0.1 +..... + +Latest change: 2021-02-04 + +================================================================================================= =========== =========================================================================== +Commit Committed Subject +================================================================================================= =========== =========================================================================== +`88bdcfa0df `_ 2021-02-04 ``Prepare to release a new wave of providers. (#14013)`` +`ac2f72c98d `_ 2021-02-01 ``Implement provider versioning tools (#13767)`` +`a9ac2b040b `_ 2021-01-23 ``Switch to f-strings using flynt. (#13732)`` +`5f81fc73c8 `_ 2021-01-03 ``Fix: Remove password if in LDAP or CUSTOM mode HiveServer2Hook (#11767)`` +`4f494d4d92 `_ 2021-01-03 ``Fix few typos (#13450)`` +`295d66f914 `_ 2020-12-30 ``Fix Grammar in PIP warning (#13380)`` +`6cf76d7ac0 `_ 2020-12-18 ``Fix typo in pip upgrade command :( (#13148)`` +`5090fb0c89 `_ 2020-12-15 ``Add script to generate integrations.json (#13073)`` +================================================================================================= =========== =========================================================================== + +1.0.0 +..... + +Latest change: 2020-12-09 + +================================================================================================= =========== ====================================================================================================================================================================== +Commit Committed Subject +================================================================================================= =========== ====================================================================================================================================================================== +`32971a1a2d `_ 2020-12-09 ``Updates providers versions to 1.0.0 (#12955)`` +`a075b6df99 `_ 2020-12-09 ``Rename remaining Sensors to match AIP-21 (#12927)`` +`b40dffa085 `_ 2020-12-08 ``Rename remaing modules to match AIP-21 (#12917)`` +`9b39f24780 `_ 2020-12-08 ``Add support for dynamic connection form fields per provider (#12558)`` +`2037303eef `_ 2020-11-29 ``Adds support for Connection/Hook discovery from providers (#12466)`` +`c34ef853c8 `_ 2020-11-20 ``Separate out documentation building per provider (#12444)`` +`0080354502 `_ 2020-11-18 ``Update provider READMEs for 1.0.0b2 batch release (#12449)`` +`ae7cb4a1e2 `_ 2020-11-17 ``Update wrong commit hash in backport provider changes (#12390)`` +`6889a333cf `_ 2020-11-15 ``Improvements for operators and hooks ref docs (#12366)`` +`7825e8f590 `_ 2020-11-13 ``Docs installation improvements (#12304)`` +`250436d962 `_ 2020-11-10 ``Fix spelling in Python files (#12230)`` +`502ba309ea `_ 2020-11-10 ``Enable Markdownlint rule - MD022/blanks-around-headings (#12225)`` +`85a18e13d9 `_ 2020-11-09 ``Point at pypi project pages for cross-dependency of provider packages (#12212)`` +`59eb5de78c `_ 2020-11-09 ``Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)`` +`b2a28d1590 `_ 2020-11-09 ``Moves provider packages scripts to dev (#12082)`` +`41bf172c1d `_ 2020-11-04 ``Simplify string expressions (#12093)`` +`4e8f9cc8d0 `_ 2020-11-03 ``Enable Black - Python Auto Formmatter (#9550)`` +`8c42cf1b00 `_ 2020-11-03 ``Use PyUpgrade to use Python 3.6 features (#11447)`` +`5a439e84eb `_ 2020-10-26 ``Prepare providers release 0.0.2a1 (#11855)`` +`872b1566a1 `_ 2020-10-25 ``Generated backport providers readmes/setup for 2020.10.29 (#11826)`` +`349b0811c3 `_ 2020-10-20 ``Add D200 pydocstyle check (#11688)`` +`16e7129719 `_ 2020-10-13 ``Added support for provider packages for Airflow 2.0 (#11487)`` +`0a0e1af800 `_ 2020-10-03 ``Fix Broken Markdown links in Providers README TOC (#11249)`` +`ca4238eb4d `_ 2020-10-02 ``Fixed month in backport packages to October (#11242)`` +`5220e4c384 `_ 2020-10-02 ``Prepare Backport release 2020.09.07 (#11238)`` +`e3f96ce7a8 `_ 2020-09-24 ``Fix incorrect Usage of Optional[bool] (#11138)`` +`f3e87c5030 `_ 2020-09-22 ``Add D202 pydocstyle check (#11032)`` +`9549274d11 `_ 2020-09-09 ``Upgrade black to 20.8b1 (#10818)`` +`ac943c9e18 `_ 2020-09-08 ``[AIRFLOW-3964][AIP-17] Consolidate and de-dup sensor tasks using Smart Sensor (#5499)`` +`fdd9b6f65b `_ 2020-08-25 ``Enable Black on Providers Packages (#10543)`` +`d760265452 `_ 2020-08-25 ``PyDocStyle: No whitespaces allowed surrounding docstring text (#10533)`` +`3696c34c28 `_ 2020-08-24 ``Fix typo in the word "release" (#10528)`` +`ee7ca128a1 `_ 2020-08-22 ``Fix broken Markdown refernces in Providers README (#10483)`` +`27339a5a0f `_ 2020-08-22 ``Remove mentions of Airflow Gitter (#10460)`` +`7c206a82a6 `_ 2020-08-22 ``Replace assigment with Augmented assignment (#10468)`` +`8f8db8959e `_ 2020-08-12 ``DbApiHook: Support kwargs in get_pandas_df (#9730)`` +`b43f90abf4 `_ 2020-08-09 ``Fix various typos in the repo (#10263)`` +`3b3287d7ac `_ 2020-08-05 ``Enforce keyword only arguments on apache operators (#10170)`` +`7d24b088cd `_ 2020-07-25 ``Stop using start_date in default_args in example_dags (2) (#9985)`` +`33f0cd2657 `_ 2020-07-22 ``apply_default keeps the function signature for mypy (#9784)`` +`c2db0dfeb1 `_ 2020-07-22 ``More strict rules in mypy (#9705) (#9906)`` +`5013fda8f0 `_ 2020-07-20 ``Add drop_partition functionality for HiveMetastoreHook (#9472)`` +`4d74ac2111 `_ 2020-07-19 ``Increase typing for Apache and http provider package (#9729)`` +`44d4ae809c `_ 2020-07-06 ``Upgrade to latest pre-commit checks (#9686)`` +`e13a14c873 `_ 2020-06-21 ``Enable & Fix Whitespace related PyDocStyle Checks (#9458)`` +`d0e7db4024 `_ 2020-06-19 ``Fixed release number for fresh release (#9408)`` +`12af6a0800 `_ 2020-06-19 ``Final cleanup for 2020.6.23rc1 release preparation (#9404)`` +`c7e5bce57f `_ 2020-06-19 ``Prepare backport release candidate for 2020.6.23rc1 (#9370)`` +`f6bd817a3a `_ 2020-06-16 ``Introduce 'transfers' packages (#9320)`` +`c78e2a5fea `_ 2020-06-16 ``Make hive macros py3 compatible (#8598)`` +`6350fd6ebb `_ 2020-06-08 ``Don't use the term "whitelist" - language matters (#9174)`` +`10796cb7ce `_ 2020-06-03 ``Remove Hive/Hadoop/Java dependency from unit tests (#9029)`` +`0b0e4f7a4c `_ 2020-05-26 ``Preparing for RC3 relase of backports (#9026)`` +`00642a46d0 `_ 2020-05-26 ``Fixed name of 20 remaining wrongly named operators. (#8994)`` +`cdb3f25456 `_ 2020-05-26 ``All classes in backport providers are now importable in Airflow 1.10 (#8991)`` +`375d1ca229 `_ 2020-05-19 ``Release candidate 2 for backport packages 2020.05.20 (#8898)`` +`12c5e5d8ae `_ 2020-05-17 ``Prepare release candidate for backport packages (#8891)`` +`f3521fb0e3 `_ 2020-05-16 ``Regenerate readme files for backport package release (#8886)`` +`92585ca4cb `_ 2020-05-15 ``Added automated release notes generation for backport operators (#8807)`` +`93ea058802 `_ 2020-04-21 ``[AIRFLOW-7059] pass hive_conf to get_pandas_df in HiveServer2Hook (#8380)`` +`87969a350d `_ 2020-04-09 ``[AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170)`` +`cb0bf4a142 `_ 2020-03-30 ``Remove sql like function in base_hook (#7901)`` +`4bde99f132 `_ 2020-03-23 ``Make airflow/providers pylint compatible (#7802)`` +`7e6372a681 `_ 2020-03-23 ``Add call to Super call in apache providers (#7820)`` +`3320e432a1 `_ 2020-02-24 ``[AIRFLOW-6817] Lazy-load 'airflow.DAG' to keep user-facing API untouched (#7517)`` +`4d03e33c11 `_ 2020-02-22 ``[AIRFLOW-6817] remove imports from 'airflow/__init__.py', replaced implicit imports with explicit imports, added entry to 'UPDATING.MD' - squashed/rebased (#7456)`` +`f3ad5cf618 `_ 2020-02-03 ``[AIRFLOW-4681] Make sensors module pylint compatible (#7309)`` +`97a429f9d0 `_ 2020-02-02 ``[AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)`` +`83c037873f `_ 2020-01-30 ``[AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287)`` +`057f3ae3a4 `_ 2020-01-29 ``[AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286)`` +`059eda05f8 `_ 2020-01-21 ``[AIRFLOW-6610] Move software classes to providers package (#7231)`` +`0481b9a957 `_ 2020-01-12 ``[AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142)`` +================================================================================================= =========== ====================================================================================================================================================================== diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/connections/hive_cli.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/connections/hive_cli.rst.txt new file mode 100644 index 00000000000..cd9e61ccaf1 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/connections/hive_cli.rst.txt @@ -0,0 +1,91 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + + +.. _howto/connection:hive_cli: + +Hive CLI Connection +=================== + +The Hive CLI connection type enables the Hive CLI Integrations. + +Authenticating to Hive CLI +-------------------------- + +There are two ways to connect to Hive using Airflow. + +1. Use the `Hive Beeline + `_. + i.e. make a JDBC connection string with host, port, and schema. Optionally you can connect with a proxy user, and specify a login and password. + +2. Use the `Hive CLI + `_. + i.e. specify Hive CLI params in the extras field. + +Only one authorization method can be used at a time. If you need to manage multiple credentials or keys then you should +configure multiple connections. + +Default Connection IDs +---------------------- + +All hooks and operators related to Hive_CLI use ``hive_cli_default`` by default. + +Configuring the Connection +-------------------------- + +Login (optional) + Specify your username for a proxy user or for the Beeline CLI. + +Password (optional) + Specify your Beeline CLI password. + +Host (optional) + Specify your JDBC Hive host that is used for Hive Beeline. + +Port (optional) + Specify your JDBC Hive port that is used for Hive Beeline. + +Schema (optional) + Specify your JDBC Hive database that you want to connect to with Beeline + or specify a schema for an HQL statement to run with the Hive CLI. + +Extra (optional) + Specify the extra parameters (as json dictionary) that can be used in Hive CLI connection. + The following parameters are all optional: + + * ``use_beeline`` + Specify as ``True`` if using the Beeline CLI. Default is ``False``. + * ``auth`` + Specify the auth type for use with Hive Beeline CLI. + * ``proxy_user`` + Specify a proxy user as an ``owner`` or ``login`` or keep blank if using a + custom proxy user. + * ``principal`` + Specify the JDBC Hive principal to be used with Hive Beeline. + + +When specifying the connection in environment variable you should specify +it using URI syntax. + +Note that all components of the URI should be URL-encoded. + +For example: + +.. code-block:: bash + + export AIRFLOW_CONN_HIVE_CLI_DEFAULT='hive-cli://beeline-username:beeline-password@jdbc-hive-host:80/hive-database?hive_cli_params=params&use_beeline=True&auth=noSasl&principal=hive%2F_HOST%40EXAMPLE.COM' diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/connections/hive_metastore.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/connections/hive_metastore.rst.txt new file mode 100644 index 00000000000..45316d4651c --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/connections/hive_metastore.rst.txt @@ -0,0 +1,69 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + + +.. _howto/connection:hive_metastore: + +Hive Metastore Connection +========================= + +The Hive Metastore connection type enables the Hive Metastore Integrations. + +Authenticating to Hive Metastore +-------------------------------- + +Authentication with the Hive Metastore through `Apache Thrift Hive Server +`_ +and the `hmsclient +`_. + + +Default Connection IDs +---------------------- + +All hooks and operators related to the Hive Metastore use ``metastore_default`` by default. + +Configuring the Connection +-------------------------- + +Host (optional) + The host of your Hive Metastore node. It is possible to specify multiple hosts as a comma-separated list. + +Port (optional) + Your Hive Metastore port number. + +Extra (optional) + Specify the extra parameters (as json dictionary) that can be used in Hive Metastore connection. + The following parameters are all optional: + + * ``auth_mechanism`` + Specify the mechanism for authentication. Default is ``NOSASL``. + * ``kerberos_service_name`` + Specify the kerberos service name. Default is ``hive``. + + +When specifying the connection in environment variable you should specify +it using URI syntax. + +Note that all components of the URI should be URL-encoded. + +For example: + +.. code-block:: bash + + export AIRFLOW_CONN_METASTORE_DEFAULT='hive-metastore://hive-metastore-node:80?auth_mechanism=NOSASL' diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/connections/hiveserver2.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/connections/hiveserver2.rst.txt new file mode 100644 index 00000000000..1d16177d97e --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/connections/hiveserver2.rst.txt @@ -0,0 +1,78 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + + +.. _howto/connection:hiveserver2: + +Hive Server2 Connection +========================= + +The Hive Server2 connection type enables the Hive Server2 Integrations. + +Authenticating to Hive Server2 +------------------------------ + +Connect to Hive Server2 using `PyHive +`_. +Choose between authenticating via LDAP, Kerberos, or custom. + +Default Connection IDs +---------------------- + +All hooks and operators related to Hive Server2 use ``hiveserver2_default`` by default. + +Configuring the Connection +-------------------------- + +Login (optional) + Specify your Hive Server2 username. + +Password (optional) + Specify your Hive password for use with LDAP and custom authentication. + +Host (optional) + Specify the host node for Hive Server2. + +Port (optional) + Specify your Hive Server2 port number. + +Schema (optional) + Specify the name for the database you would like to connect to with Hive Server2. + +Extra (optional) + Specify the extra parameters (as json dictionary) that can be used in Hive Server2 connection. + The following parameters are all optional: + + * ``auth_mechanism`` + Specify the authentication method for PyHive. Choose between ``PLAIN``, ``LDAP``, ``KERBEROS`` or ``Custom``. Default is ``PLAIN``. + * ``kerberos_service_name`` + If authenticating with Kerberos specify the Kerberos service name. Default is ``hive``. + * ``run_set_variable_statements`` + Specify if you want to run set variable statements. Default is ``True``. + + +When specifying the connection in environment variable you should specify +it using URI syntax. + +Note that all components of the URI should be URL-encoded. + +For example: + +.. code-block:: bash + + export AIRFLOW_CONN_HIVESERVER2_DEFAULT='hiveserver2://username:password@hiveserver2-node:80/database?auth_mechanism=LDAP' diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/connections/index.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/connections/index.rst.txt new file mode 100644 index 00000000000..2b8afb41df9 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/connections/index.rst.txt @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Connection Types +---------------- + +.. toctree:: + :maxdepth: 1 + :glob: + + * diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/index.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/index.rst.txt new file mode 100644 index 00000000000..be8089d9b23 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/index.rst.txt @@ -0,0 +1,132 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-apache-hive`` +======================================== + + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: Guides + + Connection types + Operators + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/apache/hive/index> + +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/apache/hive/index> + +.. toctree:: + :maxdepth: 1 + :caption: Resources + + Example DAGs + PyPI Repository + Installing from sources + +.. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! + + +.. toctree:: + :maxdepth: 1 + :caption: Commits + + Detailed list of commits + + +Package apache-airflow-providers-apache-hive +------------------------------------------------------ + +`Apache Hive `__ + + +Release: 5.0.0 + +Provider package +---------------- + +This is a provider package for ``apache.hive`` provider. All classes for this provider package +are in ``airflow.providers.apache.hive`` python package. + +Installation +------------ + +You can install this package on top of an existing Airflow 2 installation (see ``Requirements`` below) +for the minimum Airflow version supported) via +``pip install apache-airflow-providers-apache-hive`` + +Requirements +------------ + +======================================= ================================== +PIP package Version required +======================================= ================================== +``apache-airflow`` ``>=2.3.0`` +``apache-airflow-providers-common-sql`` ``>=1.3.1`` +``hmsclient`` ``>=0.1.0`` +``pandas`` ``>=0.17.1`` +``pyhive[hive]`` ``>=0.6.0`` +``sasl`` ``>=0.3.1; python_version>="3.9"`` +``thrift`` ``>=0.9.2`` +======================================= ================================== + +Cross provider package dependencies +----------------------------------- + +Those are dependencies that might be needed in order to use all the features of the package. +You need to install the specified provider packages in order to use them. + +You can install such cross-provider dependencies when installing from PyPI. For example: + +.. code-block:: bash + + pip install apache-airflow-providers-apache-hive[amazon] + + +====================================================================================================================== =================== +Dependent package Extra +====================================================================================================================== =================== +`apache-airflow-providers-amazon `_ ``amazon`` +`apache-airflow-providers-common-sql `_ ``common.sql`` +`apache-airflow-providers-microsoft-mssql `_ ``microsoft.mssql`` +`apache-airflow-providers-mysql `_ ``mysql`` +`apache-airflow-providers-presto `_ ``presto`` +`apache-airflow-providers-samba `_ ``samba`` +`apache-airflow-providers-vertica `_ ``vertica`` +====================================================================================================================== =================== + +Downloading official packages +----------------------------- + +You can download officially released packages and verify their checksums and signatures from the +`Official Apache Download site `_ + +* `The apache-airflow-providers-apache-hive 5.0.0 sdist package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-apache-hive 5.0.0 wheel package `_ (`asc `__, `sha512 `__) + +.. include:: ../../airflow/providers/apache/hive/CHANGELOG.rst diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/installing-providers-from-sources.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/installing-providers-from-sources.rst.txt new file mode 100644 index 00000000000..1c90205d15b --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/installing-providers-from-sources.rst.txt @@ -0,0 +1,18 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +.. include:: ../installing-providers-from-sources.rst diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/operators.rst.txt b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/operators.rst.txt new file mode 100644 index 00000000000..7a92cba9f2d --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_sources/operators.rst.txt @@ -0,0 +1,40 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Apache Hive Operators +===================== + +The Apache Hive data warehouse software facilitates reading, writing, +and managing large datasets residing in distributed storage using SQL. +Structure can be projected onto data already in storage. + +HiveOperator +------------ + +This operator executes hql code or hive script in a specific Hive database. + +.. exampleinclude:: /../../tests/system/providers/apache/hive/example_twitter_dag.py + :language: python + :dedent: 4 + :start-after: [START create_hive] + :end-before: [END create_hive] + + +Reference +^^^^^^^^^ + +For more information check `Apache Hive documentation `__. diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/_gen/css/main-custom.min.css b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/_gen/css/main-custom.min.css new file mode 100644 index 00000000000..f3ff099c333 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/_gen/css/main-custom.min.css @@ -0,0 +1 @@ +@charset "UTF-8";@import "https://fonts.googleapis.com/css?family=Rubik:500&display=swap";@import "https://fonts.googleapis.com/css?family=Roboto:400,400i,500,700&display=swap";@import "https://fonts.googleapis.com/css?family=Roboto+Mono:400,700&display=swap";.header__large--cerulean-blue{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#017cee}.header__large--shamrock{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#00ad46}.header__large--bright-sky-blue{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#0cb6ff}.header__large--melon{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#ff7557}.header__large--vermillion{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#e43921}.header__large--aqua{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#11e1ee}.header__large--shamrock-green{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#04d659}.header__large--aqua-blue{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#00c7d4}.header__large--white{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#fff}.header__large--brownish-grey{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#707070}.header__large--very-light-pink{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#cbcbcb}.header__large--slate-grey{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#636365}.header__large--greyish-brown,#header-canvas .text-area--header{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#51504f}.header__medium--cerulean-blue{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#017cee}.header__medium--shamrock{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#00ad46}.header__medium--bright-sky-blue{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#0cb6ff}.header__medium--melon{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#ff7557}.header__medium--vermillion{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#e43921}.header__medium--aqua{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#11e1ee}.header__medium--shamrock-green{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#04d659}.header__medium--aqua-blue{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#00c7d4}.header__medium--white{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#fff}.header__medium--brownish-grey{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#707070}.header__medium--very-light-pink{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#cbcbcb}.header__medium--slate-grey{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#636365}.header__medium--greyish-brown,.page-header{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#51504f}.header__small--cerulean-blue{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#017cee}.header__small--shamrock{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#00ad46}.header__small--bright-sky-blue{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#0cb6ff}.header__small--melon{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#ff7557}.header__small--vermillion{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#e43921}.header__small--aqua{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#11e1ee}.header__small--shamrock-green{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#04d659}.header__small--aqua-blue{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#00c7d4}.header__small--white{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#fff}.header__small--brownish-grey{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#707070}.header__small--very-light-pink{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#cbcbcb}.header__small--slate-grey{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#636365}.header__small--greyish-brown,.community--committers-header,.community--header-join,.blogpost-content__metadata--title{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#51504f}.header__xsmall--cerulean-blue{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#017cee}.header__xsmall--shamrock{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#00ad46}.header__xsmall--bright-sky-blue{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#0cb6ff}.header__xsmall--melon{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#ff7557}.header__xsmall--vermillion{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#e43921}.header__xsmall--aqua{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#11e1ee}.header__xsmall--shamrock-green{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#04d659}.header__xsmall--aqua-blue{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#00c7d4}.header__xsmall--white{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#fff}.header__xsmall--brownish-grey{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#707070}.header__xsmall--very-light-pink{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#cbcbcb}.header__xsmall--slate-grey{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#636365}.header__xsmall--greyish-brown,.text-with-icon-item--header,.feature-item--header{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#51504f}.subtitle__large--cerulean-blue,.box-event__meetup--location{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#017cee}.subtitle__large--shamrock{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#00ad46}.subtitle__large--bright-sky-blue{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#0cb6ff}.subtitle__large--melon{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#ff7557}.subtitle__large--vermillion{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#e43921}.subtitle__large--aqua{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#11e1ee}.subtitle__large--shamrock-green{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#04d659}.subtitle__large--aqua-blue{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#00c7d4}.subtitle__large--white{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#fff}.subtitle__large--brownish-grey,#header-canvas .text-area--subheader,.blogpost-content__metadata--description,.page-subtitle,.quote--text{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#707070}.subtitle__large--very-light-pink{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#cbcbcb}.subtitle__large--slate-grey{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#636365}.subtitle__large--greyish-brown,.markdown-content h1,.markdown-content h2,.markdown-content h3,.markdown-content h4,.markdown-content h5,.box-event__blogpost--header{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#51504f}.subtitle__medium--cerulean-blue,ol.counter-blue li::before{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#017cee}.subtitle__medium--shamrock{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#00ad46}.subtitle__medium--bright-sky-blue{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#0cb6ff}.subtitle__medium--melon{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#ff7557}.subtitle__medium--vermillion{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#e43921}.subtitle__medium--aqua{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#11e1ee}.subtitle__medium--shamrock-green{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#04d659}.subtitle__medium--aqua-blue{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#00c7d4}.subtitle__medium--white{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#fff}.subtitle__medium--brownish-grey,.box-event__integration--name{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#707070}.subtitle__medium--very-light-pink{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#cbcbcb}.subtitle__medium--slate-grey{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#636365}.subtitle__medium--greyish-brown,.roadmap .td-sidebar nav>ul>li>a{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#51504f}.bodytext__medium--cerulean-blue,.blogpost-content__metadata--author,.new-entry--link,.tag,.box-event__meetup--next-meetup,.box-event__blogpost--author{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#017cee}.bodytext__medium--shamrock{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#00ad46}.bodytext__medium--bright-sky-blue{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#0cb6ff}.bodytext__medium--melon{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#ff7557}.bodytext__medium--vermillion{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#e43921}.bodytext__medium--aqua{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#11e1ee}.bodytext__medium--shamrock-green{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#04d659}.bodytext__medium--aqua-blue{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#00c7d4}.bodytext__medium--white,footer .footer-section span{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#fff}.bodytext__medium--brownish-grey,.sidebar__version-selector a,.roadmap .td-sidebar li>a,.roadmap .breadcrumb-item a,.install--description,.blogpost-content__metadata--date,.video-list__item .video-list__item--title,.text-with-icon-item--text,.feature-item--text,.markdown-content p,.markdown-content span,.box-event__meetup--members,.box-event__case-study--quote,.box-event__blogpost--date,.box-event__blogpost--description,ol.counter-blue li,ul.ticks-blue li{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#707070}.bodytext__medium--very-light-pink{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#cbcbcb}.bodytext__medium--slate-grey{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#636365}.bodytext__medium--greyish-brown,.navbar__text-link,.install__accordions-content--header,.list-link,.quote--author,.box-event__committer--nick{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#51504f}.bodytext__mobile--cerulean-blue{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#017cee}.bodytext__mobile--shamrock{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#00ad46}.bodytext__mobile--bright-sky-blue{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#0cb6ff}.bodytext__mobile--melon{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#ff7557}.bodytext__mobile--vermillion{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#e43921}.bodytext__mobile--aqua{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#11e1ee}.bodytext__mobile--shamrock-green{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#04d659}.bodytext__mobile--aqua-blue{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#00c7d4}.bodytext__mobile--white{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#fff}.bodytext__mobile--brownish-grey{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#707070}.bodytext__mobile--very-light-pink{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#cbcbcb}.bodytext__mobile--slate-grey{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#636365}.bodytext__mobile--greyish-brown{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#51504f}.bodytext__small--cerulean-blue{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#017cee}.bodytext__small--shamrock{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#00ad46}.bodytext__small--bright-sky-blue{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#0cb6ff}.bodytext__small--melon{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#ff7557}.bodytext__small--vermillion{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#e43921}.bodytext__small--aqua{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#11e1ee}.bodytext__small--shamrock-green{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#04d659}.bodytext__small--aqua-blue{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#00c7d4}.bodytext__small--white{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#fff}.bodytext__small--brownish-grey{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#707070}.bodytext__small--very-light-pink{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#cbcbcb}.bodytext__small--slate-grey{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#636365}.bodytext__small--greyish-brown{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#51504f}.bodytext__xsmall--cerulean-blue{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#017cee}.bodytext__xsmall--shamrock{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#00ad46}.bodytext__xsmall--bright-sky-blue{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#0cb6ff}.bodytext__xsmall--melon{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#ff7557}.bodytext__xsmall--vermillion{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#e43921}.bodytext__xsmall--aqua{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#11e1ee}.bodytext__xsmall--shamrock-green{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#04d659}.bodytext__xsmall--aqua-blue{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#00c7d4}.bodytext__xsmall--white{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#fff}.bodytext__xsmall--brownish-grey{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#707070}.bodytext__xsmall--very-light-pink{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#cbcbcb}.bodytext__xsmall--slate-grey{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#636365}.bodytext__xsmall--greyish-brown{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#51504f}.monotext--cerulean-blue{font-family:roboto mono,monospace;color:#017cee;font-size:14px;line-height:1.71}.monotext--shamrock{font-family:roboto mono,monospace;color:#00ad46;font-size:14px;line-height:1.71}.monotext--bright-sky-blue{font-family:roboto mono,monospace;color:#0cb6ff;font-size:14px;line-height:1.71}.monotext--melon{font-family:roboto mono,monospace;color:#ff7557;font-size:14px;line-height:1.71}.monotext--vermillion{font-family:roboto mono,monospace;color:#e43921;font-size:14px;line-height:1.71}.monotext--aqua{font-family:roboto mono,monospace;color:#11e1ee;font-size:14px;line-height:1.71}.monotext--shamrock-green{font-family:roboto mono,monospace;color:#04d659;font-size:14px;line-height:1.71}.monotext--aqua-blue{font-family:roboto mono,monospace;color:#00c7d4;font-size:14px;line-height:1.71}.monotext--white{font-family:roboto mono,monospace;color:#fff;font-size:14px;line-height:1.71}.monotext--brownish-grey,pre span,.markdown-content pre span{font-family:roboto mono,monospace;color:#707070;font-size:14px;line-height:1.71}.monotext--very-light-pink{font-family:roboto mono,monospace;color:#cbcbcb;font-size:14px;line-height:1.71}.monotext--slate-grey{font-family:roboto mono,monospace;color:#636365;font-size:14px;line-height:1.71}.monotext--greyish-brown{font-family:roboto mono,monospace;color:#51504f;font-size:14px;line-height:1.71}.font-weight-normal{font-weight:400!important}.font-weight-500{font-weight:500!important}.font-weight-bold{font-weight:700!important}details.accordion{padding:40px 30px;border-bottom:solid 1px #cbcbcb;-webkit-transition:ease 1s;-o-transition:ease 1s;transition:ease 1s}details.accordion:first-of-type{border-top:solid 1px #cbcbcb}details.accordion summary{position:relative;display:block;outline:none}details.accordion summary::-webkit-details-marker{display:none}details.accordion .accordion__summary-content{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;max-width:750px;margin-right:40px}details.accordion .accordion__summary-content--icon{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;width:60px;margin-right:42px}details.accordion .accordion__summary-content--header{margin-bottom:20px}details.accordion .accordion__arrow{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;position:absolute;width:36px;height:36px;top:0;right:0;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;border:solid 1px #017cee;border-radius:50%}details.accordion .accordion__arrow svg{-webkit-transition:ease-out .2s;-o-transition:ease-out .2s;transition:ease-out .2s}details.accordion[open] .accordion__arrow svg{-webkit-transform:rotate(180deg);-ms-transform:rotate(180deg);-o-transform:rotate(180deg);transform:rotate(180deg)}details.accordion .accordion__content{margin-top:30px;margin-right:36px}details.accordion .accordion__content.indented{margin-left:102px}@media(max-width:1280px){details.accordion{padding:30px 0}details.accordion .accordion__summary-content--icon{-webkit-box-align:unset;-webkit-align-items:unset;-ms-flex-align:unset;align-items:unset;margin-right:20px;margin-top:28px}details.accordion .accordion__arrow{width:28px;height:28px;top:5px}details.accordion .accordion__content.indented{margin-left:80px}details.accordion .accordion__content ol.counter-blue{margin-left:-38px!important}}button{cursor:pointer;border:1px solid;border-radius:5px;padding:9px 29px;-webkit-transition:all ease-out .2s;-o-transition:all ease-out .2s;transition:all ease-out .2s}button:disabled{cursor:not-allowed}button.btn-filled{border-color:#017cee;background-color:#017cee}button.btn-filled:hover{border-color:#0cb6ff;background-color:#0cb6ff}button.btn-with-icon{padding:14px 20px}button.btn-with-icon svg{height:30px;width:auto;padding-right:15px}button.btn-with-icon span{display:inline-block;line-height:30px;vertical-align:middle}button.btn-hollow{background-color:#fff}button.btn-hollow.btn-blue{color:#017cee;border-color:#017cee}button.btn-hollow.btn-blue:disabled{color:#cbcbcb;border-color:#cbcbcb}button.btn-hollow.btn-blue:hover:enabled{color:#fff;background-color:#017cee}button.btn-hollow.btn-brown{border-color:#cbcbcb}button.btn-hollow.btn-brown:hover{background-color:#51504f;border-color:#51504f}button.btn-hollow.btn-brown:hover span{color:#fff}button.btn-hollow.btn-brown:hover svg path{fill:#fff}button.with-box-shadow{-webkit-box-shadow:0 2px 6px 0 rgba(0,0,0,.12);box-shadow:0 2px 6px rgba(0,0,0,.12)}@media(max-width:1280px){button{padding:4px 17px}}ol.counter-blue,ul.ticks-blue{list-style:none;margin-bottom:0}ol.counter-blue li,ul.ticks-blue li{position:relative;padding-left:10px}ol.counter-blue li::before,ul.ticks-blue li::before{position:absolute;border:solid 1px #017cee;border-radius:50%}ol.counter-blue{counter-reset:custom-counter;padding-left:-webkit-calc(26px + 2px);padding-left:calc(26px + 2px)}ol.counter-blue li{counter-increment:custom-counter;margin-bottom:25px}ol.counter-blue li::before{content:counter(custom-counter);-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;top:-2px;left:-webkit-calc(-1 * 26px);left:calc(-1 * 26px);width:26px;height:26px;text-align:center;line-height:26px}ul.ticks-blue{padding-left:-webkit-calc(24px + 2px);padding-left:calc(24px + 2px)}ul.ticks-blue li{margin-bottom:22px}ul.ticks-blue li::before{content:"";left:-webkit-calc(-1 * 24px);left:calc(-1 * 24px);width:24px;height:24px;background-position:50%;background-repeat:no-repeat;background-image:url(/images/tick.svg)}.list-items{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;margin:-20px;-webkit-box-align:stretch;-webkit-align-items:stretch;-ms-flex-align:stretch;align-items:stretch}@media(max-width:1280px){.list-items{margin:auto;max-width:580px}}@media(max-width:640px){.list-items{-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center}}.list-item{width:25%;padding:20px}@media(min-width:1920px){.list-item{width:20%}}@media(max-width:1280px){.list-item{padding:10px;width:50%}}@media(max-width:640px){.list-item{width:100%}}.list-item--wide{width:50%}@media(max-width:1280px){.list-item--wide{width:100%}}.card{border:solid 1px #cbcbcb;border-radius:5px;padding:30px 10px;height:100%}.box-event{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center}.box-event__blogpost{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-flex:1;-webkit-flex:1;-ms-flex:1;flex:1;padding:0 20px}.box-event__blogpost--metadata{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between;margin-bottom:20px}.box-event__blogpost--header{margin-bottom:4px}.box-event__blogpost--author{font-weight:500}.box-event__blogpost--description{margin-bottom:20px}.box-event__case-study{padding:18px 18px 0;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between}.box-event__case-study--logo{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;height:60px;width:100%;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center}.box-event__case-study--logo svg,.box-event__case-study--logo img{max-height:100%;max-width:100%}.box-event__case-study--quote{font-style:italic;margin:30px 0 20px;text-align:center}.box-event__case-study--quote::before{content:"“"}.box-event__case-study--quote::after{content:"”"}.box-event__committer--nick{font-weight:500;margin-top:12px}.box-event__committer--social-media-container{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center}.box-event__committer--social-media-icon{margin:0 6px}.box-event__meetup--location{margin-bottom:10px;text-align:center}.box-event__meetup--members{margin-bottom:30px}.box-event__meetup--members span{vertical-align:middle}.box-event__meetup--next-meetup{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;min-height:52px;margin-bottom:20px;text-align:center}.box-event__integration{height:208px;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center}.box-event__integration--logo{margin:auto 0;-webkit-filter:grayscale(1);filter:grayscale(1);opacity:.6;max-width:100%;max-height:100%}.box-event__integration--name{font-weight:700;text-align:center}.box-event__integration:hover .box-event__integration--logo{-webkit-filter:none;filter:none;opacity:1}.box-event.hoverable-icon svg,.box-event.hoverable-icon img{-webkit-filter:grayscale(1);filter:grayscale(1);opacity:.6;-webkit-transition:all .2s;-o-transition:all .2s;transition:all .2s}.box-event.hoverable-icon:hover svg,.box-event.hoverable-icon:hover img{-webkit-filter:none;filter:none;opacity:1}@media(max-width:640px){.box-event__blogpost--metadata{-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column}.box-event__blogpost--date{margin-top:17px}}.avatar{border-radius:50%;width:80px;height:80px}.quote{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;border-bottom:solid 1px #cbcbcb;padding:0 78px 60px}.quote--text{text-align:center;font-weight:400}.quote--text::before{content:"“"}.quote--text::after{content:"”"}.quote--author{text-align:center;font-weight:500;margin-bottom:32px}.quote--logo{max-height:140px;margin:0 auto}@media(max-width:640px){.quote{padding:0 0 40px}}.pager{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;width:100%;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between;margin-top:60px}.case-study-page{max-width:790px!important;margin:60px auto 0}@media(max-width:640px){.case-study-page{margin-top:40px}}.markdown-content h1,.markdown-content h2,.markdown-content h3,.markdown-content h4,.markdown-content h5{margin-top:40px;margin-bottom:20px}.markdown-content p,.markdown-content span{margin-bottom:30px;margin-top:20px}.markdown-content img{width:100%}.markdown-content table{border-collapse:collapse;width:100%}.markdown-content th{background:#ccc}.markdown-content th,.markdown-content td{border:1px solid #ccc;padding:8px}.markdown-content tr:nth-child(even){background:#efefef}.markdown-content tr:hover{background:#d1d1d1}.markdown-content li{color:#707070}.base-layout{padding:123px 0 40px}.base-layout--button{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;position:-webkit-sticky;position:sticky;width:-webkit-fit-content;width:-moz-fit-content;width:fit-content;margin-left:auto;margin-right:40px;bottom:40px;-webkit-box-pack:end;-webkit-justify-content:flex-end;-ms-flex-pack:end;justify-content:flex-end;z-index:1}@media(max-width:1280px){.base-layout{padding:76px 0 60px}.base-layout--button{display:none}}.page-header{text-align:center;margin-bottom:16px}@media(max-width:1280px){.page-header{font-size:36px!important;line-height:1.22!important}}.page-subtitle{text-align:center;font-weight:400!important;margin-bottom:80px}@media(max-width:1280px){.page-subtitle{font-family:roboto,sans-serif!important;font-size:16px!important;line-height:1.63!important;margin-bottom:30px}}.container{margin-top:44px;max-width:1200px}@media(min-width:1920px){.container{max-width:1510px}}@media(max-width:1280px){.container>*{max-width:630px;margin-left:auto;margin-right:auto}.container .no-width-restriction{max-width:none}}@media(max-width:640px){.container>*{max-width:306px}}.container-fluid{padding-left:20px;padding-right:20px}.show-more-button{width:-webkit-fit-content;width:-moz-fit-content;width:fit-content;margin:60px auto 0}@media(max-width:1280px){.show-more-button{margin-top:30px}}@media(max-width:1280px){.desktop-only{display:none}}@media(min-width:calc(1280px + 1px)){.no-desktop{display:none}}@media(min-width:calc(640px + 1px)){.mobile-only{display:none}}.features-list{margin:76px auto 100px;max-width:720px;display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between}@media(min-width:1920px){.features-list{max-width:unset;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;margin-top:0}}@media(max-width:640px){.features-list{margin-top:0}}.feature-item{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;margin:44px auto 0}.feature-item--icon-box{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;max-width:150px;width:100%;height:-webkit-fit-content;height:-moz-fit-content;height:fit-content;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;margin-top:18px}.feature-item--text-box{margin-left:60px}@media(min-width:1920px),(max-width:1280px){.feature-item{-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column}.feature-item--icon-box{margin-top:0;margin-bottom:20px;max-width:unset}.feature-item--text-box{margin-left:0;text-align:center}}@media(min-width:1920px){.feature-item{margin-top:60px;max-width:392px;margin-left:40px;margin-right:40px}}@media(max-width:1280px){.feature-item{margin-top:40px;max-width:260px}.feature-item svg{height:60px;width:auto}.feature-item--header{font-family:roboto,sans-serif!important;font-size:24px!important;line-height:1.5!important}.feature-item--text{font-size:14px!important;line-height:1.57!important}}.text-with-icon-list{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;max-width:940px;margin:0 auto 100px;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between}.text-with-icon-item{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;max-width:410px;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;margin-top:54px}.text-with-icon-item svg{height:100px}.text-with-icon-item--header{text-align:center;margin-top:23px}.text-with-icon-item--text{text-align:center}@media(min-width:1920px){.text-with-icon-list{max-width:unset}.text-with-icon-item{max-width:305px}}@media(max-width:1280px){.text-with-icon-item{margin-top:30px;max-width:276px}.text-with-icon-item svg{width:70px;height:70px}.text-with-icon-item--header{font-family:roboto,sans-serif;font-size:24px;line-height:1.5;margin-top:16px}.text-with-icon-item--text{font-size:14px;line-height:1.57}}@media(max-width:640px){.text-with-icon-list{-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center}}.video-section{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;border:solid 1px #cbcbcb;padding:40px}@media(max-width:1280px){.video-section{-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;max-width:560px;margin:0 auto;padding:20px}}.video-wrapper{-webkit-box-flex:1;-webkit-flex:1;-ms-flex:1;flex:1}.video-wrapper .video-container{display:none}.video-wrapper .video-container:last-child{display:block}.video-wrapper .anchor{position:fixed}.video-wrapper .anchor:target+.video-container{display:block}.video-wrapper .anchor:target+.video-container~.video-container{display:none}.video-list-wrapper{overflow-y:auto;max-height:403px;max-width:370px;width:100%;margin-left:40px}@media(max-width:1280px){.video-list-wrapper{max-width:unset;margin-left:0}}.video-list-wrapper::-webkit-scrollbar{-webkit-appearance:none}.video-list-wrapper::-webkit-scrollbar-thumb{border-radius:8px;border:2px solid #fff;background-color:rgba(0,0,0,.5)}.video-list-wrapper::-webkit-scrollbar:vertical{width:9px}.video-list{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:reverse;-webkit-flex-direction:column-reverse;-ms-flex-direction:column-reverse;flex-direction:column-reverse;-webkit-box-pack:end;-webkit-justify-content:flex-end;-ms-flex-pack:end;justify-content:flex-end}.video-list__item{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;border-bottom:solid 1px #cbcbcb;padding:16px 0}.video-list__item .video-list__item--title{margin-left:9px;vertical-align:middle}.video-list__item--icon-container{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex}.video-list__item:hover .video-list__item--title,.video-list__item.active .video-list__item--title{font-weight:500}.video-list__item:hover svg path,.video-list__item.active svg path{fill:#707070;stroke:none}.tag{display:block;background-color:rgba(1,124,238,.25);padding:1px 15px;border-radius:5px;-webkit-transition:.2s;-o-transition:.2s;transition:.2s;margin:7px}.tag.active,.tag:hover{background-color:#017cee;color:#fff}.tags-container{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;width:-webkit-fit-content;width:-moz-fit-content;width:fit-content;margin:-7px}@media(max-width:640px){.tags-container{-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;margin-bottom:7px}}.all-tags-container{margin:30px 0}.blog__list-items-wrapper{max-width:1200px;margin:30px auto 0}@media(max-width:1280px){.blog__list-items-wrapper{max-width:580px}}.new-entry{margin-bottom:20px}.new-entry--link{font-weight:500}@media(max-width:1280px){.new-entry{margin-bottom:10px;padding-left:10px}}.blogpost-content{max-width:790px;margin:0 auto}.blogpost-content--header-wrapper{border-bottom:solid 1px #cbcbcb}.blogpost-content__metadata--container{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between;margin-bottom:30px}.blogpost-content__metadata--title{margin-bottom:20px}.blogpost-content__metadata--author{font-weight:500;margin-bottom:30px;margin-right:17px}.blogpost-content__metadata--social-media-icon{margin-right:7px}.blogpost-content__metadata--description{font-weight:400;margin-bottom:30px}.blogpost-content__metadata--date{margin-top:17px}@media(max-width:640px){.blogpost-content__metadata--container{-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;margin-bottom:19px}.blogpost-content__metadata--title{font-family:roboto,sans-serif!important;font-size:24px!important;line-height:1.5!important;max-width:272px;margin-bottom:13px}.blogpost-content__metadata--author{margin-bottom:20px}.blogpost-content__metadata--description{font-family:roboto,sans-serif!important;font-weight:400!important;font-size:16px!important;line-height:1.63!important;margin-bottom:20px}}.blog-pager{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;max-width:1200px;margin:60px auto 0}.blog-pager .pager{margin-top:40px;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center}.home-page-layout.base-layout{padding-top:70px}@media(max-width:640px){.home-page-layout.base-layout{padding-top:16px}}.principles-header{margin-top:20px;margin-bottom:4px}.integrations-header{margin-bottom:60px}@media(max-width:640px){.integrations-header{margin-bottom:30px}}#integrations .list-items{margin-top:40px}@media(max-width:640px){#integrations .list-items{margin-top:20px}}.video-section-container{margin:80px auto;max-width:1200px}@media(max-width:640px){.video-section-container{margin:60px 0}}.host-header{margin-bottom:6px}.meetups{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center}.meetups>.list-items{margin-top:40px;margin-bottom:40px;width:100%}@media(max-width:1280px){.meetups>.list-items{margin-top:30px;margin-bottom:30px}}@media(max-width:640px){.meetups>.list-items{margin-bottom:10px}}.meetups-page.page-subtitle{margin-bottom:0}.list-link{text-decoration:underline}.community--header-container{max-width:936px;margin:0 auto}.community--header-join{text-align:center}.community--accordion-container{margin:60px 18px}.community--committers-header{text-align:center;margin-top:70px;margin-bottom:40px}.community--committers-header.large-margin{margin-top:100px;margin-bottom:40px}@media(min-width:1920px){.community .list-items{max-width:1220px;margin-right:auto;margin-left:auto}.community .list-item{width:25%}}@media(max-width:1280px){.community--header-container{max-width:580px}.community--header-join{font-family:roboto,sans-serif!important;font-size:24px!important;line-height:1.5!important}.community--accordion-container{margin:40px 0}.community--committers-header{font-size:36px!important;line-height:1.22!important;margin-top:60px}.community--committers-header.large-margin{margin-top:60px;margin-bottom:40px}.community--button-container{margin-top:20px}}.install .page-subtitle{margin-bottom:45px}.install--headers-wrapper{max-width:936px;margin:0 auto}.install--description{text-align:center;margin:45px 0 60px}.install--description a{color:#017cee;text-decoration:underline}.install__accordions--wrapper{max-width:900px;margin:60px auto}.install__accordions-content--header{font-weight:500}.install__accordions-content--list-wrapper{margin-bottom:40px}.install__accordions-content--methods-wrapper{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;margin-bottom:20px}.install__accordions-content--method-box{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;width:270px;height:160px;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;border:solid 1px #cbcbcb;border-radius:5px;margin-right:40px}@media(max-width:1280px){.install .page-subtitle{margin-bottom:30px}.install--headers-wrapper{max-width:580px}.install--description{margin-top:30px;margin-bottom:30px}.install__accordions--wrapper{max-width:580px;margin-top:40px;margin-bottom:0}}@media(max-width:640px){.install__accordions-content--methods-wrapper{-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;margin-bottom:0;margin-right:-36px}.install__accordions-content--method-box{margin-right:0;margin-bottom:20px}}footer{min-height:unset}footer .footer-section{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between}footer .footer-section__media-section{padding:60px 60px 30px;background-color:#51504f}footer .footer-section__media-section--link{margin-right:30px}footer .footer-section__media-section--button-with-text{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center}footer .footer-section__media-section--text{margin-right:20px}footer .footer-section__policies-section{padding:30px 60px;background-color:#636365;font-size:12px;-webkit-box-orient:vertical!important;-webkit-box-direction:normal!important;-webkit-flex-direction:column!important;-ms-flex-direction:column!important;flex-direction:column!important}footer .footer-section__policies-section span{font-size:12px}footer .footer-section__policies-section--policies{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex}footer .footer-section__policies-section--policy-item::before{content:"\00a0\00a0"}footer .footer-section__policies-section--policy-item::after{content:"\00a0\00a0|";color:#fff}footer .footer-section__policies-section--policy-item:last-of-type::after{content:"\00a0\00a0|\00a0\00a0"}footer .footer-section__policies-section--disclaimer{display:block;max-width:600px;color:#cbcbcb!important;margin-top:16px}@media(min-width:1920px){footer .footer-section__policies-section--disclaimer{max-width:800px}}footer .dropdown-toggle::after{color:#fff;vertical-align:middle}@media(max-width:1280px){footer .footer-section{-webkit-box-orient:vertical;-webkit-box-direction:reverse;-webkit-flex-direction:column-reverse;-ms-flex-direction:column-reverse;flex-direction:column-reverse;-webkit-box-align:start;-webkit-align-items:flex-start;-ms-flex-align:start;align-items:flex-start}footer .footer-section span{font-size:14px!important;line-height:1.57!important}footer .footer-section__media-section{padding:30px 40px}footer .footer-section__media-section svg{height:31px;width:auto}footer .footer-section__media-section--link{margin-right:20px}footer .footer-section__media-section--button-with-text{-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:start;-webkit-align-items:flex-start;-ms-flex-align:start;align-items:flex-start;margin-bottom:47px}footer .footer-section__media-section--text{margin-right:0;margin-bottom:16px}footer .footer-section__policies-section{padding:30px 40px}footer .footer-section__policies-section--policies{-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column}footer .footer-section__policies-section--policy-item::before,footer .footer-section__policies-section--policy-item::after,footer .footer-section__policies-section--policy-item:last-of-type::before,footer .footer-section__policies-section--policy-item:last-of-type::after{content:""}footer .footer-section__policies-section--language-toggle{margin:17px 0 35px}}.navbar{position:fixed;top:0;width:100%;background-color:#fff;-webkit-box-pack:start;-webkit-justify-content:flex-start;-ms-flex-pack:start;justify-content:flex-start;border-bottom:solid 1px #cbcbcb;z-index:32;padding:30px 60px}.navbar__menu-container{-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1}.navbar__menu-content{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;padding-top:16px;padding-left:88px}.navbar__links-container{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex}.navbar__text-link{margin-right:30px;position:relative;width:-webkit-fit-content;width:-moz-fit-content;width:fit-content}.navbar__text-link::before,.navbar__text-link::after{content:"";position:absolute;top:100%;width:0;right:0;height:2px;background-color:#017cee;-webkit-transition:.2s ease-out;-o-transition:.2s ease-out;transition:.2s ease-out}.navbar__text-link:hover,.navbar__text-link.active{color:#51504f}.navbar__text-link:hover::before,.navbar__text-link:hover::after,.navbar__text-link.active::before,.navbar__text-link.active::after{width:100%;left:0}.navbar--box-shadow{-webkit-box-shadow:0 2px 6px 0 rgba(0,0,0,.12);box-shadow:0 2px 6px rgba(0,0,0,.12)}@media(max-width:1280px){.navbar{padding:20px}.navbar__icon-container svg{width:93px;height:auto}.navbar__menu-content{-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:start;-webkit-align-items:flex-start;-ms-flex-align:start;align-items:flex-start;padding-left:0;padding-top:0}.navbar__drawer-container{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;-webkit-box-pack:end;-webkit-justify-content:flex-end;-ms-flex-pack:end;justify-content:flex-end}.navbar__toggle-button{border:none;background:0 0;padding:0;position:relative;width:26px;height:20px}.navbar__toggle-button--icon{position:absolute;top:0;bottom:0;left:0;right:0;visibility:hidden;opacity:0;-webkit-transition:.2s ease-out;-o-transition:.2s ease-out;transition:.2s ease-out}.navbar__toggle-button--icon.visible{visibility:visible;opacity:1}.navbar__drawer{position:fixed;top:77px;left:0;width:100%;height:-webkit-calc(100% - 77px);height:calc(100% - 77px);background-color:#fff;-webkit-transform:translateX(100%);-ms-transform:translateX(100%);-o-transform:translateX(100%);transform:translateX(100%);-webkit-transition:.2s ease-out;-o-transition:.2s ease-out;transition:.2s ease-out;padding:40px 40px 30px}.navbar__drawer--open{-webkit-transform:translateX(0);-ms-transform:translateX(0);-o-transform:translateX(0);transform:translateX(0)}.navbar__links-container{-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column}.navbar__text-link{margin-right:0;margin-bottom:10px}.navbar__text-link:last-child{margin-bottom:20px}}#header{position:relative;margin:123px -20px 0;min-height:-webkit-calc(100vh - 123px);min-height:calc(100vh - 123px)}#header-canvas{padding:0;margin:0;position:absolute;width:100%;height:100%;top:0;left:0;right:0;bottom:0}#header-canvas .text-area{max-width:706px;width:100%;display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;position:absolute;top:50%;left:50%;-webkit-transform:translate(-50%,-50%);-ms-transform:translate(-50%,-50%);-o-transform:translate(-50%,-50%);transform:translate(-50%,-50%)}#header-canvas .text-area--header{text-align:center;margin-bottom:20px}#header-canvas .text-area--subheader{font-weight:400!important;text-align:center;margin-bottom:20px}#header-canvas canvas{position:absolute;top:0;left:0;width:100%;height:100%;z-index:-1}@media(min-width:1920px){#header-canvas .text-area--header{font-size:90px}}@media(max-width:1280px){#header{margin:77px -20px 0;min-height:-webkit-calc(100vh - 77px);min-height:calc(100vh - 77px)}#header-canvas .text-area{max-width:450px}#header-canvas .text-area--header{margin-bottom:14px}#header-canvas .text-area--subheader{margin-bottom:26px}}@media(max-width:640px){#header-canvas .text-area{max-width:290px}#header-canvas .text-area--header{font-size:48px!important;line-height:1.25!important}#header-canvas .text-area--subheader{font-size:16px!important;line-height:1.63!important}}.roadmap{margin-top:40px}.roadmap main{padding-left:40px}.roadmap .breadcrumb{padding-bottom:0;margin-bottom:30px}.roadmap .breadcrumb-item+.breadcrumb-item{padding-left:4px}.roadmap .breadcrumb-item+.breadcrumb-item::before{color:#707070;padding-right:0}.roadmap .td-sidebar{position:-webkit-sticky;position:sticky;top:163px;height:-webkit-fit-content;height:-moz-fit-content;height:fit-content;max-height:-webkit-calc(100vh - 163px);max-height:calc(100vh - 163px);max-width:270px;min-width:270px;width:100%;overflow-y:auto;padding-top:12px;padding-left:15px;padding-bottom:30px;margin-left:40px;background-color:#f9f9f9;border:none}.roadmap .td-sidebar::-webkit-scrollbar{-webkit-appearance:none}.roadmap .td-sidebar::-webkit-scrollbar-thumb{border-radius:8px;border:2px solid #fff;background-color:rgba(0,0,0,.5)}.roadmap .td-sidebar::-webkit-scrollbar:vertical{width:9px}.roadmap .td-sidebar__inner{position:static;height:unset}.roadmap .td-sidebar li>a{width:-webkit-fit-content;width:-moz-fit-content;width:fit-content}.roadmap .td-sidebar li>a.active{color:#017cee}.roadmap .td-sidebar .searchb-box{margin-bottom:26px}.roadmap .td-sidebar .searchb-box .search-form{width:100%}.roadmap .td-sidebar .toctree li{list-style:none;font-family:roboto,sans-serif;font-size:16px;font-weight:400;font-stretch:normal;font-style:normal;line-height:1.63;letter-spacing:normal;color:#707070}.roadmap .td-sidebar .toctree ul{padding-left:15px;display:none}.roadmap .td-sidebar .toctree>ul,.roadmap .td-sidebar .toctree li.current>ul{display:block}.roadmap .td-sidebar .toctree .caption{font-family:roboto,sans-serif;font-size:18px;font-weight:700;font-stretch:normal;font-style:normal;line-height:1.33;letter-spacing:normal;color:#51504f;padding-bottom:13px;text-transform:uppercase;margin-bottom:0}.roadmap .td-sidebar .toctree .current{color:#017cee;font-weight:500}.roadmap .td-sidebar .toctree .current>a:not([href="#"]){color:#017cee}.roadmap .td-sidebar .toctree a .toctree-expand{display:inline-block;position:relative;height:1em}.roadmap .td-sidebar .toctree a .toctree-expand::before{position:absolute;top:6px;left:-12px;content:'►';font-size:7px}.roadmap .td-sidebar .toctree .current>a>.toctree-expand:before{content:'▼'}.roadmap .td-sidebar-nav__section{padding-right:0}.roadmap .td-sidebar-nav>.td-sidebar-nav__section .td-sidebar-nav__section{list-style:none;position:relative;margin-left:10px}.roadmap .td-sidebar-nav>.td-sidebar-nav__section .td-sidebar-nav__section::before{content:'►';position:absolute;top:6px;left:-12px;font-size:8px;color:#51504f}.roadmap .td-sidebar-nav>.td-sidebar-nav__section .td-sidebar-nav__section>ul{display:none}.roadmap .td-sidebar-nav>.td-sidebar-nav__section .td-sidebar-nav__section.current-section::before{content:'▼'}.roadmap .td-sidebar-nav>.td-sidebar-nav__section .td-sidebar-nav__section.current-section>ul{display:block}.roadmap .wy-nav-side-toc{position:-webkit-sticky;position:sticky;top:163px;overflow-x:hidden;overflow-y:auto;width:280px;height:-webkit-fit-content;height:-moz-fit-content;height:fit-content;max-height:-webkit-calc(100vh - 163px);max-height:calc(100vh - 163px);font-size:14px;line-height:1.43}.roadmap .wy-nav-side-toc::-webkit-scrollbar{-webkit-appearance:none}.roadmap .wy-nav-side-toc::-webkit-scrollbar-thumb{border-radius:8px;border:2px solid #fff;background-color:rgba(0,0,0,.5)}.roadmap .wy-nav-side-toc::-webkit-scrollbar:vertical{width:9px}.roadmap .wy-nav-side-toc ul,.roadmap .wy-nav-side-toc ol{padding:0;list-style:none none}.roadmap .wy-nav-side-toc li{list-style:none}.roadmap .wy-nav-side-toc .wy-menu-vertical>ul,.roadmap .wy-nav-side-toc .wy-menu-vertical li.current>ul{padding-left:1px}.roadmap .wy-nav-side-toc .wy-menu-vertical a{display:inline-block;padding:.4045em 0;color:#707070}.roadmap .wy-nav-side-toc .wy-menu-vertical li a{border-left:solid 2px #cbcbcb;padding-left:-webkit-calc(1.25em + 1px);padding-left:calc(1.25em + 1px)}.roadmap .wy-nav-side-toc .wy-menu-vertical li.current{margin-left:-1px}.roadmap .wy-nav-side-toc .wy-menu-vertical li.current>a{border-left:solid 4px #017cee;color:#017cee}.roadmap .wy-nav-side-toc .wy-menu-vertical li li>a{padding-left:2.427em}.roadmap .wy-nav-side-toc .wy-menu-vertical li li li>a{padding-left:4.045em}.roadmap .wy-nav-side-toc .wy-menu-vertical li li li li>a{padding-left:5.663em}@media(max-width:1280px){.roadmap main{padding-left:0}.roadmap .td-sidebar{position:static;background-color:transparent;padding:0;margin:0;max-width:unset;height:530px}}.search-form{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;width:344px;padding:8px 20px;border:solid 1px #cbcbcb;border-radius:5px;margin:60px auto 0}.search-form__input{font-family:roboto,sans-serif;font-size:16px;color:#707070;line-height:1.63;-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;padding-right:10px;border:none;background:0 0;outline:none;float:left}.search-form__button{border:none;background-color:transparent;padding:0}@media(max-width:1280px){.search-form{width:270px;padding:3px 20px;margin-top:30px}}.rating-container{margin-top:40px}.rating{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:horizontal;-webkit-box-direction:reverse;-webkit-flex-direction:row-reverse;-ms-flex-direction:row-reverse;flex-direction:row-reverse;width:-webkit-fit-content;width:-moz-fit-content;width:fit-content}.rate-star{cursor:pointer;margin-right:13px}.rate-star svg path{fill:none;stroke:#51504f}.rate-star:hover svg path,.rate-star:hover~.rate-star svg path{fill:#017cee;stroke:none}.rst-content{color:#707070}.rst-content h1{margin-top:0;margin-bottom:30px;font-weight:500;font-family:rubik,sans-serif;color:#51504f;font-size:225%}.rst-content h2,.rst-content h3,.rst-content h4,.rst-content h5,.rst-content h6,.rst-content p{font-family:roboto,sans-serif;color:#707070}.rst-content h2,.rst-content h3,.rst-content h4,.rst-content h5,.rst-content h6{margin-top:40px;margin-bottom:20px;font-weight:500}.rst-content p{line-height:1.63;margin:0 0 30px;font-size:16px;overflow-wrap:break-word}.rst-content h2{font-size:150%}.rst-content h3{font-size:125%}.rst-content h4{font-size:115%}.rst-content h5{font-size:110%}.rst-content h6{font-size:100%}.rst-content code{max-width:100%;color:#51504f;padding:0 5px;font-family:roboto mono,monospace;overflow-x:auto}.rst-content .note,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .tip,.rst-content .warning,.rst-content .admonition-todo,.rst-content .admonition{padding:9px 10px;line-height:24px;margin-bottom:24px;background:#e7f2fa}@media(max-width:768px){.rst-content .admonition p:not(.admonition-title){font-size:12px;line-height:1.83}}.rst-content .admonition-title:before{content:"!";background-color:#fff;border-radius:50%;padding:0 4px;margin-right:5px}.rst-content .admonition-title{color:#fff;font-weight:500;font-size:10px;line-height:2.1;display:block;background:#68d1ff;margin:-10px;padding:0 12px;margin-bottom:9px}.rst-content .danger,.rst-content .error{background:#fdece9}.rst-content .danger::before,.rst-content .error::before{color:#fdece9}.rst-content .danger .admonition-title,.rst-content .error .admonition-title{background:#ee8170}.rst-content .danger .admonition-title::before,.rst-content .error .admonition-title::before{color:#ee8170}.rst-content .attention,.rst-content .caution{background:#fff8f6}.rst-content .warning{background:#f8f8f8}.rst-content .attention .admonition-title,.rst-content .caution .admonition-title{background:#ffa996}.rst-content .attention .admonition-title::before,.rst-content .caution .admonition-title::before{color:#ffa996}.rst-content .warning .admonition-title{background:#a6a6a6}.rst-content .warning .admonition-title::before{color:#a6a6a6}.rst-content .note,.rst-content .seealso{background:#f3fbff}.rst-content .note .admonition-title,.rst-content .seealso .admonition-title{background:#68d2fe}.rst-content .note .admonition-title::before,.rst-content .seealso .admonition-title::before{color:#68d2fe}.rst-content .hint{background:#f2fef6}.rst-content .important{background:#e6f9fc}.rst-content .tip{background:#e5f7ec}.rst-content .hint .admonition-title{background:#63e598}.rst-content .hint .admonition-title::before{color:#63e598}.rst-content .important .admonition-title{background:#5bdae3}.rst-content .important .admonition-title::before{color:#5bdae3}.rst-content .tip .admonition-title{background:#5bcb88}.rst-content .tip .admonition-title::before{color:#5bcb88}.rst-content .note p:last-child,.rst-content .attention p:last-child,.rst-content .caution p:last-child,.rst-content .danger p:last-child,.rst-content .error p:last-child,.rst-content .hint p:last-child,.rst-content .important p:last-child,.rst-content .tip p:last-child,.rst-content .warning p:last-child,.rst-content .seealso p:last-child,.rst-content .admonition p:last-child{margin-bottom:0}.rst-content img{max-width:100%;height:auto}.rst-content div.figure{margin-bottom:24px}.rst-content div.figure p.caption{font-style:italic}.rst-content div.figure p:last-child.caption{margin-bottom:0}.rst-content div.figure.align-center{text-align:center}.rst-content .section>img,.rst-content .section>a>img{margin-bottom:24px}.rst-content abbr[title]{text-decoration:none}.rst-content blockquote{margin-left:24px;line-height:24px;margin-bottom:24px}.rst-content pre{background-color:#f2f8fe}.rst-content pre.literal-block,.rst-content .linenodiv pre{font-family:roboto mono,monospace;font-size:12px;line-height:1.4}@media print{.rst-content div[class^=highlight],.rst-content div[class^=highlight] pre{white-space:pre-wrap}}.rst-content .admonition table{border-color:rgba(0,0,0,.1)}.rst-content .admonition table td,.rst-content .admonition table th{background:0 0!important;border-color:rgba(0,0,0,.1)!important}.rst-content .section ol p,.rst-content .section ul p{margin-bottom:12px}.rst-content .section ol li p:last-child,.rst-content .section ul li p:last-child{margin-bottom:0}.rst-content .section ol p:last-child,.rst-content .section ul p:last-child{margin-bottom:24px}.rst-content .topic-title{font-weight:700;margin-bottom:12px}.rst-content .toc-backref{color:inherit}.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content .toctree-wrapper p.caption .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content dl dt .headerlink,.rst-content p.caption .headerlink,.rst-content table>caption .headerlink,.rst-content .code-block-caption .headerlink{visibility:hidden;font-size:0}.rst-content h1 .headerlink:after,.rst-content h2 .headerlink:after,.rst-content .toctree-wrapper p.caption .headerlink:after,.rst-content h3 .headerlink:after,.rst-content h4 .headerlink:after,.rst-content h5 .headerlink:after,.rst-content h6 .headerlink:after,.rst-content dl dt .headerlink:after,.rst-content p.caption .headerlink:after,.rst-content table>caption .headerlink:after,.rst-content .code-block-caption .headerlink:after{content:url(/images/anchor.svg);vertical-align:bottom;padding-left:8px}.rst-content h1:hover .headerlink:after,.rst-content h2:hover .headerlink:after,.rst-content .toctree-wrapper p.caption:hover .headerlink:after,.rst-content h3:hover .headerlink:after,.rst-content h4:hover .headerlink:after,.rst-content h5:hover .headerlink:after,.rst-content h6:hover .headerlink:after,.rst-content dl dt:hover .headerlink:after,.rst-content p.caption:hover .headerlink:after,.rst-content table>caption:hover .headerlink:after,.rst-content .code-block-caption:hover .headerlink:after{visibility:visible}.rst-content table>caption .headerlink:after{font-size:12px}.rst-content .footnote-reference,.rst-content .citation-reference{vertical-align:baseline;position:relative;top:-.4em;line-height:0;font-size:90%}.rst-content table.docutils,.rst-content table.field-list{border-collapse:collapse;border-spacing:0;empty-cells:show;margin-bottom:24px}.rst-content table.docutils caption,.rst-content table.field-list caption{padding:1em 0;text-align:center}.rst-content table.docutils td,.rst-content table.docutils th,.rst-content table.field-list th{margin:0;overflow:visible}.rst-content table.docutils td{padding:10px 31px}.rst-content table.docutils th,.rst-content table.field-list th{padding:11px 31px}.rst-content table.docutils td:first-child,.rst-content table.field-list td:first-child,.rst-content table.docutils th:first-child,.rst-content table.field-list th:first-child{border-left-width:0}.rst-content table.docutils thead,.rst-content table.field-list thead{text-align:left;vertical-align:bottom;white-space:nowrap}.rst-content table.docutils thead th,.rst-content table.field-list thead th{border-bottom:solid 1px rgba(81,80,79,.3);border-left:solid 1px rgba(81,80,79,.3)}.rst-content table.docutils thead th p,.rst-content table.field-list thead th p{font-weight:700;font-size:18px;color:#51504f;line-height:1.33;margin-bottom:0}.rst-content table.docutils td,.rst-content table.field-list td{background-color:transparent;vertical-align:middle}.rst-content td p:last-child,.rst-content table.docutils td p:last-child,.rst-content table.field-list td p:last-child{margin-bottom:0}.rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td{background-color:rgba(112,112,112,5%)}.rst-content table.docutils{border:1px solid rgba(81,80,79,.3)}.rst-content table.docutils td{border-bottom:1px solid rgba(81,80,79,.3);border-left:1px solid rgba(81,80,79,.3)}.rst-content table.docutils tbody>tr:last-child td{border-bottom-width:0}.rst-content .wy-table-responsive{margin-bottom:24px;max-width:100%;overflow:auto}.rst-content .wy-table-responsive table{margin-bottom:0!important}.rst-content .wy-table-responsive table th{white-space:nowrap}.rst-content code big,.rst-content tt em,.rst-content code em{font-size:100%!important;line-height:normal}.rst-content code.literal{color:#e74c3c}.rst-content code.xref,.rst-content a code{font-weight:700;color:#707070}.rst-content pre,.rst-content kbd{font-family:roboto mono,monospace}.rst-content kbd{background-color:inherit;color:inherit;-webkit-box-shadow:none;box-shadow:none;border:none;font-size:100%}.rst-content a code{color:#2980b9}.rst-content dl{margin-bottom:24px}.rst-content dl dt{font-weight:700;margin-bottom:12px;word-break:break-word}.rst-content dl p,.rst-content dl table,.rst-content dl ul,.rst-content dl ol{margin-bottom:12px!important}.rst-content dl dd{margin:0 0 12px 24px;line-height:24px}.rst-content dl:not(.docutils){margin-bottom:24px}.rst-content dl:not(.docutils) dt{font-family:roboto mono,monospace;display:table;margin:6px 0;font-size:100%;line-height:1.63;background:#f3fbff;color:#51504f;border-top:solid 4px #68d1ff;padding:8px 10px;position:relative}@media(max-width:768px){.rst-content dl:not(.docutils) dt{font-size:10px}}.rst-content dl:not(.docutils) dt:before{color:#68d1ff}.rst-content dl:not(.docutils) dt .headerlink{color:#707070;font-size:100%!important}.rst-content dl:not(.docutils) dt .fn-backref{color:#0cb6ff}.rst-content dl:not(.docutils) dl dt{margin-bottom:6px;border:none;border-left:solid 8px #a6a6a6;background:#f8f8f8;color:#707070}.rst-content dl:not(.docutils) dl dt .headerlink{color:#707070;font-size:100%!important}.rst-content dl:not(.docutils) dt:first-child{margin-top:0}.rst-content dl:not(.docutils) code{font-weight:700}.rst-content dl:not(.docutils) code.descname,.rst-content dl:not(.docutils) code.descclassname{background-color:transparent;border:none;padding:0;font-size:100%!important}.rst-content dl:not(.docutils) code.descname{font-weight:700}.rst-content dl:not(.docutils) .optional{display:inline-block;padding:0 4px;color:#51504f;font-weight:700}.rst-content dl:not(.docutils) .property{display:inline-block;padding-right:8px}.rst-content p.rubric{margin-bottom:12px;font-weight:700}.rst-content .versionmodified{font-style:italic}.rst-content .example-header{position:relative;background:#017cee;padding:8px 16px;margin-bottom:0}.rst-content .example-block-wrapper pre{margin:0;width:unset;border-top-left-radius:0;border-top-right-radius:0}.rst-content .example-header--with-button{padding-right:166px}@media(max-width:768px){.rst-content .example-header--with-button{padding-right:100px}}.rst-content .example-header:after{content:'';display:table;clear:both}.rst-content .example-title{display:block;padding:4px;margin-right:16px;color:#fff;overflow-x:auto}@media(max-width:768px){.rst-content .example-title{overflow-wrap:initial;font-size:12px}}.rst-content .example-header-button{top:8px;right:16px;position:absolute}.rst-content .example-header+.highlight-python{margin-top:0!important}.rst-content .viewcode-button{display:inline-block;padding:8px 16px;border:0;margin:0;color:#fff;font-weight:700;line-height:1;text-decoration:underline;text-overflow:ellipsis;overflow:hidden;text-transform:lowercase;vertical-align:middle;white-space:nowrap}@media(max-width:768px){.rst-content .viewcode-button{font-size:12px;padding:7px 0}}.rst-content .viewcode-button:visited{color:#404040}.rst-content .viewcode-button:hover,.rst-content .viewcode-button:focus{color:#404040}@media(min-width:1024px){.rst-content .section::before{display:block;content:" ";margin-top:-83px;height:83px;visibility:hidden}}.content-drawer-wrapper{display:none}@media(max-width:1280px){.content-drawer-wrapper{display:block;margin-bottom:30px}}.content-drawer-wrapper .navbar{-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between;left:0}.content-drawer-wrapper .navbar--hidden{display:none}.content-drawer-container{position:fixed;top:77px;left:0;width:100%;height:100%;background-color:#fff;-webkit-transform:translateX(-100%);-ms-transform:translateX(-100%);-o-transform:translateX(-100%);transform:translateX(-100%);-webkit-transition:.2s ease-out;-o-transition:.2s ease-out;transition:.2s ease-out;z-index:100}.content-drawer-container--open{-webkit-transform:translateX(0);-ms-transform:translateX(0);-o-transform:translateX(0);transform:translateX(0)}.content-drawer{padding:20px 40px}.content-drawer__toggle-button{border:none;background:0 0;padding:0;position:relative;width:26px;height:20px}.content-drawer__toggle-button--icon{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;width:-webkit-fit-content;width:-moz-fit-content;width:fit-content;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;visibility:hidden;opacity:0;-webkit-transition:.2s ease-out;-o-transition:.2s ease-out;transition:.2s ease-out}.content-drawer__toggle-button--icon svg{margin-right:10px}.content-drawer__toggle-button--icon.visible{visibility:visible;opacity:1}.dropdown-menu{font-family:roboto,sans-serif}.dropdown-toggle::after{color:#51504f}.sidebar__version-selector{margin-bottom:22px}.chroma,.highlight{background-color:#fff}.chroma .lntd,.highlight .lntd{vertical-align:top;padding:0;margin:0;border:0}.chroma .lntable,.highlight .lntable{border-spacing:0;padding:0;margin:0;border:0;width:auto;overflow:auto;display:block}.chroma .hl,.highlight .hl{display:block;width:100%;background-color:#ffc}.chroma .lnt,.chroma .ln,.highlight .lnt,.highlight .ln{margin-right:.4em;padding:0 .4em;color:#7f7f7f}.chroma .k,.chroma .kc,.chroma .kd,.chroma .kn,.chroma .kp,.chroma .kr,.highlight .k,.highlight .kc,.highlight .kd,.highlight .kn,.highlight .kp,.highlight .kr{color:#00f}.chroma .kt,.chroma .nc,.highlight .kt,.highlight .nc{color:#2b91af}.chroma .s,.chroma .sa,.chroma .sb,.chroma .sc,.chroma .dl,.chroma .sd,.chroma .s2,.chroma .se,.chroma .sh,.chroma .si,.chroma .sx,.chroma .sr,.chroma .s1,.chroma .ss,.highlight .s,.highlight .sa,.highlight .sb,.highlight .sc,.highlight .dl,.highlight .sd,.highlight .s2,.highlight .se,.highlight .sh,.highlight .si,.highlight .sx,.highlight .sr,.highlight .s1,.highlight .ss{color:#a31515}.chroma .ow,.highlight .ow{color:#00f}.chroma .c,.chroma .ch,.chroma .cm,.chroma .c1,.chroma .cs,.highlight .c,.highlight .ch,.highlight .cm,.highlight .c1,.highlight .cs{color:green}.chroma .cp,.chroma .cpf,.highlight .cp,.highlight .cpf{color:#00f}.chroma .ge,.highlight .ge{font-style:italic}.chroma .gh,.chroma .gp,.chroma .gs,.chroma .gu,.highlight .gh,.highlight .gp,.highlight .gs,.highlight .gu{font-weight:700}pre{margin:40px 0;padding:16px 20px;border:solid 1px #cbcbcb;border-radius:5px;width:100%}.share-section--icons-wrapper{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex}.share-section--icon{margin-right:12px;cursor:pointer}.four-oh-four{height:-webkit-calc(100vh - 123px);height:calc(100vh - 123px);position:relative}.four-oh-four__text-container{position:relative;top:50%;left:50%;-webkit-transform:translate(-50%,-50%);-ms-transform:translate(-50%,-50%);-o-transform:translate(-50%,-50%);transform:translate(-50%,-50%);text-align:center}@media(min-width:768px){.list-providers{-webkit-column-count:2;-moz-column-count:2;column-count:2}}@media(min-width:1000px){.list-providers{-webkit-column-count:3;-moz-column-count:3;column-count:3}} \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/_gen/css/main.min.css b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/_gen/css/main.min.css new file mode 100644 index 00000000000..0ccc485909e --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/_gen/css/main.min.css @@ -0,0 +1,7 @@ +@import "https://fonts.googleapis.com/css?family=Open+Sans:300,300i,400,400i,700,700i";@fa-font-path:"../webfonts";/*!* Bootstrap v4.1.3 (https://getbootstrap.com/) +* Copyright 2011-2018 The Bootstrap Authors +* Copyright 2011-2018 Twitter, Inc. +* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)*/:root{--blue:#72A1E5;--indigo:#6610f2;--purple:#6f42c1;--pink:#e83e8c;--red:#dc3545;--orange:#BA5A31;--yellow:#ffc107;--green:#28a745;--teal:#20c997;--cyan:#17a2b8;--white:#fff;--gray:#888;--gray-dark:#333;--primary:#30638E;--secondary:#FFA630;--success:#3772FF;--info:#C0E0DE;--warning:#ED6A5A;--danger:#ED6A5A;--light:#D3F3EE;--dark:#403F4C;--breakpoint-xs:0;--breakpoint-sm:576px;--breakpoint-md:768px;--breakpoint-lg:992px;--breakpoint-xl:1200px;--font-family-sans-serif:"Open Sans", -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol";--font-family-monospace:SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace}*,*::before,*::after{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-family:sans-serif;line-height:1.15;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%;-ms-overflow-style:scrollbar;-webkit-tap-highlight-color:transparent}@-ms-viewport{width: device-width; }article,aside,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}body{margin:0;font-family:open sans,-apple-system,BlinkMacSystemFont,segoe ui,Roboto,helvetica neue,Arial,sans-serif,apple color emoji,segoe ui emoji,segoe ui symbol;font-size:1rem;font-weight:400;line-height:1.5;color:#222;text-align:left;background-color:#fff}[tabindex="-1"]:focus{outline:0!important}hr{-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;height:0;overflow:visible}h1,h2,h3,h4,h5,h6{margin-top:0;margin-bottom:.5rem}p{margin-top:0;margin-bottom:1rem}abbr[title],abbr[data-original-title]{text-decoration:underline;-webkit-text-decoration:underline dotted;-moz-text-decoration:underline dotted;text-decoration:underline dotted;cursor:help;border-bottom:0}address{margin-bottom:1rem;font-style:normal;line-height:inherit}ol,ul,dl{margin-top:0;margin-bottom:1rem}ol ol,ul ul,ol ul,ul ol{margin-bottom:0}dt{font-weight:700}dd{margin-bottom:.5rem;margin-left:0}blockquote{margin:0 0 1rem}dfn{font-style:italic}b,strong{font-weight:bolder}small{font-size:80%}sub,sup{position:relative;font-size:75%;line-height:0;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}a{color:#3176d9;text-decoration:none;background-color:transparent;-webkit-text-decoration-skip:objects}a:hover{color:#1e53a0;text-decoration:none}a:not([href]):not([tabindex]){color:inherit;text-decoration:none}a:not([href]):not([tabindex]):hover,a:not([href]):not([tabindex]):focus{color:inherit;text-decoration:none}a:not([href]):not([tabindex]):focus{outline:0}pre,code,kbd,samp{font-family:SFMono-Regular,Menlo,Monaco,Consolas,liberation mono,courier new,monospace;font-size:1em}pre{margin-top:0;margin-bottom:1rem;overflow:auto;-ms-overflow-style:scrollbar}figure{margin:0 0 1rem}img{vertical-align:middle;border-style:none}svg{overflow:hidden;vertical-align:middle}table{border-collapse:collapse}caption{padding-top:.75rem;padding-bottom:.75rem;color:#888;text-align:left;caption-side:bottom}th{text-align:inherit}label{display:inline-block;margin-bottom:.5rem}button{border-radius:0}button:focus{outline:1px dotted;outline:5px auto -webkit-focus-ring-color}input,button,select,optgroup,textarea{margin:0;font-family:inherit;font-size:inherit;line-height:inherit}button,input{overflow:visible}button,select{text-transform:none}button,html [type=button],[type=reset],[type=submit]{-webkit-appearance:button}button::-moz-focus-inner,[type=button]::-moz-focus-inner,[type=reset]::-moz-focus-inner,[type=submit]::-moz-focus-inner{padding:0;border-style:none}input[type=radio],input[type=checkbox]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;padding:0}input[type=date],input[type=time],input[type=datetime-local],input[type=month]{-webkit-appearance:listbox}textarea{overflow:auto;resize:vertical}fieldset{min-width:0;padding:0;margin:0;border:0}legend{display:block;width:100%;max-width:100%;padding:0;margin-bottom:.5rem;font-size:1.5rem;line-height:inherit;color:inherit;white-space:normal}progress{vertical-align:baseline}[type=number]::-webkit-inner-spin-button,[type=number]::-webkit-outer-spin-button{height:auto}[type=search]{outline-offset:-2px;-webkit-appearance:none}[type=search]::-webkit-search-cancel-button,[type=search]::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{font:inherit;-webkit-appearance:button}output{display:inline-block}summary{display:list-item;cursor:pointer}template{display:none}[hidden]{display:none!important}h1,h2,h3,h4,h5,h6,.h1,.h2,.h3,.h4,.h5,.h6{margin-bottom:.5rem;font-family:inherit;font-weight:500;line-height:1.2;color:inherit}h1,.h1{font-size:2.25rem}h2,.h2{font-size:2rem}h3,.h3{font-size:1.5rem}h4,.h4{font-size:1.35rem}h5,.h5{font-size:1.15rem}h6,.h6{font-size:1rem}.lead{font-size:1.25rem;font-weight:300}.display-1{font-size:3rem;font-weight:700;line-height:1.2}.display-2{font-size:2.5rem;font-weight:700;line-height:1.2}.display-3{font-size:2rem;font-weight:700;line-height:1.2}.display-4{font-size:1.75rem;font-weight:700;line-height:1.2}hr{margin-top:1rem;margin-bottom:1rem;border:0;border-top:1px solid rgba(0,0,0,.1)}small,.small{font-size:80%;font-weight:400}mark,.mark{padding:.2em;background-color:#fcf8e3}.list-unstyled{padding-left:0;list-style:none}.list-inline{padding-left:0;list-style:none}.list-inline-item{display:inline-block}.list-inline-item:not(:last-child){margin-right:.5rem}.initialism{font-size:90%;text-transform:uppercase}.blockquote{margin-bottom:1rem;font-size:1.25rem}.blockquote-footer{display:block;font-size:80%;color:#888}.blockquote-footer::before{content:"\2014 \00A0"}.img-fluid,.td-content img{max-width:100%;height:auto}.img-thumbnail{padding:.25rem;background-color:#fff;border:1px solid #dee2e6;border-radius:.25rem;-webkit-box-shadow:0 1px 2px rgba(0,0,0,.075);box-shadow:0 1px 2px rgba(0,0,0,.075);max-width:100%;height:auto}.figure{display:inline-block}.figure-img{margin-bottom:.5rem;line-height:1}.figure-caption{font-size:90%;color:#888}code{font-size:87.5%;color:#c97300;word-break:break-word}a>code{color:inherit}kbd{padding:.2rem .4rem;font-size:87.5%;color:#fff;background-color:#222;border-radius:.2rem;-webkit-box-shadow:inset 0 -.1rem 0 rgba(0,0,0,.25);box-shadow:inset 0 -.1rem rgba(0,0,0,.25)}kbd kbd{padding:0;font-size:100%;font-weight:700;-webkit-box-shadow:none;box-shadow:none}pre{display:block;font-size:87.5%;color:#222}pre code{font-size:inherit;color:inherit;word-break:normal}.pre-scrollable{max-height:340px;overflow-y:scroll}.container{width:100%;padding-right:15px;padding-left:15px;margin-right:auto;margin-left:auto}@media(min-width:576px){.container{max-width:540px}}@media(min-width:768px){.container{max-width:720px}}@media(min-width:992px){.container{max-width:960px}}@media(min-width:1200px){.container{max-width:1140px}}.container-fluid{width:100%;padding-right:15px;padding-left:15px;margin-right:auto;margin-left:auto}.row{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;margin-right:-15px;margin-left:-15px}.no-gutters{margin-right:0;margin-left:0}.no-gutters>.col,.no-gutters>[class*=col-]{padding-right:0;padding-left:0}.col-xl,.col-xl-auto,.col-xl-12,.col-xl-11,.col-xl-10,.col-xl-9,.col-xl-8,.col-xl-7,.col-xl-6,.col-xl-5,.col-xl-4,.col-xl-3,.col-xl-2,.col-xl-1,.col-lg,.col-lg-auto,.col-lg-12,.col-lg-11,.col-lg-10,.col-lg-9,.col-lg-8,.col-lg-7,.col-lg-6,.col-lg-5,.col-lg-4,.col-lg-3,.col-lg-2,.col-lg-1,.col-md,.col-md-auto,.col-md-12,.col-md-11,.col-md-10,.col-md-9,.col-md-8,.col-md-7,.col-md-6,.col-md-5,.col-md-4,.col-md-3,.col-md-2,.col-md-1,.col-sm,.col-sm-auto,.col-sm-12,.col-sm-11,.col-sm-10,.col-sm-9,.col-sm-8,.col-sm-7,.col-sm-6,.col-sm-5,.col-sm-4,.col-sm-3,.col-sm-2,.col-sm-1,.col,.col-auto,.col-12,.col-11,.col-10,.col-9,.col-8,.col-7,.col-6,.col-5,.col-4,.col-3,.col-2,.col-1{position:relative;width:100%;min-height:1px;padding-right:15px;padding-left:15px}.col{-webkit-flex-basis:0;-ms-flex-preferred-size:0;flex-basis:0;-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;max-width:100%}.col-auto{-webkit-box-flex:0;-webkit-flex:0 0 auto;-ms-flex:0 0 auto;flex:none;width:auto;max-width:none}.col-1{-webkit-box-flex:0;-webkit-flex:0 0 8.33333333%;-ms-flex:0 0 8.33333333%;flex:0 0 8.33333333%;max-width:8.33333333%}.col-2{-webkit-box-flex:0;-webkit-flex:0 0 16.66666667%;-ms-flex:0 0 16.66666667%;flex:0 0 16.66666667%;max-width:16.66666667%}.col-3{-webkit-box-flex:0;-webkit-flex:0 0 25%;-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}.col-4{-webkit-box-flex:0;-webkit-flex:0 0 33.33333333%;-ms-flex:0 0 33.33333333%;flex:0 0 33.33333333%;max-width:33.33333333%}.col-5{-webkit-box-flex:0;-webkit-flex:0 0 41.66666667%;-ms-flex:0 0 41.66666667%;flex:0 0 41.66666667%;max-width:41.66666667%}.col-6{-webkit-box-flex:0;-webkit-flex:0 0 50%;-ms-flex:0 0 50%;flex:0 0 50%;max-width:50%}.col-7{-webkit-box-flex:0;-webkit-flex:0 0 58.33333333%;-ms-flex:0 0 58.33333333%;flex:0 0 58.33333333%;max-width:58.33333333%}.col-8{-webkit-box-flex:0;-webkit-flex:0 0 66.66666667%;-ms-flex:0 0 66.66666667%;flex:0 0 66.66666667%;max-width:66.66666667%}.col-9{-webkit-box-flex:0;-webkit-flex:0 0 75%;-ms-flex:0 0 75%;flex:0 0 75%;max-width:75%}.col-10{-webkit-box-flex:0;-webkit-flex:0 0 83.33333333%;-ms-flex:0 0 83.33333333%;flex:0 0 83.33333333%;max-width:83.33333333%}.col-11{-webkit-box-flex:0;-webkit-flex:0 0 91.66666667%;-ms-flex:0 0 91.66666667%;flex:0 0 91.66666667%;max-width:91.66666667%}.col-12{-webkit-box-flex:0;-webkit-flex:0 0 100%;-ms-flex:0 0 100%;flex:0 0 100%;max-width:100%}.order-first{-webkit-box-ordinal-group:0;-webkit-order:-1;-ms-flex-order:-1;order:-1}.order-last{-webkit-box-ordinal-group:14;-webkit-order:13;-ms-flex-order:13;order:13}.order-0{-webkit-box-ordinal-group:1;-webkit-order:0;-ms-flex-order:0;order:0}.order-1{-webkit-box-ordinal-group:2;-webkit-order:1;-ms-flex-order:1;order:1}.order-2{-webkit-box-ordinal-group:3;-webkit-order:2;-ms-flex-order:2;order:2}.order-3{-webkit-box-ordinal-group:4;-webkit-order:3;-ms-flex-order:3;order:3}.order-4{-webkit-box-ordinal-group:5;-webkit-order:4;-ms-flex-order:4;order:4}.order-5{-webkit-box-ordinal-group:6;-webkit-order:5;-ms-flex-order:5;order:5}.order-6{-webkit-box-ordinal-group:7;-webkit-order:6;-ms-flex-order:6;order:6}.order-7{-webkit-box-ordinal-group:8;-webkit-order:7;-ms-flex-order:7;order:7}.order-8{-webkit-box-ordinal-group:9;-webkit-order:8;-ms-flex-order:8;order:8}.order-9{-webkit-box-ordinal-group:10;-webkit-order:9;-ms-flex-order:9;order:9}.order-10{-webkit-box-ordinal-group:11;-webkit-order:10;-ms-flex-order:10;order:10}.order-11{-webkit-box-ordinal-group:12;-webkit-order:11;-ms-flex-order:11;order:11}.order-12{-webkit-box-ordinal-group:13;-webkit-order:12;-ms-flex-order:12;order:12}.offset-1{margin-left:8.33333333%}.offset-2{margin-left:16.66666667%}.offset-3{margin-left:25%}.offset-4{margin-left:33.33333333%}.offset-5{margin-left:41.66666667%}.offset-6{margin-left:50%}.offset-7{margin-left:58.33333333%}.offset-8{margin-left:66.66666667%}.offset-9{margin-left:75%}.offset-10{margin-left:83.33333333%}.offset-11{margin-left:91.66666667%}@media(min-width:576px){.col-sm{-webkit-flex-basis:0;-ms-flex-preferred-size:0;flex-basis:0;-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;max-width:100%}.col-sm-auto{-webkit-box-flex:0;-webkit-flex:0 0 auto;-ms-flex:0 0 auto;flex:none;width:auto;max-width:none}.col-sm-1{-webkit-box-flex:0;-webkit-flex:0 0 8.33333333%;-ms-flex:0 0 8.33333333%;flex:0 0 8.33333333%;max-width:8.33333333%}.col-sm-2{-webkit-box-flex:0;-webkit-flex:0 0 16.66666667%;-ms-flex:0 0 16.66666667%;flex:0 0 16.66666667%;max-width:16.66666667%}.col-sm-3{-webkit-box-flex:0;-webkit-flex:0 0 25%;-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}.col-sm-4{-webkit-box-flex:0;-webkit-flex:0 0 33.33333333%;-ms-flex:0 0 33.33333333%;flex:0 0 33.33333333%;max-width:33.33333333%}.col-sm-5{-webkit-box-flex:0;-webkit-flex:0 0 41.66666667%;-ms-flex:0 0 41.66666667%;flex:0 0 41.66666667%;max-width:41.66666667%}.col-sm-6{-webkit-box-flex:0;-webkit-flex:0 0 50%;-ms-flex:0 0 50%;flex:0 0 50%;max-width:50%}.col-sm-7{-webkit-box-flex:0;-webkit-flex:0 0 58.33333333%;-ms-flex:0 0 58.33333333%;flex:0 0 58.33333333%;max-width:58.33333333%}.col-sm-8{-webkit-box-flex:0;-webkit-flex:0 0 66.66666667%;-ms-flex:0 0 66.66666667%;flex:0 0 66.66666667%;max-width:66.66666667%}.col-sm-9{-webkit-box-flex:0;-webkit-flex:0 0 75%;-ms-flex:0 0 75%;flex:0 0 75%;max-width:75%}.col-sm-10{-webkit-box-flex:0;-webkit-flex:0 0 83.33333333%;-ms-flex:0 0 83.33333333%;flex:0 0 83.33333333%;max-width:83.33333333%}.col-sm-11{-webkit-box-flex:0;-webkit-flex:0 0 91.66666667%;-ms-flex:0 0 91.66666667%;flex:0 0 91.66666667%;max-width:91.66666667%}.col-sm-12{-webkit-box-flex:0;-webkit-flex:0 0 100%;-ms-flex:0 0 100%;flex:0 0 100%;max-width:100%}.order-sm-first{-webkit-box-ordinal-group:0;-webkit-order:-1;-ms-flex-order:-1;order:-1}.order-sm-last{-webkit-box-ordinal-group:14;-webkit-order:13;-ms-flex-order:13;order:13}.order-sm-0{-webkit-box-ordinal-group:1;-webkit-order:0;-ms-flex-order:0;order:0}.order-sm-1{-webkit-box-ordinal-group:2;-webkit-order:1;-ms-flex-order:1;order:1}.order-sm-2{-webkit-box-ordinal-group:3;-webkit-order:2;-ms-flex-order:2;order:2}.order-sm-3{-webkit-box-ordinal-group:4;-webkit-order:3;-ms-flex-order:3;order:3}.order-sm-4{-webkit-box-ordinal-group:5;-webkit-order:4;-ms-flex-order:4;order:4}.order-sm-5{-webkit-box-ordinal-group:6;-webkit-order:5;-ms-flex-order:5;order:5}.order-sm-6{-webkit-box-ordinal-group:7;-webkit-order:6;-ms-flex-order:6;order:6}.order-sm-7{-webkit-box-ordinal-group:8;-webkit-order:7;-ms-flex-order:7;order:7}.order-sm-8{-webkit-box-ordinal-group:9;-webkit-order:8;-ms-flex-order:8;order:8}.order-sm-9{-webkit-box-ordinal-group:10;-webkit-order:9;-ms-flex-order:9;order:9}.order-sm-10{-webkit-box-ordinal-group:11;-webkit-order:10;-ms-flex-order:10;order:10}.order-sm-11{-webkit-box-ordinal-group:12;-webkit-order:11;-ms-flex-order:11;order:11}.order-sm-12{-webkit-box-ordinal-group:13;-webkit-order:12;-ms-flex-order:12;order:12}.offset-sm-0{margin-left:0}.offset-sm-1{margin-left:8.33333333%}.offset-sm-2{margin-left:16.66666667%}.offset-sm-3{margin-left:25%}.offset-sm-4{margin-left:33.33333333%}.offset-sm-5{margin-left:41.66666667%}.offset-sm-6{margin-left:50%}.offset-sm-7{margin-left:58.33333333%}.offset-sm-8{margin-left:66.66666667%}.offset-sm-9{margin-left:75%}.offset-sm-10{margin-left:83.33333333%}.offset-sm-11{margin-left:91.66666667%}}@media(min-width:768px){.col-md{-webkit-flex-basis:0;-ms-flex-preferred-size:0;flex-basis:0;-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;max-width:100%}.col-md-auto{-webkit-box-flex:0;-webkit-flex:0 0 auto;-ms-flex:0 0 auto;flex:none;width:auto;max-width:none}.col-md-1{-webkit-box-flex:0;-webkit-flex:0 0 8.33333333%;-ms-flex:0 0 8.33333333%;flex:0 0 8.33333333%;max-width:8.33333333%}.col-md-2{-webkit-box-flex:0;-webkit-flex:0 0 16.66666667%;-ms-flex:0 0 16.66666667%;flex:0 0 16.66666667%;max-width:16.66666667%}.col-md-3{-webkit-box-flex:0;-webkit-flex:0 0 25%;-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}.col-md-4{-webkit-box-flex:0;-webkit-flex:0 0 33.33333333%;-ms-flex:0 0 33.33333333%;flex:0 0 33.33333333%;max-width:33.33333333%}.col-md-5{-webkit-box-flex:0;-webkit-flex:0 0 41.66666667%;-ms-flex:0 0 41.66666667%;flex:0 0 41.66666667%;max-width:41.66666667%}.col-md-6{-webkit-box-flex:0;-webkit-flex:0 0 50%;-ms-flex:0 0 50%;flex:0 0 50%;max-width:50%}.col-md-7{-webkit-box-flex:0;-webkit-flex:0 0 58.33333333%;-ms-flex:0 0 58.33333333%;flex:0 0 58.33333333%;max-width:58.33333333%}.col-md-8{-webkit-box-flex:0;-webkit-flex:0 0 66.66666667%;-ms-flex:0 0 66.66666667%;flex:0 0 66.66666667%;max-width:66.66666667%}.col-md-9{-webkit-box-flex:0;-webkit-flex:0 0 75%;-ms-flex:0 0 75%;flex:0 0 75%;max-width:75%}.col-md-10{-webkit-box-flex:0;-webkit-flex:0 0 83.33333333%;-ms-flex:0 0 83.33333333%;flex:0 0 83.33333333%;max-width:83.33333333%}.col-md-11{-webkit-box-flex:0;-webkit-flex:0 0 91.66666667%;-ms-flex:0 0 91.66666667%;flex:0 0 91.66666667%;max-width:91.66666667%}.col-md-12{-webkit-box-flex:0;-webkit-flex:0 0 100%;-ms-flex:0 0 100%;flex:0 0 100%;max-width:100%}.order-md-first{-webkit-box-ordinal-group:0;-webkit-order:-1;-ms-flex-order:-1;order:-1}.order-md-last{-webkit-box-ordinal-group:14;-webkit-order:13;-ms-flex-order:13;order:13}.order-md-0{-webkit-box-ordinal-group:1;-webkit-order:0;-ms-flex-order:0;order:0}.order-md-1{-webkit-box-ordinal-group:2;-webkit-order:1;-ms-flex-order:1;order:1}.order-md-2{-webkit-box-ordinal-group:3;-webkit-order:2;-ms-flex-order:2;order:2}.order-md-3{-webkit-box-ordinal-group:4;-webkit-order:3;-ms-flex-order:3;order:3}.order-md-4{-webkit-box-ordinal-group:5;-webkit-order:4;-ms-flex-order:4;order:4}.order-md-5{-webkit-box-ordinal-group:6;-webkit-order:5;-ms-flex-order:5;order:5}.order-md-6{-webkit-box-ordinal-group:7;-webkit-order:6;-ms-flex-order:6;order:6}.order-md-7{-webkit-box-ordinal-group:8;-webkit-order:7;-ms-flex-order:7;order:7}.order-md-8{-webkit-box-ordinal-group:9;-webkit-order:8;-ms-flex-order:8;order:8}.order-md-9{-webkit-box-ordinal-group:10;-webkit-order:9;-ms-flex-order:9;order:9}.order-md-10{-webkit-box-ordinal-group:11;-webkit-order:10;-ms-flex-order:10;order:10}.order-md-11{-webkit-box-ordinal-group:12;-webkit-order:11;-ms-flex-order:11;order:11}.order-md-12{-webkit-box-ordinal-group:13;-webkit-order:12;-ms-flex-order:12;order:12}.offset-md-0{margin-left:0}.offset-md-1{margin-left:8.33333333%}.offset-md-2{margin-left:16.66666667%}.offset-md-3{margin-left:25%}.offset-md-4{margin-left:33.33333333%}.offset-md-5{margin-left:41.66666667%}.offset-md-6{margin-left:50%}.offset-md-7{margin-left:58.33333333%}.offset-md-8{margin-left:66.66666667%}.offset-md-9{margin-left:75%}.offset-md-10{margin-left:83.33333333%}.offset-md-11{margin-left:91.66666667%}}@media(min-width:992px){.col-lg{-webkit-flex-basis:0;-ms-flex-preferred-size:0;flex-basis:0;-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;max-width:100%}.col-lg-auto{-webkit-box-flex:0;-webkit-flex:0 0 auto;-ms-flex:0 0 auto;flex:none;width:auto;max-width:none}.col-lg-1{-webkit-box-flex:0;-webkit-flex:0 0 8.33333333%;-ms-flex:0 0 8.33333333%;flex:0 0 8.33333333%;max-width:8.33333333%}.col-lg-2{-webkit-box-flex:0;-webkit-flex:0 0 16.66666667%;-ms-flex:0 0 16.66666667%;flex:0 0 16.66666667%;max-width:16.66666667%}.col-lg-3{-webkit-box-flex:0;-webkit-flex:0 0 25%;-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}.col-lg-4{-webkit-box-flex:0;-webkit-flex:0 0 33.33333333%;-ms-flex:0 0 33.33333333%;flex:0 0 33.33333333%;max-width:33.33333333%}.col-lg-5{-webkit-box-flex:0;-webkit-flex:0 0 41.66666667%;-ms-flex:0 0 41.66666667%;flex:0 0 41.66666667%;max-width:41.66666667%}.col-lg-6{-webkit-box-flex:0;-webkit-flex:0 0 50%;-ms-flex:0 0 50%;flex:0 0 50%;max-width:50%}.col-lg-7{-webkit-box-flex:0;-webkit-flex:0 0 58.33333333%;-ms-flex:0 0 58.33333333%;flex:0 0 58.33333333%;max-width:58.33333333%}.col-lg-8{-webkit-box-flex:0;-webkit-flex:0 0 66.66666667%;-ms-flex:0 0 66.66666667%;flex:0 0 66.66666667%;max-width:66.66666667%}.col-lg-9{-webkit-box-flex:0;-webkit-flex:0 0 75%;-ms-flex:0 0 75%;flex:0 0 75%;max-width:75%}.col-lg-10{-webkit-box-flex:0;-webkit-flex:0 0 83.33333333%;-ms-flex:0 0 83.33333333%;flex:0 0 83.33333333%;max-width:83.33333333%}.col-lg-11{-webkit-box-flex:0;-webkit-flex:0 0 91.66666667%;-ms-flex:0 0 91.66666667%;flex:0 0 91.66666667%;max-width:91.66666667%}.col-lg-12{-webkit-box-flex:0;-webkit-flex:0 0 100%;-ms-flex:0 0 100%;flex:0 0 100%;max-width:100%}.order-lg-first{-webkit-box-ordinal-group:0;-webkit-order:-1;-ms-flex-order:-1;order:-1}.order-lg-last{-webkit-box-ordinal-group:14;-webkit-order:13;-ms-flex-order:13;order:13}.order-lg-0{-webkit-box-ordinal-group:1;-webkit-order:0;-ms-flex-order:0;order:0}.order-lg-1{-webkit-box-ordinal-group:2;-webkit-order:1;-ms-flex-order:1;order:1}.order-lg-2{-webkit-box-ordinal-group:3;-webkit-order:2;-ms-flex-order:2;order:2}.order-lg-3{-webkit-box-ordinal-group:4;-webkit-order:3;-ms-flex-order:3;order:3}.order-lg-4{-webkit-box-ordinal-group:5;-webkit-order:4;-ms-flex-order:4;order:4}.order-lg-5{-webkit-box-ordinal-group:6;-webkit-order:5;-ms-flex-order:5;order:5}.order-lg-6{-webkit-box-ordinal-group:7;-webkit-order:6;-ms-flex-order:6;order:6}.order-lg-7{-webkit-box-ordinal-group:8;-webkit-order:7;-ms-flex-order:7;order:7}.order-lg-8{-webkit-box-ordinal-group:9;-webkit-order:8;-ms-flex-order:8;order:8}.order-lg-9{-webkit-box-ordinal-group:10;-webkit-order:9;-ms-flex-order:9;order:9}.order-lg-10{-webkit-box-ordinal-group:11;-webkit-order:10;-ms-flex-order:10;order:10}.order-lg-11{-webkit-box-ordinal-group:12;-webkit-order:11;-ms-flex-order:11;order:11}.order-lg-12{-webkit-box-ordinal-group:13;-webkit-order:12;-ms-flex-order:12;order:12}.offset-lg-0{margin-left:0}.offset-lg-1{margin-left:8.33333333%}.offset-lg-2{margin-left:16.66666667%}.offset-lg-3{margin-left:25%}.offset-lg-4{margin-left:33.33333333%}.offset-lg-5{margin-left:41.66666667%}.offset-lg-6{margin-left:50%}.offset-lg-7{margin-left:58.33333333%}.offset-lg-8{margin-left:66.66666667%}.offset-lg-9{margin-left:75%}.offset-lg-10{margin-left:83.33333333%}.offset-lg-11{margin-left:91.66666667%}}@media(min-width:1200px){.col-xl{-webkit-flex-basis:0;-ms-flex-preferred-size:0;flex-basis:0;-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;max-width:100%}.col-xl-auto{-webkit-box-flex:0;-webkit-flex:0 0 auto;-ms-flex:0 0 auto;flex:none;width:auto;max-width:none}.col-xl-1{-webkit-box-flex:0;-webkit-flex:0 0 8.33333333%;-ms-flex:0 0 8.33333333%;flex:0 0 8.33333333%;max-width:8.33333333%}.col-xl-2{-webkit-box-flex:0;-webkit-flex:0 0 16.66666667%;-ms-flex:0 0 16.66666667%;flex:0 0 16.66666667%;max-width:16.66666667%}.col-xl-3{-webkit-box-flex:0;-webkit-flex:0 0 25%;-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}.col-xl-4{-webkit-box-flex:0;-webkit-flex:0 0 33.33333333%;-ms-flex:0 0 33.33333333%;flex:0 0 33.33333333%;max-width:33.33333333%}.col-xl-5{-webkit-box-flex:0;-webkit-flex:0 0 41.66666667%;-ms-flex:0 0 41.66666667%;flex:0 0 41.66666667%;max-width:41.66666667%}.col-xl-6{-webkit-box-flex:0;-webkit-flex:0 0 50%;-ms-flex:0 0 50%;flex:0 0 50%;max-width:50%}.col-xl-7{-webkit-box-flex:0;-webkit-flex:0 0 58.33333333%;-ms-flex:0 0 58.33333333%;flex:0 0 58.33333333%;max-width:58.33333333%}.col-xl-8{-webkit-box-flex:0;-webkit-flex:0 0 66.66666667%;-ms-flex:0 0 66.66666667%;flex:0 0 66.66666667%;max-width:66.66666667%}.col-xl-9{-webkit-box-flex:0;-webkit-flex:0 0 75%;-ms-flex:0 0 75%;flex:0 0 75%;max-width:75%}.col-xl-10{-webkit-box-flex:0;-webkit-flex:0 0 83.33333333%;-ms-flex:0 0 83.33333333%;flex:0 0 83.33333333%;max-width:83.33333333%}.col-xl-11{-webkit-box-flex:0;-webkit-flex:0 0 91.66666667%;-ms-flex:0 0 91.66666667%;flex:0 0 91.66666667%;max-width:91.66666667%}.col-xl-12{-webkit-box-flex:0;-webkit-flex:0 0 100%;-ms-flex:0 0 100%;flex:0 0 100%;max-width:100%}.order-xl-first{-webkit-box-ordinal-group:0;-webkit-order:-1;-ms-flex-order:-1;order:-1}.order-xl-last{-webkit-box-ordinal-group:14;-webkit-order:13;-ms-flex-order:13;order:13}.order-xl-0{-webkit-box-ordinal-group:1;-webkit-order:0;-ms-flex-order:0;order:0}.order-xl-1{-webkit-box-ordinal-group:2;-webkit-order:1;-ms-flex-order:1;order:1}.order-xl-2{-webkit-box-ordinal-group:3;-webkit-order:2;-ms-flex-order:2;order:2}.order-xl-3{-webkit-box-ordinal-group:4;-webkit-order:3;-ms-flex-order:3;order:3}.order-xl-4{-webkit-box-ordinal-group:5;-webkit-order:4;-ms-flex-order:4;order:4}.order-xl-5{-webkit-box-ordinal-group:6;-webkit-order:5;-ms-flex-order:5;order:5}.order-xl-6{-webkit-box-ordinal-group:7;-webkit-order:6;-ms-flex-order:6;order:6}.order-xl-7{-webkit-box-ordinal-group:8;-webkit-order:7;-ms-flex-order:7;order:7}.order-xl-8{-webkit-box-ordinal-group:9;-webkit-order:8;-ms-flex-order:8;order:8}.order-xl-9{-webkit-box-ordinal-group:10;-webkit-order:9;-ms-flex-order:9;order:9}.order-xl-10{-webkit-box-ordinal-group:11;-webkit-order:10;-ms-flex-order:10;order:10}.order-xl-11{-webkit-box-ordinal-group:12;-webkit-order:11;-ms-flex-order:11;order:11}.order-xl-12{-webkit-box-ordinal-group:13;-webkit-order:12;-ms-flex-order:12;order:12}.offset-xl-0{margin-left:0}.offset-xl-1{margin-left:8.33333333%}.offset-xl-2{margin-left:16.66666667%}.offset-xl-3{margin-left:25%}.offset-xl-4{margin-left:33.33333333%}.offset-xl-5{margin-left:41.66666667%}.offset-xl-6{margin-left:50%}.offset-xl-7{margin-left:58.33333333%}.offset-xl-8{margin-left:66.66666667%}.offset-xl-9{margin-left:75%}.offset-xl-10{margin-left:83.33333333%}.offset-xl-11{margin-left:91.66666667%}}.table,.td-content>table,.td-box .row.section>table{width:100%;margin-bottom:1rem;background-color:transparent}.table th,.td-content>table th,.td-box .row.section>table th,.table td,.td-content>table td,.td-box .row.section>table td{padding:.75rem;vertical-align:top;border-top:1px solid #dee2e6}.table thead th,.td-content>table thead th,.td-box .row.section>table thead th{vertical-align:bottom;border-bottom:2px solid #dee2e6}.table tbody+tbody,.td-content>table tbody+tbody,.td-box .row.section>table tbody+tbody{border-top:2px solid #dee2e6}.table .table,.td-content>table .table,.table .td-content>table,.td-content>table .td-content>table,.td-box .row.section>table .table,.td-box .row.section>table .td-content>table,.table .td-box .row.section>table,.td-content>table .td-box .row.section>table,.td-box .table .row.section>table,.td-box .td-content>table .row.section>table,.td-box .row.section>table .row.section>table{background-color:#fff}.table-sm th,.table-sm td{padding:.3rem}.table-bordered{border:1px solid #dee2e6}.table-bordered th,.table-bordered td{border:1px solid #dee2e6}.table-bordered thead th,.table-bordered thead td{border-bottom-width:2px}.table-borderless th,.table-borderless td,.table-borderless thead th,.table-borderless tbody+tbody{border:0}.table-striped tbody tr:nth-of-type(odd),.td-content>table tbody tr:nth-of-type(odd),.td-box .row.section>table tbody tr:nth-of-type(odd){background-color:rgba(0,0,0,5%)}.table-hover tbody tr:hover{background-color:rgba(0,0,0,.075)}.table-primary,.table-primary>th,.table-primary>td{background-color:#c5d3df}.table-hover .table-primary:hover{background-color:#b5c7d6}.table-hover .table-primary:hover>td,.table-hover .table-primary:hover>th{background-color:#b5c7d6}.table-secondary,.table-secondary>th,.table-secondary>td{background-color:#ffe6c5}.table-hover .table-secondary:hover{background-color:#ffdbac}.table-hover .table-secondary:hover>td,.table-hover .table-secondary:hover>th{background-color:#ffdbac}.table-success,.table-success>th,.table-success>td{background-color:#c7d8ff}.table-hover .table-success:hover{background-color:#aec6ff}.table-hover .table-success:hover>td,.table-hover .table-success:hover>th{background-color:#aec6ff}.table-info,.table-info>th,.table-info>td{background-color:#edf6f6}.table-hover .table-info:hover{background-color:#dceeee}.table-hover .table-info:hover>td,.table-hover .table-info:hover>th{background-color:#dceeee}.table-warning,.table-warning>th,.table-warning>td{background-color:#fad5d1}.table-hover .table-warning:hover{background-color:#f8c0ba}.table-hover .table-warning:hover>td,.table-hover .table-warning:hover>th{background-color:#f8c0ba}.table-danger,.table-danger>th,.table-danger>td{background-color:#fad5d1}.table-hover .table-danger:hover{background-color:#f8c0ba}.table-hover .table-danger:hover>td,.table-hover .table-danger:hover>th{background-color:#f8c0ba}.table-light,.table-light>th,.table-light>td{background-color:#f3fcfa}.table-hover .table-light:hover{background-color:#dff7f2}.table-hover .table-light:hover>td,.table-hover .table-light:hover>th{background-color:#dff7f2}.table-dark,.table-dark>th,.table-dark>td{background-color:#cac9cd}.table-hover .table-dark:hover{background-color:#bdbcc1}.table-hover .table-dark:hover>td,.table-hover .table-dark:hover>th{background-color:#bdbcc1}.table-active,.table-active>th,.table-active>td{background-color:rgba(0,0,0,.075)}.table-hover .table-active:hover{background-color:rgba(0,0,0,.075)}.table-hover .table-active:hover>td,.table-hover .table-active:hover>th{background-color:rgba(0,0,0,.075)}.table .thead-dark th,.td-content>table .thead-dark th,.td-box .row.section>table .thead-dark th{color:#fff;background-color:#222;border-color:#353535}.table .thead-light th,.td-content>table .thead-light th,.td-box .row.section>table .thead-light th{color:#495057;background-color:#eee;border-color:#dee2e6}.table-dark{color:#fff;background-color:#222}.table-dark th,.table-dark td,.table-dark thead th{border-color:#353535}.table-dark.table-bordered{border:0}.table-dark.table-striped tbody tr:nth-of-type(odd),.td-content>table.table-dark tbody tr:nth-of-type(odd),.td-box .row.section>table.table-dark tbody tr:nth-of-type(odd){background-color:rgba(255,255,255,5%)}.table-dark.table-hover tbody tr:hover{background-color:rgba(255,255,255,.075)}@media(max-width:575.98px){.table-responsive-sm{display:block;width:100%;overflow-x:auto;-webkit-overflow-scrolling:touch;-ms-overflow-style:-ms-autohiding-scrollbar}.table-responsive-sm>.table-bordered{border:0}}@media(max-width:767.98px){.table-responsive-md{display:block;width:100%;overflow-x:auto;-webkit-overflow-scrolling:touch;-ms-overflow-style:-ms-autohiding-scrollbar}.table-responsive-md>.table-bordered{border:0}}@media(max-width:991.98px){.table-responsive-lg{display:block;width:100%;overflow-x:auto;-webkit-overflow-scrolling:touch;-ms-overflow-style:-ms-autohiding-scrollbar}.table-responsive-lg>.table-bordered{border:0}}@media(max-width:1199.98px){.table-responsive-xl{display:block;width:100%;overflow-x:auto;-webkit-overflow-scrolling:touch;-ms-overflow-style:-ms-autohiding-scrollbar}.table-responsive-xl>.table-bordered{border:0}}.table-responsive,.td-content>table,.td-box .row.section>table{display:block;width:100%;overflow-x:auto;-webkit-overflow-scrolling:touch;-ms-overflow-style:-ms-autohiding-scrollbar}.table-responsive>.table-bordered,.td-content>table>.table-bordered,.td-box .row.section>table>.table-bordered{border:0}.form-control{display:block;width:100%;height:-webkit-calc(2.25rem + 2px);height:calc(2.25rem + 2px);padding:.375rem .75rem;font-size:1rem;line-height:1.5;color:#495057;background-color:#fff;background-clip:padding-box;border:1px solid #ccc;border-radius:.25rem;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075);-webkit-transition:border-color .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;transition:border-color .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;-o-transition:border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:border-color .15s ease-in-out,box-shadow .15s ease-in-out,-webkit-box-shadow .15s ease-in-out}@media screen and (prefers-reduced-motion:reduce){.form-control{-webkit-transition:none;-o-transition:none;transition:none}}.form-control::-ms-expand{background-color:transparent;border:0}.form-control:focus{color:#495057;background-color:#fff;border-color:#6fa3ce;outline:0;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(48,99,142,.25);box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(48,99,142,.25)}.form-control::-webkit-input-placeholder{color:#888;opacity:1}.form-control::-moz-placeholder{color:#888;opacity:1}.form-control:-ms-input-placeholder{color:#888;opacity:1}.form-control::placeholder{color:#888;opacity:1}.form-control:disabled,.form-control[readonly]{background-color:#eee;opacity:1}select.form-control:focus::-ms-value{color:#495057;background-color:#fff}.form-control-file,.form-control-range{display:block;width:100%}.col-form-label{padding-top:-webkit-calc(.375rem + 1px);padding-top:calc(.375rem + 1px);padding-bottom:-webkit-calc(.375rem + 1px);padding-bottom:calc(.375rem + 1px);margin-bottom:0;font-size:inherit;line-height:1.5}.col-form-label-lg{padding-top:-webkit-calc(.5rem + 1px);padding-top:calc(.5rem + 1px);padding-bottom:-webkit-calc(.5rem + 1px);padding-bottom:calc(.5rem + 1px);font-size:1.25rem;line-height:1.5}.col-form-label-sm{padding-top:-webkit-calc(.25rem + 1px);padding-top:calc(.25rem + 1px);padding-bottom:-webkit-calc(.25rem + 1px);padding-bottom:calc(.25rem + 1px);font-size:.875rem;line-height:1.5}.form-control-plaintext{display:block;width:100%;padding-top:.375rem;padding-bottom:.375rem;margin-bottom:0;line-height:1.5;color:#222;background-color:transparent;border:solid transparent;border-width:1px 0}.form-control-plaintext.form-control-sm,.form-control-plaintext.form-control-lg{padding-right:0;padding-left:0}.form-control-sm{height:-webkit-calc(1.8125rem + 2px);height:calc(1.8125rem + 2px);padding:.25rem .5rem;font-size:.875rem;line-height:1.5;border-radius:.2rem}.form-control-lg{height:-webkit-calc(2.875rem + 2px);height:calc(2.875rem + 2px);padding:.5rem 1rem;font-size:1.25rem;line-height:1.5;border-radius:.3rem}select.form-control[size],select.form-control[multiple]{height:auto}textarea.form-control{height:auto}.form-group{margin-bottom:1rem}.form-text{display:block;margin-top:.25rem}.form-row{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;margin-right:-5px;margin-left:-5px}.form-row>.col,.form-row>[class*=col-]{padding-right:5px;padding-left:5px}.form-check{position:relative;display:block;padding-left:1.25rem}.form-check-input{position:absolute;margin-top:.3rem;margin-left:-1.25rem}.form-check-input:disabled~.form-check-label{color:#888}.form-check-label{margin-bottom:0}.form-check-inline{display:-webkit-inline-box;display:-webkit-inline-flex;display:-ms-inline-flexbox;display:inline-flex;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;padding-left:0;margin-right:.75rem}.form-check-inline .form-check-input{position:static;margin-top:0;margin-right:.3125rem;margin-left:0}.valid-feedback{display:none;width:100%;margin-top:.25rem;font-size:80%;color:#3772ff}.valid-tooltip{position:absolute;top:100%;z-index:5;display:none;max-width:100%;padding:.25rem .5rem;margin-top:.1rem;font-size:.875rem;line-height:1.5;color:#fff;background-color:rgba(55,114,255,.9);border-radius:.25rem}.was-validated .form-control:valid,.form-control.is-valid,.was-validated .custom-select:valid,.custom-select.is-valid{border-color:#3772ff}.was-validated .form-control:valid:focus,.form-control.is-valid:focus,.was-validated .custom-select:valid:focus,.custom-select.is-valid:focus{border-color:#3772ff;-webkit-box-shadow:0 0 0 .2rem rgba(55,114,255,.25);box-shadow:0 0 0 .2rem rgba(55,114,255,.25)}.was-validated .form-control:valid~.valid-feedback,.was-validated .form-control:valid~.valid-tooltip,.form-control.is-valid~.valid-feedback,.form-control.is-valid~.valid-tooltip,.was-validated .custom-select:valid~.valid-feedback,.was-validated .custom-select:valid~.valid-tooltip,.custom-select.is-valid~.valid-feedback,.custom-select.is-valid~.valid-tooltip{display:block}.was-validated .form-control-file:valid~.valid-feedback,.was-validated .form-control-file:valid~.valid-tooltip,.form-control-file.is-valid~.valid-feedback,.form-control-file.is-valid~.valid-tooltip{display:block}.was-validated .form-check-input:valid~.form-check-label,.form-check-input.is-valid~.form-check-label{color:#3772ff}.was-validated .form-check-input:valid~.valid-feedback,.was-validated .form-check-input:valid~.valid-tooltip,.form-check-input.is-valid~.valid-feedback,.form-check-input.is-valid~.valid-tooltip{display:block}.was-validated .custom-control-input:valid~.custom-control-label,.custom-control-input.is-valid~.custom-control-label{color:#3772ff}.was-validated .custom-control-input:valid~.custom-control-label::before,.custom-control-input.is-valid~.custom-control-label::before{background-color:#b7ccff}.was-validated .custom-control-input:valid~.valid-feedback,.was-validated .custom-control-input:valid~.valid-tooltip,.custom-control-input.is-valid~.valid-feedback,.custom-control-input.is-valid~.valid-tooltip{display:block}.was-validated .custom-control-input:valid:checked~.custom-control-label::before,.custom-control-input.is-valid:checked~.custom-control-label::before{background:#6a96ff -webkit-gradient(linear,left top,left bottom,from(#80a6ff),to(#6a96ff))repeat-x;background:#6a96ff -webkit-linear-gradient(top,#80a6ff,#6a96ff)repeat-x;background:#6a96ff -o-linear-gradient(top,#80a6ff,#6a96ff)repeat-x;background:#6a96ff linear-gradient(180deg,#80a6ff,#6a96ff)repeat-x}.was-validated .custom-control-input:valid:focus~.custom-control-label::before,.custom-control-input.is-valid:focus~.custom-control-label::before{-webkit-box-shadow:0 0 0 1px #fff,0 0 0 .2rem rgba(55,114,255,.25);box-shadow:0 0 0 1px #fff,0 0 0 .2rem rgba(55,114,255,.25)}.was-validated .custom-file-input:valid~.custom-file-label,.custom-file-input.is-valid~.custom-file-label{border-color:#3772ff}.was-validated .custom-file-input:valid~.custom-file-label::after,.custom-file-input.is-valid~.custom-file-label::after{border-color:inherit}.was-validated .custom-file-input:valid~.valid-feedback,.was-validated .custom-file-input:valid~.valid-tooltip,.custom-file-input.is-valid~.valid-feedback,.custom-file-input.is-valid~.valid-tooltip{display:block}.was-validated .custom-file-input:valid:focus~.custom-file-label,.custom-file-input.is-valid:focus~.custom-file-label{-webkit-box-shadow:0 0 0 .2rem rgba(55,114,255,.25);box-shadow:0 0 0 .2rem rgba(55,114,255,.25)}.invalid-feedback{display:none;width:100%;margin-top:.25rem;font-size:80%;color:#ed6a5a}.invalid-tooltip{position:absolute;top:100%;z-index:5;display:none;max-width:100%;padding:.25rem .5rem;margin-top:.1rem;font-size:.875rem;line-height:1.5;color:#fff;background-color:rgba(237,106,90,.9);border-radius:.25rem}.was-validated .form-control:invalid,.form-control.is-invalid,.was-validated .custom-select:invalid,.custom-select.is-invalid{border-color:#ed6a5a}.was-validated .form-control:invalid:focus,.form-control.is-invalid:focus,.was-validated .custom-select:invalid:focus,.custom-select.is-invalid:focus{border-color:#ed6a5a;-webkit-box-shadow:0 0 0 .2rem rgba(237,106,90,.25);box-shadow:0 0 0 .2rem rgba(237,106,90,.25)}.was-validated .form-control:invalid~.invalid-feedback,.was-validated .form-control:invalid~.invalid-tooltip,.form-control.is-invalid~.invalid-feedback,.form-control.is-invalid~.invalid-tooltip,.was-validated .custom-select:invalid~.invalid-feedback,.was-validated .custom-select:invalid~.invalid-tooltip,.custom-select.is-invalid~.invalid-feedback,.custom-select.is-invalid~.invalid-tooltip{display:block}.was-validated .form-control-file:invalid~.invalid-feedback,.was-validated .form-control-file:invalid~.invalid-tooltip,.form-control-file.is-invalid~.invalid-feedback,.form-control-file.is-invalid~.invalid-tooltip{display:block}.was-validated .form-check-input:invalid~.form-check-label,.form-check-input.is-invalid~.form-check-label{color:#ed6a5a}.was-validated .form-check-input:invalid~.invalid-feedback,.was-validated .form-check-input:invalid~.invalid-tooltip,.form-check-input.is-invalid~.invalid-feedback,.form-check-input.is-invalid~.invalid-tooltip{display:block}.was-validated .custom-control-input:invalid~.custom-control-label,.custom-control-input.is-invalid~.custom-control-label{color:#ed6a5a}.was-validated .custom-control-input:invalid~.custom-control-label::before,.custom-control-input.is-invalid~.custom-control-label::before{background-color:#fad2cd}.was-validated .custom-control-input:invalid~.invalid-feedback,.was-validated .custom-control-input:invalid~.invalid-tooltip,.custom-control-input.is-invalid~.invalid-feedback,.custom-control-input.is-invalid~.invalid-tooltip{display:block}.was-validated .custom-control-input:invalid:checked~.custom-control-label::before,.custom-control-input.is-invalid:checked~.custom-control-label::before{background:#f29488 -webkit-gradient(linear,left top,left bottom,from(#f4a49a),to(#f29488))repeat-x;background:#f29488 -webkit-linear-gradient(top,#f4a49a,#f29488)repeat-x;background:#f29488 -o-linear-gradient(top,#f4a49a,#f29488)repeat-x;background:#f29488 linear-gradient(180deg,#f4a49a,#f29488)repeat-x}.was-validated .custom-control-input:invalid:focus~.custom-control-label::before,.custom-control-input.is-invalid:focus~.custom-control-label::before{-webkit-box-shadow:0 0 0 1px #fff,0 0 0 .2rem rgba(237,106,90,.25);box-shadow:0 0 0 1px #fff,0 0 0 .2rem rgba(237,106,90,.25)}.was-validated .custom-file-input:invalid~.custom-file-label,.custom-file-input.is-invalid~.custom-file-label{border-color:#ed6a5a}.was-validated .custom-file-input:invalid~.custom-file-label::after,.custom-file-input.is-invalid~.custom-file-label::after{border-color:inherit}.was-validated .custom-file-input:invalid~.invalid-feedback,.was-validated .custom-file-input:invalid~.invalid-tooltip,.custom-file-input.is-invalid~.invalid-feedback,.custom-file-input.is-invalid~.invalid-tooltip{display:block}.was-validated .custom-file-input:invalid:focus~.custom-file-label,.custom-file-input.is-invalid:focus~.custom-file-label{-webkit-box-shadow:0 0 0 .2rem rgba(237,106,90,.25);box-shadow:0 0 0 .2rem rgba(237,106,90,.25)}.form-inline{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-flow:row wrap;-ms-flex-flow:row wrap;flex-flow:row wrap;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center}.form-inline .form-check{width:100%}@media(min-width:576px){.form-inline label{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;margin-bottom:0}.form-inline .form-group{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-flex:0;-webkit-flex:0 0 auto;-ms-flex:0 0 auto;flex:none;-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-flow:row wrap;-ms-flex-flow:row wrap;flex-flow:row wrap;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;margin-bottom:0}.form-inline .form-control{display:inline-block;width:auto;vertical-align:middle}.form-inline .form-control-plaintext{display:inline-block}.form-inline .input-group,.form-inline .custom-select{width:auto}.form-inline .form-check{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;width:auto;padding-left:0}.form-inline .form-check-input{position:relative;margin-top:0;margin-right:.25rem;margin-left:0}.form-inline .custom-control{-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center}.form-inline .custom-control-label{margin-bottom:0}}.btn{display:inline-block;font-weight:400;text-align:center;white-space:nowrap;vertical-align:middle;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;border:1px solid transparent;padding:.375rem .75rem;font-size:1rem;line-height:1.5;border-radius:.25rem;-webkit-transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;-o-transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out,-webkit-box-shadow .15s ease-in-out}@media screen and (prefers-reduced-motion:reduce){.btn{-webkit-transition:none;-o-transition:none;transition:none}}.btn:hover,.btn:focus{text-decoration:none}.btn:focus,.btn.focus{outline:0;-webkit-box-shadow:0 0 0 .2rem rgba(48,99,142,.25);box-shadow:0 0 0 .2rem rgba(48,99,142,.25)}.btn.disabled,.btn:disabled{opacity:.65;-webkit-box-shadow:none;box-shadow:none}.btn:not(:disabled):not(.disabled){cursor:pointer}.btn:not(:disabled):not(.disabled):active,.btn:not(:disabled):not(.disabled).active{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125);box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn:not(:disabled):not(.disabled):active:focus,.btn:not(:disabled):not(.disabled).active:focus{-webkit-box-shadow:0 0 0 .2rem rgba(48,99,142,.25),inset 0 3px 5px rgba(0,0,0,.125);box-shadow:0 0 0 .2rem rgba(48,99,142,.25),inset 0 3px 5px rgba(0,0,0,.125)}a.btn.disabled,fieldset:disabled a.btn{pointer-events:none}.btn-primary{color:#fff;background:#30638e -webkit-gradient(linear,left top,left bottom,from(#4f7a9f),to(#30638E))repeat-x;background:#30638e -webkit-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e -o-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x;border-color:#30638e;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075)}.btn-primary:hover{color:#fff;background:#264f71 -webkit-gradient(linear,left top,left bottom,from(#476987),to(#264f71))repeat-x;background:#264f71 -webkit-linear-gradient(top,#476987,#264f71)repeat-x;background:#264f71 -o-linear-gradient(top,#476987,#264f71)repeat-x;background:#264f71 linear-gradient(180deg,#476987,#264f71)repeat-x;border-color:#234868}.btn-primary:focus,.btn-primary.focus{-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(48,99,142,.5);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(48,99,142,.5)}.btn-primary.disabled,.btn-primary:disabled{color:#fff;background-color:#30638e;border-color:#30638e}.btn-primary:not(:disabled):not(.disabled):active,.btn-primary:not(:disabled):not(.disabled).active,.show>.btn-primary.dropdown-toggle{color:#fff;background-color:#234868;background-image:none;border-color:#20425e}.btn-primary:not(:disabled):not(.disabled):active:focus,.btn-primary:not(:disabled):not(.disabled).active:focus,.show>.btn-primary.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(48,99,142,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(48,99,142,.5)}.btn-secondary{color:#fff;background:#ffa630 -webkit-gradient(linear,left top,left bottom,from(#ffb34f),to(#FFA630))repeat-x;background:#ffa630 -webkit-linear-gradient(top,#ffb34f,#FFA630)repeat-x;background:#ffa630 -o-linear-gradient(top,#ffb34f,#FFA630)repeat-x;background:#ffa630 linear-gradient(180deg,#ffb34f,#FFA630)repeat-x;border-color:#ffa630;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075)}.btn-secondary:hover{color:#fff;background:#ff960a -webkit-gradient(linear,left top,left bottom,from(#ffa52f),to(#ff960a))repeat-x;background:#ff960a -webkit-linear-gradient(top,#ffa52f,#ff960a)repeat-x;background:#ff960a -o-linear-gradient(top,#ffa52f,#ff960a)repeat-x;background:#ff960a linear-gradient(180deg,#ffa52f,#ff960a)repeat-x;border-color:#fc9000}.btn-secondary:focus,.btn-secondary.focus{-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(255,166,48,.5);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(255,166,48,.5)}.btn-secondary.disabled,.btn-secondary:disabled{color:#fff;background-color:#ffa630;border-color:#ffa630}.btn-secondary:not(:disabled):not(.disabled):active,.btn-secondary:not(:disabled):not(.disabled).active,.show>.btn-secondary.dropdown-toggle{color:#fff;background-color:#fc9000;background-image:none;border-color:#ef8800}.btn-secondary:not(:disabled):not(.disabled):active:focus,.btn-secondary:not(:disabled):not(.disabled).active:focus,.show>.btn-secondary.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(255,166,48,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(255,166,48,.5)}.btn-success{color:#fff;background:#3772ff -webkit-gradient(linear,left top,left bottom,from(#5587ff),to(#3772FF))repeat-x;background:#3772ff -webkit-linear-gradient(top,#5587ff,#3772FF)repeat-x;background:#3772ff -o-linear-gradient(top,#5587ff,#3772FF)repeat-x;background:#3772ff linear-gradient(180deg,#5587ff,#3772FF)repeat-x;border-color:#3772ff;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075)}.btn-success:hover{color:#fff;background:#1157ff -webkit-gradient(linear,left top,left bottom,from(#3470ff),to(#1157ff))repeat-x;background:#1157ff -webkit-linear-gradient(top,#3470ff,#1157ff)repeat-x;background:#1157ff -o-linear-gradient(top,#3470ff,#1157ff)repeat-x;background:#1157ff linear-gradient(180deg,#3470ff,#1157ff)repeat-x;border-color:#044eff}.btn-success:focus,.btn-success.focus{-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(55,114,255,.5);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(55,114,255,.5)}.btn-success.disabled,.btn-success:disabled{color:#fff;background-color:#3772ff;border-color:#3772ff}.btn-success:not(:disabled):not(.disabled):active,.btn-success:not(:disabled):not(.disabled).active,.show>.btn-success.dropdown-toggle{color:#fff;background-color:#044eff;background-image:none;border-color:#0049f6}.btn-success:not(:disabled):not(.disabled):active:focus,.btn-success:not(:disabled):not(.disabled).active:focus,.show>.btn-success.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(55,114,255,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(55,114,255,.5)}.btn-info{color:#222;background:#c0e0de -webkit-gradient(linear,left top,left bottom,from(#c9e5e3),to(#C0E0DE))repeat-x;background:#c0e0de -webkit-linear-gradient(top,#c9e5e3,#C0E0DE)repeat-x;background:#c0e0de -o-linear-gradient(top,#c9e5e3,#C0E0DE)repeat-x;background:#c0e0de linear-gradient(180deg,#c9e5e3,#C0E0DE)repeat-x;border-color:#c0e0de;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075)}.btn-info:hover{color:#fff;background:#a6d3d1 -webkit-gradient(linear,left top,left bottom,from(#b4dad8),to(#a6d3d1))repeat-x;background:#a6d3d1 -webkit-linear-gradient(top,#b4dad8,#a6d3d1)repeat-x;background:#a6d3d1 -o-linear-gradient(top,#b4dad8,#a6d3d1)repeat-x;background:#a6d3d1 linear-gradient(180deg,#b4dad8,#a6d3d1)repeat-x;border-color:#9ecfcc}.btn-info:focus,.btn-info.focus{-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(192,224,222,.5);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(192,224,222,.5)}.btn-info.disabled,.btn-info:disabled{color:#222;background-color:#c0e0de;border-color:#c0e0de}.btn-info:not(:disabled):not(.disabled):active,.btn-info:not(:disabled):not(.disabled).active,.show>.btn-info.dropdown-toggle{color:#fff;background-color:#9ecfcc;background-image:none;border-color:#95cbc8}.btn-info:not(:disabled):not(.disabled):active:focus,.btn-info:not(:disabled):not(.disabled).active:focus,.show>.btn-info.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(192,224,222,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(192,224,222,.5)}.btn-warning{color:#fff;background:#ed6a5a -webkit-gradient(linear,left top,left bottom,from(#f08073),to(#ED6A5A))repeat-x;background:#ed6a5a -webkit-linear-gradient(top,#f08073,#ED6A5A)repeat-x;background:#ed6a5a -o-linear-gradient(top,#f08073,#ED6A5A)repeat-x;background:#ed6a5a linear-gradient(180deg,#f08073,#ED6A5A)repeat-x;border-color:#ed6a5a;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075)}.btn-warning:hover{color:#fff;background:#e94b38 -webkit-gradient(linear,left top,left bottom,from(#ed6655),to(#e94b38))repeat-x;background:#e94b38 -webkit-linear-gradient(top,#ed6655,#e94b38)repeat-x;background:#e94b38 -o-linear-gradient(top,#ed6655,#e94b38)repeat-x;background:#e94b38 linear-gradient(180deg,#ed6655,#e94b38)repeat-x;border-color:#e8402c}.btn-warning:focus,.btn-warning.focus{-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(237,106,90,.5);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(237,106,90,.5)}.btn-warning.disabled,.btn-warning:disabled{color:#fff;background-color:#ed6a5a;border-color:#ed6a5a}.btn-warning:not(:disabled):not(.disabled):active,.btn-warning:not(:disabled):not(.disabled).active,.show>.btn-warning.dropdown-toggle{color:#fff;background-color:#e8402c;background-image:none;border-color:#e73621}.btn-warning:not(:disabled):not(.disabled):active:focus,.btn-warning:not(:disabled):not(.disabled).active:focus,.show>.btn-warning.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(237,106,90,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(237,106,90,.5)}.btn-danger{color:#fff;background:#ed6a5a -webkit-gradient(linear,left top,left bottom,from(#f08073),to(#ED6A5A))repeat-x;background:#ed6a5a -webkit-linear-gradient(top,#f08073,#ED6A5A)repeat-x;background:#ed6a5a -o-linear-gradient(top,#f08073,#ED6A5A)repeat-x;background:#ed6a5a linear-gradient(180deg,#f08073,#ED6A5A)repeat-x;border-color:#ed6a5a;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075)}.btn-danger:hover{color:#fff;background:#e94b38 -webkit-gradient(linear,left top,left bottom,from(#ed6655),to(#e94b38))repeat-x;background:#e94b38 -webkit-linear-gradient(top,#ed6655,#e94b38)repeat-x;background:#e94b38 -o-linear-gradient(top,#ed6655,#e94b38)repeat-x;background:#e94b38 linear-gradient(180deg,#ed6655,#e94b38)repeat-x;border-color:#e8402c}.btn-danger:focus,.btn-danger.focus{-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(237,106,90,.5);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(237,106,90,.5)}.btn-danger.disabled,.btn-danger:disabled{color:#fff;background-color:#ed6a5a;border-color:#ed6a5a}.btn-danger:not(:disabled):not(.disabled):active,.btn-danger:not(:disabled):not(.disabled).active,.show>.btn-danger.dropdown-toggle{color:#fff;background-color:#e8402c;background-image:none;border-color:#e73621}.btn-danger:not(:disabled):not(.disabled):active:focus,.btn-danger:not(:disabled):not(.disabled).active:focus,.show>.btn-danger.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(237,106,90,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(237,106,90,.5)}.btn-light{color:#222;background:#d3f3ee -webkit-gradient(linear,left top,left bottom,from(#daf5f1),to(#D3F3EE))repeat-x;background:#d3f3ee -webkit-linear-gradient(top,#daf5f1,#D3F3EE)repeat-x;background:#d3f3ee -o-linear-gradient(top,#daf5f1,#D3F3EE)repeat-x;background:#d3f3ee linear-gradient(180deg,#daf5f1,#D3F3EE)repeat-x;border-color:#d3f3ee;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075)}.btn-light:hover{color:#222;background:#b5ebe2 -webkit-gradient(linear,left top,left bottom,from(#c0eee7),to(#b5ebe2))repeat-x;background:#b5ebe2 -webkit-linear-gradient(top,#c0eee7,#b5ebe2)repeat-x;background:#b5ebe2 -o-linear-gradient(top,#c0eee7,#b5ebe2)repeat-x;background:#b5ebe2 linear-gradient(180deg,#c0eee7,#b5ebe2)repeat-x;border-color:#abe8df}.btn-light:focus,.btn-light.focus{-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(211,243,238,.5);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(211,243,238,.5)}.btn-light.disabled,.btn-light:disabled{color:#222;background-color:#d3f3ee;border-color:#d3f3ee}.btn-light:not(:disabled):not(.disabled):active,.btn-light:not(:disabled):not(.disabled).active,.show>.btn-light.dropdown-toggle{color:#222;background-color:#abe8df;background-image:none;border-color:#a1e5db}.btn-light:not(:disabled):not(.disabled):active:focus,.btn-light:not(:disabled):not(.disabled).active:focus,.show>.btn-light.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(211,243,238,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(211,243,238,.5)}.btn-dark{color:#fff;background:#403f4c -webkit-gradient(linear,left top,left bottom,from(#5d5c67),to(#403F4C))repeat-x;background:#403f4c -webkit-linear-gradient(top,#5d5c67,#403F4C)repeat-x;background:#403f4c -o-linear-gradient(top,#5d5c67,#403F4C)repeat-x;background:#403f4c linear-gradient(180deg,#5d5c67,#403F4C)repeat-x;border-color:#403f4c;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075)}.btn-dark:hover{color:#fff;background:#2e2e37 -webkit-gradient(linear,left top,left bottom,from(#4e4d55),to(#2e2e37))repeat-x;background:#2e2e37 -webkit-linear-gradient(top,#4e4d55,#2e2e37)repeat-x;background:#2e2e37 -o-linear-gradient(top,#4e4d55,#2e2e37)repeat-x;background:#2e2e37 linear-gradient(180deg,#4e4d55,#2e2e37)repeat-x;border-color:#292830}.btn-dark:focus,.btn-dark.focus{-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(64,63,76,.5);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(64,63,76,.5)}.btn-dark.disabled,.btn-dark:disabled{color:#fff;background-color:#403f4c;border-color:#403f4c}.btn-dark:not(:disabled):not(.disabled):active,.btn-dark:not(:disabled):not(.disabled).active,.show>.btn-dark.dropdown-toggle{color:#fff;background-color:#292830;background-image:none;border-color:#232229}.btn-dark:not(:disabled):not(.disabled):active:focus,.btn-dark:not(:disabled):not(.disabled).active:focus,.show>.btn-dark.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(64,63,76,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(64,63,76,.5)}.btn-outline-primary{color:#30638e;background-color:transparent;background-image:none;border-color:#30638e}.btn-outline-primary:hover{color:#fff;background-color:#30638e;border-color:#30638e}.btn-outline-primary:focus,.btn-outline-primary.focus{-webkit-box-shadow:0 0 0 .2rem rgba(48,99,142,.5);box-shadow:0 0 0 .2rem rgba(48,99,142,.5)}.btn-outline-primary.disabled,.btn-outline-primary:disabled{color:#30638e;background-color:transparent}.btn-outline-primary:not(:disabled):not(.disabled):active,.btn-outline-primary:not(:disabled):not(.disabled).active,.show>.btn-outline-primary.dropdown-toggle{color:#fff;background-color:#30638e;border-color:#30638e}.btn-outline-primary:not(:disabled):not(.disabled):active:focus,.btn-outline-primary:not(:disabled):not(.disabled).active:focus,.show>.btn-outline-primary.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(48,99,142,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(48,99,142,.5)}.btn-outline-secondary{color:#ffa630;background-color:transparent;background-image:none;border-color:#ffa630}.btn-outline-secondary:hover{color:#fff;background-color:#ffa630;border-color:#ffa630}.btn-outline-secondary:focus,.btn-outline-secondary.focus{-webkit-box-shadow:0 0 0 .2rem rgba(255,166,48,.5);box-shadow:0 0 0 .2rem rgba(255,166,48,.5)}.btn-outline-secondary.disabled,.btn-outline-secondary:disabled{color:#ffa630;background-color:transparent}.btn-outline-secondary:not(:disabled):not(.disabled):active,.btn-outline-secondary:not(:disabled):not(.disabled).active,.show>.btn-outline-secondary.dropdown-toggle{color:#fff;background-color:#ffa630;border-color:#ffa630}.btn-outline-secondary:not(:disabled):not(.disabled):active:focus,.btn-outline-secondary:not(:disabled):not(.disabled).active:focus,.show>.btn-outline-secondary.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(255,166,48,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(255,166,48,.5)}.btn-outline-success{color:#3772ff;background-color:transparent;background-image:none;border-color:#3772ff}.btn-outline-success:hover{color:#fff;background-color:#3772ff;border-color:#3772ff}.btn-outline-success:focus,.btn-outline-success.focus{-webkit-box-shadow:0 0 0 .2rem rgba(55,114,255,.5);box-shadow:0 0 0 .2rem rgba(55,114,255,.5)}.btn-outline-success.disabled,.btn-outline-success:disabled{color:#3772ff;background-color:transparent}.btn-outline-success:not(:disabled):not(.disabled):active,.btn-outline-success:not(:disabled):not(.disabled).active,.show>.btn-outline-success.dropdown-toggle{color:#fff;background-color:#3772ff;border-color:#3772ff}.btn-outline-success:not(:disabled):not(.disabled):active:focus,.btn-outline-success:not(:disabled):not(.disabled).active:focus,.show>.btn-outline-success.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(55,114,255,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(55,114,255,.5)}.btn-outline-info{color:#c0e0de;background-color:transparent;background-image:none;border-color:#c0e0de}.btn-outline-info:hover{color:#222;background-color:#c0e0de;border-color:#c0e0de}.btn-outline-info:focus,.btn-outline-info.focus{-webkit-box-shadow:0 0 0 .2rem rgba(192,224,222,.5);box-shadow:0 0 0 .2rem rgba(192,224,222,.5)}.btn-outline-info.disabled,.btn-outline-info:disabled{color:#c0e0de;background-color:transparent}.btn-outline-info:not(:disabled):not(.disabled):active,.btn-outline-info:not(:disabled):not(.disabled).active,.show>.btn-outline-info.dropdown-toggle{color:#222;background-color:#c0e0de;border-color:#c0e0de}.btn-outline-info:not(:disabled):not(.disabled):active:focus,.btn-outline-info:not(:disabled):not(.disabled).active:focus,.show>.btn-outline-info.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(192,224,222,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(192,224,222,.5)}.btn-outline-warning{color:#ed6a5a;background-color:transparent;background-image:none;border-color:#ed6a5a}.btn-outline-warning:hover{color:#fff;background-color:#ed6a5a;border-color:#ed6a5a}.btn-outline-warning:focus,.btn-outline-warning.focus{-webkit-box-shadow:0 0 0 .2rem rgba(237,106,90,.5);box-shadow:0 0 0 .2rem rgba(237,106,90,.5)}.btn-outline-warning.disabled,.btn-outline-warning:disabled{color:#ed6a5a;background-color:transparent}.btn-outline-warning:not(:disabled):not(.disabled):active,.btn-outline-warning:not(:disabled):not(.disabled).active,.show>.btn-outline-warning.dropdown-toggle{color:#fff;background-color:#ed6a5a;border-color:#ed6a5a}.btn-outline-warning:not(:disabled):not(.disabled):active:focus,.btn-outline-warning:not(:disabled):not(.disabled).active:focus,.show>.btn-outline-warning.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(237,106,90,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(237,106,90,.5)}.btn-outline-danger{color:#ed6a5a;background-color:transparent;background-image:none;border-color:#ed6a5a}.btn-outline-danger:hover{color:#fff;background-color:#ed6a5a;border-color:#ed6a5a}.btn-outline-danger:focus,.btn-outline-danger.focus{-webkit-box-shadow:0 0 0 .2rem rgba(237,106,90,.5);box-shadow:0 0 0 .2rem rgba(237,106,90,.5)}.btn-outline-danger.disabled,.btn-outline-danger:disabled{color:#ed6a5a;background-color:transparent}.btn-outline-danger:not(:disabled):not(.disabled):active,.btn-outline-danger:not(:disabled):not(.disabled).active,.show>.btn-outline-danger.dropdown-toggle{color:#fff;background-color:#ed6a5a;border-color:#ed6a5a}.btn-outline-danger:not(:disabled):not(.disabled):active:focus,.btn-outline-danger:not(:disabled):not(.disabled).active:focus,.show>.btn-outline-danger.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(237,106,90,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(237,106,90,.5)}.btn-outline-light{color:#d3f3ee;background-color:transparent;background-image:none;border-color:#d3f3ee}.btn-outline-light:hover{color:#222;background-color:#d3f3ee;border-color:#d3f3ee}.btn-outline-light:focus,.btn-outline-light.focus{-webkit-box-shadow:0 0 0 .2rem rgba(211,243,238,.5);box-shadow:0 0 0 .2rem rgba(211,243,238,.5)}.btn-outline-light.disabled,.btn-outline-light:disabled{color:#d3f3ee;background-color:transparent}.btn-outline-light:not(:disabled):not(.disabled):active,.btn-outline-light:not(:disabled):not(.disabled).active,.show>.btn-outline-light.dropdown-toggle{color:#222;background-color:#d3f3ee;border-color:#d3f3ee}.btn-outline-light:not(:disabled):not(.disabled):active:focus,.btn-outline-light:not(:disabled):not(.disabled).active:focus,.show>.btn-outline-light.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(211,243,238,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(211,243,238,.5)}.btn-outline-dark{color:#403f4c;background-color:transparent;background-image:none;border-color:#403f4c}.btn-outline-dark:hover{color:#fff;background-color:#403f4c;border-color:#403f4c}.btn-outline-dark:focus,.btn-outline-dark.focus{-webkit-box-shadow:0 0 0 .2rem rgba(64,63,76,.5);box-shadow:0 0 0 .2rem rgba(64,63,76,.5)}.btn-outline-dark.disabled,.btn-outline-dark:disabled{color:#403f4c;background-color:transparent}.btn-outline-dark:not(:disabled):not(.disabled):active,.btn-outline-dark:not(:disabled):not(.disabled).active,.show>.btn-outline-dark.dropdown-toggle{color:#fff;background-color:#403f4c;border-color:#403f4c}.btn-outline-dark:not(:disabled):not(.disabled):active:focus,.btn-outline-dark:not(:disabled):not(.disabled).active:focus,.show>.btn-outline-dark.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(64,63,76,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(64,63,76,.5)}.btn-link{font-weight:400;color:#3176d9;background-color:transparent}.btn-link:hover{color:#1e53a0;text-decoration:none;background-color:transparent;border-color:transparent}.btn-link:focus,.btn-link.focus{text-decoration:none;border-color:transparent;-webkit-box-shadow:none;box-shadow:none}.btn-link:disabled,.btn-link.disabled{color:#888;pointer-events:none}.btn-lg,.btn-group-lg>.btn{padding:.5rem 1rem;font-size:1.25rem;line-height:1.5;border-radius:.3rem}.btn-sm,.btn-group-sm>.btn{padding:.25rem .5rem;font-size:.875rem;line-height:1.5;border-radius:.2rem}.btn-block{display:block;width:100%}.btn-block+.btn-block{margin-top:.5rem}input[type=submit].btn-block,input[type=reset].btn-block,input[type=button].btn-block{width:100%}.fade{-webkit-transition:opacity .15s linear;-o-transition:opacity .15s linear;transition:opacity .15s linear}@media screen and (prefers-reduced-motion:reduce){.fade{-webkit-transition:none;-o-transition:none;transition:none}}.fade:not(.show){opacity:0}.collapse:not(.show){display:none}.collapsing{position:relative;height:0;overflow:hidden;-webkit-transition:height .35s ease;-o-transition:height .35s ease;transition:height .35s ease}@media screen and (prefers-reduced-motion:reduce){.collapsing{-webkit-transition:none;-o-transition:none;transition:none}}.dropup,.dropright,.dropdown,.dropleft{position:relative}.dropdown-toggle::after{display:inline-block;width:0;height:0;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.dropdown-toggle:empty::after{margin-left:0}.dropdown-menu{position:absolute;top:100%;left:0;z-index:1000;display:none;float:left;min-width:10rem;padding:.5rem 0;margin:.125rem 0 0;font-size:1rem;color:#222;text-align:left;list-style:none;background-color:#fff;background-clip:padding-box;border:1px solid rgba(0,0,0,.15);border-radius:.25rem;-webkit-box-shadow:0 .5rem 1rem rgba(0,0,0,.175);box-shadow:0 .5rem 1rem rgba(0,0,0,.175)}.dropdown-menu-right{right:0;left:auto}.dropup .dropdown-menu{top:auto;bottom:100%;margin-top:0;margin-bottom:.125rem}.dropup .dropdown-toggle::after{display:inline-block;width:0;height:0;margin-left:.255em;vertical-align:.255em;content:"";border-top:0;border-right:.3em solid transparent;border-bottom:.3em solid;border-left:.3em solid transparent}.dropup .dropdown-toggle:empty::after{margin-left:0}.dropright .dropdown-menu{top:0;right:auto;left:100%;margin-top:0;margin-left:.125rem}.dropright .dropdown-toggle::after{display:inline-block;width:0;height:0;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid transparent;border-right:0;border-bottom:.3em solid transparent;border-left:.3em solid}.dropright .dropdown-toggle:empty::after{margin-left:0}.dropright .dropdown-toggle::after{vertical-align:0}.dropleft .dropdown-menu{top:0;right:100%;left:auto;margin-top:0;margin-right:.125rem}.dropleft .dropdown-toggle::after{display:inline-block;width:0;height:0;margin-left:.255em;vertical-align:.255em;content:""}.dropleft .dropdown-toggle::after{display:none}.dropleft .dropdown-toggle::before{display:inline-block;width:0;height:0;margin-right:.255em;vertical-align:.255em;content:"";border-top:.3em solid transparent;border-right:.3em solid;border-bottom:.3em solid transparent}.dropleft .dropdown-toggle:empty::after{margin-left:0}.dropleft .dropdown-toggle::before{vertical-align:0}.dropdown-menu[x-placement^=top],.dropdown-menu[x-placement^=right],.dropdown-menu[x-placement^=bottom],.dropdown-menu[x-placement^=left]{right:auto;bottom:auto}.dropdown-divider{height:0;margin:.5rem 0;overflow:hidden;border-top:1px solid #eee}.dropdown-item{display:block;width:100%;padding:.25rem 1.5rem;clear:both;font-weight:400;color:#222;text-align:inherit;white-space:nowrap;background-color:transparent;border:0}.dropdown-item:hover,.dropdown-item:focus{color:#151515;text-decoration:none;background:#f8f9fa -webkit-gradient(linear,left top,left bottom,from(#f9fafb),to(#f8f9fa))repeat-x;background:#f8f9fa -webkit-linear-gradient(top,#f9fafb,#f8f9fa)repeat-x;background:#f8f9fa -o-linear-gradient(top,#f9fafb,#f8f9fa)repeat-x;background:#f8f9fa linear-gradient(180deg,#f9fafb,#f8f9fa)repeat-x}.dropdown-item.active,.dropdown-item:active{color:#fff;text-decoration:none;background:#30638e -webkit-gradient(linear,left top,left bottom,from(#4f7a9f),to(#30638E))repeat-x;background:#30638e -webkit-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e -o-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x}.dropdown-item.disabled,.dropdown-item:disabled{color:#888;background-color:transparent;background-image:none}.dropdown-menu.show{display:block}.dropdown-header{display:block;padding:.5rem 1.5rem;margin-bottom:0;font-size:.875rem;color:#888;white-space:nowrap}.dropdown-item-text{display:block;padding:.25rem 1.5rem;color:#222}.btn-group,.btn-group-vertical{position:relative;display:-webkit-inline-box;display:-webkit-inline-flex;display:-ms-inline-flexbox;display:inline-flex;vertical-align:middle}.btn-group>.btn,.btn-group-vertical>.btn{position:relative;-webkit-box-flex:0;-webkit-flex:0 1 auto;-ms-flex:0 1 auto;flex:initial}.btn-group>.btn:hover,.btn-group-vertical>.btn:hover{z-index:1}.btn-group>.btn:focus,.btn-group>.btn:active,.btn-group>.btn.active,.btn-group-vertical>.btn:focus,.btn-group-vertical>.btn:active,.btn-group-vertical>.btn.active{z-index:1}.btn-group .btn+.btn,.btn-group .btn+.btn-group,.btn-group .btn-group+.btn,.btn-group .btn-group+.btn-group,.btn-group-vertical .btn+.btn,.btn-group-vertical .btn+.btn-group,.btn-group-vertical .btn-group+.btn,.btn-group-vertical .btn-group+.btn-group{margin-left:-1px}.btn-toolbar{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;-webkit-box-pack:start;-webkit-justify-content:flex-start;-ms-flex-pack:start;justify-content:flex-start}.btn-toolbar .input-group{width:auto}.btn-group>.btn:first-child{margin-left:0}.btn-group>.btn:not(:last-child):not(.dropdown-toggle),.btn-group>.btn-group:not(:last-child)>.btn{border-top-right-radius:0;border-bottom-right-radius:0}.btn-group>.btn:not(:first-child),.btn-group>.btn-group:not(:first-child)>.btn{border-top-left-radius:0;border-bottom-left-radius:0}.dropdown-toggle-split{padding-right:.5625rem;padding-left:.5625rem}.dropdown-toggle-split::after,.dropup .dropdown-toggle-split::after,.dropright .dropdown-toggle-split::after{margin-left:0}.dropleft .dropdown-toggle-split::before{margin-right:0}.btn-sm+.dropdown-toggle-split,.btn-group-sm>.btn+.dropdown-toggle-split{padding-right:.375rem;padding-left:.375rem}.btn-lg+.dropdown-toggle-split,.btn-group-lg>.btn+.dropdown-toggle-split{padding-right:.75rem;padding-left:.75rem}.btn-group.show .dropdown-toggle{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125);box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn-group.show .dropdown-toggle.btn-link{-webkit-box-shadow:none;box-shadow:none}.btn-group-vertical{-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:start;-webkit-align-items:flex-start;-ms-flex-align:start;align-items:flex-start;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center}.btn-group-vertical .btn,.btn-group-vertical .btn-group{width:100%}.btn-group-vertical>.btn+.btn,.btn-group-vertical>.btn+.btn-group,.btn-group-vertical>.btn-group+.btn,.btn-group-vertical>.btn-group+.btn-group{margin-top:-1px;margin-left:0}.btn-group-vertical>.btn:not(:last-child):not(.dropdown-toggle),.btn-group-vertical>.btn-group:not(:last-child)>.btn{border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn:not(:first-child),.btn-group-vertical>.btn-group:not(:first-child)>.btn{border-top-left-radius:0;border-top-right-radius:0}.btn-group-toggle>.btn,.btn-group-toggle>.btn-group>.btn{margin-bottom:0}.btn-group-toggle>.btn input[type=radio],.btn-group-toggle>.btn input[type=checkbox],.btn-group-toggle>.btn-group>.btn input[type=radio],.btn-group-toggle>.btn-group>.btn input[type=checkbox]{position:absolute;clip:rect(0,0,0,0);pointer-events:none}.input-group{position:relative;display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;-webkit-box-align:stretch;-webkit-align-items:stretch;-ms-flex-align:stretch;align-items:stretch;width:100%}.input-group>.form-control,.input-group>.custom-select,.input-group>.custom-file{position:relative;-webkit-box-flex:1;-webkit-flex:1 1 auto;-ms-flex:1 1 auto;flex:auto;width:1%;margin-bottom:0}.input-group>.form-control+.form-control,.input-group>.form-control+.custom-select,.input-group>.form-control+.custom-file,.input-group>.custom-select+.form-control,.input-group>.custom-select+.custom-select,.input-group>.custom-select+.custom-file,.input-group>.custom-file+.form-control,.input-group>.custom-file+.custom-select,.input-group>.custom-file+.custom-file{margin-left:-1px}.input-group>.form-control:focus,.input-group>.custom-select:focus,.input-group>.custom-file .custom-file-input:focus~.custom-file-label{z-index:3}.input-group>.custom-file .custom-file-input:focus{z-index:4}.input-group>.form-control:not(:last-child),.input-group>.custom-select:not(:last-child){border-top-right-radius:0;border-bottom-right-radius:0}.input-group>.form-control:not(:first-child),.input-group>.custom-select:not(:first-child){border-top-left-radius:0;border-bottom-left-radius:0}.input-group>.custom-file{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center}.input-group>.custom-file:not(:last-child) .custom-file-label,.input-group>.custom-file:not(:last-child) .custom-file-label::after{border-top-right-radius:0;border-bottom-right-radius:0}.input-group>.custom-file:not(:first-child) .custom-file-label{border-top-left-radius:0;border-bottom-left-radius:0}.input-group-prepend,.input-group-append{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex}.input-group-prepend .btn,.input-group-append .btn{position:relative;z-index:2}.input-group-prepend .btn+.btn,.input-group-prepend .btn+.input-group-text,.input-group-prepend .input-group-text+.input-group-text,.input-group-prepend .input-group-text+.btn,.input-group-append .btn+.btn,.input-group-append .btn+.input-group-text,.input-group-append .input-group-text+.input-group-text,.input-group-append .input-group-text+.btn{margin-left:-1px}.input-group-prepend{margin-right:-1px}.input-group-append{margin-left:-1px}.input-group-text{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;padding:.375rem .75rem;margin-bottom:0;font-size:1rem;font-weight:400;line-height:1.5;color:#495057;text-align:center;white-space:nowrap;background-color:#eee;border:1px solid #ccc;border-radius:.25rem}.input-group-text input[type=radio],.input-group-text input[type=checkbox]{margin-top:0}.input-group-lg>.form-control,.input-group-lg>.input-group-prepend>.input-group-text,.input-group-lg>.input-group-append>.input-group-text,.input-group-lg>.input-group-prepend>.btn,.input-group-lg>.input-group-append>.btn{height:-webkit-calc(2.875rem + 2px);height:calc(2.875rem + 2px);padding:.5rem 1rem;font-size:1.25rem;line-height:1.5;border-radius:.3rem}.input-group-sm>.form-control,.input-group-sm>.input-group-prepend>.input-group-text,.input-group-sm>.input-group-append>.input-group-text,.input-group-sm>.input-group-prepend>.btn,.input-group-sm>.input-group-append>.btn{height:-webkit-calc(1.8125rem + 2px);height:calc(1.8125rem + 2px);padding:.25rem .5rem;font-size:.875rem;line-height:1.5;border-radius:.2rem}.input-group>.input-group-prepend>.btn,.input-group>.input-group-prepend>.input-group-text,.input-group>.input-group-append:not(:last-child)>.btn,.input-group>.input-group-append:not(:last-child)>.input-group-text,.input-group>.input-group-append:last-child>.btn:not(:last-child):not(.dropdown-toggle),.input-group>.input-group-append:last-child>.input-group-text:not(:last-child){border-top-right-radius:0;border-bottom-right-radius:0}.input-group>.input-group-append>.btn,.input-group>.input-group-append>.input-group-text,.input-group>.input-group-prepend:not(:first-child)>.btn,.input-group>.input-group-prepend:not(:first-child)>.input-group-text,.input-group>.input-group-prepend:first-child>.btn:not(:first-child),.input-group>.input-group-prepend:first-child>.input-group-text:not(:first-child){border-top-left-radius:0;border-bottom-left-radius:0}.custom-control{position:relative;display:block;min-height:1.5rem;padding-left:1.5rem}.custom-control-inline{display:-webkit-inline-box;display:-webkit-inline-flex;display:-ms-inline-flexbox;display:inline-flex;margin-right:1rem}.custom-control-input{position:absolute;z-index:-1;opacity:0}.custom-control-input:checked~.custom-control-label::before{color:#fff;background:#30638e -webkit-gradient(linear,left top,left bottom,from(#4f7a9f),to(#30638E))repeat-x;background:#30638e -webkit-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e -o-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x;-webkit-box-shadow:none;box-shadow:none}.custom-control-input:focus~.custom-control-label::before{-webkit-box-shadow:0 0 0 1px #fff,0 0 0 .2rem rgba(48,99,142,.25);box-shadow:0 0 0 1px #fff,0 0 0 .2rem rgba(48,99,142,.25)}.custom-control-input:active~.custom-control-label::before{color:#fff;background-color:#95bbdb;-webkit-box-shadow:none;box-shadow:none}.custom-control-input:disabled~.custom-control-label{color:#888}.custom-control-input:disabled~.custom-control-label::before{background-color:#eee}.custom-control-label{position:relative;margin-bottom:0}.custom-control-label::before{position:absolute;top:.25rem;left:-1.5rem;display:block;width:1rem;height:1rem;pointer-events:none;content:"";-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;background-color:#dee2e6;-webkit-box-shadow:inset 0 .25rem .25rem rgba(0,0,0,.1);box-shadow:inset 0 .25rem .25rem rgba(0,0,0,.1)}.custom-control-label::after{position:absolute;top:.25rem;left:-1.5rem;display:block;width:1rem;height:1rem;content:"";background-repeat:no-repeat;background-position:50%;-webkit-background-size:50% 50%;background-size:50% 50%}.custom-checkbox .custom-control-label::before{border-radius:.25rem}.custom-checkbox .custom-control-input:checked~.custom-control-label::before{background:#30638e -webkit-gradient(linear,left top,left bottom,from(#4f7a9f),to(#30638E))repeat-x;background:#30638e -webkit-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e -o-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x}.custom-checkbox .custom-control-input:checked~.custom-control-label::after{background-image:url("data:image/svg+xml;charset=utf8,%3Csvg xmlns=%22http://www.w3.org/2000/svg%22 viewBox=%220 0 8 8%22%3E%3Cpath fill=%22%23fff%22 d=%22M6.564.75l-3.59 3.612-1.538-1.55L0 4.26 2.974 7.25 8 2.193z%22/%3E%3C/svg%3E")}.custom-checkbox .custom-control-input:indeterminate~.custom-control-label::before{background:#30638e -webkit-gradient(linear,left top,left bottom,from(#4f7a9f),to(#30638E))repeat-x;background:#30638e -webkit-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e -o-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x;-webkit-box-shadow:none;box-shadow:none}.custom-checkbox .custom-control-input:indeterminate~.custom-control-label::after{background-image:url("data:image/svg+xml;charset=utf8,%3Csvg xmlns=%22http://www.w3.org/2000/svg%22 viewBox=%220 0 4 4%22%3E%3Cpath stroke=%22%23fff%22 d=%22M0 2h4%22/%3E%3C/svg%3E")}.custom-checkbox .custom-control-input:disabled:checked~.custom-control-label::before{background-color:rgba(48,99,142,.5)}.custom-checkbox .custom-control-input:disabled:indeterminate~.custom-control-label::before{background-color:rgba(48,99,142,.5)}.custom-radio .custom-control-label::before{border-radius:50%}.custom-radio .custom-control-input:checked~.custom-control-label::before{background:#30638e -webkit-gradient(linear,left top,left bottom,from(#4f7a9f),to(#30638E))repeat-x;background:#30638e -webkit-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e -o-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x}.custom-radio .custom-control-input:checked~.custom-control-label::after{background-image:url("data:image/svg+xml;charset=utf8,%3Csvg xmlns=%22http://www.w3.org/2000/svg%22 viewBox=%22-4 -4 8 8%22%3E%3Ccircle r=%223%22 fill=%22%23fff%22/%3E%3C/svg%3E")}.custom-radio .custom-control-input:disabled:checked~.custom-control-label::before{background-color:rgba(48,99,142,.5)}.custom-select{display:inline-block;width:100%;height:-webkit-calc(2.25rem + 2px);height:calc(2.25rem + 2px);padding:.375rem 1.75rem .375rem .75rem;line-height:1.5;color:#495057;vertical-align:middle;background:#fff url("data:image/svg+xml;charset=utf8,%3Csvg xmlns=%22http://www.w3.org/2000/svg%22 viewBox=%220 0 4 5%22%3E%3Cpath fill=%22%23333%22 d=%22M2 0 0 2h4zm0 5L0 3h4z%22/%3E%3C/svg%3E")no-repeat right .75rem center;-webkit-background-size:8px 10px;background-size:8px 10px;border:1px solid #ccc;border-radius:.25rem;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,.075);box-shadow:inset 0 1px 2px rgba(0,0,0,.075);-webkit-appearance:none;-moz-appearance:none;appearance:none}.custom-select:focus{border-color:#6fa3ce;outline:0;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,.075),0 0 0 .2rem rgba(111,163,206,.5);box-shadow:inset 0 1px 2px rgba(0,0,0,.075),0 0 0 .2rem rgba(111,163,206,.5)}.custom-select:focus::-ms-value{color:#495057;background-color:#fff}.custom-select[multiple],.custom-select[size]:not([size="1"]){height:auto;padding-right:.75rem;background-image:none}.custom-select:disabled{color:#888;background-color:#eee}.custom-select::-ms-expand{opacity:0}.custom-select-sm{height:-webkit-calc(1.8125rem + 2px);height:calc(1.8125rem + 2px);padding-top:.375rem;padding-bottom:.375rem;font-size:75%}.custom-select-lg{height:-webkit-calc(2.875rem + 2px);height:calc(2.875rem + 2px);padding-top:.375rem;padding-bottom:.375rem;font-size:125%}.custom-file{position:relative;display:inline-block;width:100%;height:-webkit-calc(2.25rem + 2px);height:calc(2.25rem + 2px);margin-bottom:0}.custom-file-input{position:relative;z-index:2;width:100%;height:-webkit-calc(2.25rem + 2px);height:calc(2.25rem + 2px);margin:0;opacity:0}.custom-file-input:focus~.custom-file-label{border-color:#6fa3ce;-webkit-box-shadow:0 0 0 .2rem rgba(48,99,142,.25);box-shadow:0 0 0 .2rem rgba(48,99,142,.25)}.custom-file-input:focus~.custom-file-label::after{border-color:#6fa3ce}.custom-file-input:disabled~.custom-file-label{background-color:#eee}.custom-file-input:lang(en)~.custom-file-label::after{content:"Browse"}.custom-file-label{position:absolute;top:0;right:0;left:0;z-index:1;height:-webkit-calc(2.25rem + 2px);height:calc(2.25rem + 2px);padding:.375rem .75rem;line-height:1.5;color:#495057;background-color:#fff;border:1px solid #ccc;border-radius:.25rem;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.custom-file-label::after{position:absolute;top:0;right:0;bottom:0;z-index:3;display:block;height:2.25rem;padding:.375rem .75rem;line-height:1.5;color:#495057;content:"Browse";background:#eee -webkit-gradient(linear,left top,left bottom,from(#f1f1f1),to(#eee))repeat-x;background:#eee -webkit-linear-gradient(top,#f1f1f1,#eee)repeat-x;background:#eee -o-linear-gradient(top,#f1f1f1,#eee)repeat-x;background:#eee linear-gradient(180deg,#f1f1f1,#eee)repeat-x;border-left:1px solid #ccc;border-radius:0 .25rem .25rem 0}.custom-range{width:100%;padding-left:0;background-color:transparent;-webkit-appearance:none;-moz-appearance:none;appearance:none}.custom-range:focus{outline:none}.custom-range:focus::-webkit-slider-thumb{-webkit-box-shadow:0 0 0 1px #fff,0 0 0 .2rem rgba(48,99,142,.25);box-shadow:0 0 0 1px #fff,0 0 0 .2rem rgba(48,99,142,.25)}.custom-range:focus::-moz-range-thumb{box-shadow:0 0 0 1px #fff,0 0 0 .2rem rgba(48,99,142,.25)}.custom-range:focus::-ms-thumb{box-shadow:0 0 0 1px #fff,0 0 0 .2rem rgba(48,99,142,.25)}.custom-range::-moz-focus-outer{border:0}.custom-range::-webkit-slider-thumb{width:1rem;height:1rem;margin-top:-.25rem;background:#30638e -webkit-gradient(linear,left top,left bottom,from(#4f7a9f),to(#30638E))repeat-x;background:#30638e -webkit-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x;border:0;border-radius:1rem;-webkit-box-shadow:0 .1rem .25rem rgba(0,0,0,.1);box-shadow:0 .1rem .25rem rgba(0,0,0,.1);-webkit-transition:background-color .15s ease-in-out,border-color .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;-o-transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;-webkit-appearance:none;appearance:none}@media screen and (prefers-reduced-motion:reduce){.custom-range::-webkit-slider-thumb{-webkit-transition:none;-o-transition:none;transition:none}}.custom-range::-webkit-slider-thumb:active{background:#95bbdb -webkit-gradient(linear,left top,left bottom,from(#a5c5e1),to(#95bbdb))repeat-x;background:#95bbdb -webkit-linear-gradient(top,#a5c5e1,#95bbdb)repeat-x;background:#95bbdb linear-gradient(180deg,#a5c5e1,#95bbdb)repeat-x}.custom-range::-webkit-slider-runnable-track{width:100%;height:.5rem;color:transparent;cursor:pointer;background-color:#dee2e6;border-color:transparent;border-radius:1rem;-webkit-box-shadow:inset 0 .25rem .25rem rgba(0,0,0,.1);box-shadow:inset 0 .25rem .25rem rgba(0,0,0,.1)}.custom-range::-moz-range-thumb{width:1rem;height:1rem;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x;border:0;border-radius:1rem;box-shadow:0 .1rem .25rem rgba(0,0,0,.1);-webkit-transition:background-color .15s ease-in-out,border-color .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;-o-transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;-moz-appearance:none;appearance:none}@media screen and (prefers-reduced-motion:reduce){.custom-range::-moz-range-thumb{-webkit-transition:none;-o-transition:none;transition:none}}.custom-range::-moz-range-thumb:active{background:#95bbdb linear-gradient(180deg,#a5c5e1,#95bbdb)repeat-x}.custom-range::-moz-range-track{width:100%;height:.5rem;color:transparent;cursor:pointer;background-color:#dee2e6;border-color:transparent;border-radius:1rem;box-shadow:inset 0 .25rem .25rem rgba(0,0,0,.1)}.custom-range::-ms-thumb{width:1rem;height:1rem;margin-top:0;margin-right:.2rem;margin-left:.2rem;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x;border:0;border-radius:1rem;box-shadow:0 .1rem .25rem rgba(0,0,0,.1);-webkit-transition:background-color .15s ease-in-out,border-color .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;-o-transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;appearance:none}@media screen and (prefers-reduced-motion:reduce){.custom-range::-ms-thumb{-webkit-transition:none;-o-transition:none;transition:none}}.custom-range::-ms-thumb:active{background:#95bbdb linear-gradient(180deg,#a5c5e1,#95bbdb)repeat-x}.custom-range::-ms-track{width:100%;height:.5rem;color:transparent;cursor:pointer;background-color:transparent;border-color:transparent;border-width:.5rem;box-shadow:inset 0 .25rem .25rem rgba(0,0,0,.1)}.custom-range::-ms-fill-lower{background-color:#dee2e6;border-radius:1rem}.custom-range::-ms-fill-upper{margin-right:15px;background-color:#dee2e6;border-radius:1rem}.custom-control-label::before,.custom-file-label,.custom-select{-webkit-transition:background-color .15s ease-in-out,border-color .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;-o-transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out,-webkit-box-shadow .15s ease-in-out}@media screen and (prefers-reduced-motion:reduce){.custom-control-label::before,.custom-file-label,.custom-select{-webkit-transition:none;-o-transition:none;transition:none}}.nav{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;padding-left:0;margin-bottom:0;list-style:none}.nav-link{display:block;padding:.5rem 1rem}.nav-link:hover,.nav-link:focus{text-decoration:none}.nav-link.disabled{color:#888}.nav-tabs{border-bottom:1px solid #dee2e6}.nav-tabs .nav-item{margin-bottom:-1px}.nav-tabs .nav-link{border:1px solid transparent;border-top-left-radius:.25rem;border-top-right-radius:.25rem}.nav-tabs .nav-link:hover,.nav-tabs .nav-link:focus{border-color:#eee #eee #dee2e6}.nav-tabs .nav-link.disabled{color:#888;background-color:transparent;border-color:transparent}.nav-tabs .nav-link.active,.nav-tabs .nav-item.show .nav-link{color:#495057;background-color:#fff;border-color:#dee2e6 #dee2e6 #fff}.nav-tabs .dropdown-menu{margin-top:-1px;border-top-left-radius:0;border-top-right-radius:0}.nav-pills .nav-link{border-radius:.25rem}.nav-pills .nav-link.active,.nav-pills .show>.nav-link{color:#fff;background-color:#30638e}.nav-fill .nav-item{-webkit-box-flex:1;-webkit-flex:1 1 auto;-ms-flex:1 1 auto;flex:auto;text-align:center}.nav-justified .nav-item{-webkit-flex-basis:0;-ms-flex-preferred-size:0;flex-basis:0;-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;text-align:center}.tab-content>.tab-pane{display:none}.tab-content>.active{display:block}.navbar{position:relative;display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between;padding:.5rem 1rem}.navbar>.container,.navbar>.container-fluid{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between}.navbar-brand{display:inline-block;padding-top:.3125rem;padding-bottom:.3125rem;margin-right:1rem;font-size:1.25rem;line-height:inherit;white-space:nowrap}.navbar-brand:hover,.navbar-brand:focus{text-decoration:none}.navbar-nav{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;padding-left:0;margin-bottom:0;list-style:none}.navbar-nav .nav-link{padding-right:0;padding-left:0}.navbar-nav .dropdown-menu{position:static;float:none}.navbar-text{display:inline-block;padding-top:.5rem;padding-bottom:.5rem}.navbar-collapse{-webkit-flex-basis:100%;-ms-flex-preferred-size:100%;flex-basis:100%;-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center}.navbar-toggler{padding:.25rem .75rem;font-size:1.25rem;line-height:1;background-color:transparent;border:1px solid transparent;border-radius:.25rem}.navbar-toggler:hover,.navbar-toggler:focus{text-decoration:none}.navbar-toggler:not(:disabled):not(.disabled){cursor:pointer}.navbar-toggler-icon{display:inline-block;width:1.5em;height:1.5em;vertical-align:middle;content:"";background:no-repeat 50%;-webkit-background-size:100% 100%;background-size:100% 100%}@media(max-width:575.98px){.navbar-expand-sm>.container,.navbar-expand-sm>.container-fluid{padding-right:0;padding-left:0}}@media(min-width:576px){.navbar-expand-sm{-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-flow:row nowrap;-ms-flex-flow:row nowrap;flex-flow:row nowrap;-webkit-box-pack:start;-webkit-justify-content:flex-start;-ms-flex-pack:start;justify-content:flex-start}.navbar-expand-sm .navbar-nav{-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-direction:row;-ms-flex-direction:row;flex-direction:row}.navbar-expand-sm .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-sm .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-sm>.container,.navbar-expand-sm>.container-fluid{-webkit-flex-wrap:nowrap;-ms-flex-wrap:nowrap;flex-wrap:nowrap}.navbar-expand-sm .navbar-collapse{display:-webkit-box!important;display:-webkit-flex!important;display:-ms-flexbox!important;display:flex!important;-webkit-flex-basis:auto;-ms-flex-preferred-size:auto;flex-basis:auto}.navbar-expand-sm .navbar-toggler{display:none}}@media(max-width:767.98px){.navbar-expand-md>.container,.navbar-expand-md>.container-fluid{padding-right:0;padding-left:0}}@media(min-width:768px){.navbar-expand-md{-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-flow:row nowrap;-ms-flex-flow:row nowrap;flex-flow:row nowrap;-webkit-box-pack:start;-webkit-justify-content:flex-start;-ms-flex-pack:start;justify-content:flex-start}.navbar-expand-md .navbar-nav{-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-direction:row;-ms-flex-direction:row;flex-direction:row}.navbar-expand-md .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-md .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-md>.container,.navbar-expand-md>.container-fluid{-webkit-flex-wrap:nowrap;-ms-flex-wrap:nowrap;flex-wrap:nowrap}.navbar-expand-md .navbar-collapse{display:-webkit-box!important;display:-webkit-flex!important;display:-ms-flexbox!important;display:flex!important;-webkit-flex-basis:auto;-ms-flex-preferred-size:auto;flex-basis:auto}.navbar-expand-md .navbar-toggler{display:none}}@media(max-width:991.98px){.navbar-expand-lg>.container,.navbar-expand-lg>.container-fluid{padding-right:0;padding-left:0}}@media(min-width:992px){.navbar-expand-lg{-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-flow:row nowrap;-ms-flex-flow:row nowrap;flex-flow:row nowrap;-webkit-box-pack:start;-webkit-justify-content:flex-start;-ms-flex-pack:start;justify-content:flex-start}.navbar-expand-lg .navbar-nav{-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-direction:row;-ms-flex-direction:row;flex-direction:row}.navbar-expand-lg .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-lg .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-lg>.container,.navbar-expand-lg>.container-fluid{-webkit-flex-wrap:nowrap;-ms-flex-wrap:nowrap;flex-wrap:nowrap}.navbar-expand-lg .navbar-collapse{display:-webkit-box!important;display:-webkit-flex!important;display:-ms-flexbox!important;display:flex!important;-webkit-flex-basis:auto;-ms-flex-preferred-size:auto;flex-basis:auto}.navbar-expand-lg .navbar-toggler{display:none}}@media(max-width:1199.98px){.navbar-expand-xl>.container,.navbar-expand-xl>.container-fluid{padding-right:0;padding-left:0}}@media(min-width:1200px){.navbar-expand-xl{-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-flow:row nowrap;-ms-flex-flow:row nowrap;flex-flow:row nowrap;-webkit-box-pack:start;-webkit-justify-content:flex-start;-ms-flex-pack:start;justify-content:flex-start}.navbar-expand-xl .navbar-nav{-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-direction:row;-ms-flex-direction:row;flex-direction:row}.navbar-expand-xl .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-xl .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-xl>.container,.navbar-expand-xl>.container-fluid{-webkit-flex-wrap:nowrap;-ms-flex-wrap:nowrap;flex-wrap:nowrap}.navbar-expand-xl .navbar-collapse{display:-webkit-box!important;display:-webkit-flex!important;display:-ms-flexbox!important;display:flex!important;-webkit-flex-basis:auto;-ms-flex-preferred-size:auto;flex-basis:auto}.navbar-expand-xl .navbar-toggler{display:none}}.navbar-expand{-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-flow:row nowrap;-ms-flex-flow:row nowrap;flex-flow:row nowrap;-webkit-box-pack:start;-webkit-justify-content:flex-start;-ms-flex-pack:start;justify-content:flex-start}.navbar-expand>.container,.navbar-expand>.container-fluid{padding-right:0;padding-left:0}.navbar-expand .navbar-nav{-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-direction:row;-ms-flex-direction:row;flex-direction:row}.navbar-expand .navbar-nav .dropdown-menu{position:absolute}.navbar-expand .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand>.container,.navbar-expand>.container-fluid{-webkit-flex-wrap:nowrap;-ms-flex-wrap:nowrap;flex-wrap:nowrap}.navbar-expand .navbar-collapse{display:-webkit-box!important;display:-webkit-flex!important;display:-ms-flexbox!important;display:flex!important;-webkit-flex-basis:auto;-ms-flex-preferred-size:auto;flex-basis:auto}.navbar-expand .navbar-toggler{display:none}.navbar-light .navbar-brand{color:rgba(0,0,0,.9)}.navbar-light .navbar-brand:hover,.navbar-light .navbar-brand:focus{color:rgba(0,0,0,.9)}.navbar-light .navbar-nav .nav-link{color:rgba(0,0,0,.5)}.navbar-light .navbar-nav .nav-link:hover,.navbar-light .navbar-nav .nav-link:focus{color:rgba(0,0,0,.7)}.navbar-light .navbar-nav .nav-link.disabled{color:rgba(0,0,0,.3)}.navbar-light .navbar-nav .show>.nav-link,.navbar-light .navbar-nav .active>.nav-link,.navbar-light .navbar-nav .nav-link.show,.navbar-light .navbar-nav .nav-link.active{color:rgba(0,0,0,.9)}.navbar-light .navbar-toggler{color:rgba(0,0,0,.5);border-color:rgba(0,0,0,.1)}.navbar-light .navbar-toggler-icon{background-image:url("data:image/svg+xml;charset=utf8,%3Csvg viewBox=%220 0 30 30%22 xmlns=%22http://www.w3.org/2000/svg%22%3E%3Cpath stroke=%22rgba(0, 0, 0, 0.5)%22 stroke-width=%222%22 stroke-linecap=%22round%22 stroke-miterlimit=%2210%22 d=%22M4 7h22M4 15h22M4 23h22%22/%3E%3C/svg%3E")}.navbar-light .navbar-text{color:rgba(0,0,0,.5)}.navbar-light .navbar-text a{color:rgba(0,0,0,.9)}.navbar-light .navbar-text a:hover,.navbar-light .navbar-text a:focus{color:rgba(0,0,0,.9)}.navbar-dark .navbar-brand{color:#fff}.navbar-dark .navbar-brand:hover,.navbar-dark .navbar-brand:focus{color:#fff}.navbar-dark .navbar-nav .nav-link{color:rgba(255,255,255,.75)}.navbar-dark .navbar-nav .nav-link:hover,.navbar-dark .navbar-nav .nav-link:focus{color:rgba(255,255,255,.5)}.navbar-dark .navbar-nav .nav-link.disabled{color:rgba(255,255,255,.25)}.navbar-dark .navbar-nav .show>.nav-link,.navbar-dark .navbar-nav .active>.nav-link,.navbar-dark .navbar-nav .nav-link.show,.navbar-dark .navbar-nav .nav-link.active{color:#fff}.navbar-dark .navbar-toggler{color:rgba(255,255,255,.75);border-color:rgba(255,255,255,.1)}.navbar-dark .navbar-toggler-icon{background-image:url("data:image/svg+xml;charset=utf8,%3Csvg viewBox=%220 0 30 30%22 xmlns=%22http://www.w3.org/2000/svg%22%3E%3Cpath stroke=%22rgba(255, 255, 255, 0.75)%22 stroke-width=%222%22 stroke-linecap=%22round%22 stroke-miterlimit=%2210%22 d=%22M4 7h22M4 15h22M4 23h22%22/%3E%3C/svg%3E")}.navbar-dark .navbar-text{color:rgba(255,255,255,.75)}.navbar-dark .navbar-text a{color:#fff}.navbar-dark .navbar-text a:hover,.navbar-dark .navbar-text a:focus{color:#fff}.card,.td-content .highlight{position:relative;display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;min-width:0;word-wrap:break-word;background-color:#fff;background-clip:border-box;border:1px solid rgba(0,0,0,.125);border-radius:.25rem}.card>hr,.td-content .highlight>hr{margin-right:0;margin-left:0}.card>.list-group:first-child .list-group-item:first-child,.td-content .highlight>.list-group:first-child .list-group-item:first-child{border-top-left-radius:.25rem;border-top-right-radius:.25rem}.card>.list-group:last-child .list-group-item:last-child,.td-content .highlight>.list-group:last-child .list-group-item:last-child{border-bottom-right-radius:.25rem;border-bottom-left-radius:.25rem}.card-body{-webkit-box-flex:1;-webkit-flex:1 1 auto;-ms-flex:1 1 auto;flex:auto;padding:1.25rem}.card-title{margin-bottom:.75rem}.card-subtitle{margin-top:-.375rem;margin-bottom:0}.card-text:last-child{margin-bottom:0}.card-link:hover{text-decoration:none}.card-link+.card-link{margin-left:1.25rem}.card-header{padding:.75rem 1.25rem;margin-bottom:0;background-color:rgba(0,0,0,3%);border-bottom:1px solid rgba(0,0,0,.125)}.card-header:first-child{border-radius:-webkit-calc(.25rem - 1px)-webkit-calc(.25rem - 1px)0 0;border-radius:calc(.25rem - 1px)calc(.25rem - 1px)0 0}.card-header+.list-group .list-group-item:first-child{border-top:0}.card-footer{padding:.75rem 1.25rem;background-color:rgba(0,0,0,3%);border-top:1px solid rgba(0,0,0,.125)}.card-footer:last-child{border-radius:0 0 -webkit-calc(.25rem - 1px)-webkit-calc(.25rem - 1px);border-radius:0 0 calc(.25rem - 1px)calc(.25rem - 1px)}.card-header-tabs{margin-right:-.625rem;margin-bottom:-.75rem;margin-left:-.625rem;border-bottom:0}.card-header-pills{margin-right:-.625rem;margin-left:-.625rem}.card-img-overlay{position:absolute;top:0;right:0;bottom:0;left:0;padding:1.25rem}.card-img{width:100%;border-radius:-webkit-calc(.25rem - 1px);border-radius:calc(.25rem - 1px)}.card-img-top{width:100%;border-top-left-radius:-webkit-calc(.25rem - 1px);border-top-left-radius:calc(.25rem - 1px);border-top-right-radius:-webkit-calc(.25rem - 1px);border-top-right-radius:calc(.25rem - 1px)}.card-img-bottom{width:100%;border-bottom-right-radius:-webkit-calc(.25rem - 1px);border-bottom-right-radius:calc(.25rem - 1px);border-bottom-left-radius:-webkit-calc(.25rem - 1px);border-bottom-left-radius:calc(.25rem - 1px)}.card-deck{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column}.card-deck .card,.card-deck .td-content .highlight,.td-content .card-deck .highlight{margin-bottom:15px}@media(min-width:576px){.card-deck{-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-flow:row wrap;-ms-flex-flow:row wrap;flex-flow:row wrap;margin-right:-15px;margin-left:-15px}.card-deck .card,.card-deck .td-content .highlight,.td-content .card-deck .highlight{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-flex:1;-webkit-flex:1 0 0%;-ms-flex:1 0 0%;flex:1 0;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;margin-right:15px;margin-bottom:0;margin-left:15px}}.card-group{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column}.card-group>.card,.td-content .card-group>.highlight{margin-bottom:15px}@media(min-width:576px){.card-group{-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-flow:row wrap;-ms-flex-flow:row wrap;flex-flow:row wrap}.card-group>.card,.td-content .card-group>.highlight{-webkit-box-flex:1;-webkit-flex:1 0 0%;-ms-flex:1 0 0%;flex:1 0;margin-bottom:0}.card-group>.card+.card,.td-content .card-group>.highlight+.card,.td-content .card-group>.card+.highlight,.td-content .card-group>.highlight+.highlight{margin-left:0;border-left:0}.card-group>.card:first-child,.td-content .card-group>.highlight:first-child{border-top-right-radius:0;border-bottom-right-radius:0}.card-group>.card:first-child .card-img-top,.td-content .card-group>.highlight:first-child .card-img-top,.card-group>.card:first-child .card-header,.td-content .card-group>.highlight:first-child .card-header{border-top-right-radius:0}.card-group>.card:first-child .card-img-bottom,.td-content .card-group>.highlight:first-child .card-img-bottom,.card-group>.card:first-child .card-footer,.td-content .card-group>.highlight:first-child .card-footer{border-bottom-right-radius:0}.card-group>.card:last-child,.td-content .card-group>.highlight:last-child{border-top-left-radius:0;border-bottom-left-radius:0}.card-group>.card:last-child .card-img-top,.td-content .card-group>.highlight:last-child .card-img-top,.card-group>.card:last-child .card-header,.td-content .card-group>.highlight:last-child .card-header{border-top-left-radius:0}.card-group>.card:last-child .card-img-bottom,.td-content .card-group>.highlight:last-child .card-img-bottom,.card-group>.card:last-child .card-footer,.td-content .card-group>.highlight:last-child .card-footer{border-bottom-left-radius:0}.card-group>.card:only-child,.td-content .card-group>.highlight:only-child{border-radius:.25rem}.card-group>.card:only-child .card-img-top,.td-content .card-group>.highlight:only-child .card-img-top,.card-group>.card:only-child .card-header,.td-content .card-group>.highlight:only-child .card-header{border-top-left-radius:.25rem;border-top-right-radius:.25rem}.card-group>.card:only-child .card-img-bottom,.td-content .card-group>.highlight:only-child .card-img-bottom,.card-group>.card:only-child .card-footer,.td-content .card-group>.highlight:only-child .card-footer{border-bottom-right-radius:.25rem;border-bottom-left-radius:.25rem}.card-group>.card:not(:first-child):not(:last-child):not(:only-child),.td-content .card-group>.highlight:not(:first-child):not(:last-child):not(:only-child){border-radius:0}.card-group>.card:not(:first-child):not(:last-child):not(:only-child) .card-img-top,.td-content .card-group>.highlight:not(:first-child):not(:last-child):not(:only-child) .card-img-top,.card-group>.card:not(:first-child):not(:last-child):not(:only-child) .card-img-bottom,.td-content .card-group>.highlight:not(:first-child):not(:last-child):not(:only-child) .card-img-bottom,.card-group>.card:not(:first-child):not(:last-child):not(:only-child) .card-header,.td-content .card-group>.highlight:not(:first-child):not(:last-child):not(:only-child) .card-header,.card-group>.card:not(:first-child):not(:last-child):not(:only-child) .card-footer,.td-content .card-group>.highlight:not(:first-child):not(:last-child):not(:only-child) .card-footer{border-radius:0}}.card-columns .card,.card-columns .td-content .highlight,.td-content .card-columns .highlight{margin-bottom:.75rem}@media(min-width:576px){.card-columns{-webkit-column-count:3;-moz-column-count:3;column-count:3;-webkit-column-gap:1.25rem;-moz-column-gap:1.25rem;column-gap:1.25rem;orphans:1;widows:1}.card-columns .card,.card-columns .td-content .highlight,.td-content .card-columns .highlight{display:inline-block;width:100%}}.accordion .card:not(:first-of-type):not(:last-of-type),.accordion .td-content .highlight:not(:first-of-type):not(:last-of-type),.td-content .accordion .highlight:not(:first-of-type):not(:last-of-type){border-bottom:0;border-radius:0}.accordion .card:not(:first-of-type) .card-header:first-child,.accordion .td-content .highlight:not(:first-of-type) .card-header:first-child,.td-content .accordion .highlight:not(:first-of-type) .card-header:first-child{border-radius:0}.accordion .card:first-of-type,.accordion .td-content .highlight:first-of-type,.td-content .accordion .highlight:first-of-type{border-bottom:0;border-bottom-right-radius:0;border-bottom-left-radius:0}.accordion .card:last-of-type,.accordion .td-content .highlight:last-of-type,.td-content .accordion .highlight:last-of-type{border-top-left-radius:0;border-top-right-radius:0}.breadcrumb{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;padding:.75rem 1rem;margin-bottom:1rem;list-style:none;background-color:#eee;border-radius:.25rem}.breadcrumb-item+.breadcrumb-item{padding-left:.5rem}.breadcrumb-item+.breadcrumb-item::before{display:inline-block;padding-right:.5rem;color:#888;content:"/"}.breadcrumb-item+.breadcrumb-item:hover::before{text-decoration:underline}.breadcrumb-item+.breadcrumb-item:hover::before{text-decoration:none}.breadcrumb-item.active{color:#888}.pagination{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;padding-left:0;list-style:none;border-radius:.25rem}.page-link{position:relative;display:block;padding:.5rem .75rem;margin-left:-1px;line-height:1.25;color:#888;background-color:#fff;border:1px solid rgba(0,0,0,.1)}.page-link:hover{z-index:2;color:#1e53a0;text-decoration:none;background-color:#eee;border-color:#dee2e6}.page-link:focus{z-index:2;outline:0;-webkit-box-shadow:0 0 0 .2rem rgba(48,99,142,.25);box-shadow:0 0 0 .2rem rgba(48,99,142,.25)}.page-link:not(:disabled):not(.disabled){cursor:pointer}.page-item:first-child .page-link{margin-left:0;border-top-left-radius:.25rem;border-bottom-left-radius:.25rem}.page-item:last-child .page-link{border-top-right-radius:.25rem;border-bottom-right-radius:.25rem}.page-item.active .page-link{z-index:1;color:#fff;background-color:#30638e;border-color:#2a567b}.page-item.disabled .page-link{color:#dee2e6;pointer-events:none;cursor:auto;background-color:#fff;border-color:#dee2e6}.pagination-lg .page-link{padding:.75rem 1.5rem;font-size:1.25rem;line-height:1.5}.pagination-lg .page-item:first-child .page-link{border-top-left-radius:.3rem;border-bottom-left-radius:.3rem}.pagination-lg .page-item:last-child .page-link{border-top-right-radius:.3rem;border-bottom-right-radius:.3rem}.pagination-sm .page-link{padding:.25rem .5rem;font-size:.875rem;line-height:1.5}.pagination-sm .page-item:first-child .page-link{border-top-left-radius:.2rem;border-bottom-left-radius:.2rem}.pagination-sm .page-item:last-child .page-link{border-top-right-radius:.2rem;border-bottom-right-radius:.2rem}.badge{display:inline-block;padding:.25em .4em;font-size:75%;font-weight:700;line-height:1;text-align:center;white-space:nowrap;vertical-align:baseline;border-radius:.25rem}.badge:empty{display:none}.btn .badge{position:relative;top:-1px}.badge-pill{padding-right:.6em;padding-left:.6em;border-radius:10rem}.badge-primary{color:#fff;background-color:#30638e}.badge-primary[href]:hover,.badge-primary[href]:focus{color:#fff;text-decoration:none;background-color:#234868}.badge-secondary{color:#fff;background-color:#ffa630}.badge-secondary[href]:hover,.badge-secondary[href]:focus{color:#fff;text-decoration:none;background-color:#fc9000}.badge-success{color:#fff;background-color:#3772ff}.badge-success[href]:hover,.badge-success[href]:focus{color:#fff;text-decoration:none;background-color:#044eff}.badge-info{color:#222;background-color:#c0e0de}.badge-info[href]:hover,.badge-info[href]:focus{color:#222;text-decoration:none;background-color:#9ecfcc}.badge-warning{color:#fff;background-color:#ed6a5a}.badge-warning[href]:hover,.badge-warning[href]:focus{color:#fff;text-decoration:none;background-color:#e8402c}.badge-danger{color:#fff;background-color:#ed6a5a}.badge-danger[href]:hover,.badge-danger[href]:focus{color:#fff;text-decoration:none;background-color:#e8402c}.badge-light{color:#222;background-color:#d3f3ee}.badge-light[href]:hover,.badge-light[href]:focus{color:#222;text-decoration:none;background-color:#abe8df}.badge-dark{color:#fff;background-color:#403f4c}.badge-dark[href]:hover,.badge-dark[href]:focus{color:#fff;text-decoration:none;background-color:#292830}.jumbotron{padding:2rem 1rem;margin-bottom:2rem;background-color:#eee;border-radius:.3rem}@media(min-width:576px){.jumbotron{padding:4rem 2rem}}.jumbotron-fluid{padding-right:0;padding-left:0;border-radius:0}.alert{position:relative;padding:.75rem 1.25rem;margin-bottom:1rem;border:1px solid transparent;border-radius:.25rem}.alert-heading{color:inherit}.alert-link{font-weight:700}.alert-dismissible{padding-right:4rem}.alert-dismissible .close{position:absolute;top:0;right:0;padding:.75rem 1.25rem;color:inherit}.alert-primary{color:#19334a;background:#d6e0e8 -webkit-gradient(linear,left top,left bottom,from(#dce5eb),to(#d6e0e8))repeat-x;background:#d6e0e8 -webkit-linear-gradient(top,#dce5eb,#d6e0e8)repeat-x;background:#d6e0e8 -o-linear-gradient(top,#dce5eb,#d6e0e8)repeat-x;background:#d6e0e8 linear-gradient(180deg,#dce5eb,#d6e0e8)repeat-x;border-color:#c5d3df}.alert-primary hr{border-top-color:#b5c7d6}.alert-primary .alert-link{color:#0c1924}.alert-secondary{color:#855619;background:#ffedd6 -webkit-gradient(linear,left top,left bottom,from(#fff0dc),to(#ffedd6))repeat-x;background:#ffedd6 -webkit-linear-gradient(top,#fff0dc,#ffedd6)repeat-x;background:#ffedd6 -o-linear-gradient(top,#fff0dc,#ffedd6)repeat-x;background:#ffedd6 linear-gradient(180deg,#fff0dc,#ffedd6)repeat-x;border-color:#ffe6c5}.alert-secondary hr{border-top-color:#ffdbac}.alert-secondary .alert-link{color:#5a3a11}.alert-success{color:#1d3b85;background:#d7e3ff -webkit-gradient(linear,left top,left bottom,from(#dde7ff),to(#d7e3ff))repeat-x;background:#d7e3ff -webkit-linear-gradient(top,#dde7ff,#d7e3ff)repeat-x;background:#d7e3ff -o-linear-gradient(top,#dde7ff,#d7e3ff)repeat-x;background:#d7e3ff linear-gradient(180deg,#dde7ff,#d7e3ff)repeat-x;border-color:#c7d8ff}.alert-success hr{border-top-color:#aec6ff}.alert-success .alert-link{color:#14285b}.alert-info{color:#647473;background:#f2f9f8 -webkit-gradient(linear,left top,left bottom,from(#f4faf9),to(#f2f9f8))repeat-x;background:#f2f9f8 -webkit-linear-gradient(top,#f4faf9,#f2f9f8)repeat-x;background:#f2f9f8 -o-linear-gradient(top,#f4faf9,#f2f9f8)repeat-x;background:#f2f9f8 linear-gradient(180deg,#f4faf9,#f2f9f8)repeat-x;border-color:#edf6f6}.alert-info hr{border-top-color:#dceeee}.alert-info .alert-link{color:#4c5958}.alert-warning{color:#7b372f;background:#fbe1de -webkit-gradient(linear,left top,left bottom,from(#fce6e3),to(#fbe1de))repeat-x;background:#fbe1de -webkit-linear-gradient(top,#fce6e3,#fbe1de)repeat-x;background:#fbe1de -o-linear-gradient(top,#fce6e3,#fbe1de)repeat-x;background:#fbe1de linear-gradient(180deg,#fce6e3,#fbe1de)repeat-x;border-color:#fad5d1}.alert-warning hr{border-top-color:#f8c0ba}.alert-warning .alert-link{color:#562721}.alert-danger{color:#7b372f;background:#fbe1de -webkit-gradient(linear,left top,left bottom,from(#fce6e3),to(#fbe1de))repeat-x;background:#fbe1de -webkit-linear-gradient(top,#fce6e3,#fbe1de)repeat-x;background:#fbe1de -o-linear-gradient(top,#fce6e3,#fbe1de)repeat-x;background:#fbe1de linear-gradient(180deg,#fce6e3,#fbe1de)repeat-x;border-color:#fad5d1}.alert-danger hr{border-top-color:#f8c0ba}.alert-danger .alert-link{color:#562721}.alert-light{color:#6e7e7c;background:#f6fdfc -webkit-gradient(linear,left top,left bottom,from(#f7fdfc),to(#f6fdfc))repeat-x;background:#f6fdfc -webkit-linear-gradient(top,#f7fdfc,#f6fdfc)repeat-x;background:#f6fdfc -o-linear-gradient(top,#f7fdfc,#f6fdfc)repeat-x;background:#f6fdfc linear-gradient(180deg,#f7fdfc,#f6fdfc)repeat-x;border-color:#f3fcfa}.alert-light hr{border-top-color:#dff7f2}.alert-light .alert-link{color:#566361}.alert-dark{color:#212128;background:#d9d9db -webkit-gradient(linear,left top,left bottom,from(#dfdfe0),to(#d9d9db))repeat-x;background:#d9d9db -webkit-linear-gradient(top,#dfdfe0,#d9d9db)repeat-x;background:#d9d9db -o-linear-gradient(top,#dfdfe0,#d9d9db)repeat-x;background:#d9d9db linear-gradient(180deg,#dfdfe0,#d9d9db)repeat-x;border-color:#cac9cd}.alert-dark hr{border-top-color:#bdbcc1}.alert-dark .alert-link{color:#0a0a0c}@-webkit-keyframes progress-bar-stripes{from{background-position:1rem 0}to{background-position:0 0}}@-o-keyframes progress-bar-stripes{from{background-position:1rem 0}to{background-position:0 0}}@keyframes progress-bar-stripes{from{background-position:1rem 0}to{background-position:0 0}}.progress{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;height:1rem;overflow:hidden;font-size:.75rem;background-color:#eee;border-radius:.25rem;-webkit-box-shadow:inset 0 .1rem .1rem rgba(0,0,0,.1);box-shadow:inset 0 .1rem .1rem rgba(0,0,0,.1)}.progress-bar{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;color:#fff;text-align:center;white-space:nowrap;background-color:#30638e;-webkit-transition:width .6s ease;-o-transition:width .6s ease;transition:width .6s ease}@media screen and (prefers-reduced-motion:reduce){.progress-bar{-webkit-transition:none;-o-transition:none;transition:none}}.progress-bar-striped{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);-webkit-background-size:1rem 1rem;background-size:1rem 1rem}.progress-bar-animated{-webkit-animation:progress-bar-stripes 1s linear infinite;-o-animation:progress-bar-stripes 1s linear infinite;animation:progress-bar-stripes 1s linear infinite}.media{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-align:start;-webkit-align-items:flex-start;-ms-flex-align:start;align-items:flex-start}.media-body{-webkit-box-flex:1;-webkit-flex:1;-ms-flex:1;flex:1}.list-group{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;padding-left:0;margin-bottom:0}.list-group-item-action{width:100%;color:#495057;text-align:inherit}.list-group-item-action:hover,.list-group-item-action:focus{color:#495057;text-decoration:none;background-color:#f8f9fa}.list-group-item-action:active{color:#222;background-color:#eee}.list-group-item{position:relative;display:block;padding:.75rem 1.25rem;margin-bottom:-1px;background-color:#fff;border:1px solid rgba(0,0,0,.125)}.list-group-item:first-child{border-top-left-radius:.25rem;border-top-right-radius:.25rem}.list-group-item:last-child{margin-bottom:0;border-bottom-right-radius:.25rem;border-bottom-left-radius:.25rem}.list-group-item:hover,.list-group-item:focus{z-index:1;text-decoration:none}.list-group-item.disabled,.list-group-item:disabled{color:#888;background-color:#fff}.list-group-item.active{z-index:2;color:#fff;background-color:#30638e;border-color:#30638e}.list-group-flush .list-group-item{border-right:0;border-left:0;border-radius:0}.list-group-flush:first-child .list-group-item:first-child{border-top:0}.list-group-flush:last-child .list-group-item:last-child{border-bottom:0}.list-group-item-primary{color:#19334a;background-color:#c5d3df}.list-group-item-primary.list-group-item-action:hover,.list-group-item-primary.list-group-item-action:focus{color:#19334a;background-color:#b5c7d6}.list-group-item-primary.list-group-item-action.active{color:#fff;background-color:#19334a;border-color:#19334a}.list-group-item-secondary{color:#855619;background-color:#ffe6c5}.list-group-item-secondary.list-group-item-action:hover,.list-group-item-secondary.list-group-item-action:focus{color:#855619;background-color:#ffdbac}.list-group-item-secondary.list-group-item-action.active{color:#fff;background-color:#855619;border-color:#855619}.list-group-item-success{color:#1d3b85;background-color:#c7d8ff}.list-group-item-success.list-group-item-action:hover,.list-group-item-success.list-group-item-action:focus{color:#1d3b85;background-color:#aec6ff}.list-group-item-success.list-group-item-action.active{color:#fff;background-color:#1d3b85;border-color:#1d3b85}.list-group-item-info{color:#647473;background-color:#edf6f6}.list-group-item-info.list-group-item-action:hover,.list-group-item-info.list-group-item-action:focus{color:#647473;background-color:#dceeee}.list-group-item-info.list-group-item-action.active{color:#fff;background-color:#647473;border-color:#647473}.list-group-item-warning{color:#7b372f;background-color:#fad5d1}.list-group-item-warning.list-group-item-action:hover,.list-group-item-warning.list-group-item-action:focus{color:#7b372f;background-color:#f8c0ba}.list-group-item-warning.list-group-item-action.active{color:#fff;background-color:#7b372f;border-color:#7b372f}.list-group-item-danger{color:#7b372f;background-color:#fad5d1}.list-group-item-danger.list-group-item-action:hover,.list-group-item-danger.list-group-item-action:focus{color:#7b372f;background-color:#f8c0ba}.list-group-item-danger.list-group-item-action.active{color:#fff;background-color:#7b372f;border-color:#7b372f}.list-group-item-light{color:#6e7e7c;background-color:#f3fcfa}.list-group-item-light.list-group-item-action:hover,.list-group-item-light.list-group-item-action:focus{color:#6e7e7c;background-color:#dff7f2}.list-group-item-light.list-group-item-action.active{color:#fff;background-color:#6e7e7c;border-color:#6e7e7c}.list-group-item-dark{color:#212128;background-color:#cac9cd}.list-group-item-dark.list-group-item-action:hover,.list-group-item-dark.list-group-item-action:focus{color:#212128;background-color:#bdbcc1}.list-group-item-dark.list-group-item-action.active{color:#fff;background-color:#212128;border-color:#212128}.close{float:right;font-size:1.5rem;font-weight:700;line-height:1;color:#000;text-shadow:0 1px 0 #fff;opacity:.5}.close:not(:disabled):not(.disabled){cursor:pointer}.close:not(:disabled):not(.disabled):hover,.close:not(:disabled):not(.disabled):focus{color:#000;text-decoration:none;opacity:.75}button.close{padding:0;background-color:transparent;border:0;-webkit-appearance:none}.modal-open{overflow:hidden}.modal-open .modal{overflow-x:hidden;overflow-y:auto}.modal{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1050;display:none;overflow:hidden;outline:0}.modal-dialog{position:relative;width:auto;margin:.5rem;pointer-events:none}.modal.fade .modal-dialog{-webkit-transition:-webkit-transform .3s ease-out;transition:-webkit-transform .3s ease-out;-o-transition:-o-transform .3s ease-out;transition:transform .3s ease-out;transition:transform .3s ease-out,-webkit-transform .3s ease-out,-o-transform .3s ease-out;-webkit-transform:translate(0,-25%);-ms-transform:translate(0,-25%);-o-transform:translate(0,-25%);transform:translate(0,-25%)}@media screen and (prefers-reduced-motion:reduce){.modal.fade .modal-dialog{-webkit-transition:none;-o-transition:none;transition:none}}.modal.show .modal-dialog{-webkit-transform:translate(0,0);-ms-transform:translate(0,0);-o-transform:translate(0,0);transform:translate(0,0)}.modal-dialog-centered{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;min-height:-webkit-calc(100% - (.5rem * 2));min-height:calc(100% - (.5rem * 2))}.modal-dialog-centered::before{display:block;height:-webkit-calc(100vh - (.5rem * 2));height:calc(100vh - (.5rem * 2));content:""}.modal-content{position:relative;display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;width:100%;pointer-events:auto;background-color:#fff;background-clip:padding-box;border:1px solid rgba(0,0,0,.2);border-radius:.3rem;-webkit-box-shadow:0 .25rem .5rem rgba(0,0,0,.5);box-shadow:0 .25rem .5rem rgba(0,0,0,.5);outline:0}.modal-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040;background-color:#000}.modal-backdrop.fade{opacity:0}.modal-backdrop.show{opacity:.5}.modal-header{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-align:start;-webkit-align-items:flex-start;-ms-flex-align:start;align-items:flex-start;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between;padding:1rem;border-bottom:1px solid #eee;border-top-left-radius:.3rem;border-top-right-radius:.3rem}.modal-header .close{padding:1rem;margin:-1rem -1rem -1rem auto}.modal-title{margin-bottom:0;line-height:1.5}.modal-body{position:relative;-webkit-box-flex:1;-webkit-flex:1 1 auto;-ms-flex:1 1 auto;flex:auto;padding:1rem}.modal-footer{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;-webkit-box-pack:end;-webkit-justify-content:flex-end;-ms-flex-pack:end;justify-content:flex-end;padding:1rem;border-top:1px solid #eee}.modal-footer>:not(:first-child){margin-left:.25rem}.modal-footer>:not(:last-child){margin-right:.25rem}.modal-scrollbar-measure{position:absolute;top:-9999px;width:50px;height:50px;overflow:scroll}@media(min-width:576px){.modal-dialog{max-width:500px;margin:1.75rem auto}.modal-dialog-centered{min-height:-webkit-calc(100% - (1.75rem * 2));min-height:calc(100% - (1.75rem * 2))}.modal-dialog-centered::before{height:-webkit-calc(100vh - (1.75rem * 2));height:calc(100vh - (1.75rem * 2))}.modal-content{-webkit-box-shadow:0 .5rem 1rem rgba(0,0,0,.5);box-shadow:0 .5rem 1rem rgba(0,0,0,.5)}.modal-sm{max-width:300px}}@media(min-width:992px){.modal-lg{max-width:800px}}.tooltip{position:absolute;z-index:1070;display:block;margin:0;font-family:open sans,-apple-system,BlinkMacSystemFont,segoe ui,Roboto,helvetica neue,Arial,sans-serif,apple color emoji,segoe ui emoji,segoe ui symbol;font-style:normal;font-weight:400;line-height:1.5;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;white-space:normal;line-break:auto;font-size:.875rem;word-wrap:break-word;opacity:0}.tooltip.show{opacity:.9}.tooltip .arrow{position:absolute;display:block;width:.8rem;height:.4rem}.tooltip .arrow::before{position:absolute;content:"";border-color:transparent;border-style:solid}.bs-tooltip-top,.bs-tooltip-auto[x-placement^=top]{padding:.4rem 0}.bs-tooltip-top .arrow,.bs-tooltip-auto[x-placement^=top] .arrow{bottom:0}.bs-tooltip-top .arrow::before,.bs-tooltip-auto[x-placement^=top] .arrow::before{top:0;border-width:.4rem .4rem 0;border-top-color:#000}.bs-tooltip-right,.bs-tooltip-auto[x-placement^=right]{padding:0 .4rem}.bs-tooltip-right .arrow,.bs-tooltip-auto[x-placement^=right] .arrow{left:0;width:.4rem;height:.8rem}.bs-tooltip-right .arrow::before,.bs-tooltip-auto[x-placement^=right] .arrow::before{right:0;border-width:.4rem .4rem .4rem 0;border-right-color:#000}.bs-tooltip-bottom,.bs-tooltip-auto[x-placement^=bottom]{padding:.4rem 0}.bs-tooltip-bottom .arrow,.bs-tooltip-auto[x-placement^=bottom] .arrow{top:0}.bs-tooltip-bottom .arrow::before,.bs-tooltip-auto[x-placement^=bottom] .arrow::before{bottom:0;border-width:0 .4rem .4rem;border-bottom-color:#000}.bs-tooltip-left,.bs-tooltip-auto[x-placement^=left]{padding:0 .4rem}.bs-tooltip-left .arrow,.bs-tooltip-auto[x-placement^=left] .arrow{right:0;width:.4rem;height:.8rem}.bs-tooltip-left .arrow::before,.bs-tooltip-auto[x-placement^=left] .arrow::before{left:0;border-width:.4rem 0 .4rem .4rem;border-left-color:#000}.tooltip-inner{max-width:200px;padding:.25rem .5rem;color:#fff;text-align:center;background-color:#000;border-radius:.25rem}.popover{position:absolute;top:0;left:0;z-index:1060;display:block;max-width:276px;font-family:open sans,-apple-system,BlinkMacSystemFont,segoe ui,Roboto,helvetica neue,Arial,sans-serif,apple color emoji,segoe ui emoji,segoe ui symbol;font-style:normal;font-weight:400;line-height:1.5;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;white-space:normal;line-break:auto;font-size:.875rem;word-wrap:break-word;background-color:#fff;background-clip:padding-box;border:1px solid rgba(0,0,0,.2);border-radius:.3rem;-webkit-box-shadow:0 .25rem .5rem rgba(0,0,0,.2);box-shadow:0 .25rem .5rem rgba(0,0,0,.2)}.popover .arrow{position:absolute;display:block;width:1rem;height:.5rem;margin:0 .3rem}.popover .arrow::before,.popover .arrow::after{position:absolute;display:block;content:"";border-color:transparent;border-style:solid}.bs-popover-top,.bs-popover-auto[x-placement^=top]{margin-bottom:.5rem}.bs-popover-top .arrow,.bs-popover-auto[x-placement^=top] .arrow{bottom:-webkit-calc((.5rem + 1px) * -1);bottom:calc((.5rem + 1px) * -1)}.bs-popover-top .arrow::before,.bs-popover-auto[x-placement^=top] .arrow::before,.bs-popover-top .arrow::after,.bs-popover-auto[x-placement^=top] .arrow::after{border-width:.5rem .5rem 0}.bs-popover-top .arrow::before,.bs-popover-auto[x-placement^=top] .arrow::before{bottom:0;border-top-color:rgba(0,0,0,.25)}.bs-popover-top .arrow::after,.bs-popover-auto[x-placement^=top] .arrow::after{bottom:1px;border-top-color:#fff}.bs-popover-right,.bs-popover-auto[x-placement^=right]{margin-left:.5rem}.bs-popover-right .arrow,.bs-popover-auto[x-placement^=right] .arrow{left:-webkit-calc((.5rem + 1px) * -1);left:calc((.5rem + 1px) * -1);width:.5rem;height:1rem;margin:.3rem 0}.bs-popover-right .arrow::before,.bs-popover-auto[x-placement^=right] .arrow::before,.bs-popover-right .arrow::after,.bs-popover-auto[x-placement^=right] .arrow::after{border-width:.5rem .5rem .5rem 0}.bs-popover-right .arrow::before,.bs-popover-auto[x-placement^=right] .arrow::before{left:0;border-right-color:rgba(0,0,0,.25)}.bs-popover-right .arrow::after,.bs-popover-auto[x-placement^=right] .arrow::after{left:1px;border-right-color:#fff}.bs-popover-bottom,.bs-popover-auto[x-placement^=bottom]{margin-top:.5rem}.bs-popover-bottom .arrow,.bs-popover-auto[x-placement^=bottom] .arrow{top:-webkit-calc((.5rem + 1px) * -1);top:calc((.5rem + 1px) * -1)}.bs-popover-bottom .arrow::before,.bs-popover-auto[x-placement^=bottom] .arrow::before,.bs-popover-bottom .arrow::after,.bs-popover-auto[x-placement^=bottom] .arrow::after{border-width:0 .5rem .5rem}.bs-popover-bottom .arrow::before,.bs-popover-auto[x-placement^=bottom] .arrow::before{top:0;border-bottom-color:rgba(0,0,0,.25)}.bs-popover-bottom .arrow::after,.bs-popover-auto[x-placement^=bottom] .arrow::after{top:1px;border-bottom-color:#fff}.bs-popover-bottom .popover-header::before,.bs-popover-auto[x-placement^=bottom] .popover-header::before{position:absolute;top:0;left:50%;display:block;width:1rem;margin-left:-.5rem;content:"";border-bottom:1px solid #f7f7f7}.bs-popover-left,.bs-popover-auto[x-placement^=left]{margin-right:.5rem}.bs-popover-left .arrow,.bs-popover-auto[x-placement^=left] .arrow{right:-webkit-calc((.5rem + 1px) * -1);right:calc((.5rem + 1px) * -1);width:.5rem;height:1rem;margin:.3rem 0}.bs-popover-left .arrow::before,.bs-popover-auto[x-placement^=left] .arrow::before,.bs-popover-left .arrow::after,.bs-popover-auto[x-placement^=left] .arrow::after{border-width:.5rem 0 .5rem .5rem}.bs-popover-left .arrow::before,.bs-popover-auto[x-placement^=left] .arrow::before{right:0;border-left-color:rgba(0,0,0,.25)}.bs-popover-left .arrow::after,.bs-popover-auto[x-placement^=left] .arrow::after{right:1px;border-left-color:#fff}.popover-header{padding:.5rem .75rem;margin-bottom:0;font-size:1rem;color:inherit;background-color:#f7f7f7;border-bottom:1px solid #ebebeb;border-top-left-radius:-webkit-calc(.3rem - 1px);border-top-left-radius:calc(.3rem - 1px);border-top-right-radius:-webkit-calc(.3rem - 1px);border-top-right-radius:calc(.3rem - 1px)}.popover-header:empty{display:none}.popover-body{padding:.5rem .75rem;color:#222}.carousel{position:relative}.carousel-inner{position:relative;width:100%;overflow:hidden}.carousel-item{position:relative;display:none;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;width:100%;-webkit-backface-visibility:hidden;backface-visibility:hidden;-webkit-perspective:1000px;perspective:1000px}.carousel-item.active,.carousel-item-next,.carousel-item-prev{display:block;-webkit-transition:-webkit-transform .6s ease;transition:-webkit-transform .6s ease;-o-transition:-o-transform .6s ease;transition:transform .6s ease;transition:transform .6s ease,-webkit-transform .6s ease,-o-transform .6s ease}@media screen and (prefers-reduced-motion:reduce){.carousel-item.active,.carousel-item-next,.carousel-item-prev{-webkit-transition:none;-o-transition:none;transition:none}}.carousel-item-next,.carousel-item-prev{position:absolute;top:0}.carousel-item-next.carousel-item-left,.carousel-item-prev.carousel-item-right{-webkit-transform:translateX(0);-ms-transform:translateX(0);-o-transform:translateX(0);transform:translateX(0)}@supports((-webkit-transform-style:preserve-3d) or (transform-style:preserve-3d)){.carousel-item-next.carousel-item-left,.carousel-item-prev.carousel-item-right{-webkit-transform:translate3d(0,0,0);transform:translate3d(0,0,0)}}.carousel-item-next,.active.carousel-item-right{-webkit-transform:translateX(100%);-ms-transform:translateX(100%);-o-transform:translateX(100%);transform:translateX(100%)}@supports((-webkit-transform-style:preserve-3d) or (transform-style:preserve-3d)){.carousel-item-next,.active.carousel-item-right{-webkit-transform:translate3d(100%,0,0);transform:translate3d(100%,0,0)}}.carousel-item-prev,.active.carousel-item-left{-webkit-transform:translateX(-100%);-ms-transform:translateX(-100%);-o-transform:translateX(-100%);transform:translateX(-100%)}@supports((-webkit-transform-style:preserve-3d) or (transform-style:preserve-3d)){.carousel-item-prev,.active.carousel-item-left{-webkit-transform:translate3d(-100%,0,0);transform:translate3d(-100%,0,0)}}.carousel-fade .carousel-item{opacity:0;-webkit-transition-duration:.6s;-o-transition-duration:.6s;transition-duration:.6s;-webkit-transition-property:opacity;-o-transition-property:opacity;transition-property:opacity}.carousel-fade .carousel-item.active,.carousel-fade .carousel-item-next.carousel-item-left,.carousel-fade .carousel-item-prev.carousel-item-right{opacity:1}.carousel-fade .active.carousel-item-left,.carousel-fade .active.carousel-item-right{opacity:0}.carousel-fade .carousel-item-next,.carousel-fade .carousel-item-prev,.carousel-fade .carousel-item.active,.carousel-fade .active.carousel-item-left,.carousel-fade .active.carousel-item-prev{-webkit-transform:translateX(0);-ms-transform:translateX(0);-o-transform:translateX(0);transform:translateX(0)}@supports((-webkit-transform-style:preserve-3d) or (transform-style:preserve-3d)){.carousel-fade .carousel-item-next,.carousel-fade .carousel-item-prev,.carousel-fade .carousel-item.active,.carousel-fade .active.carousel-item-left,.carousel-fade .active.carousel-item-prev{-webkit-transform:translate3d(0,0,0);transform:translate3d(0,0,0)}}.carousel-control-prev,.carousel-control-next{position:absolute;top:0;bottom:0;display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;width:15%;color:#fff;text-align:center;opacity:.5}.carousel-control-prev:hover,.carousel-control-prev:focus,.carousel-control-next:hover,.carousel-control-next:focus{color:#fff;text-decoration:none;outline:0;opacity:.9}.carousel-control-prev{left:0;background:-webkit-gradient(linear,left top,right top,from(rgba(0,0,0,.25)),to(rgba(0,0,0,.1%)));background:-webkit-linear-gradient(left,rgba(0,0,0,.25),rgba(0,0,0,.1%));background:-o-linear-gradient(left,rgba(0,0,0,.25),rgba(0,0,0,.1%));background:linear-gradient(90deg,rgba(0,0,0,.25),rgba(0,0,0,.1%))}.carousel-control-next{right:0;background:-webkit-gradient(linear,right top,left top,from(rgba(0,0,0,.25)),to(rgba(0,0,0,.1%)));background:-webkit-linear-gradient(right,rgba(0,0,0,.25),rgba(0,0,0,.1%));background:-o-linear-gradient(right,rgba(0,0,0,.25),rgba(0,0,0,.1%));background:linear-gradient(270deg,rgba(0,0,0,.25),rgba(0,0,0,.1%))}.carousel-control-prev-icon,.carousel-control-next-icon{display:inline-block;width:20px;height:20px;background:no-repeat 50%;-webkit-background-size:100% 100%;background-size:100% 100%}.carousel-control-prev-icon{background-image:url("data:image/svg+xml;charset=utf8,%3Csvg xmlns=%22http://www.w3.org/2000/svg%22 fill=%22%23fff%22 viewBox=%220 0 8 8%22%3E%3Cpath d=%22M5.25.0l-4 4 4 4 1.5-1.5L4.25 4l2.5-2.5L5.25.0z%22/%3E%3C/svg%3E")}.carousel-control-next-icon{background-image:url("data:image/svg+xml;charset=utf8,%3Csvg xmlns=%22http://www.w3.org/2000/svg%22 fill=%22%23fff%22 viewBox=%220 0 8 8%22%3E%3Cpath d=%22M2.75.0l-1.5 1.5L3.75 4l-2.5 2.5L2.75 8l4-4-4-4z%22/%3E%3C/svg%3E")}.carousel-indicators{position:absolute;right:0;bottom:10px;left:0;z-index:15;display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;padding-left:0;margin-right:15%;margin-left:15%;list-style:none}.carousel-indicators li{position:relative;-webkit-box-flex:0;-webkit-flex:0 1 auto;-ms-flex:0 1 auto;flex:initial;width:30px;height:3px;margin-right:3px;margin-left:3px;text-indent:-999px;cursor:pointer;background-color:rgba(255,255,255,.5)}.carousel-indicators li::before{position:absolute;top:-10px;left:0;display:inline-block;width:100%;height:10px;content:""}.carousel-indicators li::after{position:absolute;bottom:-10px;left:0;display:inline-block;width:100%;height:10px;content:""}.carousel-indicators .active{background-color:#fff}.carousel-caption{position:absolute;right:15%;bottom:20px;left:15%;z-index:10;padding-top:20px;padding-bottom:20px;color:#fff;text-align:center}.align-baseline{vertical-align:baseline!important}.align-top{vertical-align:top!important}.align-middle{vertical-align:middle!important}.align-bottom{vertical-align:bottom!important}.align-text-bottom{vertical-align:text-bottom!important}.align-text-top{vertical-align:text-top!important}.bg-primary{background-color:#30638e!important}a.bg-primary:hover,a.bg-primary:focus,button.bg-primary:hover,button.bg-primary:focus{background-color:#234868!important}.bg-secondary{background-color:#ffa630!important}a.bg-secondary:hover,a.bg-secondary:focus,button.bg-secondary:hover,button.bg-secondary:focus{background-color:#fc9000!important}.bg-success{background-color:#3772ff!important}a.bg-success:hover,a.bg-success:focus,button.bg-success:hover,button.bg-success:focus{background-color:#044eff!important}.bg-info{background-color:#c0e0de!important}a.bg-info:hover,a.bg-info:focus,button.bg-info:hover,button.bg-info:focus{background-color:#9ecfcc!important}.bg-warning{background-color:#ed6a5a!important}a.bg-warning:hover,a.bg-warning:focus,button.bg-warning:hover,button.bg-warning:focus{background-color:#e8402c!important}.bg-danger{background-color:#ed6a5a!important}a.bg-danger:hover,a.bg-danger:focus,button.bg-danger:hover,button.bg-danger:focus{background-color:#e8402c!important}.bg-light{background-color:#d3f3ee!important}a.bg-light:hover,a.bg-light:focus,button.bg-light:hover,button.bg-light:focus{background-color:#abe8df!important}.bg-dark{background-color:#403f4c!important}a.bg-dark:hover,a.bg-dark:focus,button.bg-dark:hover,button.bg-dark:focus{background-color:#292830!important}.bg-gradient-primary{background:#30638e -webkit-gradient(linear,left top,left bottom,from(#4f7a9f),to(#30638E))repeat-x!important;background:#30638e -webkit-linear-gradient(top,#4f7a9f,#30638E)repeat-x!important;background:#30638e -o-linear-gradient(top,#4f7a9f,#30638E)repeat-x!important;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x!important}.bg-gradient-secondary{background:#ffa630 -webkit-gradient(linear,left top,left bottom,from(#ffb34f),to(#FFA630))repeat-x!important;background:#ffa630 -webkit-linear-gradient(top,#ffb34f,#FFA630)repeat-x!important;background:#ffa630 -o-linear-gradient(top,#ffb34f,#FFA630)repeat-x!important;background:#ffa630 linear-gradient(180deg,#ffb34f,#FFA630)repeat-x!important}.bg-gradient-success{background:#3772ff -webkit-gradient(linear,left top,left bottom,from(#5587ff),to(#3772FF))repeat-x!important;background:#3772ff -webkit-linear-gradient(top,#5587ff,#3772FF)repeat-x!important;background:#3772ff -o-linear-gradient(top,#5587ff,#3772FF)repeat-x!important;background:#3772ff linear-gradient(180deg,#5587ff,#3772FF)repeat-x!important}.bg-gradient-info{background:#c0e0de -webkit-gradient(linear,left top,left bottom,from(#c9e5e3),to(#C0E0DE))repeat-x!important;background:#c0e0de -webkit-linear-gradient(top,#c9e5e3,#C0E0DE)repeat-x!important;background:#c0e0de -o-linear-gradient(top,#c9e5e3,#C0E0DE)repeat-x!important;background:#c0e0de linear-gradient(180deg,#c9e5e3,#C0E0DE)repeat-x!important}.bg-gradient-warning{background:#ed6a5a -webkit-gradient(linear,left top,left bottom,from(#f08073),to(#ED6A5A))repeat-x!important;background:#ed6a5a -webkit-linear-gradient(top,#f08073,#ED6A5A)repeat-x!important;background:#ed6a5a -o-linear-gradient(top,#f08073,#ED6A5A)repeat-x!important;background:#ed6a5a linear-gradient(180deg,#f08073,#ED6A5A)repeat-x!important}.bg-gradient-danger{background:#ed6a5a -webkit-gradient(linear,left top,left bottom,from(#f08073),to(#ED6A5A))repeat-x!important;background:#ed6a5a -webkit-linear-gradient(top,#f08073,#ED6A5A)repeat-x!important;background:#ed6a5a -o-linear-gradient(top,#f08073,#ED6A5A)repeat-x!important;background:#ed6a5a linear-gradient(180deg,#f08073,#ED6A5A)repeat-x!important}.bg-gradient-light{background:#d3f3ee -webkit-gradient(linear,left top,left bottom,from(#daf5f1),to(#D3F3EE))repeat-x!important;background:#d3f3ee -webkit-linear-gradient(top,#daf5f1,#D3F3EE)repeat-x!important;background:#d3f3ee -o-linear-gradient(top,#daf5f1,#D3F3EE)repeat-x!important;background:#d3f3ee linear-gradient(180deg,#daf5f1,#D3F3EE)repeat-x!important}.bg-gradient-dark{background:#403f4c -webkit-gradient(linear,left top,left bottom,from(#5d5c67),to(#403F4C))repeat-x!important;background:#403f4c -webkit-linear-gradient(top,#5d5c67,#403F4C)repeat-x!important;background:#403f4c -o-linear-gradient(top,#5d5c67,#403F4C)repeat-x!important;background:#403f4c linear-gradient(180deg,#5d5c67,#403F4C)repeat-x!important}.bg-white{background-color:#fff!important}.bg-transparent{background-color:transparent!important}.border{border:1px solid #dee2e6!important}.border-top{border-top:1px solid #dee2e6!important}.border-right{border-right:1px solid #dee2e6!important}.border-bottom{border-bottom:1px solid #dee2e6!important}.border-left{border-left:1px solid #dee2e6!important}.border-0{border:0!important}.border-top-0{border-top:0!important}.border-right-0{border-right:0!important}.border-bottom-0{border-bottom:0!important}.border-left-0{border-left:0!important}.border-primary{border-color:#30638e!important}.border-secondary{border-color:#ffa630!important}.border-success{border-color:#3772ff!important}.border-info{border-color:#c0e0de!important}.border-warning{border-color:#ed6a5a!important}.border-danger{border-color:#ed6a5a!important}.border-light{border-color:#d3f3ee!important}.border-dark{border-color:#403f4c!important}.border-white{border-color:#fff!important}.rounded{border-radius:.25rem!important}.rounded-top{border-top-left-radius:.25rem!important;border-top-right-radius:.25rem!important}.rounded-right{border-top-right-radius:.25rem!important;border-bottom-right-radius:.25rem!important}.rounded-bottom{border-bottom-right-radius:.25rem!important;border-bottom-left-radius:.25rem!important}.rounded-left{border-top-left-radius:.25rem!important;border-bottom-left-radius:.25rem!important}.rounded-circle{border-radius:50%!important}.rounded-0{border-radius:0!important}.clearfix::after{display:block;clear:both;content:""}.d-none{display:none!important}.d-inline{display:inline!important}.d-inline-block{display:inline-block!important}.d-block{display:block!important}.d-table{display:table!important}.d-table-row{display:table-row!important}.d-table-cell{display:table-cell!important}.d-flex{display:-webkit-box!important;display:-webkit-flex!important;display:-ms-flexbox!important;display:flex!important}.d-inline-flex{display:-webkit-inline-box!important;display:-webkit-inline-flex!important;display:-ms-inline-flexbox!important;display:inline-flex!important}@media(min-width:576px){.d-sm-none{display:none!important}.d-sm-inline{display:inline!important}.d-sm-inline-block{display:inline-block!important}.d-sm-block{display:block!important}.d-sm-table{display:table!important}.d-sm-table-row{display:table-row!important}.d-sm-table-cell{display:table-cell!important}.d-sm-flex{display:-webkit-box!important;display:-webkit-flex!important;display:-ms-flexbox!important;display:flex!important}.d-sm-inline-flex{display:-webkit-inline-box!important;display:-webkit-inline-flex!important;display:-ms-inline-flexbox!important;display:inline-flex!important}}@media(min-width:768px){.d-md-none{display:none!important}.d-md-inline{display:inline!important}.d-md-inline-block{display:inline-block!important}.d-md-block{display:block!important}.d-md-table{display:table!important}.d-md-table-row{display:table-row!important}.d-md-table-cell{display:table-cell!important}.d-md-flex{display:-webkit-box!important;display:-webkit-flex!important;display:-ms-flexbox!important;display:flex!important}.d-md-inline-flex{display:-webkit-inline-box!important;display:-webkit-inline-flex!important;display:-ms-inline-flexbox!important;display:inline-flex!important}}@media(min-width:992px){.d-lg-none{display:none!important}.d-lg-inline{display:inline!important}.d-lg-inline-block{display:inline-block!important}.d-lg-block{display:block!important}.d-lg-table{display:table!important}.d-lg-table-row{display:table-row!important}.d-lg-table-cell{display:table-cell!important}.d-lg-flex{display:-webkit-box!important;display:-webkit-flex!important;display:-ms-flexbox!important;display:flex!important}.d-lg-inline-flex{display:-webkit-inline-box!important;display:-webkit-inline-flex!important;display:-ms-inline-flexbox!important;display:inline-flex!important}}@media(min-width:1200px){.d-xl-none{display:none!important}.d-xl-inline{display:inline!important}.d-xl-inline-block{display:inline-block!important}.d-xl-block{display:block!important}.d-xl-table{display:table!important}.d-xl-table-row{display:table-row!important}.d-xl-table-cell{display:table-cell!important}.d-xl-flex{display:-webkit-box!important;display:-webkit-flex!important;display:-ms-flexbox!important;display:flex!important}.d-xl-inline-flex{display:-webkit-inline-box!important;display:-webkit-inline-flex!important;display:-ms-inline-flexbox!important;display:inline-flex!important}}@media print{.d-print-none{display:none!important}.d-print-inline{display:inline!important}.d-print-inline-block{display:inline-block!important}.d-print-block{display:block!important}.d-print-table{display:table!important}.d-print-table-row{display:table-row!important}.d-print-table-cell{display:table-cell!important}.d-print-flex{display:-webkit-box!important;display:-webkit-flex!important;display:-ms-flexbox!important;display:flex!important}.d-print-inline-flex{display:-webkit-inline-box!important;display:-webkit-inline-flex!important;display:-ms-inline-flexbox!important;display:inline-flex!important}}.embed-responsive{position:relative;display:block;width:100%;padding:0;overflow:hidden}.embed-responsive::before{display:block;content:""}.embed-responsive .embed-responsive-item,.embed-responsive iframe,.embed-responsive embed,.embed-responsive object,.embed-responsive video{position:absolute;top:0;bottom:0;left:0;width:100%;height:100%;border:0}.embed-responsive-21by9::before{padding-top:42.85714286%}.embed-responsive-16by9::before{padding-top:56.25%}.embed-responsive-4by3::before{padding-top:75%}.embed-responsive-1by1::before{padding-top:100%}.flex-row{-webkit-box-orient:horizontal!important;-webkit-box-direction:normal!important;-webkit-flex-direction:row!important;-ms-flex-direction:row!important;flex-direction:row!important}.flex-column{-webkit-box-orient:vertical!important;-webkit-box-direction:normal!important;-webkit-flex-direction:column!important;-ms-flex-direction:column!important;flex-direction:column!important}.flex-row-reverse{-webkit-box-orient:horizontal!important;-webkit-box-direction:reverse!important;-webkit-flex-direction:row-reverse!important;-ms-flex-direction:row-reverse!important;flex-direction:row-reverse!important}.flex-column-reverse{-webkit-box-orient:vertical!important;-webkit-box-direction:reverse!important;-webkit-flex-direction:column-reverse!important;-ms-flex-direction:column-reverse!important;flex-direction:column-reverse!important}.flex-wrap{-webkit-flex-wrap:wrap!important;-ms-flex-wrap:wrap!important;flex-wrap:wrap!important}.flex-nowrap{-webkit-flex-wrap:nowrap!important;-ms-flex-wrap:nowrap!important;flex-wrap:nowrap!important}.flex-wrap-reverse{-webkit-flex-wrap:wrap-reverse!important;-ms-flex-wrap:wrap-reverse!important;flex-wrap:wrap-reverse!important}.flex-fill{-webkit-box-flex:1!important;-webkit-flex:1 1 auto!important;-ms-flex:1 1 auto!important;flex:auto!important}.flex-grow-0{-webkit-box-flex:0!important;-webkit-flex-grow:0!important;-ms-flex-positive:0!important;flex-grow:0!important}.flex-grow-1{-webkit-box-flex:1!important;-webkit-flex-grow:1!important;-ms-flex-positive:1!important;flex-grow:1!important}.flex-shrink-0{-webkit-flex-shrink:0!important;-ms-flex-negative:0!important;flex-shrink:0!important}.flex-shrink-1{-webkit-flex-shrink:1!important;-ms-flex-negative:1!important;flex-shrink:1!important}.justify-content-start{-webkit-box-pack:start!important;-webkit-justify-content:flex-start!important;-ms-flex-pack:start!important;justify-content:flex-start!important}.justify-content-end{-webkit-box-pack:end!important;-webkit-justify-content:flex-end!important;-ms-flex-pack:end!important;justify-content:flex-end!important}.justify-content-center{-webkit-box-pack:center!important;-webkit-justify-content:center!important;-ms-flex-pack:center!important;justify-content:center!important}.justify-content-between{-webkit-box-pack:justify!important;-webkit-justify-content:space-between!important;-ms-flex-pack:justify!important;justify-content:space-between!important}.justify-content-around{-webkit-justify-content:space-around!important;-ms-flex-pack:distribute!important;justify-content:space-around!important}.align-items-start{-webkit-box-align:start!important;-webkit-align-items:flex-start!important;-ms-flex-align:start!important;align-items:flex-start!important}.align-items-end{-webkit-box-align:end!important;-webkit-align-items:flex-end!important;-ms-flex-align:end!important;align-items:flex-end!important}.align-items-center{-webkit-box-align:center!important;-webkit-align-items:center!important;-ms-flex-align:center!important;align-items:center!important}.align-items-baseline{-webkit-box-align:baseline!important;-webkit-align-items:baseline!important;-ms-flex-align:baseline!important;align-items:baseline!important}.align-items-stretch{-webkit-box-align:stretch!important;-webkit-align-items:stretch!important;-ms-flex-align:stretch!important;align-items:stretch!important}.align-content-start{-webkit-align-content:flex-start!important;-ms-flex-line-pack:start!important;align-content:flex-start!important}.align-content-end{-webkit-align-content:flex-end!important;-ms-flex-line-pack:end!important;align-content:flex-end!important}.align-content-center{-webkit-align-content:center!important;-ms-flex-line-pack:center!important;align-content:center!important}.align-content-between{-webkit-align-content:space-between!important;-ms-flex-line-pack:justify!important;align-content:space-between!important}.align-content-around{-webkit-align-content:space-around!important;-ms-flex-line-pack:distribute!important;align-content:space-around!important}.align-content-stretch{-webkit-align-content:stretch!important;-ms-flex-line-pack:stretch!important;align-content:stretch!important}.align-self-auto{-webkit-align-self:auto!important;-ms-flex-item-align:auto!important;align-self:auto!important}.align-self-start{-webkit-align-self:flex-start!important;-ms-flex-item-align:start!important;align-self:flex-start!important}.align-self-end{-webkit-align-self:flex-end!important;-ms-flex-item-align:end!important;align-self:flex-end!important}.align-self-center{-webkit-align-self:center!important;-ms-flex-item-align:center!important;align-self:center!important}.align-self-baseline{-webkit-align-self:baseline!important;-ms-flex-item-align:baseline!important;align-self:baseline!important}.align-self-stretch{-webkit-align-self:stretch!important;-ms-flex-item-align:stretch!important;align-self:stretch!important}@media(min-width:576px){.flex-sm-row{-webkit-box-orient:horizontal!important;-webkit-box-direction:normal!important;-webkit-flex-direction:row!important;-ms-flex-direction:row!important;flex-direction:row!important}.flex-sm-column{-webkit-box-orient:vertical!important;-webkit-box-direction:normal!important;-webkit-flex-direction:column!important;-ms-flex-direction:column!important;flex-direction:column!important}.flex-sm-row-reverse{-webkit-box-orient:horizontal!important;-webkit-box-direction:reverse!important;-webkit-flex-direction:row-reverse!important;-ms-flex-direction:row-reverse!important;flex-direction:row-reverse!important}.flex-sm-column-reverse{-webkit-box-orient:vertical!important;-webkit-box-direction:reverse!important;-webkit-flex-direction:column-reverse!important;-ms-flex-direction:column-reverse!important;flex-direction:column-reverse!important}.flex-sm-wrap{-webkit-flex-wrap:wrap!important;-ms-flex-wrap:wrap!important;flex-wrap:wrap!important}.flex-sm-nowrap{-webkit-flex-wrap:nowrap!important;-ms-flex-wrap:nowrap!important;flex-wrap:nowrap!important}.flex-sm-wrap-reverse{-webkit-flex-wrap:wrap-reverse!important;-ms-flex-wrap:wrap-reverse!important;flex-wrap:wrap-reverse!important}.flex-sm-fill{-webkit-box-flex:1!important;-webkit-flex:1 1 auto!important;-ms-flex:1 1 auto!important;flex:auto!important}.flex-sm-grow-0{-webkit-box-flex:0!important;-webkit-flex-grow:0!important;-ms-flex-positive:0!important;flex-grow:0!important}.flex-sm-grow-1{-webkit-box-flex:1!important;-webkit-flex-grow:1!important;-ms-flex-positive:1!important;flex-grow:1!important}.flex-sm-shrink-0{-webkit-flex-shrink:0!important;-ms-flex-negative:0!important;flex-shrink:0!important}.flex-sm-shrink-1{-webkit-flex-shrink:1!important;-ms-flex-negative:1!important;flex-shrink:1!important}.justify-content-sm-start{-webkit-box-pack:start!important;-webkit-justify-content:flex-start!important;-ms-flex-pack:start!important;justify-content:flex-start!important}.justify-content-sm-end{-webkit-box-pack:end!important;-webkit-justify-content:flex-end!important;-ms-flex-pack:end!important;justify-content:flex-end!important}.justify-content-sm-center{-webkit-box-pack:center!important;-webkit-justify-content:center!important;-ms-flex-pack:center!important;justify-content:center!important}.justify-content-sm-between{-webkit-box-pack:justify!important;-webkit-justify-content:space-between!important;-ms-flex-pack:justify!important;justify-content:space-between!important}.justify-content-sm-around{-webkit-justify-content:space-around!important;-ms-flex-pack:distribute!important;justify-content:space-around!important}.align-items-sm-start{-webkit-box-align:start!important;-webkit-align-items:flex-start!important;-ms-flex-align:start!important;align-items:flex-start!important}.align-items-sm-end{-webkit-box-align:end!important;-webkit-align-items:flex-end!important;-ms-flex-align:end!important;align-items:flex-end!important}.align-items-sm-center{-webkit-box-align:center!important;-webkit-align-items:center!important;-ms-flex-align:center!important;align-items:center!important}.align-items-sm-baseline{-webkit-box-align:baseline!important;-webkit-align-items:baseline!important;-ms-flex-align:baseline!important;align-items:baseline!important}.align-items-sm-stretch{-webkit-box-align:stretch!important;-webkit-align-items:stretch!important;-ms-flex-align:stretch!important;align-items:stretch!important}.align-content-sm-start{-webkit-align-content:flex-start!important;-ms-flex-line-pack:start!important;align-content:flex-start!important}.align-content-sm-end{-webkit-align-content:flex-end!important;-ms-flex-line-pack:end!important;align-content:flex-end!important}.align-content-sm-center{-webkit-align-content:center!important;-ms-flex-line-pack:center!important;align-content:center!important}.align-content-sm-between{-webkit-align-content:space-between!important;-ms-flex-line-pack:justify!important;align-content:space-between!important}.align-content-sm-around{-webkit-align-content:space-around!important;-ms-flex-line-pack:distribute!important;align-content:space-around!important}.align-content-sm-stretch{-webkit-align-content:stretch!important;-ms-flex-line-pack:stretch!important;align-content:stretch!important}.align-self-sm-auto{-webkit-align-self:auto!important;-ms-flex-item-align:auto!important;align-self:auto!important}.align-self-sm-start{-webkit-align-self:flex-start!important;-ms-flex-item-align:start!important;align-self:flex-start!important}.align-self-sm-end{-webkit-align-self:flex-end!important;-ms-flex-item-align:end!important;align-self:flex-end!important}.align-self-sm-center{-webkit-align-self:center!important;-ms-flex-item-align:center!important;align-self:center!important}.align-self-sm-baseline{-webkit-align-self:baseline!important;-ms-flex-item-align:baseline!important;align-self:baseline!important}.align-self-sm-stretch{-webkit-align-self:stretch!important;-ms-flex-item-align:stretch!important;align-self:stretch!important}}@media(min-width:768px){.flex-md-row{-webkit-box-orient:horizontal!important;-webkit-box-direction:normal!important;-webkit-flex-direction:row!important;-ms-flex-direction:row!important;flex-direction:row!important}.flex-md-column{-webkit-box-orient:vertical!important;-webkit-box-direction:normal!important;-webkit-flex-direction:column!important;-ms-flex-direction:column!important;flex-direction:column!important}.flex-md-row-reverse{-webkit-box-orient:horizontal!important;-webkit-box-direction:reverse!important;-webkit-flex-direction:row-reverse!important;-ms-flex-direction:row-reverse!important;flex-direction:row-reverse!important}.flex-md-column-reverse{-webkit-box-orient:vertical!important;-webkit-box-direction:reverse!important;-webkit-flex-direction:column-reverse!important;-ms-flex-direction:column-reverse!important;flex-direction:column-reverse!important}.flex-md-wrap{-webkit-flex-wrap:wrap!important;-ms-flex-wrap:wrap!important;flex-wrap:wrap!important}.flex-md-nowrap{-webkit-flex-wrap:nowrap!important;-ms-flex-wrap:nowrap!important;flex-wrap:nowrap!important}.flex-md-wrap-reverse{-webkit-flex-wrap:wrap-reverse!important;-ms-flex-wrap:wrap-reverse!important;flex-wrap:wrap-reverse!important}.flex-md-fill{-webkit-box-flex:1!important;-webkit-flex:1 1 auto!important;-ms-flex:1 1 auto!important;flex:auto!important}.flex-md-grow-0{-webkit-box-flex:0!important;-webkit-flex-grow:0!important;-ms-flex-positive:0!important;flex-grow:0!important}.flex-md-grow-1{-webkit-box-flex:1!important;-webkit-flex-grow:1!important;-ms-flex-positive:1!important;flex-grow:1!important}.flex-md-shrink-0{-webkit-flex-shrink:0!important;-ms-flex-negative:0!important;flex-shrink:0!important}.flex-md-shrink-1{-webkit-flex-shrink:1!important;-ms-flex-negative:1!important;flex-shrink:1!important}.justify-content-md-start{-webkit-box-pack:start!important;-webkit-justify-content:flex-start!important;-ms-flex-pack:start!important;justify-content:flex-start!important}.justify-content-md-end{-webkit-box-pack:end!important;-webkit-justify-content:flex-end!important;-ms-flex-pack:end!important;justify-content:flex-end!important}.justify-content-md-center{-webkit-box-pack:center!important;-webkit-justify-content:center!important;-ms-flex-pack:center!important;justify-content:center!important}.justify-content-md-between{-webkit-box-pack:justify!important;-webkit-justify-content:space-between!important;-ms-flex-pack:justify!important;justify-content:space-between!important}.justify-content-md-around{-webkit-justify-content:space-around!important;-ms-flex-pack:distribute!important;justify-content:space-around!important}.align-items-md-start{-webkit-box-align:start!important;-webkit-align-items:flex-start!important;-ms-flex-align:start!important;align-items:flex-start!important}.align-items-md-end{-webkit-box-align:end!important;-webkit-align-items:flex-end!important;-ms-flex-align:end!important;align-items:flex-end!important}.align-items-md-center{-webkit-box-align:center!important;-webkit-align-items:center!important;-ms-flex-align:center!important;align-items:center!important}.align-items-md-baseline{-webkit-box-align:baseline!important;-webkit-align-items:baseline!important;-ms-flex-align:baseline!important;align-items:baseline!important}.align-items-md-stretch{-webkit-box-align:stretch!important;-webkit-align-items:stretch!important;-ms-flex-align:stretch!important;align-items:stretch!important}.align-content-md-start{-webkit-align-content:flex-start!important;-ms-flex-line-pack:start!important;align-content:flex-start!important}.align-content-md-end{-webkit-align-content:flex-end!important;-ms-flex-line-pack:end!important;align-content:flex-end!important}.align-content-md-center{-webkit-align-content:center!important;-ms-flex-line-pack:center!important;align-content:center!important}.align-content-md-between{-webkit-align-content:space-between!important;-ms-flex-line-pack:justify!important;align-content:space-between!important}.align-content-md-around{-webkit-align-content:space-around!important;-ms-flex-line-pack:distribute!important;align-content:space-around!important}.align-content-md-stretch{-webkit-align-content:stretch!important;-ms-flex-line-pack:stretch!important;align-content:stretch!important}.align-self-md-auto{-webkit-align-self:auto!important;-ms-flex-item-align:auto!important;align-self:auto!important}.align-self-md-start{-webkit-align-self:flex-start!important;-ms-flex-item-align:start!important;align-self:flex-start!important}.align-self-md-end{-webkit-align-self:flex-end!important;-ms-flex-item-align:end!important;align-self:flex-end!important}.align-self-md-center{-webkit-align-self:center!important;-ms-flex-item-align:center!important;align-self:center!important}.align-self-md-baseline{-webkit-align-self:baseline!important;-ms-flex-item-align:baseline!important;align-self:baseline!important}.align-self-md-stretch{-webkit-align-self:stretch!important;-ms-flex-item-align:stretch!important;align-self:stretch!important}}@media(min-width:992px){.flex-lg-row{-webkit-box-orient:horizontal!important;-webkit-box-direction:normal!important;-webkit-flex-direction:row!important;-ms-flex-direction:row!important;flex-direction:row!important}.flex-lg-column{-webkit-box-orient:vertical!important;-webkit-box-direction:normal!important;-webkit-flex-direction:column!important;-ms-flex-direction:column!important;flex-direction:column!important}.flex-lg-row-reverse{-webkit-box-orient:horizontal!important;-webkit-box-direction:reverse!important;-webkit-flex-direction:row-reverse!important;-ms-flex-direction:row-reverse!important;flex-direction:row-reverse!important}.flex-lg-column-reverse{-webkit-box-orient:vertical!important;-webkit-box-direction:reverse!important;-webkit-flex-direction:column-reverse!important;-ms-flex-direction:column-reverse!important;flex-direction:column-reverse!important}.flex-lg-wrap{-webkit-flex-wrap:wrap!important;-ms-flex-wrap:wrap!important;flex-wrap:wrap!important}.flex-lg-nowrap{-webkit-flex-wrap:nowrap!important;-ms-flex-wrap:nowrap!important;flex-wrap:nowrap!important}.flex-lg-wrap-reverse{-webkit-flex-wrap:wrap-reverse!important;-ms-flex-wrap:wrap-reverse!important;flex-wrap:wrap-reverse!important}.flex-lg-fill{-webkit-box-flex:1!important;-webkit-flex:1 1 auto!important;-ms-flex:1 1 auto!important;flex:auto!important}.flex-lg-grow-0{-webkit-box-flex:0!important;-webkit-flex-grow:0!important;-ms-flex-positive:0!important;flex-grow:0!important}.flex-lg-grow-1{-webkit-box-flex:1!important;-webkit-flex-grow:1!important;-ms-flex-positive:1!important;flex-grow:1!important}.flex-lg-shrink-0{-webkit-flex-shrink:0!important;-ms-flex-negative:0!important;flex-shrink:0!important}.flex-lg-shrink-1{-webkit-flex-shrink:1!important;-ms-flex-negative:1!important;flex-shrink:1!important}.justify-content-lg-start{-webkit-box-pack:start!important;-webkit-justify-content:flex-start!important;-ms-flex-pack:start!important;justify-content:flex-start!important}.justify-content-lg-end{-webkit-box-pack:end!important;-webkit-justify-content:flex-end!important;-ms-flex-pack:end!important;justify-content:flex-end!important}.justify-content-lg-center{-webkit-box-pack:center!important;-webkit-justify-content:center!important;-ms-flex-pack:center!important;justify-content:center!important}.justify-content-lg-between{-webkit-box-pack:justify!important;-webkit-justify-content:space-between!important;-ms-flex-pack:justify!important;justify-content:space-between!important}.justify-content-lg-around{-webkit-justify-content:space-around!important;-ms-flex-pack:distribute!important;justify-content:space-around!important}.align-items-lg-start{-webkit-box-align:start!important;-webkit-align-items:flex-start!important;-ms-flex-align:start!important;align-items:flex-start!important}.align-items-lg-end{-webkit-box-align:end!important;-webkit-align-items:flex-end!important;-ms-flex-align:end!important;align-items:flex-end!important}.align-items-lg-center{-webkit-box-align:center!important;-webkit-align-items:center!important;-ms-flex-align:center!important;align-items:center!important}.align-items-lg-baseline{-webkit-box-align:baseline!important;-webkit-align-items:baseline!important;-ms-flex-align:baseline!important;align-items:baseline!important}.align-items-lg-stretch{-webkit-box-align:stretch!important;-webkit-align-items:stretch!important;-ms-flex-align:stretch!important;align-items:stretch!important}.align-content-lg-start{-webkit-align-content:flex-start!important;-ms-flex-line-pack:start!important;align-content:flex-start!important}.align-content-lg-end{-webkit-align-content:flex-end!important;-ms-flex-line-pack:end!important;align-content:flex-end!important}.align-content-lg-center{-webkit-align-content:center!important;-ms-flex-line-pack:center!important;align-content:center!important}.align-content-lg-between{-webkit-align-content:space-between!important;-ms-flex-line-pack:justify!important;align-content:space-between!important}.align-content-lg-around{-webkit-align-content:space-around!important;-ms-flex-line-pack:distribute!important;align-content:space-around!important}.align-content-lg-stretch{-webkit-align-content:stretch!important;-ms-flex-line-pack:stretch!important;align-content:stretch!important}.align-self-lg-auto{-webkit-align-self:auto!important;-ms-flex-item-align:auto!important;align-self:auto!important}.align-self-lg-start{-webkit-align-self:flex-start!important;-ms-flex-item-align:start!important;align-self:flex-start!important}.align-self-lg-end{-webkit-align-self:flex-end!important;-ms-flex-item-align:end!important;align-self:flex-end!important}.align-self-lg-center{-webkit-align-self:center!important;-ms-flex-item-align:center!important;align-self:center!important}.align-self-lg-baseline{-webkit-align-self:baseline!important;-ms-flex-item-align:baseline!important;align-self:baseline!important}.align-self-lg-stretch{-webkit-align-self:stretch!important;-ms-flex-item-align:stretch!important;align-self:stretch!important}}@media(min-width:1200px){.flex-xl-row{-webkit-box-orient:horizontal!important;-webkit-box-direction:normal!important;-webkit-flex-direction:row!important;-ms-flex-direction:row!important;flex-direction:row!important}.flex-xl-column{-webkit-box-orient:vertical!important;-webkit-box-direction:normal!important;-webkit-flex-direction:column!important;-ms-flex-direction:column!important;flex-direction:column!important}.flex-xl-row-reverse{-webkit-box-orient:horizontal!important;-webkit-box-direction:reverse!important;-webkit-flex-direction:row-reverse!important;-ms-flex-direction:row-reverse!important;flex-direction:row-reverse!important}.flex-xl-column-reverse{-webkit-box-orient:vertical!important;-webkit-box-direction:reverse!important;-webkit-flex-direction:column-reverse!important;-ms-flex-direction:column-reverse!important;flex-direction:column-reverse!important}.flex-xl-wrap{-webkit-flex-wrap:wrap!important;-ms-flex-wrap:wrap!important;flex-wrap:wrap!important}.flex-xl-nowrap{-webkit-flex-wrap:nowrap!important;-ms-flex-wrap:nowrap!important;flex-wrap:nowrap!important}.flex-xl-wrap-reverse{-webkit-flex-wrap:wrap-reverse!important;-ms-flex-wrap:wrap-reverse!important;flex-wrap:wrap-reverse!important}.flex-xl-fill{-webkit-box-flex:1!important;-webkit-flex:1 1 auto!important;-ms-flex:1 1 auto!important;flex:auto!important}.flex-xl-grow-0{-webkit-box-flex:0!important;-webkit-flex-grow:0!important;-ms-flex-positive:0!important;flex-grow:0!important}.flex-xl-grow-1{-webkit-box-flex:1!important;-webkit-flex-grow:1!important;-ms-flex-positive:1!important;flex-grow:1!important}.flex-xl-shrink-0{-webkit-flex-shrink:0!important;-ms-flex-negative:0!important;flex-shrink:0!important}.flex-xl-shrink-1{-webkit-flex-shrink:1!important;-ms-flex-negative:1!important;flex-shrink:1!important}.justify-content-xl-start{-webkit-box-pack:start!important;-webkit-justify-content:flex-start!important;-ms-flex-pack:start!important;justify-content:flex-start!important}.justify-content-xl-end{-webkit-box-pack:end!important;-webkit-justify-content:flex-end!important;-ms-flex-pack:end!important;justify-content:flex-end!important}.justify-content-xl-center{-webkit-box-pack:center!important;-webkit-justify-content:center!important;-ms-flex-pack:center!important;justify-content:center!important}.justify-content-xl-between{-webkit-box-pack:justify!important;-webkit-justify-content:space-between!important;-ms-flex-pack:justify!important;justify-content:space-between!important}.justify-content-xl-around{-webkit-justify-content:space-around!important;-ms-flex-pack:distribute!important;justify-content:space-around!important}.align-items-xl-start{-webkit-box-align:start!important;-webkit-align-items:flex-start!important;-ms-flex-align:start!important;align-items:flex-start!important}.align-items-xl-end{-webkit-box-align:end!important;-webkit-align-items:flex-end!important;-ms-flex-align:end!important;align-items:flex-end!important}.align-items-xl-center{-webkit-box-align:center!important;-webkit-align-items:center!important;-ms-flex-align:center!important;align-items:center!important}.align-items-xl-baseline{-webkit-box-align:baseline!important;-webkit-align-items:baseline!important;-ms-flex-align:baseline!important;align-items:baseline!important}.align-items-xl-stretch{-webkit-box-align:stretch!important;-webkit-align-items:stretch!important;-ms-flex-align:stretch!important;align-items:stretch!important}.align-content-xl-start{-webkit-align-content:flex-start!important;-ms-flex-line-pack:start!important;align-content:flex-start!important}.align-content-xl-end{-webkit-align-content:flex-end!important;-ms-flex-line-pack:end!important;align-content:flex-end!important}.align-content-xl-center{-webkit-align-content:center!important;-ms-flex-line-pack:center!important;align-content:center!important}.align-content-xl-between{-webkit-align-content:space-between!important;-ms-flex-line-pack:justify!important;align-content:space-between!important}.align-content-xl-around{-webkit-align-content:space-around!important;-ms-flex-line-pack:distribute!important;align-content:space-around!important}.align-content-xl-stretch{-webkit-align-content:stretch!important;-ms-flex-line-pack:stretch!important;align-content:stretch!important}.align-self-xl-auto{-webkit-align-self:auto!important;-ms-flex-item-align:auto!important;align-self:auto!important}.align-self-xl-start{-webkit-align-self:flex-start!important;-ms-flex-item-align:start!important;align-self:flex-start!important}.align-self-xl-end{-webkit-align-self:flex-end!important;-ms-flex-item-align:end!important;align-self:flex-end!important}.align-self-xl-center{-webkit-align-self:center!important;-ms-flex-item-align:center!important;align-self:center!important}.align-self-xl-baseline{-webkit-align-self:baseline!important;-ms-flex-item-align:baseline!important;align-self:baseline!important}.align-self-xl-stretch{-webkit-align-self:stretch!important;-ms-flex-item-align:stretch!important;align-self:stretch!important}}.float-left{float:left!important}.float-right{float:right!important}.float-none{float:none!important}@media(min-width:576px){.float-sm-left{float:left!important}.float-sm-right{float:right!important}.float-sm-none{float:none!important}}@media(min-width:768px){.float-md-left{float:left!important}.float-md-right{float:right!important}.float-md-none{float:none!important}}@media(min-width:992px){.float-lg-left{float:left!important}.float-lg-right{float:right!important}.float-lg-none{float:none!important}}@media(min-width:1200px){.float-xl-left{float:left!important}.float-xl-right{float:right!important}.float-xl-none{float:none!important}}.position-static{position:static!important}.position-relative{position:relative!important}.position-absolute{position:absolute!important}.position-fixed{position:fixed!important}.position-sticky{position:-webkit-sticky!important;position:sticky!important}.fixed-top{position:fixed;top:0;right:0;left:0;z-index:1030}.fixed-bottom{position:fixed;right:0;bottom:0;left:0;z-index:1030}@supports((position:-webkit-sticky) or (position:sticky)){.sticky-top{position:-webkit-sticky;position:sticky;top:0;z-index:1020}}.sr-only{position:absolute;width:1px;height:1px;padding:0;overflow:hidden;clip:rect(0,0,0,0);white-space:nowrap;border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;overflow:visible;clip:auto;white-space:normal}.shadow-sm{-webkit-box-shadow:0 .125rem .25rem rgba(0,0,0,.075)!important;box-shadow:0 .125rem .25rem rgba(0,0,0,.075)!important}.shadow{-webkit-box-shadow:0 .5rem 1rem rgba(0,0,0,.15)!important;box-shadow:0 .5rem 1rem rgba(0,0,0,.15)!important}.shadow-lg{-webkit-box-shadow:0 1rem 3rem rgba(0,0,0,.175)!important;box-shadow:0 1rem 3rem rgba(0,0,0,.175)!important}.shadow-none{-webkit-box-shadow:none!important;box-shadow:none!important}.w-25{width:25%!important}.w-50{width:50%!important}.w-75{width:75%!important}.w-100{width:100%!important}.w-auto{width:auto!important}.h-25{height:25%!important}.h-50{height:50%!important}.h-75{height:75%!important}.h-100{height:100%!important}.h-auto{height:auto!important}.mw-100{max-width:100%!important}.mh-100{max-height:100%!important}.m-0{margin:0!important}.mt-0,.my-0{margin-top:0!important}.mr-0,.mx-0{margin-right:0!important}.mb-0,.my-0{margin-bottom:0!important}.ml-0,.mx-0{margin-left:0!important}.m-1{margin:.25rem!important}.mt-1,.my-1{margin-top:.25rem!important}.mr-1,.mx-1{margin-right:.25rem!important}.mb-1,.my-1{margin-bottom:.25rem!important}.ml-1,.mx-1{margin-left:.25rem!important}.m-2{margin:.5rem!important}.mt-2,.my-2{margin-top:.5rem!important}.mr-2,.mx-2{margin-right:.5rem!important}.mb-2,.my-2{margin-bottom:.5rem!important}.ml-2,.mx-2{margin-left:.5rem!important}.m-3{margin:1rem!important}.mt-3,.my-3{margin-top:1rem!important}.mr-3,.mx-3{margin-right:1rem!important}.mb-3,.my-3{margin-bottom:1rem!important}.ml-3,.mx-3{margin-left:1rem!important}.m-4{margin:1.5rem!important}.mt-4,.my-4{margin-top:1.5rem!important}.mr-4,.mx-4{margin-right:1.5rem!important}.mb-4,.my-4{margin-bottom:1.5rem!important}.ml-4,.mx-4{margin-left:1.5rem!important}.m-5{margin:3rem!important}.mt-5,.my-5{margin-top:3rem!important}.mr-5,.mx-5{margin-right:3rem!important}.mb-5,.my-5{margin-bottom:3rem!important}.ml-5,.mx-5{margin-left:3rem!important}.p-0{padding:0!important}.pt-0,.py-0{padding-top:0!important}.pr-0,.px-0{padding-right:0!important}.pb-0,.py-0{padding-bottom:0!important}.pl-0,.px-0{padding-left:0!important}.p-1{padding:.25rem!important}.pt-1,.py-1{padding-top:.25rem!important}.pr-1,.px-1{padding-right:.25rem!important}.pb-1,.py-1{padding-bottom:.25rem!important}.pl-1,.px-1{padding-left:.25rem!important}.p-2{padding:.5rem!important}.pt-2,.py-2{padding-top:.5rem!important}.pr-2,.px-2{padding-right:.5rem!important}.pb-2,.py-2{padding-bottom:.5rem!important}.pl-2,.px-2{padding-left:.5rem!important}.p-3{padding:1rem!important}.pt-3,.py-3{padding-top:1rem!important}.pr-3,.px-3{padding-right:1rem!important}.pb-3,.py-3{padding-bottom:1rem!important}.pl-3,.px-3{padding-left:1rem!important}.p-4{padding:1.5rem!important}.pt-4,.py-4{padding-top:1.5rem!important}.pr-4,.px-4{padding-right:1.5rem!important}.pb-4,.py-4{padding-bottom:1.5rem!important}.pl-4,.px-4{padding-left:1.5rem!important}.p-5{padding:3rem!important}.pt-5,.py-5{padding-top:3rem!important}.pr-5,.px-5{padding-right:3rem!important}.pb-5,.py-5{padding-bottom:3rem!important}.pl-5,.px-5{padding-left:3rem!important}.m-auto{margin:auto!important}.mt-auto,.my-auto{margin-top:auto!important}.mr-auto,.mx-auto{margin-right:auto!important}.mb-auto,.my-auto{margin-bottom:auto!important}.ml-auto,.mx-auto{margin-left:auto!important}@media(min-width:576px){.m-sm-0{margin:0!important}.mt-sm-0,.my-sm-0{margin-top:0!important}.mr-sm-0,.mx-sm-0{margin-right:0!important}.mb-sm-0,.my-sm-0{margin-bottom:0!important}.ml-sm-0,.mx-sm-0{margin-left:0!important}.m-sm-1{margin:.25rem!important}.mt-sm-1,.my-sm-1{margin-top:.25rem!important}.mr-sm-1,.mx-sm-1{margin-right:.25rem!important}.mb-sm-1,.my-sm-1{margin-bottom:.25rem!important}.ml-sm-1,.mx-sm-1{margin-left:.25rem!important}.m-sm-2{margin:.5rem!important}.mt-sm-2,.my-sm-2{margin-top:.5rem!important}.mr-sm-2,.mx-sm-2{margin-right:.5rem!important}.mb-sm-2,.my-sm-2{margin-bottom:.5rem!important}.ml-sm-2,.mx-sm-2{margin-left:.5rem!important}.m-sm-3{margin:1rem!important}.mt-sm-3,.my-sm-3{margin-top:1rem!important}.mr-sm-3,.mx-sm-3{margin-right:1rem!important}.mb-sm-3,.my-sm-3{margin-bottom:1rem!important}.ml-sm-3,.mx-sm-3{margin-left:1rem!important}.m-sm-4{margin:1.5rem!important}.mt-sm-4,.my-sm-4{margin-top:1.5rem!important}.mr-sm-4,.mx-sm-4{margin-right:1.5rem!important}.mb-sm-4,.my-sm-4{margin-bottom:1.5rem!important}.ml-sm-4,.mx-sm-4{margin-left:1.5rem!important}.m-sm-5{margin:3rem!important}.mt-sm-5,.my-sm-5{margin-top:3rem!important}.mr-sm-5,.mx-sm-5{margin-right:3rem!important}.mb-sm-5,.my-sm-5{margin-bottom:3rem!important}.ml-sm-5,.mx-sm-5{margin-left:3rem!important}.p-sm-0{padding:0!important}.pt-sm-0,.py-sm-0{padding-top:0!important}.pr-sm-0,.px-sm-0{padding-right:0!important}.pb-sm-0,.py-sm-0{padding-bottom:0!important}.pl-sm-0,.px-sm-0{padding-left:0!important}.p-sm-1{padding:.25rem!important}.pt-sm-1,.py-sm-1{padding-top:.25rem!important}.pr-sm-1,.px-sm-1{padding-right:.25rem!important}.pb-sm-1,.py-sm-1{padding-bottom:.25rem!important}.pl-sm-1,.px-sm-1{padding-left:.25rem!important}.p-sm-2{padding:.5rem!important}.pt-sm-2,.py-sm-2{padding-top:.5rem!important}.pr-sm-2,.px-sm-2{padding-right:.5rem!important}.pb-sm-2,.py-sm-2{padding-bottom:.5rem!important}.pl-sm-2,.px-sm-2{padding-left:.5rem!important}.p-sm-3{padding:1rem!important}.pt-sm-3,.py-sm-3{padding-top:1rem!important}.pr-sm-3,.px-sm-3{padding-right:1rem!important}.pb-sm-3,.py-sm-3{padding-bottom:1rem!important}.pl-sm-3,.px-sm-3{padding-left:1rem!important}.p-sm-4{padding:1.5rem!important}.pt-sm-4,.py-sm-4{padding-top:1.5rem!important}.pr-sm-4,.px-sm-4{padding-right:1.5rem!important}.pb-sm-4,.py-sm-4{padding-bottom:1.5rem!important}.pl-sm-4,.px-sm-4{padding-left:1.5rem!important}.p-sm-5{padding:3rem!important}.pt-sm-5,.py-sm-5{padding-top:3rem!important}.pr-sm-5,.px-sm-5{padding-right:3rem!important}.pb-sm-5,.py-sm-5{padding-bottom:3rem!important}.pl-sm-5,.px-sm-5{padding-left:3rem!important}.m-sm-auto{margin:auto!important}.mt-sm-auto,.my-sm-auto{margin-top:auto!important}.mr-sm-auto,.mx-sm-auto{margin-right:auto!important}.mb-sm-auto,.my-sm-auto{margin-bottom:auto!important}.ml-sm-auto,.mx-sm-auto{margin-left:auto!important}}@media(min-width:768px){.m-md-0{margin:0!important}.mt-md-0,.my-md-0{margin-top:0!important}.mr-md-0,.mx-md-0{margin-right:0!important}.mb-md-0,.my-md-0{margin-bottom:0!important}.ml-md-0,.mx-md-0{margin-left:0!important}.m-md-1{margin:.25rem!important}.mt-md-1,.my-md-1{margin-top:.25rem!important}.mr-md-1,.mx-md-1{margin-right:.25rem!important}.mb-md-1,.my-md-1{margin-bottom:.25rem!important}.ml-md-1,.mx-md-1{margin-left:.25rem!important}.m-md-2{margin:.5rem!important}.mt-md-2,.my-md-2{margin-top:.5rem!important}.mr-md-2,.mx-md-2{margin-right:.5rem!important}.mb-md-2,.my-md-2{margin-bottom:.5rem!important}.ml-md-2,.mx-md-2{margin-left:.5rem!important}.m-md-3{margin:1rem!important}.mt-md-3,.my-md-3{margin-top:1rem!important}.mr-md-3,.mx-md-3{margin-right:1rem!important}.mb-md-3,.my-md-3{margin-bottom:1rem!important}.ml-md-3,.mx-md-3{margin-left:1rem!important}.m-md-4{margin:1.5rem!important}.mt-md-4,.my-md-4{margin-top:1.5rem!important}.mr-md-4,.mx-md-4{margin-right:1.5rem!important}.mb-md-4,.my-md-4{margin-bottom:1.5rem!important}.ml-md-4,.mx-md-4{margin-left:1.5rem!important}.m-md-5{margin:3rem!important}.mt-md-5,.my-md-5{margin-top:3rem!important}.mr-md-5,.mx-md-5{margin-right:3rem!important}.mb-md-5,.my-md-5{margin-bottom:3rem!important}.ml-md-5,.mx-md-5{margin-left:3rem!important}.p-md-0{padding:0!important}.pt-md-0,.py-md-0{padding-top:0!important}.pr-md-0,.px-md-0{padding-right:0!important}.pb-md-0,.py-md-0{padding-bottom:0!important}.pl-md-0,.px-md-0{padding-left:0!important}.p-md-1{padding:.25rem!important}.pt-md-1,.py-md-1{padding-top:.25rem!important}.pr-md-1,.px-md-1{padding-right:.25rem!important}.pb-md-1,.py-md-1{padding-bottom:.25rem!important}.pl-md-1,.px-md-1{padding-left:.25rem!important}.p-md-2{padding:.5rem!important}.pt-md-2,.py-md-2{padding-top:.5rem!important}.pr-md-2,.px-md-2{padding-right:.5rem!important}.pb-md-2,.py-md-2{padding-bottom:.5rem!important}.pl-md-2,.px-md-2{padding-left:.5rem!important}.p-md-3{padding:1rem!important}.pt-md-3,.py-md-3{padding-top:1rem!important}.pr-md-3,.px-md-3{padding-right:1rem!important}.pb-md-3,.py-md-3{padding-bottom:1rem!important}.pl-md-3,.px-md-3{padding-left:1rem!important}.p-md-4{padding:1.5rem!important}.pt-md-4,.py-md-4{padding-top:1.5rem!important}.pr-md-4,.px-md-4{padding-right:1.5rem!important}.pb-md-4,.py-md-4{padding-bottom:1.5rem!important}.pl-md-4,.px-md-4{padding-left:1.5rem!important}.p-md-5{padding:3rem!important}.pt-md-5,.py-md-5{padding-top:3rem!important}.pr-md-5,.px-md-5{padding-right:3rem!important}.pb-md-5,.py-md-5{padding-bottom:3rem!important}.pl-md-5,.px-md-5{padding-left:3rem!important}.m-md-auto{margin:auto!important}.mt-md-auto,.my-md-auto{margin-top:auto!important}.mr-md-auto,.mx-md-auto{margin-right:auto!important}.mb-md-auto,.my-md-auto{margin-bottom:auto!important}.ml-md-auto,.mx-md-auto{margin-left:auto!important}}@media(min-width:992px){.m-lg-0{margin:0!important}.mt-lg-0,.my-lg-0{margin-top:0!important}.mr-lg-0,.mx-lg-0{margin-right:0!important}.mb-lg-0,.my-lg-0{margin-bottom:0!important}.ml-lg-0,.mx-lg-0{margin-left:0!important}.m-lg-1{margin:.25rem!important}.mt-lg-1,.my-lg-1{margin-top:.25rem!important}.mr-lg-1,.mx-lg-1{margin-right:.25rem!important}.mb-lg-1,.my-lg-1{margin-bottom:.25rem!important}.ml-lg-1,.mx-lg-1{margin-left:.25rem!important}.m-lg-2{margin:.5rem!important}.mt-lg-2,.my-lg-2{margin-top:.5rem!important}.mr-lg-2,.mx-lg-2{margin-right:.5rem!important}.mb-lg-2,.my-lg-2{margin-bottom:.5rem!important}.ml-lg-2,.mx-lg-2{margin-left:.5rem!important}.m-lg-3{margin:1rem!important}.mt-lg-3,.my-lg-3{margin-top:1rem!important}.mr-lg-3,.mx-lg-3{margin-right:1rem!important}.mb-lg-3,.my-lg-3{margin-bottom:1rem!important}.ml-lg-3,.mx-lg-3{margin-left:1rem!important}.m-lg-4{margin:1.5rem!important}.mt-lg-4,.my-lg-4{margin-top:1.5rem!important}.mr-lg-4,.mx-lg-4{margin-right:1.5rem!important}.mb-lg-4,.my-lg-4{margin-bottom:1.5rem!important}.ml-lg-4,.mx-lg-4{margin-left:1.5rem!important}.m-lg-5{margin:3rem!important}.mt-lg-5,.my-lg-5{margin-top:3rem!important}.mr-lg-5,.mx-lg-5{margin-right:3rem!important}.mb-lg-5,.my-lg-5{margin-bottom:3rem!important}.ml-lg-5,.mx-lg-5{margin-left:3rem!important}.p-lg-0{padding:0!important}.pt-lg-0,.py-lg-0{padding-top:0!important}.pr-lg-0,.px-lg-0{padding-right:0!important}.pb-lg-0,.py-lg-0{padding-bottom:0!important}.pl-lg-0,.px-lg-0{padding-left:0!important}.p-lg-1{padding:.25rem!important}.pt-lg-1,.py-lg-1{padding-top:.25rem!important}.pr-lg-1,.px-lg-1{padding-right:.25rem!important}.pb-lg-1,.py-lg-1{padding-bottom:.25rem!important}.pl-lg-1,.px-lg-1{padding-left:.25rem!important}.p-lg-2{padding:.5rem!important}.pt-lg-2,.py-lg-2{padding-top:.5rem!important}.pr-lg-2,.px-lg-2{padding-right:.5rem!important}.pb-lg-2,.py-lg-2{padding-bottom:.5rem!important}.pl-lg-2,.px-lg-2{padding-left:.5rem!important}.p-lg-3{padding:1rem!important}.pt-lg-3,.py-lg-3{padding-top:1rem!important}.pr-lg-3,.px-lg-3{padding-right:1rem!important}.pb-lg-3,.py-lg-3{padding-bottom:1rem!important}.pl-lg-3,.px-lg-3{padding-left:1rem!important}.p-lg-4{padding:1.5rem!important}.pt-lg-4,.py-lg-4{padding-top:1.5rem!important}.pr-lg-4,.px-lg-4{padding-right:1.5rem!important}.pb-lg-4,.py-lg-4{padding-bottom:1.5rem!important}.pl-lg-4,.px-lg-4{padding-left:1.5rem!important}.p-lg-5{padding:3rem!important}.pt-lg-5,.py-lg-5{padding-top:3rem!important}.pr-lg-5,.px-lg-5{padding-right:3rem!important}.pb-lg-5,.py-lg-5{padding-bottom:3rem!important}.pl-lg-5,.px-lg-5{padding-left:3rem!important}.m-lg-auto{margin:auto!important}.mt-lg-auto,.my-lg-auto{margin-top:auto!important}.mr-lg-auto,.mx-lg-auto{margin-right:auto!important}.mb-lg-auto,.my-lg-auto{margin-bottom:auto!important}.ml-lg-auto,.mx-lg-auto{margin-left:auto!important}}@media(min-width:1200px){.m-xl-0{margin:0!important}.mt-xl-0,.my-xl-0{margin-top:0!important}.mr-xl-0,.mx-xl-0{margin-right:0!important}.mb-xl-0,.my-xl-0{margin-bottom:0!important}.ml-xl-0,.mx-xl-0{margin-left:0!important}.m-xl-1{margin:.25rem!important}.mt-xl-1,.my-xl-1{margin-top:.25rem!important}.mr-xl-1,.mx-xl-1{margin-right:.25rem!important}.mb-xl-1,.my-xl-1{margin-bottom:.25rem!important}.ml-xl-1,.mx-xl-1{margin-left:.25rem!important}.m-xl-2{margin:.5rem!important}.mt-xl-2,.my-xl-2{margin-top:.5rem!important}.mr-xl-2,.mx-xl-2{margin-right:.5rem!important}.mb-xl-2,.my-xl-2{margin-bottom:.5rem!important}.ml-xl-2,.mx-xl-2{margin-left:.5rem!important}.m-xl-3{margin:1rem!important}.mt-xl-3,.my-xl-3{margin-top:1rem!important}.mr-xl-3,.mx-xl-3{margin-right:1rem!important}.mb-xl-3,.my-xl-3{margin-bottom:1rem!important}.ml-xl-3,.mx-xl-3{margin-left:1rem!important}.m-xl-4{margin:1.5rem!important}.mt-xl-4,.my-xl-4{margin-top:1.5rem!important}.mr-xl-4,.mx-xl-4{margin-right:1.5rem!important}.mb-xl-4,.my-xl-4{margin-bottom:1.5rem!important}.ml-xl-4,.mx-xl-4{margin-left:1.5rem!important}.m-xl-5{margin:3rem!important}.mt-xl-5,.my-xl-5{margin-top:3rem!important}.mr-xl-5,.mx-xl-5{margin-right:3rem!important}.mb-xl-5,.my-xl-5{margin-bottom:3rem!important}.ml-xl-5,.mx-xl-5{margin-left:3rem!important}.p-xl-0{padding:0!important}.pt-xl-0,.py-xl-0{padding-top:0!important}.pr-xl-0,.px-xl-0{padding-right:0!important}.pb-xl-0,.py-xl-0{padding-bottom:0!important}.pl-xl-0,.px-xl-0{padding-left:0!important}.p-xl-1{padding:.25rem!important}.pt-xl-1,.py-xl-1{padding-top:.25rem!important}.pr-xl-1,.px-xl-1{padding-right:.25rem!important}.pb-xl-1,.py-xl-1{padding-bottom:.25rem!important}.pl-xl-1,.px-xl-1{padding-left:.25rem!important}.p-xl-2{padding:.5rem!important}.pt-xl-2,.py-xl-2{padding-top:.5rem!important}.pr-xl-2,.px-xl-2{padding-right:.5rem!important}.pb-xl-2,.py-xl-2{padding-bottom:.5rem!important}.pl-xl-2,.px-xl-2{padding-left:.5rem!important}.p-xl-3{padding:1rem!important}.pt-xl-3,.py-xl-3{padding-top:1rem!important}.pr-xl-3,.px-xl-3{padding-right:1rem!important}.pb-xl-3,.py-xl-3{padding-bottom:1rem!important}.pl-xl-3,.px-xl-3{padding-left:1rem!important}.p-xl-4{padding:1.5rem!important}.pt-xl-4,.py-xl-4{padding-top:1.5rem!important}.pr-xl-4,.px-xl-4{padding-right:1.5rem!important}.pb-xl-4,.py-xl-4{padding-bottom:1.5rem!important}.pl-xl-4,.px-xl-4{padding-left:1.5rem!important}.p-xl-5{padding:3rem!important}.pt-xl-5,.py-xl-5{padding-top:3rem!important}.pr-xl-5,.px-xl-5{padding-right:3rem!important}.pb-xl-5,.py-xl-5{padding-bottom:3rem!important}.pl-xl-5,.px-xl-5{padding-left:3rem!important}.m-xl-auto{margin:auto!important}.mt-xl-auto,.my-xl-auto{margin-top:auto!important}.mr-xl-auto,.mx-xl-auto{margin-right:auto!important}.mb-xl-auto,.my-xl-auto{margin-bottom:auto!important}.ml-xl-auto,.mx-xl-auto{margin-left:auto!important}}.text-monospace{font-family:SFMono-Regular,Menlo,Monaco,Consolas,liberation mono,courier new,monospace}.text-justify{text-align:justify!important}.text-nowrap{white-space:nowrap!important}.text-truncate{overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.text-left{text-align:left!important}.text-right{text-align:right!important}.text-center{text-align:center!important}@media(min-width:576px){.text-sm-left{text-align:left!important}.text-sm-right{text-align:right!important}.text-sm-center{text-align:center!important}}@media(min-width:768px){.text-md-left{text-align:left!important}.text-md-right{text-align:right!important}.text-md-center{text-align:center!important}}@media(min-width:992px){.text-lg-left{text-align:left!important}.text-lg-right{text-align:right!important}.text-lg-center{text-align:center!important}}@media(min-width:1200px){.text-xl-left{text-align:left!important}.text-xl-right{text-align:right!important}.text-xl-center{text-align:center!important}}.text-lowercase{text-transform:lowercase!important}.text-uppercase{text-transform:uppercase!important}.text-capitalize{text-transform:capitalize!important}.font-weight-light{font-weight:300!important}.font-weight-normal{font-weight:400!important}.font-weight-bold{font-weight:700!important}.font-italic{font-style:italic!important}.text-white{color:#fff!important}.text-primary{color:#30638e!important}a.text-primary:hover,a.text-primary:focus{color:#234868!important}.text-secondary{color:#ffa630!important}a.text-secondary:hover,a.text-secondary:focus{color:#fc9000!important}.text-success{color:#3772ff!important}a.text-success:hover,a.text-success:focus{color:#044eff!important}.text-info{color:#c0e0de!important}a.text-info:hover,a.text-info:focus{color:#9ecfcc!important}.text-warning{color:#ed6a5a!important}a.text-warning:hover,a.text-warning:focus{color:#e8402c!important}.text-danger{color:#ed6a5a!important}a.text-danger:hover,a.text-danger:focus{color:#e8402c!important}.text-light{color:#d3f3ee!important}a.text-light:hover,a.text-light:focus{color:#abe8df!important}.text-dark{color:#403f4c!important}a.text-dark:hover,a.text-dark:focus{color:#292830!important}.text-body{color:#222!important}.text-muted{color:#888!important}.text-black-50{color:rgba(0,0,0,.5)!important}.text-white-50{color:rgba(255,255,255,.5)!important}.text-hide{font:0/0 a;color:transparent;text-shadow:none;background-color:transparent;border:0}.visible{visibility:visible!important}.invisible{visibility:hidden!important}@media print{*,*::before,*::after{text-shadow:none!important;-webkit-box-shadow:none!important;box-shadow:none!important}a:not(.btn){text-decoration:underline}abbr[title]::after{content:" (" attr(title)")"}pre{white-space:pre-wrap!important}pre,blockquote{border:1px solid #adb5bd;page-break-inside:avoid}thead{display:table-header-group}tr,img{page-break-inside:avoid}p,h2,h3{orphans:3;widows:3}h2,h3{page-break-after:avoid}@page{size:a3}body{min-width:992px!important}.container{min-width:992px!important}.navbar{display:none}.badge{border:1px solid #000}.table,.td-content>table,.td-box .row.section>table{border-collapse:collapse!important}.table td,.td-content>table td,.td-box .row.section>table td,.table th,.td-content>table th,.td-box .row.section>table th{background-color:#fff!important}.table-bordered th,.table-bordered td{border:1px solid #dee2e6!important}.table-dark{color:inherit}.table-dark th,.table-dark td,.table-dark thead th,.table-dark tbody+tbody{border-color:#dee2e6}.table .thead-dark th,.td-content>table .thead-dark th,.td-box .row.section>table .thead-dark th{color:inherit;border-color:#dee2e6}}/*!* Font Awesome Free 5.10.1 by @fontawesome - https://fontawesome.com +* License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License)*/.fa,.fas,.far,.fal,.fad,.fab{-moz-osx-font-smoothing:grayscale;-webkit-font-smoothing:antialiased;display:inline-block;font-style:normal;font-variant:normal;text-rendering:auto;line-height:1}.fa-lg{font-size:1.33333333em;line-height:.75em;vertical-align:-.0667em}.fa-xs{font-size:.75em}.fa-sm{font-size:.875em}.fa-1x{font-size:1em}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-6x{font-size:6em}.fa-7x{font-size:7em}.fa-8x{font-size:8em}.fa-9x{font-size:9em}.fa-10x{font-size:10em}.fa-fw{text-align:center;width:1.25em}.fa-ul{list-style-type:none;margin-left:2.5em;padding-left:0}.fa-ul>li{position:relative}.fa-li{left:-2em;position:absolute;text-align:center;width:2em;line-height:inherit}.fa-border{border:solid .08em #eee;border-radius:.1em;padding:.2em .25em .15em}.fa-pull-left{float:left}.fa-pull-right{float:right}.fa.fa-pull-left,.fas.fa-pull-left,.far.fa-pull-left,.fal.fa-pull-left,.fab.fa-pull-left{margin-right:.3em}.fa.fa-pull-right,.fas.fa-pull-right,.far.fa-pull-right,.fal.fa-pull-right,.fab.fa-pull-right{margin-left:.3em}.fa-spin{-webkit-animation:fa-spin 2s infinite linear;-o-animation:fa-spin 2s infinite linear;animation:fa-spin 2s infinite linear}.fa-pulse{-webkit-animation:fa-spin 1s infinite steps(8);-o-animation:fa-spin 1s infinite steps(8);animation:fa-spin 1s infinite steps(8)}@-webkit-keyframes fa-spin{0%{-webkit-transform:rotate(0);transform:rotate(0)}100%{-webkit-transform:rotate(360deg);transform:rotate(360deg)}}@-o-keyframes fa-spin{0%{-o-transform:rotate(0);transform:rotate(0)}100%{-o-transform:rotate(360deg);transform:rotate(360deg)}}@keyframes fa-spin{0%{-webkit-transform:rotate(0);-o-transform:rotate(0);transform:rotate(0)}100%{-webkit-transform:rotate(360deg);-o-transform:rotate(360deg);transform:rotate(360deg)}}.fa-rotate-90{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=1)";-webkit-transform:rotate(90deg);-ms-transform:rotate(90deg);-o-transform:rotate(90deg);transform:rotate(90deg)}.fa-rotate-180{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2)";-webkit-transform:rotate(180deg);-ms-transform:rotate(180deg);-o-transform:rotate(180deg);transform:rotate(180deg)}.fa-rotate-270{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=3)";-webkit-transform:rotate(270deg);-ms-transform:rotate(270deg);-o-transform:rotate(270deg);transform:rotate(270deg)}.fa-flip-horizontal{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)";-webkit-transform:scale(-1,1);-ms-transform:scale(-1,1);-o-transform:scale(-1,1);transform:scale(-1,1)}.fa-flip-vertical{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)";-webkit-transform:scale(1,-1);-ms-transform:scale(1,-1);-o-transform:scale(1,-1);transform:scale(1,-1)}.fa-flip-both,.fa-flip-horizontal.fa-flip-vertical{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)";-webkit-transform:scale(-1,-1);-ms-transform:scale(-1,-1);-o-transform:scale(-1,-1);transform:scale(-1,-1)}:root .fa-rotate-90,:root .fa-rotate-180,:root .fa-rotate-270,:root .fa-flip-horizontal,:root .fa-flip-vertical,:root .fa-flip-both{-webkit-filter:none;filter:none}.fa-stack{display:inline-block;height:2em;line-height:2em;position:relative;vertical-align:middle;width:2.5em}.fa-stack-1x,.fa-stack-2x{left:0;position:absolute;text-align:center;width:100%}.fa-stack-1x{line-height:inherit}.fa-stack-2x{font-size:2em}.fa-inverse{color:#fff}.fa-500px:before{content:"\f26e"}.fa-accessible-icon:before{content:"\f368"}.fa-accusoft:before{content:"\f369"}.fa-acquisitions-incorporated:before{content:"\f6af"}.fa-ad:before{content:"\f641"}.fa-address-book:before{content:"\f2b9"}.fa-address-card:before{content:"\f2bb"}.fa-adjust:before{content:"\f042"}.fa-adn:before{content:"\f170"}.fa-adobe:before{content:"\f778"}.fa-adversal:before{content:"\f36a"}.fa-affiliatetheme:before{content:"\f36b"}.fa-air-freshener:before{content:"\f5d0"}.fa-airbnb:before{content:"\f834"}.fa-algolia:before{content:"\f36c"}.fa-align-center:before{content:"\f037"}.fa-align-justify:before{content:"\f039"}.fa-align-left:before{content:"\f036"}.fa-align-right:before{content:"\f038"}.fa-alipay:before{content:"\f642"}.fa-allergies:before{content:"\f461"}.fa-amazon:before{content:"\f270"}.fa-amazon-pay:before{content:"\f42c"}.fa-ambulance:before{content:"\f0f9"}.fa-american-sign-language-interpreting:before{content:"\f2a3"}.fa-amilia:before{content:"\f36d"}.fa-anchor:before{content:"\f13d"}.fa-android:before{content:"\f17b"}.fa-angellist:before{content:"\f209"}.fa-angle-double-down:before{content:"\f103"}.fa-angle-double-left:before{content:"\f100"}.fa-angle-double-right:before{content:"\f101"}.fa-angle-double-up:before{content:"\f102"}.fa-angle-down:before{content:"\f107"}.fa-angle-left:before{content:"\f104"}.fa-angle-right:before{content:"\f105"}.fa-angle-up:before{content:"\f106"}.fa-angry:before{content:"\f556"}.fa-angrycreative:before{content:"\f36e"}.fa-angular:before{content:"\f420"}.fa-ankh:before{content:"\f644"}.fa-app-store:before{content:"\f36f"}.fa-app-store-ios:before{content:"\f370"}.fa-apper:before{content:"\f371"}.fa-apple:before{content:"\f179"}.fa-apple-alt:before{content:"\f5d1"}.fa-apple-pay:before{content:"\f415"}.fa-archive:before{content:"\f187"}.fa-archway:before{content:"\f557"}.fa-arrow-alt-circle-down:before{content:"\f358"}.fa-arrow-alt-circle-left:before{content:"\f359"}.fa-arrow-alt-circle-right:before{content:"\f35a"}.fa-arrow-alt-circle-up:before{content:"\f35b"}.fa-arrow-circle-down:before{content:"\f0ab"}.fa-arrow-circle-left:before{content:"\f0a8"}.fa-arrow-circle-right:before{content:"\f0a9"}.fa-arrow-circle-up:before{content:"\f0aa"}.fa-arrow-down:before{content:"\f063"}.fa-arrow-left:before{content:"\f060"}.fa-arrow-right:before{content:"\f061"}.fa-arrow-up:before{content:"\f062"}.fa-arrows-alt:before{content:"\f0b2"}.fa-arrows-alt-h:before{content:"\f337"}.fa-arrows-alt-v:before{content:"\f338"}.fa-artstation:before{content:"\f77a"}.fa-assistive-listening-systems:before{content:"\f2a2"}.fa-asterisk:before{content:"\f069"}.fa-asymmetrik:before{content:"\f372"}.fa-at:before{content:"\f1fa"}.fa-atlas:before{content:"\f558"}.fa-atlassian:before{content:"\f77b"}.fa-atom:before{content:"\f5d2"}.fa-audible:before{content:"\f373"}.fa-audio-description:before{content:"\f29e"}.fa-autoprefixer:before{content:"\f41c"}.fa-avianex:before{content:"\f374"}.fa-aviato:before{content:"\f421"}.fa-award:before{content:"\f559"}.fa-aws:before{content:"\f375"}.fa-baby:before{content:"\f77c"}.fa-baby-carriage:before{content:"\f77d"}.fa-backspace:before{content:"\f55a"}.fa-backward:before{content:"\f04a"}.fa-bacon:before{content:"\f7e5"}.fa-balance-scale:before{content:"\f24e"}.fa-balance-scale-left:before{content:"\f515"}.fa-balance-scale-right:before{content:"\f516"}.fa-ban:before{content:"\f05e"}.fa-band-aid:before{content:"\f462"}.fa-bandcamp:before{content:"\f2d5"}.fa-barcode:before{content:"\f02a"}.fa-bars:before{content:"\f0c9"}.fa-baseball-ball:before{content:"\f433"}.fa-basketball-ball:before{content:"\f434"}.fa-bath:before{content:"\f2cd"}.fa-battery-empty:before{content:"\f244"}.fa-battery-full:before{content:"\f240"}.fa-battery-half:before{content:"\f242"}.fa-battery-quarter:before{content:"\f243"}.fa-battery-three-quarters:before{content:"\f241"}.fa-battle-net:before{content:"\f835"}.fa-bed:before{content:"\f236"}.fa-beer:before{content:"\f0fc"}.fa-behance:before{content:"\f1b4"}.fa-behance-square:before{content:"\f1b5"}.fa-bell:before{content:"\f0f3"}.fa-bell-slash:before{content:"\f1f6"}.fa-bezier-curve:before{content:"\f55b"}.fa-bible:before{content:"\f647"}.fa-bicycle:before{content:"\f206"}.fa-biking:before{content:"\f84a"}.fa-bimobject:before{content:"\f378"}.fa-binoculars:before{content:"\f1e5"}.fa-biohazard:before{content:"\f780"}.fa-birthday-cake:before{content:"\f1fd"}.fa-bitbucket:before{content:"\f171"}.fa-bitcoin:before{content:"\f379"}.fa-bity:before{content:"\f37a"}.fa-black-tie:before{content:"\f27e"}.fa-blackberry:before{content:"\f37b"}.fa-blender:before{content:"\f517"}.fa-blender-phone:before{content:"\f6b6"}.fa-blind:before{content:"\f29d"}.fa-blog:before{content:"\f781"}.fa-blogger:before{content:"\f37c"}.fa-blogger-b:before{content:"\f37d"}.fa-bluetooth:before{content:"\f293"}.fa-bluetooth-b:before{content:"\f294"}.fa-bold:before{content:"\f032"}.fa-bolt:before{content:"\f0e7"}.fa-bomb:before{content:"\f1e2"}.fa-bone:before{content:"\f5d7"}.fa-bong:before{content:"\f55c"}.fa-book:before{content:"\f02d"}.fa-book-dead:before{content:"\f6b7"}.fa-book-medical:before{content:"\f7e6"}.fa-book-open:before{content:"\f518"}.fa-book-reader:before{content:"\f5da"}.fa-bookmark:before{content:"\f02e"}.fa-bootstrap:before{content:"\f836"}.fa-border-all:before{content:"\f84c"}.fa-border-none:before{content:"\f850"}.fa-border-style:before{content:"\f853"}.fa-bowling-ball:before{content:"\f436"}.fa-box:before{content:"\f466"}.fa-box-open:before{content:"\f49e"}.fa-boxes:before{content:"\f468"}.fa-braille:before{content:"\f2a1"}.fa-brain:before{content:"\f5dc"}.fa-bread-slice:before{content:"\f7ec"}.fa-briefcase:before{content:"\f0b1"}.fa-briefcase-medical:before{content:"\f469"}.fa-broadcast-tower:before{content:"\f519"}.fa-broom:before{content:"\f51a"}.fa-brush:before{content:"\f55d"}.fa-btc:before{content:"\f15a"}.fa-buffer:before{content:"\f837"}.fa-bug:before{content:"\f188"}.fa-building:before{content:"\f1ad"}.fa-bullhorn:before{content:"\f0a1"}.fa-bullseye:before{content:"\f140"}.fa-burn:before{content:"\f46a"}.fa-buromobelexperte:before{content:"\f37f"}.fa-bus:before{content:"\f207"}.fa-bus-alt:before{content:"\f55e"}.fa-business-time:before{content:"\f64a"}.fa-buysellads:before{content:"\f20d"}.fa-calculator:before{content:"\f1ec"}.fa-calendar:before{content:"\f133"}.fa-calendar-alt:before{content:"\f073"}.fa-calendar-check:before{content:"\f274"}.fa-calendar-day:before{content:"\f783"}.fa-calendar-minus:before{content:"\f272"}.fa-calendar-plus:before{content:"\f271"}.fa-calendar-times:before{content:"\f273"}.fa-calendar-week:before{content:"\f784"}.fa-camera:before{content:"\f030"}.fa-camera-retro:before{content:"\f083"}.fa-campground:before{content:"\f6bb"}.fa-canadian-maple-leaf:before{content:"\f785"}.fa-candy-cane:before{content:"\f786"}.fa-cannabis:before{content:"\f55f"}.fa-capsules:before{content:"\f46b"}.fa-car:before{content:"\f1b9"}.fa-car-alt:before{content:"\f5de"}.fa-car-battery:before{content:"\f5df"}.fa-car-crash:before{content:"\f5e1"}.fa-car-side:before{content:"\f5e4"}.fa-caret-down:before{content:"\f0d7"}.fa-caret-left:before{content:"\f0d9"}.fa-caret-right:before{content:"\f0da"}.fa-caret-square-down:before{content:"\f150"}.fa-caret-square-left:before{content:"\f191"}.fa-caret-square-right:before{content:"\f152"}.fa-caret-square-up:before{content:"\f151"}.fa-caret-up:before{content:"\f0d8"}.fa-carrot:before{content:"\f787"}.fa-cart-arrow-down:before{content:"\f218"}.fa-cart-plus:before{content:"\f217"}.fa-cash-register:before{content:"\f788"}.fa-cat:before{content:"\f6be"}.fa-cc-amazon-pay:before{content:"\f42d"}.fa-cc-amex:before{content:"\f1f3"}.fa-cc-apple-pay:before{content:"\f416"}.fa-cc-diners-club:before{content:"\f24c"}.fa-cc-discover:before{content:"\f1f2"}.fa-cc-jcb:before{content:"\f24b"}.fa-cc-mastercard:before{content:"\f1f1"}.fa-cc-paypal:before{content:"\f1f4"}.fa-cc-stripe:before{content:"\f1f5"}.fa-cc-visa:before{content:"\f1f0"}.fa-centercode:before{content:"\f380"}.fa-centos:before{content:"\f789"}.fa-certificate:before{content:"\f0a3"}.fa-chair:before{content:"\f6c0"}.fa-chalkboard:before{content:"\f51b"}.fa-chalkboard-teacher:before{content:"\f51c"}.fa-charging-station:before{content:"\f5e7"}.fa-chart-area:before{content:"\f1fe"}.fa-chart-bar:before{content:"\f080"}.fa-chart-line:before{content:"\f201"}.fa-chart-pie:before{content:"\f200"}.fa-check:before{content:"\f00c"}.fa-check-circle:before{content:"\f058"}.fa-check-double:before{content:"\f560"}.fa-check-square:before{content:"\f14a"}.fa-cheese:before{content:"\f7ef"}.fa-chess:before{content:"\f439"}.fa-chess-bishop:before{content:"\f43a"}.fa-chess-board:before{content:"\f43c"}.fa-chess-king:before{content:"\f43f"}.fa-chess-knight:before{content:"\f441"}.fa-chess-pawn:before{content:"\f443"}.fa-chess-queen:before{content:"\f445"}.fa-chess-rook:before{content:"\f447"}.fa-chevron-circle-down:before{content:"\f13a"}.fa-chevron-circle-left:before{content:"\f137"}.fa-chevron-circle-right:before{content:"\f138"}.fa-chevron-circle-up:before{content:"\f139"}.fa-chevron-down:before{content:"\f078"}.fa-chevron-left:before{content:"\f053"}.fa-chevron-right:before{content:"\f054"}.fa-chevron-up:before{content:"\f077"}.fa-child:before{content:"\f1ae"}.fa-chrome:before{content:"\f268"}.fa-chromecast:before{content:"\f838"}.fa-church:before{content:"\f51d"}.fa-circle:before{content:"\f111"}.fa-circle-notch:before{content:"\f1ce"}.fa-city:before{content:"\f64f"}.fa-clinic-medical:before{content:"\f7f2"}.fa-clipboard:before{content:"\f328"}.fa-clipboard-check:before{content:"\f46c"}.fa-clipboard-list:before{content:"\f46d"}.fa-clock:before{content:"\f017"}.fa-clone:before{content:"\f24d"}.fa-closed-captioning:before{content:"\f20a"}.fa-cloud:before{content:"\f0c2"}.fa-cloud-download-alt:before{content:"\f381"}.fa-cloud-meatball:before{content:"\f73b"}.fa-cloud-moon:before{content:"\f6c3"}.fa-cloud-moon-rain:before{content:"\f73c"}.fa-cloud-rain:before{content:"\f73d"}.fa-cloud-showers-heavy:before{content:"\f740"}.fa-cloud-sun:before{content:"\f6c4"}.fa-cloud-sun-rain:before{content:"\f743"}.fa-cloud-upload-alt:before{content:"\f382"}.fa-cloudscale:before{content:"\f383"}.fa-cloudsmith:before{content:"\f384"}.fa-cloudversify:before{content:"\f385"}.fa-cocktail:before{content:"\f561"}.fa-code:before{content:"\f121"}.fa-code-branch:before{content:"\f126"}.fa-codepen:before{content:"\f1cb"}.fa-codiepie:before{content:"\f284"}.fa-coffee:before{content:"\f0f4"}.fa-cog:before{content:"\f013"}.fa-cogs:before{content:"\f085"}.fa-coins:before{content:"\f51e"}.fa-columns:before{content:"\f0db"}.fa-comment:before{content:"\f075"}.fa-comment-alt:before{content:"\f27a"}.fa-comment-dollar:before{content:"\f651"}.fa-comment-dots:before{content:"\f4ad"}.fa-comment-medical:before{content:"\f7f5"}.fa-comment-slash:before{content:"\f4b3"}.fa-comments:before{content:"\f086"}.fa-comments-dollar:before{content:"\f653"}.fa-compact-disc:before{content:"\f51f"}.fa-compass:before{content:"\f14e"}.fa-compress:before{content:"\f066"}.fa-compress-arrows-alt:before{content:"\f78c"}.fa-concierge-bell:before{content:"\f562"}.fa-confluence:before{content:"\f78d"}.fa-connectdevelop:before{content:"\f20e"}.fa-contao:before{content:"\f26d"}.fa-cookie:before{content:"\f563"}.fa-cookie-bite:before{content:"\f564"}.fa-copy:before{content:"\f0c5"}.fa-copyright:before{content:"\f1f9"}.fa-cotton-bureau:before{content:"\f89e"}.fa-couch:before{content:"\f4b8"}.fa-cpanel:before{content:"\f388"}.fa-creative-commons:before{content:"\f25e"}.fa-creative-commons-by:before{content:"\f4e7"}.fa-creative-commons-nc:before{content:"\f4e8"}.fa-creative-commons-nc-eu:before{content:"\f4e9"}.fa-creative-commons-nc-jp:before{content:"\f4ea"}.fa-creative-commons-nd:before{content:"\f4eb"}.fa-creative-commons-pd:before{content:"\f4ec"}.fa-creative-commons-pd-alt:before{content:"\f4ed"}.fa-creative-commons-remix:before{content:"\f4ee"}.fa-creative-commons-sa:before{content:"\f4ef"}.fa-creative-commons-sampling:before{content:"\f4f0"}.fa-creative-commons-sampling-plus:before{content:"\f4f1"}.fa-creative-commons-share:before{content:"\f4f2"}.fa-creative-commons-zero:before{content:"\f4f3"}.fa-credit-card:before{content:"\f09d"}.fa-critical-role:before{content:"\f6c9"}.fa-crop:before{content:"\f125"}.fa-crop-alt:before{content:"\f565"}.fa-cross:before{content:"\f654"}.fa-crosshairs:before{content:"\f05b"}.fa-crow:before{content:"\f520"}.fa-crown:before{content:"\f521"}.fa-crutch:before{content:"\f7f7"}.fa-css3:before{content:"\f13c"}.fa-css3-alt:before{content:"\f38b"}.fa-cube:before{content:"\f1b2"}.fa-cubes:before{content:"\f1b3"}.fa-cut:before{content:"\f0c4"}.fa-cuttlefish:before{content:"\f38c"}.fa-d-and-d:before{content:"\f38d"}.fa-d-and-d-beyond:before{content:"\f6ca"}.fa-dashcube:before{content:"\f210"}.fa-database:before{content:"\f1c0"}.fa-deaf:before{content:"\f2a4"}.fa-delicious:before{content:"\f1a5"}.fa-democrat:before{content:"\f747"}.fa-deploydog:before{content:"\f38e"}.fa-deskpro:before{content:"\f38f"}.fa-desktop:before{content:"\f108"}.fa-dev:before{content:"\f6cc"}.fa-deviantart:before{content:"\f1bd"}.fa-dharmachakra:before{content:"\f655"}.fa-dhl:before{content:"\f790"}.fa-diagnoses:before{content:"\f470"}.fa-diaspora:before{content:"\f791"}.fa-dice:before{content:"\f522"}.fa-dice-d20:before{content:"\f6cf"}.fa-dice-d6:before{content:"\f6d1"}.fa-dice-five:before{content:"\f523"}.fa-dice-four:before{content:"\f524"}.fa-dice-one:before{content:"\f525"}.fa-dice-six:before{content:"\f526"}.fa-dice-three:before{content:"\f527"}.fa-dice-two:before{content:"\f528"}.fa-digg:before{content:"\f1a6"}.fa-digital-ocean:before{content:"\f391"}.fa-digital-tachograph:before{content:"\f566"}.fa-directions:before{content:"\f5eb"}.fa-discord:before{content:"\f392"}.fa-discourse:before{content:"\f393"}.fa-divide:before{content:"\f529"}.fa-dizzy:before{content:"\f567"}.fa-dna:before{content:"\f471"}.fa-dochub:before{content:"\f394"}.fa-docker:before{content:"\f395"}.fa-dog:before{content:"\f6d3"}.fa-dollar-sign:before{content:"\f155"}.fa-dolly:before{content:"\f472"}.fa-dolly-flatbed:before{content:"\f474"}.fa-donate:before{content:"\f4b9"}.fa-door-closed:before{content:"\f52a"}.fa-door-open:before{content:"\f52b"}.fa-dot-circle:before{content:"\f192"}.fa-dove:before{content:"\f4ba"}.fa-download:before{content:"\f019"}.fa-draft2digital:before{content:"\f396"}.fa-drafting-compass:before{content:"\f568"}.fa-dragon:before{content:"\f6d5"}.fa-draw-polygon:before{content:"\f5ee"}.fa-dribbble:before{content:"\f17d"}.fa-dribbble-square:before{content:"\f397"}.fa-dropbox:before{content:"\f16b"}.fa-drum:before{content:"\f569"}.fa-drum-steelpan:before{content:"\f56a"}.fa-drumstick-bite:before{content:"\f6d7"}.fa-drupal:before{content:"\f1a9"}.fa-dumbbell:before{content:"\f44b"}.fa-dumpster:before{content:"\f793"}.fa-dumpster-fire:before{content:"\f794"}.fa-dungeon:before{content:"\f6d9"}.fa-dyalog:before{content:"\f399"}.fa-earlybirds:before{content:"\f39a"}.fa-ebay:before{content:"\f4f4"}.fa-edge:before{content:"\f282"}.fa-edit:before{content:"\f044"}.fa-egg:before{content:"\f7fb"}.fa-eject:before{content:"\f052"}.fa-elementor:before{content:"\f430"}.fa-ellipsis-h:before{content:"\f141"}.fa-ellipsis-v:before{content:"\f142"}.fa-ello:before{content:"\f5f1"}.fa-ember:before{content:"\f423"}.fa-empire:before{content:"\f1d1"}.fa-envelope:before{content:"\f0e0"}.fa-envelope-open:before{content:"\f2b6"}.fa-envelope-open-text:before{content:"\f658"}.fa-envelope-square:before{content:"\f199"}.fa-envira:before{content:"\f299"}.fa-equals:before{content:"\f52c"}.fa-eraser:before{content:"\f12d"}.fa-erlang:before{content:"\f39d"}.fa-ethereum:before{content:"\f42e"}.fa-ethernet:before{content:"\f796"}.fa-etsy:before{content:"\f2d7"}.fa-euro-sign:before{content:"\f153"}.fa-evernote:before{content:"\f839"}.fa-exchange-alt:before{content:"\f362"}.fa-exclamation:before{content:"\f12a"}.fa-exclamation-circle:before{content:"\f06a"}.fa-exclamation-triangle:before{content:"\f071"}.fa-expand:before{content:"\f065"}.fa-expand-arrows-alt:before{content:"\f31e"}.fa-expeditedssl:before{content:"\f23e"}.fa-external-link-alt:before{content:"\f35d"}.fa-external-link-square-alt:before{content:"\f360"}.fa-eye:before{content:"\f06e"}.fa-eye-dropper:before{content:"\f1fb"}.fa-eye-slash:before{content:"\f070"}.fa-facebook:before{content:"\f09a"}.fa-facebook-f:before{content:"\f39e"}.fa-facebook-messenger:before{content:"\f39f"}.fa-facebook-square:before{content:"\f082"}.fa-fan:before{content:"\f863"}.fa-fantasy-flight-games:before{content:"\f6dc"}.fa-fast-backward:before{content:"\f049"}.fa-fast-forward:before{content:"\f050"}.fa-fax:before{content:"\f1ac"}.fa-feather:before{content:"\f52d"}.fa-feather-alt:before{content:"\f56b"}.fa-fedex:before{content:"\f797"}.fa-fedora:before{content:"\f798"}.fa-female:before{content:"\f182"}.fa-fighter-jet:before{content:"\f0fb"}.fa-figma:before{content:"\f799"}.fa-file:before{content:"\f15b"}.fa-file-alt:before{content:"\f15c"}.fa-file-archive:before{content:"\f1c6"}.fa-file-audio:before{content:"\f1c7"}.fa-file-code:before{content:"\f1c9"}.fa-file-contract:before{content:"\f56c"}.fa-file-csv:before{content:"\f6dd"}.fa-file-download:before{content:"\f56d"}.fa-file-excel:before{content:"\f1c3"}.fa-file-export:before{content:"\f56e"}.fa-file-image:before{content:"\f1c5"}.fa-file-import:before{content:"\f56f"}.fa-file-invoice:before{content:"\f570"}.fa-file-invoice-dollar:before{content:"\f571"}.fa-file-medical:before{content:"\f477"}.fa-file-medical-alt:before{content:"\f478"}.fa-file-pdf:before{content:"\f1c1"}.fa-file-powerpoint:before{content:"\f1c4"}.fa-file-prescription:before{content:"\f572"}.fa-file-signature:before{content:"\f573"}.fa-file-upload:before{content:"\f574"}.fa-file-video:before{content:"\f1c8"}.fa-file-word:before{content:"\f1c2"}.fa-fill:before{content:"\f575"}.fa-fill-drip:before{content:"\f576"}.fa-film:before{content:"\f008"}.fa-filter:before{content:"\f0b0"}.fa-fingerprint:before{content:"\f577"}.fa-fire:before{content:"\f06d"}.fa-fire-alt:before{content:"\f7e4"}.fa-fire-extinguisher:before{content:"\f134"}.fa-firefox:before{content:"\f269"}.fa-first-aid:before{content:"\f479"}.fa-first-order:before{content:"\f2b0"}.fa-first-order-alt:before{content:"\f50a"}.fa-firstdraft:before{content:"\f3a1"}.fa-fish:before{content:"\f578"}.fa-fist-raised:before{content:"\f6de"}.fa-flag:before{content:"\f024"}.fa-flag-checkered:before{content:"\f11e"}.fa-flag-usa:before{content:"\f74d"}.fa-flask:before{content:"\f0c3"}.fa-flickr:before{content:"\f16e"}.fa-flipboard:before{content:"\f44d"}.fa-flushed:before{content:"\f579"}.fa-fly:before{content:"\f417"}.fa-folder:before{content:"\f07b"}.fa-folder-minus:before{content:"\f65d"}.fa-folder-open:before{content:"\f07c"}.fa-folder-plus:before{content:"\f65e"}.fa-font:before{content:"\f031"}.fa-font-awesome:before{content:"\f2b4"}.fa-font-awesome-alt:before{content:"\f35c"}.fa-font-awesome-flag:before{content:"\f425"}.fa-font-awesome-logo-full:before{content:"\f4e6"}.fa-fonticons:before{content:"\f280"}.fa-fonticons-fi:before{content:"\f3a2"}.fa-football-ball:before{content:"\f44e"}.fa-fort-awesome:before{content:"\f286"}.fa-fort-awesome-alt:before{content:"\f3a3"}.fa-forumbee:before{content:"\f211"}.fa-forward:before{content:"\f04e"}.fa-foursquare:before{content:"\f180"}.fa-free-code-camp:before{content:"\f2c5"}.fa-freebsd:before{content:"\f3a4"}.fa-frog:before{content:"\f52e"}.fa-frown:before{content:"\f119"}.fa-frown-open:before{content:"\f57a"}.fa-fulcrum:before{content:"\f50b"}.fa-funnel-dollar:before{content:"\f662"}.fa-futbol:before{content:"\f1e3"}.fa-galactic-republic:before{content:"\f50c"}.fa-galactic-senate:before{content:"\f50d"}.fa-gamepad:before{content:"\f11b"}.fa-gas-pump:before{content:"\f52f"}.fa-gavel:before{content:"\f0e3"}.fa-gem:before{content:"\f3a5"}.fa-genderless:before{content:"\f22d"}.fa-get-pocket:before{content:"\f265"}.fa-gg:before{content:"\f260"}.fa-gg-circle:before{content:"\f261"}.fa-ghost:before{content:"\f6e2"}.fa-gift:before{content:"\f06b"}.fa-gifts:before{content:"\f79c"}.fa-git:before{content:"\f1d3"}.fa-git-alt:before{content:"\f841"}.fa-git-square:before{content:"\f1d2"}.fa-github:before{content:"\f09b"}.fa-github-alt:before{content:"\f113"}.fa-github-square:before{content:"\f092"}.fa-gitkraken:before{content:"\f3a6"}.fa-gitlab:before{content:"\f296"}.fa-gitter:before{content:"\f426"}.fa-glass-cheers:before{content:"\f79f"}.fa-glass-martini:before{content:"\f000"}.fa-glass-martini-alt:before{content:"\f57b"}.fa-glass-whiskey:before{content:"\f7a0"}.fa-glasses:before{content:"\f530"}.fa-glide:before{content:"\f2a5"}.fa-glide-g:before{content:"\f2a6"}.fa-globe:before{content:"\f0ac"}.fa-globe-africa:before{content:"\f57c"}.fa-globe-americas:before{content:"\f57d"}.fa-globe-asia:before{content:"\f57e"}.fa-globe-europe:before{content:"\f7a2"}.fa-gofore:before{content:"\f3a7"}.fa-golf-ball:before{content:"\f450"}.fa-goodreads:before{content:"\f3a8"}.fa-goodreads-g:before{content:"\f3a9"}.fa-google:before{content:"\f1a0"}.fa-google-drive:before{content:"\f3aa"}.fa-google-play:before{content:"\f3ab"}.fa-google-plus:before{content:"\f2b3"}.fa-google-plus-g:before{content:"\f0d5"}.fa-google-plus-square:before{content:"\f0d4"}.fa-google-wallet:before{content:"\f1ee"}.fa-gopuram:before{content:"\f664"}.fa-graduation-cap:before{content:"\f19d"}.fa-gratipay:before{content:"\f184"}.fa-grav:before{content:"\f2d6"}.fa-greater-than:before{content:"\f531"}.fa-greater-than-equal:before{content:"\f532"}.fa-grimace:before{content:"\f57f"}.fa-grin:before{content:"\f580"}.fa-grin-alt:before{content:"\f581"}.fa-grin-beam:before{content:"\f582"}.fa-grin-beam-sweat:before{content:"\f583"}.fa-grin-hearts:before{content:"\f584"}.fa-grin-squint:before{content:"\f585"}.fa-grin-squint-tears:before{content:"\f586"}.fa-grin-stars:before{content:"\f587"}.fa-grin-tears:before{content:"\f588"}.fa-grin-tongue:before{content:"\f589"}.fa-grin-tongue-squint:before{content:"\f58a"}.fa-grin-tongue-wink:before{content:"\f58b"}.fa-grin-wink:before{content:"\f58c"}.fa-grip-horizontal:before{content:"\f58d"}.fa-grip-lines:before{content:"\f7a4"}.fa-grip-lines-vertical:before{content:"\f7a5"}.fa-grip-vertical:before{content:"\f58e"}.fa-gripfire:before{content:"\f3ac"}.fa-grunt:before{content:"\f3ad"}.fa-guitar:before{content:"\f7a6"}.fa-gulp:before{content:"\f3ae"}.fa-h-square:before{content:"\f0fd"}.fa-hacker-news:before{content:"\f1d4"}.fa-hacker-news-square:before{content:"\f3af"}.fa-hackerrank:before{content:"\f5f7"}.fa-hamburger:before{content:"\f805"}.fa-hammer:before{content:"\f6e3"}.fa-hamsa:before{content:"\f665"}.fa-hand-holding:before{content:"\f4bd"}.fa-hand-holding-heart:before{content:"\f4be"}.fa-hand-holding-usd:before{content:"\f4c0"}.fa-hand-lizard:before{content:"\f258"}.fa-hand-middle-finger:before{content:"\f806"}.fa-hand-paper:before{content:"\f256"}.fa-hand-peace:before{content:"\f25b"}.fa-hand-point-down:before{content:"\f0a7"}.fa-hand-point-left:before{content:"\f0a5"}.fa-hand-point-right:before{content:"\f0a4"}.fa-hand-point-up:before{content:"\f0a6"}.fa-hand-pointer:before{content:"\f25a"}.fa-hand-rock:before{content:"\f255"}.fa-hand-scissors:before{content:"\f257"}.fa-hand-spock:before{content:"\f259"}.fa-hands:before{content:"\f4c2"}.fa-hands-helping:before{content:"\f4c4"}.fa-handshake:before{content:"\f2b5"}.fa-hanukiah:before{content:"\f6e6"}.fa-hard-hat:before{content:"\f807"}.fa-hashtag:before{content:"\f292"}.fa-hat-wizard:before{content:"\f6e8"}.fa-haykal:before{content:"\f666"}.fa-hdd:before{content:"\f0a0"}.fa-heading:before{content:"\f1dc"}.fa-headphones:before{content:"\f025"}.fa-headphones-alt:before{content:"\f58f"}.fa-headset:before{content:"\f590"}.fa-heart:before{content:"\f004"}.fa-heart-broken:before{content:"\f7a9"}.fa-heartbeat:before{content:"\f21e"}.fa-helicopter:before{content:"\f533"}.fa-highlighter:before{content:"\f591"}.fa-hiking:before{content:"\f6ec"}.fa-hippo:before{content:"\f6ed"}.fa-hips:before{content:"\f452"}.fa-hire-a-helper:before{content:"\f3b0"}.fa-history:before{content:"\f1da"}.fa-hockey-puck:before{content:"\f453"}.fa-holly-berry:before{content:"\f7aa"}.fa-home:before{content:"\f015"}.fa-hooli:before{content:"\f427"}.fa-hornbill:before{content:"\f592"}.fa-horse:before{content:"\f6f0"}.fa-horse-head:before{content:"\f7ab"}.fa-hospital:before{content:"\f0f8"}.fa-hospital-alt:before{content:"\f47d"}.fa-hospital-symbol:before{content:"\f47e"}.fa-hot-tub:before{content:"\f593"}.fa-hotdog:before{content:"\f80f"}.fa-hotel:before{content:"\f594"}.fa-hotjar:before{content:"\f3b1"}.fa-hourglass:before{content:"\f254"}.fa-hourglass-end:before{content:"\f253"}.fa-hourglass-half:before{content:"\f252"}.fa-hourglass-start:before{content:"\f251"}.fa-house-damage:before{content:"\f6f1"}.fa-houzz:before{content:"\f27c"}.fa-hryvnia:before{content:"\f6f2"}.fa-html5:before{content:"\f13b"}.fa-hubspot:before{content:"\f3b2"}.fa-i-cursor:before{content:"\f246"}.fa-ice-cream:before{content:"\f810"}.fa-icicles:before{content:"\f7ad"}.fa-icons:before{content:"\f86d"}.fa-id-badge:before{content:"\f2c1"}.fa-id-card:before{content:"\f2c2"}.fa-id-card-alt:before{content:"\f47f"}.fa-igloo:before{content:"\f7ae"}.fa-image:before{content:"\f03e"}.fa-images:before{content:"\f302"}.fa-imdb:before{content:"\f2d8"}.fa-inbox:before{content:"\f01c"}.fa-indent:before{content:"\f03c"}.fa-industry:before{content:"\f275"}.fa-infinity:before{content:"\f534"}.fa-info:before{content:"\f129"}.fa-info-circle:before{content:"\f05a"}.fa-instagram:before{content:"\f16d"}.fa-intercom:before{content:"\f7af"}.fa-internet-explorer:before{content:"\f26b"}.fa-invision:before{content:"\f7b0"}.fa-ioxhost:before{content:"\f208"}.fa-italic:before{content:"\f033"}.fa-itch-io:before{content:"\f83a"}.fa-itunes:before{content:"\f3b4"}.fa-itunes-note:before{content:"\f3b5"}.fa-java:before{content:"\f4e4"}.fa-jedi:before{content:"\f669"}.fa-jedi-order:before{content:"\f50e"}.fa-jenkins:before{content:"\f3b6"}.fa-jira:before{content:"\f7b1"}.fa-joget:before{content:"\f3b7"}.fa-joint:before{content:"\f595"}.fa-joomla:before{content:"\f1aa"}.fa-journal-whills:before{content:"\f66a"}.fa-js:before{content:"\f3b8"}.fa-js-square:before{content:"\f3b9"}.fa-jsfiddle:before{content:"\f1cc"}.fa-kaaba:before{content:"\f66b"}.fa-kaggle:before{content:"\f5fa"}.fa-key:before{content:"\f084"}.fa-keybase:before{content:"\f4f5"}.fa-keyboard:before{content:"\f11c"}.fa-keycdn:before{content:"\f3ba"}.fa-khanda:before{content:"\f66d"}.fa-kickstarter:before{content:"\f3bb"}.fa-kickstarter-k:before{content:"\f3bc"}.fa-kiss:before{content:"\f596"}.fa-kiss-beam:before{content:"\f597"}.fa-kiss-wink-heart:before{content:"\f598"}.fa-kiwi-bird:before{content:"\f535"}.fa-korvue:before{content:"\f42f"}.fa-landmark:before{content:"\f66f"}.fa-language:before{content:"\f1ab"}.fa-laptop:before{content:"\f109"}.fa-laptop-code:before{content:"\f5fc"}.fa-laptop-medical:before{content:"\f812"}.fa-laravel:before{content:"\f3bd"}.fa-lastfm:before{content:"\f202"}.fa-lastfm-square:before{content:"\f203"}.fa-laugh:before{content:"\f599"}.fa-laugh-beam:before{content:"\f59a"}.fa-laugh-squint:before{content:"\f59b"}.fa-laugh-wink:before{content:"\f59c"}.fa-layer-group:before{content:"\f5fd"}.fa-leaf:before{content:"\f06c"}.fa-leanpub:before{content:"\f212"}.fa-lemon:before{content:"\f094"}.fa-less:before{content:"\f41d"}.fa-less-than:before{content:"\f536"}.fa-less-than-equal:before{content:"\f537"}.fa-level-down-alt:before{content:"\f3be"}.fa-level-up-alt:before{content:"\f3bf"}.fa-life-ring:before{content:"\f1cd"}.fa-lightbulb:before{content:"\f0eb"}.fa-line:before{content:"\f3c0"}.fa-link:before{content:"\f0c1"}.fa-linkedin:before{content:"\f08c"}.fa-linkedin-in:before{content:"\f0e1"}.fa-linode:before{content:"\f2b8"}.fa-linux:before{content:"\f17c"}.fa-lira-sign:before{content:"\f195"}.fa-list:before{content:"\f03a"}.fa-list-alt:before{content:"\f022"}.fa-list-ol:before{content:"\f0cb"}.fa-list-ul:before{content:"\f0ca"}.fa-location-arrow:before{content:"\f124"}.fa-lock:before{content:"\f023"}.fa-lock-open:before{content:"\f3c1"}.fa-long-arrow-alt-down:before{content:"\f309"}.fa-long-arrow-alt-left:before{content:"\f30a"}.fa-long-arrow-alt-right:before{content:"\f30b"}.fa-long-arrow-alt-up:before{content:"\f30c"}.fa-low-vision:before{content:"\f2a8"}.fa-luggage-cart:before{content:"\f59d"}.fa-lyft:before{content:"\f3c3"}.fa-magento:before{content:"\f3c4"}.fa-magic:before{content:"\f0d0"}.fa-magnet:before{content:"\f076"}.fa-mail-bulk:before{content:"\f674"}.fa-mailchimp:before{content:"\f59e"}.fa-male:before{content:"\f183"}.fa-mandalorian:before{content:"\f50f"}.fa-map:before{content:"\f279"}.fa-map-marked:before{content:"\f59f"}.fa-map-marked-alt:before{content:"\f5a0"}.fa-map-marker:before{content:"\f041"}.fa-map-marker-alt:before{content:"\f3c5"}.fa-map-pin:before{content:"\f276"}.fa-map-signs:before{content:"\f277"}.fa-markdown:before{content:"\f60f"}.fa-marker:before{content:"\f5a1"}.fa-mars:before{content:"\f222"}.fa-mars-double:before{content:"\f227"}.fa-mars-stroke:before{content:"\f229"}.fa-mars-stroke-h:before{content:"\f22b"}.fa-mars-stroke-v:before{content:"\f22a"}.fa-mask:before{content:"\f6fa"}.fa-mastodon:before{content:"\f4f6"}.fa-maxcdn:before{content:"\f136"}.fa-medal:before{content:"\f5a2"}.fa-medapps:before{content:"\f3c6"}.fa-medium:before{content:"\f23a"}.fa-medium-m:before{content:"\f3c7"}.fa-medkit:before{content:"\f0fa"}.fa-medrt:before{content:"\f3c8"}.fa-meetup:before{content:"\f2e0"}.fa-megaport:before{content:"\f5a3"}.fa-meh:before{content:"\f11a"}.fa-meh-blank:before{content:"\f5a4"}.fa-meh-rolling-eyes:before{content:"\f5a5"}.fa-memory:before{content:"\f538"}.fa-mendeley:before{content:"\f7b3"}.fa-menorah:before{content:"\f676"}.fa-mercury:before{content:"\f223"}.fa-meteor:before{content:"\f753"}.fa-microchip:before{content:"\f2db"}.fa-microphone:before{content:"\f130"}.fa-microphone-alt:before{content:"\f3c9"}.fa-microphone-alt-slash:before{content:"\f539"}.fa-microphone-slash:before{content:"\f131"}.fa-microscope:before{content:"\f610"}.fa-microsoft:before{content:"\f3ca"}.fa-minus:before{content:"\f068"}.fa-minus-circle:before{content:"\f056"}.fa-minus-square:before{content:"\f146"}.fa-mitten:before{content:"\f7b5"}.fa-mix:before{content:"\f3cb"}.fa-mixcloud:before{content:"\f289"}.fa-mizuni:before{content:"\f3cc"}.fa-mobile:before{content:"\f10b"}.fa-mobile-alt:before{content:"\f3cd"}.fa-modx:before{content:"\f285"}.fa-monero:before{content:"\f3d0"}.fa-money-bill:before{content:"\f0d6"}.fa-money-bill-alt:before{content:"\f3d1"}.fa-money-bill-wave:before{content:"\f53a"}.fa-money-bill-wave-alt:before{content:"\f53b"}.fa-money-check:before{content:"\f53c"}.fa-money-check-alt:before{content:"\f53d"}.fa-monument:before{content:"\f5a6"}.fa-moon:before{content:"\f186"}.fa-mortar-pestle:before{content:"\f5a7"}.fa-mosque:before{content:"\f678"}.fa-motorcycle:before{content:"\f21c"}.fa-mountain:before{content:"\f6fc"}.fa-mouse-pointer:before{content:"\f245"}.fa-mug-hot:before{content:"\f7b6"}.fa-music:before{content:"\f001"}.fa-napster:before{content:"\f3d2"}.fa-neos:before{content:"\f612"}.fa-network-wired:before{content:"\f6ff"}.fa-neuter:before{content:"\f22c"}.fa-newspaper:before{content:"\f1ea"}.fa-nimblr:before{content:"\f5a8"}.fa-node:before{content:"\f419"}.fa-node-js:before{content:"\f3d3"}.fa-not-equal:before{content:"\f53e"}.fa-notes-medical:before{content:"\f481"}.fa-npm:before{content:"\f3d4"}.fa-ns8:before{content:"\f3d5"}.fa-nutritionix:before{content:"\f3d6"}.fa-object-group:before{content:"\f247"}.fa-object-ungroup:before{content:"\f248"}.fa-odnoklassniki:before{content:"\f263"}.fa-odnoklassniki-square:before{content:"\f264"}.fa-oil-can:before{content:"\f613"}.fa-old-republic:before{content:"\f510"}.fa-om:before{content:"\f679"}.fa-opencart:before{content:"\f23d"}.fa-openid:before{content:"\f19b"}.fa-opera:before{content:"\f26a"}.fa-optin-monster:before{content:"\f23c"}.fa-osi:before{content:"\f41a"}.fa-otter:before{content:"\f700"}.fa-outdent:before{content:"\f03b"}.fa-page4:before{content:"\f3d7"}.fa-pagelines:before{content:"\f18c"}.fa-pager:before{content:"\f815"}.fa-paint-brush:before{content:"\f1fc"}.fa-paint-roller:before{content:"\f5aa"}.fa-palette:before{content:"\f53f"}.fa-palfed:before{content:"\f3d8"}.fa-pallet:before{content:"\f482"}.fa-paper-plane:before{content:"\f1d8"}.fa-paperclip:before{content:"\f0c6"}.fa-parachute-box:before{content:"\f4cd"}.fa-paragraph:before{content:"\f1dd"}.fa-parking:before{content:"\f540"}.fa-passport:before{content:"\f5ab"}.fa-pastafarianism:before{content:"\f67b"}.fa-paste:before{content:"\f0ea"}.fa-patreon:before{content:"\f3d9"}.fa-pause:before{content:"\f04c"}.fa-pause-circle:before{content:"\f28b"}.fa-paw:before{content:"\f1b0"}.fa-paypal:before{content:"\f1ed"}.fa-peace:before{content:"\f67c"}.fa-pen:before{content:"\f304"}.fa-pen-alt:before{content:"\f305"}.fa-pen-fancy:before{content:"\f5ac"}.fa-pen-nib:before{content:"\f5ad"}.fa-pen-square:before{content:"\f14b"}.fa-pencil-alt:before{content:"\f303"}.fa-pencil-ruler:before{content:"\f5ae"}.fa-penny-arcade:before{content:"\f704"}.fa-people-carry:before{content:"\f4ce"}.fa-pepper-hot:before{content:"\f816"}.fa-percent:before{content:"\f295"}.fa-percentage:before{content:"\f541"}.fa-periscope:before{content:"\f3da"}.fa-person-booth:before{content:"\f756"}.fa-phabricator:before{content:"\f3db"}.fa-phoenix-framework:before{content:"\f3dc"}.fa-phoenix-squadron:before{content:"\f511"}.fa-phone:before{content:"\f095"}.fa-phone-alt:before{content:"\f879"}.fa-phone-slash:before{content:"\f3dd"}.fa-phone-square:before{content:"\f098"}.fa-phone-square-alt:before{content:"\f87b"}.fa-phone-volume:before{content:"\f2a0"}.fa-photo-video:before{content:"\f87c"}.fa-php:before{content:"\f457"}.fa-pied-piper:before{content:"\f2ae"}.fa-pied-piper-alt:before{content:"\f1a8"}.fa-pied-piper-hat:before{content:"\f4e5"}.fa-pied-piper-pp:before{content:"\f1a7"}.fa-piggy-bank:before{content:"\f4d3"}.fa-pills:before{content:"\f484"}.fa-pinterest:before{content:"\f0d2"}.fa-pinterest-p:before{content:"\f231"}.fa-pinterest-square:before{content:"\f0d3"}.fa-pizza-slice:before{content:"\f818"}.fa-place-of-worship:before{content:"\f67f"}.fa-plane:before{content:"\f072"}.fa-plane-arrival:before{content:"\f5af"}.fa-plane-departure:before{content:"\f5b0"}.fa-play:before{content:"\f04b"}.fa-play-circle:before{content:"\f144"}.fa-playstation:before{content:"\f3df"}.fa-plug:before{content:"\f1e6"}.fa-plus:before{content:"\f067"}.fa-plus-circle:before{content:"\f055"}.fa-plus-square:before{content:"\f0fe"}.fa-podcast:before{content:"\f2ce"}.fa-poll:before{content:"\f681"}.fa-poll-h:before{content:"\f682"}.fa-poo:before{content:"\f2fe"}.fa-poo-storm:before{content:"\f75a"}.fa-poop:before{content:"\f619"}.fa-portrait:before{content:"\f3e0"}.fa-pound-sign:before{content:"\f154"}.fa-power-off:before{content:"\f011"}.fa-pray:before{content:"\f683"}.fa-praying-hands:before{content:"\f684"}.fa-prescription:before{content:"\f5b1"}.fa-prescription-bottle:before{content:"\f485"}.fa-prescription-bottle-alt:before{content:"\f486"}.fa-print:before{content:"\f02f"}.fa-procedures:before{content:"\f487"}.fa-product-hunt:before{content:"\f288"}.fa-project-diagram:before{content:"\f542"}.fa-pushed:before{content:"\f3e1"}.fa-puzzle-piece:before{content:"\f12e"}.fa-python:before{content:"\f3e2"}.fa-qq:before{content:"\f1d6"}.fa-qrcode:before{content:"\f029"}.fa-question:before{content:"\f128"}.fa-question-circle:before{content:"\f059"}.fa-quidditch:before{content:"\f458"}.fa-quinscape:before{content:"\f459"}.fa-quora:before{content:"\f2c4"}.fa-quote-left:before{content:"\f10d"}.fa-quote-right:before{content:"\f10e"}.fa-quran:before{content:"\f687"}.fa-r-project:before{content:"\f4f7"}.fa-radiation:before{content:"\f7b9"}.fa-radiation-alt:before{content:"\f7ba"}.fa-rainbow:before{content:"\f75b"}.fa-random:before{content:"\f074"}.fa-raspberry-pi:before{content:"\f7bb"}.fa-ravelry:before{content:"\f2d9"}.fa-react:before{content:"\f41b"}.fa-reacteurope:before{content:"\f75d"}.fa-readme:before{content:"\f4d5"}.fa-rebel:before{content:"\f1d0"}.fa-receipt:before{content:"\f543"}.fa-recycle:before{content:"\f1b8"}.fa-red-river:before{content:"\f3e3"}.fa-reddit:before{content:"\f1a1"}.fa-reddit-alien:before{content:"\f281"}.fa-reddit-square:before{content:"\f1a2"}.fa-redhat:before{content:"\f7bc"}.fa-redo:before{content:"\f01e"}.fa-redo-alt:before{content:"\f2f9"}.fa-registered:before{content:"\f25d"}.fa-remove-format:before{content:"\f87d"}.fa-renren:before{content:"\f18b"}.fa-reply:before{content:"\f3e5"}.fa-reply-all:before{content:"\f122"}.fa-replyd:before{content:"\f3e6"}.fa-republican:before{content:"\f75e"}.fa-researchgate:before{content:"\f4f8"}.fa-resolving:before{content:"\f3e7"}.fa-restroom:before{content:"\f7bd"}.fa-retweet:before{content:"\f079"}.fa-rev:before{content:"\f5b2"}.fa-ribbon:before{content:"\f4d6"}.fa-ring:before{content:"\f70b"}.fa-road:before{content:"\f018"}.fa-robot:before{content:"\f544"}.fa-rocket:before{content:"\f135"}.fa-rocketchat:before{content:"\f3e8"}.fa-rockrms:before{content:"\f3e9"}.fa-route:before{content:"\f4d7"}.fa-rss:before{content:"\f09e"}.fa-rss-square:before{content:"\f143"}.fa-ruble-sign:before{content:"\f158"}.fa-ruler:before{content:"\f545"}.fa-ruler-combined:before{content:"\f546"}.fa-ruler-horizontal:before{content:"\f547"}.fa-ruler-vertical:before{content:"\f548"}.fa-running:before{content:"\f70c"}.fa-rupee-sign:before{content:"\f156"}.fa-sad-cry:before{content:"\f5b3"}.fa-sad-tear:before{content:"\f5b4"}.fa-safari:before{content:"\f267"}.fa-salesforce:before{content:"\f83b"}.fa-sass:before{content:"\f41e"}.fa-satellite:before{content:"\f7bf"}.fa-satellite-dish:before{content:"\f7c0"}.fa-save:before{content:"\f0c7"}.fa-schlix:before{content:"\f3ea"}.fa-school:before{content:"\f549"}.fa-screwdriver:before{content:"\f54a"}.fa-scribd:before{content:"\f28a"}.fa-scroll:before{content:"\f70e"}.fa-sd-card:before{content:"\f7c2"}.fa-search:before{content:"\f002"}.fa-search-dollar:before{content:"\f688"}.fa-search-location:before{content:"\f689"}.fa-search-minus:before{content:"\f010"}.fa-search-plus:before{content:"\f00e"}.fa-searchengin:before{content:"\f3eb"}.fa-seedling:before{content:"\f4d8"}.fa-sellcast:before{content:"\f2da"}.fa-sellsy:before{content:"\f213"}.fa-server:before{content:"\f233"}.fa-servicestack:before{content:"\f3ec"}.fa-shapes:before{content:"\f61f"}.fa-share:before{content:"\f064"}.fa-share-alt:before{content:"\f1e0"}.fa-share-alt-square:before{content:"\f1e1"}.fa-share-square:before{content:"\f14d"}.fa-shekel-sign:before{content:"\f20b"}.fa-shield-alt:before{content:"\f3ed"}.fa-ship:before{content:"\f21a"}.fa-shipping-fast:before{content:"\f48b"}.fa-shirtsinbulk:before{content:"\f214"}.fa-shoe-prints:before{content:"\f54b"}.fa-shopping-bag:before{content:"\f290"}.fa-shopping-basket:before{content:"\f291"}.fa-shopping-cart:before{content:"\f07a"}.fa-shopware:before{content:"\f5b5"}.fa-shower:before{content:"\f2cc"}.fa-shuttle-van:before{content:"\f5b6"}.fa-sign:before{content:"\f4d9"}.fa-sign-in-alt:before{content:"\f2f6"}.fa-sign-language:before{content:"\f2a7"}.fa-sign-out-alt:before{content:"\f2f5"}.fa-signal:before{content:"\f012"}.fa-signature:before{content:"\f5b7"}.fa-sim-card:before{content:"\f7c4"}.fa-simplybuilt:before{content:"\f215"}.fa-sistrix:before{content:"\f3ee"}.fa-sitemap:before{content:"\f0e8"}.fa-sith:before{content:"\f512"}.fa-skating:before{content:"\f7c5"}.fa-sketch:before{content:"\f7c6"}.fa-skiing:before{content:"\f7c9"}.fa-skiing-nordic:before{content:"\f7ca"}.fa-skull:before{content:"\f54c"}.fa-skull-crossbones:before{content:"\f714"}.fa-skyatlas:before{content:"\f216"}.fa-skype:before{content:"\f17e"}.fa-slack:before{content:"\f198"}.fa-slack-hash:before{content:"\f3ef"}.fa-slash:before{content:"\f715"}.fa-sleigh:before{content:"\f7cc"}.fa-sliders-h:before{content:"\f1de"}.fa-slideshare:before{content:"\f1e7"}.fa-smile:before{content:"\f118"}.fa-smile-beam:before{content:"\f5b8"}.fa-smile-wink:before{content:"\f4da"}.fa-smog:before{content:"\f75f"}.fa-smoking:before{content:"\f48d"}.fa-smoking-ban:before{content:"\f54d"}.fa-sms:before{content:"\f7cd"}.fa-snapchat:before{content:"\f2ab"}.fa-snapchat-ghost:before{content:"\f2ac"}.fa-snapchat-square:before{content:"\f2ad"}.fa-snowboarding:before{content:"\f7ce"}.fa-snowflake:before{content:"\f2dc"}.fa-snowman:before{content:"\f7d0"}.fa-snowplow:before{content:"\f7d2"}.fa-socks:before{content:"\f696"}.fa-solar-panel:before{content:"\f5ba"}.fa-sort:before{content:"\f0dc"}.fa-sort-alpha-down:before{content:"\f15d"}.fa-sort-alpha-down-alt:before{content:"\f881"}.fa-sort-alpha-up:before{content:"\f15e"}.fa-sort-alpha-up-alt:before{content:"\f882"}.fa-sort-amount-down:before{content:"\f160"}.fa-sort-amount-down-alt:before{content:"\f884"}.fa-sort-amount-up:before{content:"\f161"}.fa-sort-amount-up-alt:before{content:"\f885"}.fa-sort-down:before{content:"\f0dd"}.fa-sort-numeric-down:before{content:"\f162"}.fa-sort-numeric-down-alt:before{content:"\f886"}.fa-sort-numeric-up:before{content:"\f163"}.fa-sort-numeric-up-alt:before{content:"\f887"}.fa-sort-up:before{content:"\f0de"}.fa-soundcloud:before{content:"\f1be"}.fa-sourcetree:before{content:"\f7d3"}.fa-spa:before{content:"\f5bb"}.fa-space-shuttle:before{content:"\f197"}.fa-speakap:before{content:"\f3f3"}.fa-speaker-deck:before{content:"\f83c"}.fa-spell-check:before{content:"\f891"}.fa-spider:before{content:"\f717"}.fa-spinner:before{content:"\f110"}.fa-splotch:before{content:"\f5bc"}.fa-spotify:before{content:"\f1bc"}.fa-spray-can:before{content:"\f5bd"}.fa-square:before{content:"\f0c8"}.fa-square-full:before{content:"\f45c"}.fa-square-root-alt:before{content:"\f698"}.fa-squarespace:before{content:"\f5be"}.fa-stack-exchange:before{content:"\f18d"}.fa-stack-overflow:before{content:"\f16c"}.fa-stackpath:before{content:"\f842"}.fa-stamp:before{content:"\f5bf"}.fa-star:before{content:"\f005"}.fa-star-and-crescent:before{content:"\f699"}.fa-star-half:before{content:"\f089"}.fa-star-half-alt:before{content:"\f5c0"}.fa-star-of-david:before{content:"\f69a"}.fa-star-of-life:before{content:"\f621"}.fa-staylinked:before{content:"\f3f5"}.fa-steam:before{content:"\f1b6"}.fa-steam-square:before{content:"\f1b7"}.fa-steam-symbol:before{content:"\f3f6"}.fa-step-backward:before{content:"\f048"}.fa-step-forward:before{content:"\f051"}.fa-stethoscope:before{content:"\f0f1"}.fa-sticker-mule:before{content:"\f3f7"}.fa-sticky-note:before{content:"\f249"}.fa-stop:before{content:"\f04d"}.fa-stop-circle:before{content:"\f28d"}.fa-stopwatch:before{content:"\f2f2"}.fa-store:before{content:"\f54e"}.fa-store-alt:before{content:"\f54f"}.fa-strava:before{content:"\f428"}.fa-stream:before{content:"\f550"}.fa-street-view:before{content:"\f21d"}.fa-strikethrough:before{content:"\f0cc"}.fa-stripe:before{content:"\f429"}.fa-stripe-s:before{content:"\f42a"}.fa-stroopwafel:before{content:"\f551"}.fa-studiovinari:before{content:"\f3f8"}.fa-stumbleupon:before{content:"\f1a4"}.fa-stumbleupon-circle:before{content:"\f1a3"}.fa-subscript:before{content:"\f12c"}.fa-subway:before{content:"\f239"}.fa-suitcase:before{content:"\f0f2"}.fa-suitcase-rolling:before{content:"\f5c1"}.fa-sun:before{content:"\f185"}.fa-superpowers:before{content:"\f2dd"}.fa-superscript:before{content:"\f12b"}.fa-supple:before{content:"\f3f9"}.fa-surprise:before{content:"\f5c2"}.fa-suse:before{content:"\f7d6"}.fa-swatchbook:before{content:"\f5c3"}.fa-swimmer:before{content:"\f5c4"}.fa-swimming-pool:before{content:"\f5c5"}.fa-symfony:before{content:"\f83d"}.fa-synagogue:before{content:"\f69b"}.fa-sync:before{content:"\f021"}.fa-sync-alt:before{content:"\f2f1"}.fa-syringe:before{content:"\f48e"}.fa-table:before{content:"\f0ce"}.fa-table-tennis:before{content:"\f45d"}.fa-tablet:before{content:"\f10a"}.fa-tablet-alt:before{content:"\f3fa"}.fa-tablets:before{content:"\f490"}.fa-tachometer-alt:before{content:"\f3fd"}.fa-tag:before{content:"\f02b"}.fa-tags:before{content:"\f02c"}.fa-tape:before{content:"\f4db"}.fa-tasks:before{content:"\f0ae"}.fa-taxi:before{content:"\f1ba"}.fa-teamspeak:before{content:"\f4f9"}.fa-teeth:before{content:"\f62e"}.fa-teeth-open:before{content:"\f62f"}.fa-telegram:before{content:"\f2c6"}.fa-telegram-plane:before{content:"\f3fe"}.fa-temperature-high:before{content:"\f769"}.fa-temperature-low:before{content:"\f76b"}.fa-tencent-weibo:before{content:"\f1d5"}.fa-tenge:before{content:"\f7d7"}.fa-terminal:before{content:"\f120"}.fa-text-height:before{content:"\f034"}.fa-text-width:before{content:"\f035"}.fa-th:before{content:"\f00a"}.fa-th-large:before{content:"\f009"}.fa-th-list:before{content:"\f00b"}.fa-the-red-yeti:before{content:"\f69d"}.fa-theater-masks:before{content:"\f630"}.fa-themeco:before{content:"\f5c6"}.fa-themeisle:before{content:"\f2b2"}.fa-thermometer:before{content:"\f491"}.fa-thermometer-empty:before{content:"\f2cb"}.fa-thermometer-full:before{content:"\f2c7"}.fa-thermometer-half:before{content:"\f2c9"}.fa-thermometer-quarter:before{content:"\f2ca"}.fa-thermometer-three-quarters:before{content:"\f2c8"}.fa-think-peaks:before{content:"\f731"}.fa-thumbs-down:before{content:"\f165"}.fa-thumbs-up:before{content:"\f164"}.fa-thumbtack:before{content:"\f08d"}.fa-ticket-alt:before{content:"\f3ff"}.fa-times:before{content:"\f00d"}.fa-times-circle:before{content:"\f057"}.fa-tint:before{content:"\f043"}.fa-tint-slash:before{content:"\f5c7"}.fa-tired:before{content:"\f5c8"}.fa-toggle-off:before{content:"\f204"}.fa-toggle-on:before{content:"\f205"}.fa-toilet:before{content:"\f7d8"}.fa-toilet-paper:before{content:"\f71e"}.fa-toolbox:before{content:"\f552"}.fa-tools:before{content:"\f7d9"}.fa-tooth:before{content:"\f5c9"}.fa-torah:before{content:"\f6a0"}.fa-torii-gate:before{content:"\f6a1"}.fa-tractor:before{content:"\f722"}.fa-trade-federation:before{content:"\f513"}.fa-trademark:before{content:"\f25c"}.fa-traffic-light:before{content:"\f637"}.fa-train:before{content:"\f238"}.fa-tram:before{content:"\f7da"}.fa-transgender:before{content:"\f224"}.fa-transgender-alt:before{content:"\f225"}.fa-trash:before{content:"\f1f8"}.fa-trash-alt:before{content:"\f2ed"}.fa-trash-restore:before{content:"\f829"}.fa-trash-restore-alt:before{content:"\f82a"}.fa-tree:before{content:"\f1bb"}.fa-trello:before{content:"\f181"}.fa-tripadvisor:before{content:"\f262"}.fa-trophy:before{content:"\f091"}.fa-truck:before{content:"\f0d1"}.fa-truck-loading:before{content:"\f4de"}.fa-truck-monster:before{content:"\f63b"}.fa-truck-moving:before{content:"\f4df"}.fa-truck-pickup:before{content:"\f63c"}.fa-tshirt:before{content:"\f553"}.fa-tty:before{content:"\f1e4"}.fa-tumblr:before{content:"\f173"}.fa-tumblr-square:before{content:"\f174"}.fa-tv:before{content:"\f26c"}.fa-twitch:before{content:"\f1e8"}.fa-twitter:before{content:"\f099"}.fa-twitter-square:before{content:"\f081"}.fa-typo3:before{content:"\f42b"}.fa-uber:before{content:"\f402"}.fa-ubuntu:before{content:"\f7df"}.fa-uikit:before{content:"\f403"}.fa-umbrella:before{content:"\f0e9"}.fa-umbrella-beach:before{content:"\f5ca"}.fa-underline:before{content:"\f0cd"}.fa-undo:before{content:"\f0e2"}.fa-undo-alt:before{content:"\f2ea"}.fa-uniregistry:before{content:"\f404"}.fa-universal-access:before{content:"\f29a"}.fa-university:before{content:"\f19c"}.fa-unlink:before{content:"\f127"}.fa-unlock:before{content:"\f09c"}.fa-unlock-alt:before{content:"\f13e"}.fa-untappd:before{content:"\f405"}.fa-upload:before{content:"\f093"}.fa-ups:before{content:"\f7e0"}.fa-usb:before{content:"\f287"}.fa-user:before{content:"\f007"}.fa-user-alt:before{content:"\f406"}.fa-user-alt-slash:before{content:"\f4fa"}.fa-user-astronaut:before{content:"\f4fb"}.fa-user-check:before{content:"\f4fc"}.fa-user-circle:before{content:"\f2bd"}.fa-user-clock:before{content:"\f4fd"}.fa-user-cog:before{content:"\f4fe"}.fa-user-edit:before{content:"\f4ff"}.fa-user-friends:before{content:"\f500"}.fa-user-graduate:before{content:"\f501"}.fa-user-injured:before{content:"\f728"}.fa-user-lock:before{content:"\f502"}.fa-user-md:before{content:"\f0f0"}.fa-user-minus:before{content:"\f503"}.fa-user-ninja:before{content:"\f504"}.fa-user-nurse:before{content:"\f82f"}.fa-user-plus:before{content:"\f234"}.fa-user-secret:before{content:"\f21b"}.fa-user-shield:before{content:"\f505"}.fa-user-slash:before{content:"\f506"}.fa-user-tag:before{content:"\f507"}.fa-user-tie:before{content:"\f508"}.fa-user-times:before{content:"\f235"}.fa-users:before{content:"\f0c0"}.fa-users-cog:before{content:"\f509"}.fa-usps:before{content:"\f7e1"}.fa-ussunnah:before{content:"\f407"}.fa-utensil-spoon:before{content:"\f2e5"}.fa-utensils:before{content:"\f2e7"}.fa-vaadin:before{content:"\f408"}.fa-vector-square:before{content:"\f5cb"}.fa-venus:before{content:"\f221"}.fa-venus-double:before{content:"\f226"}.fa-venus-mars:before{content:"\f228"}.fa-viacoin:before{content:"\f237"}.fa-viadeo:before{content:"\f2a9"}.fa-viadeo-square:before{content:"\f2aa"}.fa-vial:before{content:"\f492"}.fa-vials:before{content:"\f493"}.fa-viber:before{content:"\f409"}.fa-video:before{content:"\f03d"}.fa-video-slash:before{content:"\f4e2"}.fa-vihara:before{content:"\f6a7"}.fa-vimeo:before{content:"\f40a"}.fa-vimeo-square:before{content:"\f194"}.fa-vimeo-v:before{content:"\f27d"}.fa-vine:before{content:"\f1ca"}.fa-vk:before{content:"\f189"}.fa-vnv:before{content:"\f40b"}.fa-voicemail:before{content:"\f897"}.fa-volleyball-ball:before{content:"\f45f"}.fa-volume-down:before{content:"\f027"}.fa-volume-mute:before{content:"\f6a9"}.fa-volume-off:before{content:"\f026"}.fa-volume-up:before{content:"\f028"}.fa-vote-yea:before{content:"\f772"}.fa-vr-cardboard:before{content:"\f729"}.fa-vuejs:before{content:"\f41f"}.fa-walking:before{content:"\f554"}.fa-wallet:before{content:"\f555"}.fa-warehouse:before{content:"\f494"}.fa-water:before{content:"\f773"}.fa-wave-square:before{content:"\f83e"}.fa-waze:before{content:"\f83f"}.fa-weebly:before{content:"\f5cc"}.fa-weibo:before{content:"\f18a"}.fa-weight:before{content:"\f496"}.fa-weight-hanging:before{content:"\f5cd"}.fa-weixin:before{content:"\f1d7"}.fa-whatsapp:before{content:"\f232"}.fa-whatsapp-square:before{content:"\f40c"}.fa-wheelchair:before{content:"\f193"}.fa-whmcs:before{content:"\f40d"}.fa-wifi:before{content:"\f1eb"}.fa-wikipedia-w:before{content:"\f266"}.fa-wind:before{content:"\f72e"}.fa-window-close:before{content:"\f410"}.fa-window-maximize:before{content:"\f2d0"}.fa-window-minimize:before{content:"\f2d1"}.fa-window-restore:before{content:"\f2d2"}.fa-windows:before{content:"\f17a"}.fa-wine-bottle:before{content:"\f72f"}.fa-wine-glass:before{content:"\f4e3"}.fa-wine-glass-alt:before{content:"\f5ce"}.fa-wix:before{content:"\f5cf"}.fa-wizards-of-the-coast:before{content:"\f730"}.fa-wolf-pack-battalion:before{content:"\f514"}.fa-won-sign:before{content:"\f159"}.fa-wordpress:before{content:"\f19a"}.fa-wordpress-simple:before{content:"\f411"}.fa-wpbeginner:before{content:"\f297"}.fa-wpexplorer:before{content:"\f2de"}.fa-wpforms:before{content:"\f298"}.fa-wpressr:before{content:"\f3e4"}.fa-wrench:before{content:"\f0ad"}.fa-x-ray:before{content:"\f497"}.fa-xbox:before{content:"\f412"}.fa-xing:before{content:"\f168"}.fa-xing-square:before{content:"\f169"}.fa-y-combinator:before{content:"\f23b"}.fa-yahoo:before{content:"\f19e"}.fa-yammer:before{content:"\f840"}.fa-yandex:before{content:"\f413"}.fa-yandex-international:before{content:"\f414"}.fa-yarn:before{content:"\f7e3"}.fa-yelp:before{content:"\f1e9"}.fa-yen-sign:before{content:"\f157"}.fa-yin-yang:before{content:"\f6ad"}.fa-yoast:before{content:"\f2b1"}.fa-youtube:before{content:"\f167"}.fa-youtube-square:before{content:"\f431"}.fa-zhihu:before{content:"\f63f"}.sr-only{border:0;clip:rect(0,0,0,0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.sr-only-focusable:active,.sr-only-focusable:focus{clip:auto;height:auto;margin:0;overflow:visible;position:static;width:auto}/*!* Font Awesome Free 5.10.1 by @fontawesome - https://fontawesome.com +* License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License)*/@font-face{font-family:'font awesome 5 free';font-style:normal;font-weight:900;font-display:auto;src:url(../webfonts/fa-solid-900.eot);src:url(../webfonts/fa-solid-900.eot?#iefix)format("embedded-opentype"),url(../webfonts/fa-solid-900.woff2)format("woff2"),url(../webfonts/fa-solid-900.woff)format("woff"),url(../webfonts/fa-solid-900.ttf)format("truetype"),url(../webfonts/fa-solid-900.svg#fontawesome)format("svg")}.fa,.fas{font-family:'font awesome 5 free';font-weight:900}/*!* Font Awesome Free 5.10.1 by @fontawesome - https://fontawesome.com +* License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License)*/@font-face{font-family:'font awesome 5 brands';font-style:normal;font-weight:400;font-display:auto;src:url(../webfonts/fa-brands-400.eot);src:url(../webfonts/fa-brands-400.eot?#iefix)format("embedded-opentype"),url(../webfonts/fa-brands-400.woff2)format("woff2"),url(../webfonts/fa-brands-400.woff)format("woff"),url(../webfonts/fa-brands-400.ttf)format("truetype"),url(../webfonts/fa-brands-400.svg#fontawesome)format("svg")}.fab{font-family:'font awesome 5 brands'}.td-border-top{border:none;border-top:1px solid #eee}.td-border-none{border:none}.td-block-padding,.td-default main section{padding-top:4rem;padding-bottom:4rem}@media(min-width:768px){.td-block-padding,.td-default main section{padding-top:5rem;padding-bottom:5rem}}.td-overlay{position:relative}.td-overlay::after{content:"";position:absolute;top:0;right:0;bottom:0;left:0}.td-overlay--dark::after{background-color:rgba(64,63,76,.3)}.td-overlay--light::after{background-color:rgba(211,243,238,.3)}.td-overlay__inner{position:relative;z-index:1}@media(min-width:992px){.td-max-width-on-larger-screens,.td-content>pre,.td-content>.highlight,.td-content>.lead,.td-content>h1,.td-content>h2,.td-content>ul,.td-content>ol,.td-content>p,.td-content>blockquote,.td-content>dl dd,.td-content .footnotes,.td-content>.alert{max-width:80%}}.td-box--height-min{min-height:300px}.td-box--height-med{min-height:400px}.td-box--height-max{min-height:500px}.td-box--height-full{min-height:100vh}@media(min-width:768px){.td-box--height-min{min-height:450px}.td-box--height-med{min-height:500px}.td-box--height-max{min-height:650px}}.td-box .row.section{padding-left:5rem;padding-right:5rem;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column}.td-box .row{padding-left:5rem;padding-right:5rem;-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-direction:row;-ms-flex-direction:row;flex-direction:row}.td-box.linkbox{padding:5rem}.td-box--0{color:#fff;background-color:#403f4c}.td-box--0 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#403f4c transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--0 p>a{color:#d9e5f8}.td-box--10.td-box--gradient{background:#403f4c -webkit-gradient(linear,left top,left bottom,from(#5d5c67),to(#403F4C))repeat-x!important;background:#403f4c -webkit-linear-gradient(top,#5d5c67,#403F4C)repeat-x!important;background:#403f4c -o-linear-gradient(top,#5d5c67,#403F4C)repeat-x!important;background:#403f4c linear-gradient(180deg,#5d5c67,#403F4C)repeat-x!important}.td-box--1{color:#fff;background-color:#30638e}.td-box--1 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#30638e transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--1 p>a{color:#cadcf5}.td-box--11.td-box--gradient{background:#30638e -webkit-gradient(linear,left top,left bottom,from(#4f7a9f),to(#30638E))repeat-x!important;background:#30638e -webkit-linear-gradient(top,#4f7a9f,#30638E)repeat-x!important;background:#30638e -o-linear-gradient(top,#4f7a9f,#30638E)repeat-x!important;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x!important}.td-box--2{color:#fff;background-color:#ffa630}.td-box--2 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#ffa630 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--2 p>a{color:#abc7f0}.td-box--12.td-box--gradient{background:#ffa630 -webkit-gradient(linear,left top,left bottom,from(#ffb34f),to(#FFA630))repeat-x!important;background:#ffa630 -webkit-linear-gradient(top,#ffb34f,#FFA630)repeat-x!important;background:#ffa630 -o-linear-gradient(top,#ffb34f,#FFA630)repeat-x!important;background:#ffa630 linear-gradient(180deg,#ffb34f,#FFA630)repeat-x!important}.td-box--3{color:#222;background-color:#c0e0de}.td-box--3 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#c0e0de transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--3 p>a{color:#638ac1}.td-box--13.td-box--gradient{background:#c0e0de -webkit-gradient(linear,left top,left bottom,from(#c9e5e3),to(#C0E0DE))repeat-x!important;background:#c0e0de -webkit-linear-gradient(top,#c9e5e3,#C0E0DE)repeat-x!important;background:#c0e0de -o-linear-gradient(top,#c9e5e3,#C0E0DE)repeat-x!important;background:#c0e0de linear-gradient(180deg,#c9e5e3,#C0E0DE)repeat-x!important}.td-box--4{color:#222;background-color:#fff}.td-box--4 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#fff transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--4 p>a{color:#72a1e5}.td-box--14.td-box--gradient{background:#fff -webkit-gradient(linear,left top,left bottom,from(white),to(white))repeat-x!important;background:#fff -webkit-linear-gradient(top,white,white)repeat-x!important;background:#fff -o-linear-gradient(top,white,white)repeat-x!important;background:#fff linear-gradient(180deg,white,white)repeat-x!important}.td-box--5{color:#fff;background-color:#888}.td-box--5 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#888 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--5 p>a{color:#b4cdf1}.td-box--15.td-box--gradient{background:#888 -webkit-gradient(linear,left top,left bottom,from(#9a9a9a),to(#888))repeat-x!important;background:#888 -webkit-linear-gradient(top,#9a9a9a,#888)repeat-x!important;background:#888 -o-linear-gradient(top,#9a9a9a,#888)repeat-x!important;background:#888 linear-gradient(180deg,#9a9a9a,#888)repeat-x!important}.td-box--6{color:#fff;background-color:#3772ff}.td-box--6 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#3772ff transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--6 p>a{color:#a9c6ef}.td-box--16.td-box--gradient{background:#3772ff -webkit-gradient(linear,left top,left bottom,from(#5587ff),to(#3772FF))repeat-x!important;background:#3772ff -webkit-linear-gradient(top,#5587ff,#3772FF)repeat-x!important;background:#3772ff -o-linear-gradient(top,#5587ff,#3772FF)repeat-x!important;background:#3772ff linear-gradient(180deg,#5587ff,#3772FF)repeat-x!important}.td-box--7{color:#fff;background-color:#ed6a5a}.td-box--7 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#ed6a5a transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--7 p>a{color:#a5c3ee}.td-box--17.td-box--gradient{background:#ed6a5a -webkit-gradient(linear,left top,left bottom,from(#f08073),to(#ED6A5A))repeat-x!important;background:#ed6a5a -webkit-linear-gradient(top,#f08073,#ED6A5A)repeat-x!important;background:#ed6a5a -o-linear-gradient(top,#f08073,#ED6A5A)repeat-x!important;background:#ed6a5a linear-gradient(180deg,#f08073,#ED6A5A)repeat-x!important}.td-box--8{color:#fff;background-color:#403f4c}.td-box--8 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#403f4c transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--8 p>a{color:#d9e5f8}.td-box--18.td-box--gradient{background:#403f4c -webkit-gradient(linear,left top,left bottom,from(#5d5c67),to(#403F4C))repeat-x!important;background:#403f4c -webkit-linear-gradient(top,#5d5c67,#403F4C)repeat-x!important;background:#403f4c -o-linear-gradient(top,#5d5c67,#403F4C)repeat-x!important;background:#403f4c linear-gradient(180deg,#5d5c67,#403F4C)repeat-x!important}.td-box--9{color:#fff;background-color:#ed6a5a}.td-box--9 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#ed6a5a transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--9 p>a{color:#a5c3ee}.td-box--19.td-box--gradient{background:#ed6a5a -webkit-gradient(linear,left top,left bottom,from(#f08073),to(#ED6A5A))repeat-x!important;background:#ed6a5a -webkit-linear-gradient(top,#f08073,#ED6A5A)repeat-x!important;background:#ed6a5a -o-linear-gradient(top,#f08073,#ED6A5A)repeat-x!important;background:#ed6a5a linear-gradient(180deg,#f08073,#ED6A5A)repeat-x!important}.td-box--10{color:#fff;background-color:#30638e}.td-box--10 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#30638e transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--10 p>a{color:#cadcf5}.td-box--110.td-box--gradient{background:#30638e -webkit-gradient(linear,left top,left bottom,from(#4f7a9f),to(#30638E))repeat-x!important;background:#30638e -webkit-linear-gradient(top,#4f7a9f,#30638E)repeat-x!important;background:#30638e -o-linear-gradient(top,#4f7a9f,#30638E)repeat-x!important;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x!important}.td-box--11{color:#fff;background-color:#ffa630}.td-box--11 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#ffa630 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--11 p>a{color:#abc7f0}.td-box--111.td-box--gradient{background:#ffa630 -webkit-gradient(linear,left top,left bottom,from(#ffb34f),to(#FFA630))repeat-x!important;background:#ffa630 -webkit-linear-gradient(top,#ffb34f,#FFA630)repeat-x!important;background:#ffa630 -o-linear-gradient(top,#ffb34f,#FFA630)repeat-x!important;background:#ffa630 linear-gradient(180deg,#ffb34f,#FFA630)repeat-x!important}.td-box--12{color:#222;background-color:#fff}.td-box--12 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#fff transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--12 p>a{color:#72a1e5}.td-box--112.td-box--gradient{background:#fff -webkit-gradient(linear,left top,left bottom,from(white),to(white))repeat-x!important;background:#fff -webkit-linear-gradient(top,white,white)repeat-x!important;background:#fff -o-linear-gradient(top,white,white)repeat-x!important;background:#fff linear-gradient(180deg,white,white)repeat-x!important}.td-box--13{color:#222;background-color:#c0e0de}.td-box--13 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#c0e0de transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--13 p>a{color:#638ac1}.td-box--113.td-box--gradient{background:#c0e0de -webkit-gradient(linear,left top,left bottom,from(#c9e5e3),to(#C0E0DE))repeat-x!important;background:#c0e0de -webkit-linear-gradient(top,#c9e5e3,#C0E0DE)repeat-x!important;background:#c0e0de -o-linear-gradient(top,#c9e5e3,#C0E0DE)repeat-x!important;background:#c0e0de linear-gradient(180deg,#c9e5e3,#C0E0DE)repeat-x!important}.td-box--cerulean-blue{color:#fff;background-color:#017cee}.td-box--cerulean-blue .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#017cee transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--cerulean-blue p>a{color:#bdd3f3}.td-box--1cerulean-blue.td-box--gradient{background:#017cee -webkit-gradient(linear,left top,left bottom,from(#2790f1),to(#017cee))repeat-x!important;background:#017cee -webkit-linear-gradient(top,#2790f1,#017cee)repeat-x!important;background:#017cee -o-linear-gradient(top,#2790f1,#017cee)repeat-x!important;background:#017cee linear-gradient(180deg,#2790f1,#017cee)repeat-x!important}.td-box--shamrock{color:#fff;background-color:#00ad46}.td-box--shamrock .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#00ad46 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--shamrock p>a{color:#cfdff6}.td-box--1shamrock.td-box--gradient{background:#00ad46 -webkit-gradient(linear,left top,left bottom,from(#26b962),to(#00ad46))repeat-x!important;background:#00ad46 -webkit-linear-gradient(top,#26b962,#00ad46)repeat-x!important;background:#00ad46 -o-linear-gradient(top,#26b962,#00ad46)repeat-x!important;background:#00ad46 linear-gradient(180deg,#26b962,#00ad46)repeat-x!important}.td-box--bright-sky-blue{color:#fff;background-color:#0cb6ff}.td-box--bright-sky-blue .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#0cb6ff transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--bright-sky-blue p>a{color:#b5cef1}.td-box--1bright-sky-blue.td-box--gradient{background:#0cb6ff -webkit-gradient(linear,left top,left bottom,from(#30c1ff),to(#0cb6ff))repeat-x!important;background:#0cb6ff -webkit-linear-gradient(top,#30c1ff,#0cb6ff)repeat-x!important;background:#0cb6ff -o-linear-gradient(top,#30c1ff,#0cb6ff)repeat-x!important;background:#0cb6ff linear-gradient(180deg,#30c1ff,#0cb6ff)repeat-x!important}.td-box--melon{color:#fff;background-color:#ff7557}.td-box--melon .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#ff7557 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--melon p>a{color:#a0c0ee}.td-box--1melon.td-box--gradient{background:#ff7557 -webkit-gradient(linear,left top,left bottom,from(#ff8a70),to(#ff7557))repeat-x!important;background:#ff7557 -webkit-linear-gradient(top,#ff8a70,#ff7557)repeat-x!important;background:#ff7557 -o-linear-gradient(top,#ff8a70,#ff7557)repeat-x!important;background:#ff7557 linear-gradient(180deg,#ff8a70,#ff7557)repeat-x!important}.td-box--vermillion{color:#fff;background-color:#e43921}.td-box--vermillion .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#e43921 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--vermillion p>a{color:#b7cff2}.td-box--1vermillion.td-box--gradient{background:#e43921 -webkit-gradient(linear,left top,left bottom,from(#e85742),to(#e43921))repeat-x!important;background:#e43921 -webkit-linear-gradient(top,#e85742,#e43921)repeat-x!important;background:#e43921 -o-linear-gradient(top,#e85742,#e43921)repeat-x!important;background:#e43921 linear-gradient(180deg,#e85742,#e43921)repeat-x!important}.td-box--aqua{color:#fff;background-color:#11e1ee}.td-box--aqua .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#11e1ee transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--aqua p>a{color:#b9d0f2}.td-box--1aqua.td-box--gradient{background:#11e1ee -webkit-gradient(linear,left top,left bottom,from(#35e6f1),to(#11e1ee))repeat-x!important;background:#11e1ee -webkit-linear-gradient(top,#35e6f1,#11e1ee)repeat-x!important;background:#11e1ee -o-linear-gradient(top,#35e6f1,#11e1ee)repeat-x!important;background:#11e1ee linear-gradient(180deg,#35e6f1,#11e1ee)repeat-x!important}.td-box--shamrock-green{color:#fff;background-color:#04d659}.td-box--shamrock-green .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#04d659 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--shamrock-green p>a{color:#c3d7f4}.td-box--1shamrock-green.td-box--gradient{background:#04d659 -webkit-gradient(linear,left top,left bottom,from(#2adc72),to(#04d659))repeat-x!important;background:#04d659 -webkit-linear-gradient(top,#2adc72,#04d659)repeat-x!important;background:#04d659 -o-linear-gradient(top,#2adc72,#04d659)repeat-x!important;background:#04d659 linear-gradient(180deg,#2adc72,#04d659)repeat-x!important}.td-box--aqua-blue{color:#fff;background-color:#00c7d4}.td-box--aqua-blue .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#00c7d4 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--aqua-blue p>a{color:#c4d8f4}.td-box--1aqua-blue.td-box--gradient{background:#00c7d4 -webkit-gradient(linear,left top,left bottom,from(#26cfda),to(#00c7d4))repeat-x!important;background:#00c7d4 -webkit-linear-gradient(top,#26cfda,#00c7d4)repeat-x!important;background:#00c7d4 -o-linear-gradient(top,#26cfda,#00c7d4)repeat-x!important;background:#00c7d4 linear-gradient(180deg,#26cfda,#00c7d4)repeat-x!important}.td-box--white{color:#222;background-color:#fff}.td-box--white .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#fff transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--white p>a{color:#72a1e5}.td-box--1white.td-box--gradient{background:#fff -webkit-gradient(linear,left top,left bottom,from(white),to(#ffffff))repeat-x!important;background:#fff -webkit-linear-gradient(top,white,#ffffff)repeat-x!important;background:#fff -o-linear-gradient(top,white,#ffffff)repeat-x!important;background:#fff linear-gradient(180deg,white,#ffffff)repeat-x!important}.td-box--brownish-grey{color:#fff;background-color:#707070}.td-box--brownish-grey .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#707070 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--brownish-grey p>a{color:#c1d6f4}.td-box--1brownish-grey.td-box--gradient{background:#707070 -webkit-gradient(linear,left top,left bottom,from(#858585),to(#707070))repeat-x!important;background:#707070 -webkit-linear-gradient(top,#858585,#707070)repeat-x!important;background:#707070 -o-linear-gradient(top,#858585,#707070)repeat-x!important;background:#707070 linear-gradient(180deg,#858585,#707070)repeat-x!important}.td-box--very-light-pink{color:#222;background-color:#cbcbcb}.td-box--very-light-pink .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#cbcbcb transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--very-light-pink p>a{color:#6287bd}.td-box--1very-light-pink.td-box--gradient{background:#cbcbcb -webkit-gradient(linear,left top,left bottom,from(lightgray),to(#cbcbcb))repeat-x!important;background:#cbcbcb -webkit-linear-gradient(top,lightgray,#cbcbcb)repeat-x!important;background:#cbcbcb -o-linear-gradient(top,lightgray,#cbcbcb)repeat-x!important;background:#cbcbcb linear-gradient(180deg,lightgray,#cbcbcb)repeat-x!important}.td-box--slate-grey{color:#fff;background-color:#636365}.td-box--slate-grey .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#636365 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--slate-grey p>a{color:#c8daf5}.td-box--1slate-grey.td-box--gradient{background:#636365 -webkit-gradient(linear,left top,left bottom,from(#7a7a7c),to(#636365))repeat-x!important;background:#636365 -webkit-linear-gradient(top,#7a7a7c,#636365)repeat-x!important;background:#636365 -o-linear-gradient(top,#7a7a7c,#636365)repeat-x!important;background:#636365 linear-gradient(180deg,#7a7a7c,#636365)repeat-x!important}.td-box--greyish-brown{color:#fff;background-color:#51504f}.td-box--greyish-brown .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#51504f transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--greyish-brown p>a{color:#d3e2f7}.td-box--1greyish-brown.td-box--gradient{background:#51504f -webkit-gradient(linear,left top,left bottom,from(#6b6a69),to(#51504f))repeat-x!important;background:#51504f -webkit-linear-gradient(top,#6b6a69,#51504f)repeat-x!important;background:#51504f -o-linear-gradient(top,#6b6a69,#51504f)repeat-x!important;background:#51504f linear-gradient(180deg,#6b6a69,#51504f)repeat-x!important}.td-box--primary{color:#fff;background-color:#30638e}.td-box--primary .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#30638e transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--primary p>a{color:#cadcf5}.td-box--1primary.td-box--gradient{background:#30638e -webkit-gradient(linear,left top,left bottom,from(#4f7a9f),to(#30638E))repeat-x!important;background:#30638e -webkit-linear-gradient(top,#4f7a9f,#30638E)repeat-x!important;background:#30638e -o-linear-gradient(top,#4f7a9f,#30638E)repeat-x!important;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x!important}.td-box--secondary{color:#fff;background-color:#ffa630}.td-box--secondary .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#ffa630 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--secondary p>a{color:#abc7f0}.td-box--1secondary.td-box--gradient{background:#ffa630 -webkit-gradient(linear,left top,left bottom,from(#ffb34f),to(#FFA630))repeat-x!important;background:#ffa630 -webkit-linear-gradient(top,#ffb34f,#FFA630)repeat-x!important;background:#ffa630 -o-linear-gradient(top,#ffb34f,#FFA630)repeat-x!important;background:#ffa630 linear-gradient(180deg,#ffb34f,#FFA630)repeat-x!important}.td-box--success{color:#fff;background-color:#3772ff}.td-box--success .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#3772ff transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--success p>a{color:#a9c6ef}.td-box--1success.td-box--gradient{background:#3772ff -webkit-gradient(linear,left top,left bottom,from(#5587ff),to(#3772FF))repeat-x!important;background:#3772ff -webkit-linear-gradient(top,#5587ff,#3772FF)repeat-x!important;background:#3772ff -o-linear-gradient(top,#5587ff,#3772FF)repeat-x!important;background:#3772ff linear-gradient(180deg,#5587ff,#3772FF)repeat-x!important}.td-box--info{color:#222;background-color:#c0e0de}.td-box--info .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#c0e0de transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--info p>a{color:#638ac1}.td-box--1info.td-box--gradient{background:#c0e0de -webkit-gradient(linear,left top,left bottom,from(#c9e5e3),to(#C0E0DE))repeat-x!important;background:#c0e0de -webkit-linear-gradient(top,#c9e5e3,#C0E0DE)repeat-x!important;background:#c0e0de -o-linear-gradient(top,#c9e5e3,#C0E0DE)repeat-x!important;background:#c0e0de linear-gradient(180deg,#c9e5e3,#C0E0DE)repeat-x!important}.td-box--warning{color:#fff;background-color:#ed6a5a}.td-box--warning .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#ed6a5a transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--warning p>a{color:#a5c3ee}.td-box--1warning.td-box--gradient{background:#ed6a5a -webkit-gradient(linear,left top,left bottom,from(#f08073),to(#ED6A5A))repeat-x!important;background:#ed6a5a -webkit-linear-gradient(top,#f08073,#ED6A5A)repeat-x!important;background:#ed6a5a -o-linear-gradient(top,#f08073,#ED6A5A)repeat-x!important;background:#ed6a5a linear-gradient(180deg,#f08073,#ED6A5A)repeat-x!important}.td-box--danger{color:#fff;background-color:#ed6a5a}.td-box--danger .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#ed6a5a transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--danger p>a{color:#a5c3ee}.td-box--1danger.td-box--gradient{background:#ed6a5a -webkit-gradient(linear,left top,left bottom,from(#f08073),to(#ED6A5A))repeat-x!important;background:#ed6a5a -webkit-linear-gradient(top,#f08073,#ED6A5A)repeat-x!important;background:#ed6a5a -o-linear-gradient(top,#f08073,#ED6A5A)repeat-x!important;background:#ed6a5a linear-gradient(180deg,#f08073,#ED6A5A)repeat-x!important}.td-box--light{color:#222;background-color:#d3f3ee}.td-box--light .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#d3f3ee transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--light p>a{color:#6993d0}.td-box--1light.td-box--gradient{background:#d3f3ee -webkit-gradient(linear,left top,left bottom,from(#daf5f1),to(#D3F3EE))repeat-x!important;background:#d3f3ee -webkit-linear-gradient(top,#daf5f1,#D3F3EE)repeat-x!important;background:#d3f3ee -o-linear-gradient(top,#daf5f1,#D3F3EE)repeat-x!important;background:#d3f3ee linear-gradient(180deg,#daf5f1,#D3F3EE)repeat-x!important}.td-box--dark{color:#fff;background-color:#403f4c}.td-box--dark .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#403f4c transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--dark p>a{color:#d9e5f8}.td-box--1dark.td-box--gradient{background:#403f4c -webkit-gradient(linear,left top,left bottom,from(#5d5c67),to(#403F4C))repeat-x!important;background:#403f4c -webkit-linear-gradient(top,#5d5c67,#403F4C)repeat-x!important;background:#403f4c -o-linear-gradient(top,#5d5c67,#403F4C)repeat-x!important;background:#403f4c linear-gradient(180deg,#5d5c67,#403F4C)repeat-x!important}.td-box--100{color:#222;background-color:#f8f9fa}.td-box--100 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#f8f9fa transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--100 p>a{color:#709ee0}.td-box--1100.td-box--gradient{background:#f8f9fa -webkit-gradient(linear,left top,left bottom,from(#f9fafb),to(#f8f9fa))repeat-x!important;background:#f8f9fa -webkit-linear-gradient(top,#f9fafb,#f8f9fa)repeat-x!important;background:#f8f9fa -o-linear-gradient(top,#f9fafb,#f8f9fa)repeat-x!important;background:#f8f9fa linear-gradient(180deg,#f9fafb,#f8f9fa)repeat-x!important}.td-box--200{color:#222;background-color:#eee}.td-box--200 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#eee transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--200 p>a{color:#6d99d8}.td-box--1200.td-box--gradient{background:#eee -webkit-gradient(linear,left top,left bottom,from(#f1f1f1),to(#eee))repeat-x!important;background:#eee -webkit-linear-gradient(top,#f1f1f1,#eee)repeat-x!important;background:#eee -o-linear-gradient(top,#f1f1f1,#eee)repeat-x!important;background:#eee linear-gradient(180deg,#f1f1f1,#eee)repeat-x!important}.td-box--300{color:#222;background-color:#dee2e6}.td-box--300 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#dee2e6 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--300 p>a{color:#6993cf}.td-box--1300.td-box--gradient{background:#dee2e6 -webkit-gradient(linear,left top,left bottom,from(#e3e6ea),to(#dee2e6))repeat-x!important;background:#dee2e6 -webkit-linear-gradient(top,#e3e6ea,#dee2e6)repeat-x!important;background:#dee2e6 -o-linear-gradient(top,#e3e6ea,#dee2e6)repeat-x!important;background:#dee2e6 linear-gradient(180deg,#e3e6ea,#dee2e6)repeat-x!important}.td-box--400{color:#222;background-color:#ccc}.td-box--400 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#ccc transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--400 p>a{color:#6288be}.td-box--1400.td-box--gradient{background:#ccc -webkit-gradient(linear,left top,left bottom,from(#d4d4d4),to(#ccc))repeat-x!important;background:#ccc -webkit-linear-gradient(top,#d4d4d4,#ccc)repeat-x!important;background:#ccc -o-linear-gradient(top,#d4d4d4,#ccc)repeat-x!important;background:#ccc linear-gradient(180deg,#d4d4d4,#ccc)repeat-x!important}.td-box--500{color:#fff;background-color:#adb5bd}.td-box--500 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#adb5bd transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--500 p>a{color:#9bbced}.td-box--1500.td-box--gradient{background:#adb5bd -webkit-gradient(linear,left top,left bottom,from(#b9c0c7),to(#adb5bd))repeat-x!important;background:#adb5bd -webkit-linear-gradient(top,#b9c0c7,#adb5bd)repeat-x!important;background:#adb5bd -o-linear-gradient(top,#b9c0c7,#adb5bd)repeat-x!important;background:#adb5bd linear-gradient(180deg,#b9c0c7,#adb5bd)repeat-x!important}.td-box--600{color:#fff;background-color:#888}.td-box--600 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#888 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--600 p>a{color:#b4cdf1}.td-box--1600.td-box--gradient{background:#888 -webkit-gradient(linear,left top,left bottom,from(#9a9a9a),to(#888))repeat-x!important;background:#888 -webkit-linear-gradient(top,#9a9a9a,#888)repeat-x!important;background:#888 -o-linear-gradient(top,#9a9a9a,#888)repeat-x!important;background:#888 linear-gradient(180deg,#9a9a9a,#888)repeat-x!important}.td-box--700{color:#fff;background-color:#495057}.td-box--700 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#495057 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--700 p>a{color:#d3e2f7}.td-box--1700.td-box--gradient{background:#495057 -webkit-gradient(linear,left top,left bottom,from(#646a70),to(#495057))repeat-x!important;background:#495057 -webkit-linear-gradient(top,#646a70,#495057)repeat-x!important;background:#495057 -o-linear-gradient(top,#646a70,#495057)repeat-x!important;background:#495057 linear-gradient(180deg,#646a70,#495057)repeat-x!important}.td-box--800{color:#fff;background-color:#333}.td-box--800 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#333 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--800 p>a{color:#e3ecfa}.td-box--1800.td-box--gradient{background:#333 -webkit-gradient(linear,left top,left bottom,from(#525252),to(#333))repeat-x!important;background:#333 -webkit-linear-gradient(top,#525252,#333)repeat-x!important;background:#333 -o-linear-gradient(top,#525252,#333)repeat-x!important;background:#333 linear-gradient(180deg,#525252,#333)repeat-x!important}.td-box--900{color:#fff;background-color:#222}.td-box--900 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#222 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--900 p>a{color:#ecf2fc}.td-box--1900.td-box--gradient{background:#222 -webkit-gradient(linear,left top,left bottom,from(#434343),to(#222))repeat-x!important;background:#222 -webkit-linear-gradient(top,#434343,#222)repeat-x!important;background:#222 -o-linear-gradient(top,#434343,#222)repeat-x!important;background:#222 linear-gradient(180deg,#434343,#222)repeat-x!important}.td-blog .td-rss-button{position:absolute;top:5.5rem;right:1rem;z-index:22}.td-content .highlight{margin:2rem 0;padding:1rem;background-color:#f8f9fa}.td-content .highlight pre,.td-content .highlight div{background-color:inherit!important}.td-content .highlight pre{margin:0;padding:0}.td-content p code,.td-content li>code,.td-content table code{color:inherit;padding:.2em .4em;margin:0;font-size:85%;word-break:normal;background-color:rgba(0,0,0,5%);border-radius:.25rem}.td-content p code br,.td-content li>code br,.td-content table code br{display:none}.td-content pre{word-wrap:normal;background-color:#f8f9fa;padding:1rem}.td-content pre>code{padding:0;margin:0;font-size:100%;word-break:normal;white-space:pre;border:0}.td-navbar-cover{background:#30638e}@media(min-width:768px){.td-navbar-cover{background:0 0!important}.td-navbar-cover .nav-link{text-shadow:1px 1px 2px #403f4c}}.td-navbar-cover.navbar-bg-onscroll .nav-link{text-shadow:none}.navbar-bg-onscroll{background:#30638e!important;opacity:inherit}.td-navbar{background:#30638e;min-height:4rem;margin:0;z-index:32}@media(min-width:768px){.td-navbar{position:fixed;top:0;width:100%}}.td-navbar .navbar-brand{text-transform:none;text-align:middle}.td-navbar .navbar-brand .nav-link{display:inline-block;margin-right:-30px}.td-navbar .navbar-brand svg{display:inline-block;margin:0 10px;height:30px}.td-navbar .nav-link{text-transform:none;font-weight:700}.td-navbar .td-search-input{border:none}.td-navbar .td-search-input::-webkit-input-placeholder{color:rgba(255,255,255,.75)}.td-navbar .td-search-input:-moz-placeholder{color:rgba(255,255,255,.75)}.td-navbar .td-search-input::-moz-placeholder{color:rgba(255,255,255,.75)}.td-navbar .td-search-input:-ms-input-placeholder{color:rgba(255,255,255,.75)}.td-navbar .dropdown{min-width:100px}@media(max-width:991.98px){.td-navbar{padding-right:.5rem;padding-left:.75rem}.td-navbar .td-navbar-nav-scroll{max-width:100%;height:2.5rem;margin-top:.25rem;overflow:hidden;font-size:.875rem}.td-navbar .td-navbar-nav-scroll .nav-link{padding-right:.25rem;padding-left:0}.td-navbar .td-navbar-nav-scroll .navbar-nav{padding-bottom:2rem;overflow-x:auto;white-space:nowrap;-webkit-overflow-scrolling:touch}}.td-sidebar-nav{padding-right:.5rem;margin-right:-15px;margin-left:-15px}@media(min-width:768px){@supports((position:-webkit-sticky) or (position:sticky)){.td-sidebar-nav{max-height:-webkit-calc(100vh - 10rem);max-height:calc(100vh - 10rem);overflow-y:auto}}}@media(min-width:768px){.td-sidebar-nav{display:block!important}}.td-sidebar-nav__section{padding-left:0}.td-sidebar-nav__section li{list-style:none}.td-sidebar-nav__section ul{padding:0;margin:0}@media(min-width:768px){.td-sidebar-nav__section>ul{padding-left:.5rem}}.td-sidebar-nav__section-title{display:block;font-weight:500}.td-sidebar-nav__section-title .active{font-weight:700}.td-sidebar-nav__section-title a{color:#222}.td-sidebar-nav .td-sidebar-link{display:block;padding-bottom:.375rem}.td-sidebar-nav .td-sidebar-link__page{color:#495057;font-weight:300}.td-sidebar-nav a:hover{color:#72a1e5;text-decoration:none}.td-sidebar-nav a.active{font-weight:700}.td-sidebar-nav .dropdown a{color:#495057}.td-sidebar-nav .dropdown .nav-link{padding:0 0 1rem}.td-sidebar{padding-bottom:1rem}@media(min-width:768px){.td-sidebar{padding-top:4rem;background-color:rgba(48,99,142,3%);padding-right:1rem;border-right:1px solid #dee2e6}}.td-sidebar__toggle{line-height:1;color:#222;margin:1rem}.td-sidebar__search{padding:1rem 15px;margin-right:-15px;margin-left:-15px}.td-sidebar__inner{-webkit-box-ordinal-group:1;-webkit-order:0;-ms-flex-order:0;order:0}@media(min-width:768px){@supports((position:-webkit-sticky) or (position:sticky)){.td-sidebar__inner{position:-webkit-sticky;position:sticky;top:4rem;z-index:10;height:-webkit-calc(100vh - 6rem);height:calc(100vh - 6rem)}}}@media(min-width:1200px){.td-sidebar__inner{-webkit-box-flex:0;-webkit-flex:0 1 320px;-ms-flex:0 1 320px;flex:0 1 320px}}.td-sidebar__inner .td-search-box{width:100%}.td-toc{border-left:1px solid #dee2e6;-webkit-box-ordinal-group:3;-webkit-order:2;-ms-flex-order:2;order:2;padding-top:.75rem;padding-bottom:1.5rem;vertical-align:top}@supports((position:-webkit-sticky) or (position:sticky)){.td-toc{position:-webkit-sticky;position:sticky;top:4rem;height:-webkit-calc(100vh - 10rem);height:calc(100vh - 10rem);overflow-y:auto}}.td-toc a{display:block;font-weight:300;padding-bottom:.25rem}.td-toc li{list-style:none;display:block}.td-toc li li{margin-left:.5rem}.td-toc .td-page-meta a{font-weight:500}.td-toc #TableOfContents a{color:#888}.td-toc #TableOfContents a:hover{color:#72a1e5;text-decoration:none}.td-toc ul{padding-left:0}button{cursor:pointer;border:1px solid;border-radius:5px;padding:9px 29px;-webkit-transition:all ease-out .2s;-o-transition:all ease-out .2s;transition:all ease-out .2s}button:disabled{cursor:not-allowed}button.btn-filled{border-color:#017cee;background-color:#017cee}button.btn-filled:hover{border-color:#0cb6ff;background-color:#0cb6ff}button.btn-with-icon{padding:14px 20px}button.btn-with-icon svg{height:30px;width:auto;padding-right:15px}button.btn-with-icon span{display:inline-block;line-height:30px;vertical-align:middle}button.btn-hollow{background-color:#fff}button.btn-hollow.btn-blue{color:#017cee;border-color:#017cee}button.btn-hollow.btn-blue:disabled{color:#cbcbcb;border-color:#cbcbcb}button.btn-hollow.btn-blue:hover:enabled{color:#fff;background-color:#017cee}button.btn-hollow.btn-brown{border-color:#cbcbcb}button.btn-hollow.btn-brown:hover{background-color:#51504f;border-color:#51504f}button.btn-hollow.btn-brown:hover span{color:#fff}button.btn-hollow.btn-brown:hover svg path{fill:#fff}button.with-box-shadow{-webkit-box-shadow:0 2px 6px 0 rgba(0,0,0,.12);box-shadow:0 2px 6px rgba(0,0,0,.12)}@media(max-width:1280px){button{padding:4px 17px}}.breadcrumb{background:0 0;padding-left:0;padding-top:0}.alert{font-weight:500;background:#fff;color:inherit;border-radius:0}.alert-primary{border-style:solid;border-color:#30638e;border-width:0 0 0 4px}.alert-primary .alert-heading{color:#30638e}.alert-secondary{border-style:solid;border-color:#ffa630;border-width:0 0 0 4px}.alert-secondary .alert-heading{color:#ffa630}.alert-success{border-style:solid;border-color:#3772ff;border-width:0 0 0 4px}.alert-success .alert-heading{color:#3772ff}.alert-info{border-style:solid;border-color:#c0e0de;border-width:0 0 0 4px}.alert-info .alert-heading{color:#c0e0de}.alert-warning{border-style:solid;border-color:#ed6a5a;border-width:0 0 0 4px}.alert-warning .alert-heading{color:#ed6a5a}.alert-danger{border-style:solid;border-color:#ed6a5a;border-width:0 0 0 4px}.alert-danger .alert-heading{color:#ed6a5a}.alert-light{border-style:solid;border-color:#d3f3ee;border-width:0 0 0 4px}.alert-light .alert-heading{color:#d3f3ee}.alert-dark{border-style:solid;border-color:#403f4c;border-width:0 0 0 4px}.alert-dark .alert-heading{color:#403f4c}.td-content{-webkit-box-ordinal-group:2;-webkit-order:1;-ms-flex-order:1;order:1}.td-content p,.td-content li,.td-content td{font-weight:400}.td-content>h1{font-weight:700;margin-bottom:1rem}.td-content>h2{margin-bottom:1rem}.td-content>h2:not(:first-child){margin-top:3rem}.td-content>h2+h3{margin-top:1rem}.td-content>h3,.td-content>h4,.td-content>h5,.td-content>h6{margin-bottom:1rem;margin-top:2rem}.td-content>blockquote{padding:0 0 0 1rem;margin-bottom:1rem;color:#888;border-left:6px solid #ffa630}.td-content>ul li,.td-content>ol li{margin-bottom:.25rem}.td-content strong{font-weight:700}.td-content .alert:not(:first-child){margin-top:2rem;margin-bottom:2rem}.td-content .lead{margin-bottom:1.5rem}.td-title{margin-top:1rem;margin-bottom:.5rem}@media(min-width:576px){.td-title{font-size:3rem}}.search-form{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;width:344px;padding:8px 20px;border:solid 1px #cbcbcb;border-radius:5px;margin:60px auto 0}.search-form__input{font-family:roboto,sans-serif;font-size:16px;color:#707070;line-height:1.63;-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;padding-right:10px;border:none;background:0 0;outline:none;float:left}.search-form__button{border:none;background-color:transparent;padding:0}@media(max-width:1280px){.search-form{width:270px;padding:3px 20px;margin-top:30px}}.td-outer{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;height:100vh}@media(min-width:768px){.td-default main section:first-of-type{padding-top:8rem}}.td-main{-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1}.td-main main{padding-bottom:2rem}@media(min-width:768px){.td-main main{padding-top:5.5rem}}.td-cover-block--height-min{min-height:300px}.td-cover-block--height-med{min-height:400px}.td-cover-block--height-max{min-height:500px}.td-cover-block--height-full{min-height:100vh}@media(min-width:768px){.td-cover-block--height-min{min-height:450px}.td-cover-block--height-med{min-height:500px}.td-cover-block--height-max{min-height:650px}}.td-cover-logo{margin-right:.5em}.td-cover-block{position:relative;padding-top:5rem;padding-bottom:5rem;background-repeat:no-repeat;background-position:50% 0;-webkit-background-size:cover;background-size:cover}.td-bg-arrow-wrapper{position:relative}.section-index .entry{padding:.75rem}.section-index h5{margin-bottom:0}.section-index h5 a{font-weight:700}.section-index p{margin-top:0}.pageinfo{font-weight:500;background:#f8f9fa;color:inherit;border-radius:0;margin:2rem;padding:1.5rem;padding-bottom:.5rem}.pageinfo-primary{border-style:solid;border-color:#30638e}.pageinfo-secondary{border-style:solid;border-color:#ffa630}.pageinfo-success{border-style:solid;border-color:#3772ff}.pageinfo-info{border-style:solid;border-color:#c0e0de}.pageinfo-warning{border-style:solid;border-color:#ed6a5a}.pageinfo-danger{border-style:solid;border-color:#ed6a5a}.pageinfo-light{border-style:solid;border-color:#d3f3ee}.pageinfo-dark{border-style:solid;border-color:#403f4c}footer{min-height:150px}@media(max-width:991.98px){footer{min-height:200px}}@media(min-width:768px){.td-offset-anchor:target{display:block;position:relative;top:-4rem;visibility:hidden}h2[id]:before,h3[id]:before,h4[id]:before,h5[id]:before{display:block;content:" ";margin-top:-5rem;height:5rem;visibility:hidden}} \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/_gen/js/docs.js b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/_gen/js/docs.js new file mode 100644 index 00000000000..2029b9f8147 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/_gen/js/docs.js @@ -0,0 +1 @@ +!function(r){var n={};function o(t){if(n[t])return n[t].exports;var e=n[t]={i:t,l:!1,exports:{}};return r[t].call(e.exports,e,e.exports,o),e.l=!0,e.exports}o.m=r,o.c=n,o.d=function(t,e,r){o.o(t,e)||Object.defineProperty(t,e,{enumerable:!0,get:r})},o.r=function(t){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})},o.t=function(e,t){if(1&t&&(e=o(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(o.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var n in e)o.d(r,n,function(t){return e[t]}.bind(null,n));return r},o.n=function(t){var e=t&&t.__esModule?function(){return t.default}:function(){return t};return o.d(e,"a",e),e},o.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},o.p="/",o(o.s=56)}([function(t,e,r){var m=r(1),w=r(14),g=r(15),E=Math.max,x=Math.min;t.exports=function(n,r,t){var o,i,a,s,u,c,f=0,l=!1,d=!1,e=!0;if("function"!=typeof n)throw new TypeError("Expected a function");function h(t){var e=o,r=i;return o=i=void 0,f=t,s=n.apply(r,e)}function p(t){var e=t-c;return void 0===c||r<=e||e<0||d&&a<=t-f}function y(){var t=w();if(p(t))return b(t);u=setTimeout(y,function(t){var e=r-(t-c);return d?x(e,a-(t-f)):e}(t))}function b(t){return u=void 0,e&&o?h(t):(o=i=void 0,s)}function v(){var t=w(),e=p(t);if(o=arguments,i=this,c=t,e){if(void 0===u)return function(t){return f=t,u=setTimeout(y,r),l?h(t):s}(c);if(d)return clearTimeout(u),u=setTimeout(y,r),h(c)}return void 0===u&&(u=setTimeout(y,r)),s}return r=g(r)||0,m(t)&&(l=!!t.leading,a=(d="maxWait"in t)?E(g(t.maxWait)||0,r):a,e="trailing"in t?!!t.trailing:e),v.cancel=function(){void 0!==u&&clearTimeout(u),o=c=i=u=void(f=0)},v.flush=function(){return void 0===u?s:b(w())},v}},function(t,e){t.exports=function(t){var e=typeof t;return null!=t&&("object"==e||"function"==e)}},function(t,e,r){var n=r(8),o="object"==typeof self&&self&&self.Object===Object&&self,i=n||o||Function("return this")();t.exports=i},function(t,e,r){var n=r(6),o=r(19),i=r(20),a=n?n.toStringTag:void 0;t.exports=function(t){return null==t?void 0===t?"[object Undefined]":"[object Null]":a&&a in Object(t)?o(t):i(t)}},function(t,e){t.exports=function(t){return null!=t&&"object"==typeof t}},function(t,e,r){!function(s){"use strict";var u={searchParams:"URLSearchParams"in self,iterable:"Symbol"in self&&"iterator"in Symbol,blob:"FileReader"in self&&"Blob"in self&&function(){try{return new Blob,!0}catch(t){return!1}}(),formData:"FormData"in self,arrayBuffer:"ArrayBuffer"in self};if(u.arrayBuffer)var e=["[object Int8Array]","[object Uint8Array]","[object Uint8ClampedArray]","[object Int16Array]","[object Uint16Array]","[object Int32Array]","[object Uint32Array]","[object Float32Array]","[object Float64Array]"],r=ArrayBuffer.isView||function(t){return t&&-1 nav").offsetHeight,n=function(){var t;e.sort(function(t,e){return t.targetElement.offsetTop-e.targetElement.offsetTop}),t=e[0].targetElement.offsetTop+r>window.scrollY?0:e[e.length-1].targetElement.offsetTop+rwindow.scrollY})-1,e.forEach(function(t){return t.navElement.classList.remove("current")}),e[t].navElement.classList.add("current")};window.addEventListener("scroll",o()(n,10)),window.addEventListener("resize",o()(n,10)),n()}}()},function(t,e,r){var n=r(2);t.exports=function(){return n.Date.now()}},function(t,e,r){var n=r(16),o=r(1),i=r(18),a=/^[-+]0x[0-9a-f]+$/i,s=/^0b[01]+$/i,u=/^0o[0-7]+$/i,c=parseInt;t.exports=function(t){if("number"==typeof t)return t;if(i(t))return NaN;if(o(t)){var e="function"==typeof t.valueOf?t.valueOf():t;t=o(e)?e+"":e}if("string"!=typeof t)return 0===t?t:+t;t=n(t);var r=s.test(t);return r||u.test(t)?c(t.slice(2),r?2:8):a.test(t)?NaN:+t}},function(t,e,r){var n=r(17),o=/^\s+/;t.exports=function(t){return t?t.slice(0,n(t)+1).replace(o,""):t}},function(t,e){var r=/\s/;t.exports=function(t){for(var e=t.length;e--&&r.test(t.charAt(e)););return e}},function(t,e,r){var n=r(3),o=r(4);t.exports=function(t){return"symbol"==typeof t||o(t)&&"[object Symbol]"==n(t)}},function(t,e,r){var n=r(6),o=Object.prototype,i=o.hasOwnProperty,a=o.toString,s=n?n.toStringTag:void 0;t.exports=function(t){var e=i.call(t,s),r=t[s];try{var n=!(t[s]=void 0)}catch(t){}var o=a.call(t);return n&&(e?t[s]=r:delete t[s]),o}},function(t,e){var r=Object.prototype.toString;t.exports=function(t){return r.call(t)}},function(t,e){!function(){var e=window.document.querySelector(".rating");if(e){function t(t){e.querySelector("#rate-star-".concat(t)).addEventListener("click",function(){!function(t){window._paq.push(["trackEvent","Docs","Rating",window.location.pathname,t])}(t),e.innerHTML="

Thank you!

"})}for(var r=1;r<=5;r++)t(r)}}()},function(t,e){var r=window.document.querySelector(".rst-content");!function(){if(r){var t=r.querySelectorAll("table");t&&0!==t.length&&t.forEach(function(t){if(!t.parentNode.classList.contains("wy-table-responsive")){var e=document.createElement("div");e.classList.add("wy-table-responsive"),t.parentNode.insertBefore(e,t),e.appendChild(t)}})}}()},function(t,e,n){"use strict";(function(t){var i=n(7);function r(t){return function(t){if(Array.isArray(t))return t}(t)||function(t){if(Symbol.iterator in Object(t)||"[object Arguments]"===Object.prototype.toString.call(t))return Array.from(t)}(t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance")}()}function f(){var t=r(document.location.pathname.split("/")),e=t[2];return{currentVersion:t[3],currentPackageName:e,pagePath:t.slice(4).join("/")}}var a;(a=window.document.querySelectorAll(".docs-version-selector"))&&0!==a.length&&t("/_gen/packages-metadata.json").then(function(t){return t.json()}).then(function(t){var e=f().currentPackageName,r=t.find(function(t){return t["package-name"]===e});if(r){var n=r["all-versions"].sort(i.a).reverse(),o=r["stable-version"];a.forEach(function(t){return function(t,e,r){var n=t.querySelector("#version-item-template").innerText,o=document.createElement("div");function i(t,e){var r=o.cloneNode(!0),n="/docs/".concat(u,"/").concat(t,"/").concat(c);r.setAttribute("href",n),r.innerText=e,a.appendChild(r)}o.innerHTML=n,o=o.firstElementChild;var a=t.querySelector(".dropdown-menu"),s=f(),u=s.currentPackageName,c=s.pagePath;i("stable","Stable (".concat(r,")")),e.forEach(function(t){return i(t,t)})}(t,n,o)})}})}).call(this,n(5))},,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,function(t,e,r){"use strict";r.r(e);r(10),r(11),r(12),r(13),r(21),r(22),r(57),r(23)},function(t,e){Array.from(document.querySelectorAll(".toctree ul")).forEach(function(t){Array.from(t.parentNode.children).filter(function(t){return"A"===t.tagName}).forEach(function(t){var e=document.createElement("span");e.classList.add("toctree-expand"),t.insertBefore(e,t.firstChild)})})}]); \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/_sphinx_javascript_frameworks_compat.js b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/_sphinx_javascript_frameworks_compat.js new file mode 100644 index 00000000000..8549469dc29 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/_sphinx_javascript_frameworks_compat.js @@ -0,0 +1,134 @@ +/* + * _sphinx_javascript_frameworks_compat.js + * ~~~~~~~~~~ + * + * Compatability shim for jQuery and underscores.js. + * + * WILL BE REMOVED IN Sphinx 6.0 + * xref RemovedInSphinx60Warning + * + */ + +/** + * select a different prefix for underscore + */ +$u = _.noConflict(); + + +/** + * small helper function to urldecode strings + * + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/decodeURIComponent#Decoding_query_parameters_from_a_URL + */ +jQuery.urldecode = function(x) { + if (!x) { + return x + } + return decodeURIComponent(x.replace(/\+/g, ' ')); +}; + +/** + * small helper function to urlencode strings + */ +jQuery.urlencode = encodeURIComponent; + +/** + * This function returns the parsed url parameters of the + * current request. Multiple values per key are supported, + * it will always return arrays of strings for the value parts. + */ +jQuery.getQueryParameters = function(s) { + if (typeof s === 'undefined') + s = document.location.search; + var parts = s.substr(s.indexOf('?') + 1).split('&'); + var result = {}; + for (var i = 0; i < parts.length; i++) { + var tmp = parts[i].split('=', 2); + var key = jQuery.urldecode(tmp[0]); + var value = jQuery.urldecode(tmp[1]); + if (key in result) + result[key].push(value); + else + result[key] = [value]; + } + return result; +}; + +/** + * highlight a given string on a jquery object by wrapping it in + * span elements with the given class name. + */ +jQuery.fn.highlightText = function(text, className) { + function highlight(node, addItems) { + if (node.nodeType === 3) { + var val = node.nodeValue; + var pos = val.toLowerCase().indexOf(text); + if (pos >= 0 && + !jQuery(node.parentNode).hasClass(className) && + !jQuery(node.parentNode).hasClass("nohighlight")) { + var span; + var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.className = className; + } + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + node.parentNode.insertBefore(span, node.parentNode.insertBefore( + document.createTextNode(val.substr(pos + text.length)), + node.nextSibling)); + node.nodeValue = val.substr(0, pos); + if (isInSVG) { + var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect"); + var bbox = node.parentElement.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute('class', className); + addItems.push({ + "parent": node.parentNode, + "target": rect}); + } + } + } + else if (!jQuery(node).is("button, select, textarea")) { + jQuery.each(node.childNodes, function() { + highlight(this, addItems); + }); + } + } + var addItems = []; + var result = this.each(function() { + highlight(this, addItems); + }); + for (var i = 0; i < addItems.length; ++i) { + jQuery(addItems[i].parent).before(addItems[i].target); + } + return result; +}; + +/* + * backward compatibility for jQuery.browser + * This will be supported until firefox bug is fixed. + */ +if (!jQuery.browser) { + jQuery.uaMatch = function(ua) { + ua = ua.toLowerCase(); + + var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || + /(webkit)[ \/]([\w.]+)/.exec(ua) || + /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || + /(msie) ([\w.]+)/.exec(ua) || + ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || + []; + + return { + browser: match[ 1 ] || "", + version: match[ 2 ] || "0" + }; + }; + jQuery.browser = {}; + jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; +} diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/basic.css b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/basic.css new file mode 100644 index 00000000000..eeb0519a69b --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/basic.css @@ -0,0 +1,899 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2022 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +div.section::after { + display: block; + content: ''; + clear: left; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 230px; + margin-left: -100%; + font-size: 90%; + word-wrap: break-word; + overflow-wrap : break-word; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox form.search { + overflow: hidden; +} + +div.sphinxsidebar #searchbox input[type="text"] { + float: left; + width: 80%; + padding: 0.25em; + box-sizing: border-box; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + float: left; + width: 20%; + border-left: none; + padding: 0.25em; + box-sizing: border-box; +} + + +img { + border: 0; + max-width: 100%; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li p.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; + margin-left: auto; + margin-right: auto; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable ul { + margin-top: 0; + margin-bottom: 0; + list-style-type: none; +} + +table.indextable > tbody > tr > td > ul { + padding-left: 0em; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- domain module index --------------------------------------------------- */ + +table.modindextable td { + padding: 2px; + border-collapse: collapse; +} + +/* -- general body styles --------------------------------------------------- */ + +div.body { + min-width: 360px; + max-width: 800px; +} + +div.body p, div.body dd, div.body li, div.body blockquote { + -moz-hyphens: auto; + -ms-hyphens: auto; + -webkit-hyphens: auto; + hyphens: auto; +} + +a.headerlink { + visibility: hidden; +} +a.brackets:before, +span.brackets > a:before{ + content: "["; +} + +a.brackets:after, +span.brackets > a:after { + content: "]"; +} + + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink, +caption:hover > a.headerlink, +p.caption:hover > a.headerlink, +div.code-block-caption:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, figure.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, figure.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, figure.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +img.align-default, figure.align-default, .figure.align-default { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-default { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar, +aside.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px; + background-color: #ffe; + width: 40%; + float: right; + clear: right; + overflow-x: auto; +} + +p.sidebar-title { + font-weight: bold; +} +div.admonition, div.topic, blockquote { + clear: left; +} + +/* -- topics ---------------------------------------------------------------- */ +div.topic { + border: 1px solid #ccc; + padding: 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- content of sidebars/topics/admonitions -------------------------------- */ + +div.sidebar > :last-child, +aside.sidebar > :last-child, +div.topic > :last-child, +div.admonition > :last-child { + margin-bottom: 0; +} + +div.sidebar::after, +aside.sidebar::after, +div.topic::after, +div.admonition::after, +blockquote::after { + display: block; + content: ''; + clear: both; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + margin-top: 10px; + margin-bottom: 10px; + border: 0; + border-collapse: collapse; +} + +table.align-center { + margin-left: auto; + margin-right: auto; +} + +table.align-default { + margin-left: auto; + margin-right: auto; +} + +table caption span.caption-number { + font-style: italic; +} + +table caption span.caption-text { +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +th > :first-child, +td > :first-child { + margin-top: 0px; +} + +th > :last-child, +td > :last-child { + margin-bottom: 0px; +} + +/* -- figures --------------------------------------------------------------- */ + +div.figure, figure { + margin: 0.5em; + padding: 0.5em; +} + +div.figure p.caption, figcaption { + padding: 0.3em; +} + +div.figure p.caption span.caption-number, +figcaption span.caption-number { + font-style: italic; +} + +div.figure p.caption span.caption-text, +figcaption span.caption-text { +} + +/* -- field list styles ----------------------------------------------------- */ + +table.field-list td, table.field-list th { + border: 0 !important; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +/* -- hlist styles ---------------------------------------------------------- */ + +table.hlist { + margin: 1em 0; +} + +table.hlist td { + vertical-align: top; +} + +/* -- object description styles --------------------------------------------- */ + +.sig { + font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; +} + +.sig-name, code.descname { + background-color: transparent; + font-weight: bold; +} + +.sig-name { + font-size: 1.1em; +} + +code.descname { + font-size: 1.2em; +} + +.sig-prename, code.descclassname { + background-color: transparent; +} + +.optional { + font-size: 1.3em; +} + +.sig-paren { + font-size: larger; +} + +.sig-param.n { + font-style: italic; +} + +/* C++ specific styling */ + +.sig-inline.c-texpr, +.sig-inline.cpp-texpr { + font-family: unset; +} + +.sig.c .k, .sig.c .kt, +.sig.cpp .k, .sig.cpp .kt { + color: #0033B3; +} + +.sig.c .m, +.sig.cpp .m { + color: #1750EB; +} + +.sig.c .s, .sig.c .sc, +.sig.cpp .s, .sig.cpp .sc { + color: #067D17; +} + + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +:not(li) > ol > li:first-child > :first-child, +:not(li) > ul > li:first-child > :first-child { + margin-top: 0px; +} + +:not(li) > ol > li:last-child > :last-child, +:not(li) > ul > li:last-child > :last-child { + margin-bottom: 0px; +} + +ol.simple ol p, +ol.simple ul p, +ul.simple ol p, +ul.simple ul p { + margin-top: 0; +} + +ol.simple > li:not(:first-child) > p, +ul.simple > li:not(:first-child) > p { + margin-top: 0; +} + +ol.simple p, +ul.simple p { + margin-bottom: 0; +} +dl.footnote > dt, +dl.citation > dt { + float: left; + margin-right: 0.5em; +} + +dl.footnote > dd, +dl.citation > dd { + margin-bottom: 0em; +} + +dl.footnote > dd:after, +dl.citation > dd:after { + content: ""; + clear: both; +} + +dl.field-list { + display: grid; + grid-template-columns: fit-content(30%) auto; +} + +dl.field-list > dt { + font-weight: bold; + word-break: break-word; + padding-left: 0.5em; + padding-right: 5px; +} +dl.field-list > dt:after { + content: ":"; +} + + +dl.field-list > dd { + padding-left: 0.5em; + margin-top: 0em; + margin-left: 0em; + margin-bottom: 0em; +} + +dl { + margin-bottom: 15px; +} + +dd > :first-child { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +dl > dd:last-child, +dl > dd:last-child > :last-child { + margin-bottom: 0; +} + +dt:target, span.highlighted { + background-color: #fbe54e; +} + +rect.highlighted { + fill: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +.classifier:before { + font-style: normal; + margin: 0 0.5em; + content: ":"; + display: inline-block; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +pre, div[class*="highlight-"] { + clear: both; +} + +span.pre { + -moz-hyphens: none; + -ms-hyphens: none; + -webkit-hyphens: none; + hyphens: none; + white-space: nowrap; +} + +div[class*="highlight-"] { + margin: 1em 0; +} + +td.linenos pre { + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + display: block; +} + +table.highlighttable tbody { + display: block; +} + +table.highlighttable tr { + display: flex; +} + +table.highlighttable td { + margin: 0; + padding: 0; +} + +table.highlighttable td.linenos { + padding-right: 0.5em; +} + +table.highlighttable td.code { + flex: 1; + overflow: hidden; +} + +.highlight .hll { + display: block; +} + +div.highlight pre, +table.highlighttable pre { + margin: 0; +} + +div.code-block-caption + div { + margin-top: 0; +} + +div.code-block-caption { + margin-top: 1em; + padding: 2px 5px; + font-size: small; +} + +div.code-block-caption code { + background-color: transparent; +} + +table.highlighttable td.linenos, +span.linenos, +div.highlight span.gp { /* gp: Generic.Prompt */ + user-select: none; + -webkit-user-select: text; /* Safari fallback only */ + -webkit-user-select: none; /* Chrome/Safari */ + -moz-user-select: none; /* Firefox */ + -ms-user-select: none; /* IE10+ */ +} + +div.code-block-caption span.caption-number { + padding: 0.1em 0.3em; + font-style: italic; +} + +div.code-block-caption span.caption-text { +} + +div.literal-block-wrapper { + margin: 1em 0; +} + +code.xref, a code { + background-color: transparent; + font-weight: bold; +} + +h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +span.eqno a.headerlink { + position: absolute; + z-index: 1; +} + +div.math:hover a.headerlink { + visibility: visible; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/check-solid.svg b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/check-solid.svg new file mode 100644 index 00000000000..92fad4b5c0b --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/check-solid.svg @@ -0,0 +1,4 @@ + + + + diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/clipboard.min.js b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/clipboard.min.js new file mode 100644 index 00000000000..54b3c463811 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/clipboard.min.js @@ -0,0 +1,7 @@ +/*! + * clipboard.js v2.0.8 + * https://clipboardjs.com/ + * + * Licensed MIT © Zeno Rocha + */ +!function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define([],e):"object"==typeof exports?exports.ClipboardJS=e():t.ClipboardJS=e()}(this,function(){return n={686:function(t,e,n){"use strict";n.d(e,{default:function(){return o}});var e=n(279),i=n.n(e),e=n(370),u=n.n(e),e=n(817),c=n.n(e);function a(t){try{return document.execCommand(t)}catch(t){return}}var f=function(t){t=c()(t);return a("cut"),t};var l=function(t){var e,n,o,r=1 + + + + diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/copybutton.css b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/copybutton.css new file mode 100644 index 00000000000..f1916ec7d1b --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/copybutton.css @@ -0,0 +1,94 @@ +/* Copy buttons */ +button.copybtn { + position: absolute; + display: flex; + top: .3em; + right: .3em; + width: 1.7em; + height: 1.7em; + opacity: 0; + transition: opacity 0.3s, border .3s, background-color .3s; + user-select: none; + padding: 0; + border: none; + outline: none; + border-radius: 0.4em; + /* The colors that GitHub uses */ + border: #1b1f2426 1px solid; + background-color: #f6f8fa; + color: #57606a; +} + +button.copybtn.success { + border-color: #22863a; + color: #22863a; +} + +button.copybtn svg { + stroke: currentColor; + width: 1.5em; + height: 1.5em; + padding: 0.1em; +} + +div.highlight { + position: relative; +} + +/* Show the copybutton */ +.highlight:hover button.copybtn, button.copybtn.success { + opacity: 1; +} + +.highlight button.copybtn:hover { + background-color: rgb(235, 235, 235); +} + +.highlight button.copybtn:active { + background-color: rgb(187, 187, 187); +} + +/** + * A minimal CSS-only tooltip copied from: + * https://codepen.io/mildrenben/pen/rVBrpK + * + * To use, write HTML like the following: + * + *

Short

+ */ + .o-tooltip--left { + position: relative; + } + + .o-tooltip--left:after { + opacity: 0; + visibility: hidden; + position: absolute; + content: attr(data-tooltip); + padding: .2em; + font-size: .8em; + left: -.2em; + background: grey; + color: white; + white-space: nowrap; + z-index: 2; + border-radius: 2px; + transform: translateX(-102%) translateY(0); + transition: opacity 0.2s cubic-bezier(0.64, 0.09, 0.08, 1), transform 0.2s cubic-bezier(0.64, 0.09, 0.08, 1); +} + +.o-tooltip--left:hover:after { + display: block; + opacity: 1; + visibility: visible; + transform: translateX(-100%) translateY(0); + transition: opacity 0.2s cubic-bezier(0.64, 0.09, 0.08, 1), transform 0.2s cubic-bezier(0.64, 0.09, 0.08, 1); + transition-delay: .5s; +} + +/* By default the copy button shouldn't show up when printing a page */ +@media print { + button.copybtn { + display: none; + } +} diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/copybutton.js b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/copybutton.js new file mode 100644 index 00000000000..02c5c82d9d5 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/copybutton.js @@ -0,0 +1,248 @@ +// Localization support +const messages = { + 'en': { + 'copy': 'Copy', + 'copy_to_clipboard': 'Copy to clipboard', + 'copy_success': 'Copied!', + 'copy_failure': 'Failed to copy', + }, + 'es' : { + 'copy': 'Copiar', + 'copy_to_clipboard': 'Copiar al portapapeles', + 'copy_success': '¡Copiado!', + 'copy_failure': 'Error al copiar', + }, + 'de' : { + 'copy': 'Kopieren', + 'copy_to_clipboard': 'In die Zwischenablage kopieren', + 'copy_success': 'Kopiert!', + 'copy_failure': 'Fehler beim Kopieren', + }, + 'fr' : { + 'copy': 'Copier', + 'copy_to_clipboard': 'Copié dans le presse-papier', + 'copy_success': 'Copié !', + 'copy_failure': 'Échec de la copie', + }, + 'ru': { + 'copy': 'Скопировать', + 'copy_to_clipboard': 'Скопировать в буфер', + 'copy_success': 'Скопировано!', + 'copy_failure': 'Не удалось скопировать', + }, + 'zh-CN': { + 'copy': '复制', + 'copy_to_clipboard': '复制到剪贴板', + 'copy_success': '复制成功!', + 'copy_failure': '复制失败', + }, + 'it' : { + 'copy': 'Copiare', + 'copy_to_clipboard': 'Copiato negli appunti', + 'copy_success': 'Copiato!', + 'copy_failure': 'Errore durante la copia', + } +} + +let locale = 'en' +if( document.documentElement.lang !== undefined + && messages[document.documentElement.lang] !== undefined ) { + locale = document.documentElement.lang +} + +let doc_url_root = DOCUMENTATION_OPTIONS.URL_ROOT; +if (doc_url_root == '#') { + doc_url_root = ''; +} + +/** + * SVG files for our copy buttons + */ +let iconCheck = ` + ${messages[locale]['copy_success']} + + +` + +// If the user specified their own SVG use that, otherwise use the default +let iconCopy = ``; +if (!iconCopy) { + iconCopy = ` + ${messages[locale]['copy_to_clipboard']} + + + +` +} + +/** + * Set up copy/paste for code blocks + */ + +const runWhenDOMLoaded = cb => { + if (document.readyState != 'loading') { + cb() + } else if (document.addEventListener) { + document.addEventListener('DOMContentLoaded', cb) + } else { + document.attachEvent('onreadystatechange', function() { + if (document.readyState == 'complete') cb() + }) + } +} + +const codeCellId = index => `codecell${index}` + +// Clears selected text since ClipboardJS will select the text when copying +const clearSelection = () => { + if (window.getSelection) { + window.getSelection().removeAllRanges() + } else if (document.selection) { + document.selection.empty() + } +} + +// Changes tooltip text for a moment, then changes it back +// We want the timeout of our `success` class to be a bit shorter than the +// tooltip and icon change, so that we can hide the icon before changing back. +var timeoutIcon = 2000; +var timeoutSuccessClass = 1500; + +const temporarilyChangeTooltip = (el, oldText, newText) => { + el.setAttribute('data-tooltip', newText) + el.classList.add('success') + // Remove success a little bit sooner than we change the tooltip + // So that we can use CSS to hide the copybutton first + setTimeout(() => el.classList.remove('success'), timeoutSuccessClass) + setTimeout(() => el.setAttribute('data-tooltip', oldText), timeoutIcon) +} + +// Changes the copy button icon for two seconds, then changes it back +const temporarilyChangeIcon = (el) => { + el.innerHTML = iconCheck; + setTimeout(() => {el.innerHTML = iconCopy}, timeoutIcon) +} + +const addCopyButtonToCodeCells = () => { + // If ClipboardJS hasn't loaded, wait a bit and try again. This + // happens because we load ClipboardJS asynchronously. + if (window.ClipboardJS === undefined) { + setTimeout(addCopyButtonToCodeCells, 250) + return + } + + // Add copybuttons to all of our code cells + const COPYBUTTON_SELECTOR = 'div.highlight pre'; + const codeCells = document.querySelectorAll(COPYBUTTON_SELECTOR) + codeCells.forEach((codeCell, index) => { + const id = codeCellId(index) + codeCell.setAttribute('id', id) + + const clipboardButton = id => + `` + codeCell.insertAdjacentHTML('afterend', clipboardButton(id)) + }) + +function escapeRegExp(string) { + return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string +} + +/** + * Removes excluded text from a Node. + * + * @param {Node} target Node to filter. + * @param {string} exclude CSS selector of nodes to exclude. + * @returns {DOMString} Text from `target` with text removed. + */ +function filterText(target, exclude) { + const clone = target.cloneNode(true); // clone as to not modify the live DOM + if (exclude) { + // remove excluded nodes + clone.querySelectorAll(exclude).forEach(node => node.remove()); + } + return clone.innerText; +} + +// Callback when a copy button is clicked. Will be passed the node that was clicked +// should then grab the text and replace pieces of text that shouldn't be used in output +function formatCopyText(textContent, copybuttonPromptText, isRegexp = false, onlyCopyPromptLines = true, removePrompts = true, copyEmptyLines = true, lineContinuationChar = "", hereDocDelim = "") { + var regexp; + var match; + + // Do we check for line continuation characters and "HERE-documents"? + var useLineCont = !!lineContinuationChar + var useHereDoc = !!hereDocDelim + + // create regexp to capture prompt and remaining line + if (isRegexp) { + regexp = new RegExp('^(' + copybuttonPromptText + ')(.*)') + } else { + regexp = new RegExp('^(' + escapeRegExp(copybuttonPromptText) + ')(.*)') + } + + const outputLines = []; + var promptFound = false; + var gotLineCont = false; + var gotHereDoc = false; + const lineGotPrompt = []; + for (const line of textContent.split('\n')) { + match = line.match(regexp) + if (match || gotLineCont || gotHereDoc) { + promptFound = regexp.test(line) + lineGotPrompt.push(promptFound) + if (removePrompts && promptFound) { + outputLines.push(match[2]) + } else { + outputLines.push(line) + } + gotLineCont = line.endsWith(lineContinuationChar) & useLineCont + if (line.includes(hereDocDelim) & useHereDoc) + gotHereDoc = !gotHereDoc + } else if (!onlyCopyPromptLines) { + outputLines.push(line) + } else if (copyEmptyLines && line.trim() === '') { + outputLines.push(line) + } + } + + // If no lines with the prompt were found then just use original lines + if (lineGotPrompt.some(v => v === true)) { + textContent = outputLines.join('\n'); + } + + // Remove a trailing newline to avoid auto-running when pasting + if (textContent.endsWith("\n")) { + textContent = textContent.slice(0, -1) + } + return textContent +} + + +var copyTargetText = (trigger) => { + var target = document.querySelector(trigger.attributes['data-clipboard-target'].value); + + // get filtered text + let exclude = '.linenos, .gp'; + + let text = filterText(target, exclude); + return formatCopyText(text, '', false, true, true, true, '', '') +} + + // Initialize with a callback so we can modify the text before copy + const clipboard = new ClipboardJS('.copybtn', {text: copyTargetText}) + + // Update UI with error/success messages + clipboard.on('success', event => { + clearSelection() + temporarilyChangeTooltip(event.trigger, messages[locale]['copy'], messages[locale]['copy_success']) + temporarilyChangeIcon(event.trigger) + }) + + clipboard.on('error', event => { + temporarilyChangeTooltip(event.trigger, messages[locale]['copy'], messages[locale]['copy_failure']) + }) +} + +runWhenDOMLoaded(addCopyButtonToCodeCells) \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/copybutton_funcs.js b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/copybutton_funcs.js new file mode 100644 index 00000000000..dbe1aaad79c --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/copybutton_funcs.js @@ -0,0 +1,73 @@ +function escapeRegExp(string) { + return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string +} + +/** + * Removes excluded text from a Node. + * + * @param {Node} target Node to filter. + * @param {string} exclude CSS selector of nodes to exclude. + * @returns {DOMString} Text from `target` with text removed. + */ +export function filterText(target, exclude) { + const clone = target.cloneNode(true); // clone as to not modify the live DOM + if (exclude) { + // remove excluded nodes + clone.querySelectorAll(exclude).forEach(node => node.remove()); + } + return clone.innerText; +} + +// Callback when a copy button is clicked. Will be passed the node that was clicked +// should then grab the text and replace pieces of text that shouldn't be used in output +export function formatCopyText(textContent, copybuttonPromptText, isRegexp = false, onlyCopyPromptLines = true, removePrompts = true, copyEmptyLines = true, lineContinuationChar = "", hereDocDelim = "") { + var regexp; + var match; + + // Do we check for line continuation characters and "HERE-documents"? + var useLineCont = !!lineContinuationChar + var useHereDoc = !!hereDocDelim + + // create regexp to capture prompt and remaining line + if (isRegexp) { + regexp = new RegExp('^(' + copybuttonPromptText + ')(.*)') + } else { + regexp = new RegExp('^(' + escapeRegExp(copybuttonPromptText) + ')(.*)') + } + + const outputLines = []; + var promptFound = false; + var gotLineCont = false; + var gotHereDoc = false; + const lineGotPrompt = []; + for (const line of textContent.split('\n')) { + match = line.match(regexp) + if (match || gotLineCont || gotHereDoc) { + promptFound = regexp.test(line) + lineGotPrompt.push(promptFound) + if (removePrompts && promptFound) { + outputLines.push(match[2]) + } else { + outputLines.push(line) + } + gotLineCont = line.endsWith(lineContinuationChar) & useLineCont + if (line.includes(hereDocDelim) & useHereDoc) + gotHereDoc = !gotHereDoc + } else if (!onlyCopyPromptLines) { + outputLines.push(line) + } else if (copyEmptyLines && line.trim() === '') { + outputLines.push(line) + } + } + + // If no lines with the prompt were found then just use original lines + if (lineGotPrompt.some(v => v === true)) { + textContent = outputLines.join('\n'); + } + + // Remove a trailing newline to avoid auto-running when pasting + if (textContent.endsWith("\n")) { + textContent = textContent.slice(0, -1) + } + return textContent +} diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/doctools.js b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/doctools.js new file mode 100644 index 00000000000..527b876ca63 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/doctools.js @@ -0,0 +1,156 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Base JavaScript utilities for all Sphinx HTML documentation. + * + * :copyright: Copyright 2007-2022 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([ + "TEXTAREA", + "INPUT", + "SELECT", + "BUTTON", +]); + +const _ready = (callback) => { + if (document.readyState !== "loading") { + callback(); + } else { + document.addEventListener("DOMContentLoaded", callback); + } +}; + +/** + * Small JavaScript module for the documentation. + */ +const Documentation = { + init: () => { + Documentation.initDomainIndexTable(); + Documentation.initOnKeyListeners(); + }, + + /** + * i18n support + */ + TRANSLATIONS: {}, + PLURAL_EXPR: (n) => (n === 1 ? 0 : 1), + LOCALE: "unknown", + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext: (string) => { + const translated = Documentation.TRANSLATIONS[string]; + switch (typeof translated) { + case "undefined": + return string; // no translation + case "string": + return translated; // translation exists + default: + return translated[0]; // (singular, plural) translation tuple exists + } + }, + + ngettext: (singular, plural, n) => { + const translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated !== "undefined") + return translated[Documentation.PLURAL_EXPR(n)]; + return n === 1 ? singular : plural; + }, + + addTranslations: (catalog) => { + Object.assign(Documentation.TRANSLATIONS, catalog.messages); + Documentation.PLURAL_EXPR = new Function( + "n", + `return (${catalog.plural_expr})` + ); + Documentation.LOCALE = catalog.locale; + }, + + /** + * helper function to focus on search bar + */ + focusSearchBar: () => { + document.querySelectorAll("input[name=q]")[0]?.focus(); + }, + + /** + * Initialise the domain index toggle buttons + */ + initDomainIndexTable: () => { + const toggler = (el) => { + const idNumber = el.id.substr(7); + const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`); + if (el.src.substr(-9) === "minus.png") { + el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`; + toggledRows.forEach((el) => (el.style.display = "none")); + } else { + el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`; + toggledRows.forEach((el) => (el.style.display = "")); + } + }; + + const togglerElements = document.querySelectorAll("img.toggler"); + togglerElements.forEach((el) => + el.addEventListener("click", (event) => toggler(event.currentTarget)) + ); + togglerElements.forEach((el) => (el.style.display = "")); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); + }, + + initOnKeyListeners: () => { + // only install a listener if it is really needed + if ( + !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && + !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS + ) + return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.altKey || event.ctrlKey || event.metaKey) return; + + if (!event.shiftKey) { + switch (event.key) { + case "ArrowLeft": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const prevLink = document.querySelector('link[rel="prev"]'); + if (prevLink && prevLink.href) { + window.location.href = prevLink.href; + event.preventDefault(); + } + break; + case "ArrowRight": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const nextLink = document.querySelector('link[rel="next"]'); + if (nextLink && nextLink.href) { + window.location.href = nextLink.href; + event.preventDefault(); + } + break; + } + } + + // some keyboard layouts may need Shift to get / + switch (event.key) { + case "/": + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; + Documentation.focusSearchBar(); + event.preventDefault(); + } + }); + }, +}; + +// quick alias for translations +const _ = Documentation.gettext; + +_ready(Documentation.init); diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/documentation_options.js b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/documentation_options.js new file mode 100644 index 00000000000..65ce0e65566 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/documentation_options.js @@ -0,0 +1,14 @@ +var DOCUMENTATION_OPTIONS = { + URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'), + VERSION: '5.0.0', + LANGUAGE: 'en', + COLLAPSE_INDEX: false, + BUILDER: 'html', + FILE_SUFFIX: '.html', + LINK_SUFFIX: '.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt', + NAVIGATION_WITH_KEYS: false, + SHOW_SEARCH_SUMMARY: true, + ENABLE_SEARCH_SHORTCUTS: true, +}; \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/file.png b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/file.png new file mode 100644 index 0000000000000000000000000000000000000000..a858a410e4faa62ce324d814e4b816fff83a6fb3 GIT binary patch literal 286 zcmV+(0pb3MP)s`hMrGg#P~ix$^RISR_I47Y|r1 z_CyJOe}D1){SET-^Amu_i71Lt6eYfZjRyw@I6OQAIXXHDfiX^GbOlHe=Ae4>0m)d(f|Me07*qoM6N<$f}vM^LjV8( literal 0 HcmV?d00001 diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/graphviz.css b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/graphviz.css new file mode 100644 index 00000000000..19e7afd385b --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/graphviz.css @@ -0,0 +1,19 @@ +/* + * graphviz.css + * ~~~~~~~~~~~~ + * + * Sphinx stylesheet -- graphviz extension. + * + * :copyright: Copyright 2007-2022 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +img.graphviz { + border: 0; + max-width: 100%; +} + +object.graphviz { + max-width: 100%; +} diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/jquery-3.6.0.js b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/jquery-3.6.0.js new file mode 100644 index 00000000000..fc6c299b73e --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/_static/jquery-3.6.0.js @@ -0,0 +1,10881 @@ +/*! + * jQuery JavaScript Library v3.6.0 + * https://jquery.com/ + * + * Includes Sizzle.js + * https://sizzlejs.com/ + * + * Copyright OpenJS Foundation and other contributors + * Released under the MIT license + * https://jquery.org/license + * + * Date: 2021-03-02T17:08Z + */ +( function( global, factory ) { + + "use strict"; + + if ( typeof module === "object" && typeof module.exports === "object" ) { + + // For CommonJS and CommonJS-like environments where a proper `window` + // is present, execute the factory and get jQuery. + // For environments that do not have a `window` with a `document` + // (such as Node.js), expose a factory as module.exports. + // This accentuates the need for the creation of a real `window`. + // e.g. var jQuery = require("jquery")(window); + // See ticket #14549 for more info. + module.exports = global.document ? + factory( global, true ) : + function( w ) { + if ( !w.document ) { + throw new Error( "jQuery requires a window with a document" ); + } + return factory( w ); + }; + } else { + factory( global ); + } + +// Pass this if window is not defined yet +} )( typeof window !== "undefined" ? window : this, function( window, noGlobal ) { + +// Edge <= 12 - 13+, Firefox <=18 - 45+, IE 10 - 11, Safari 5.1 - 9+, iOS 6 - 9.1 +// throw exceptions when non-strict code (e.g., ASP.NET 4.5) accesses strict mode +// arguments.callee.caller (trac-13335). But as of jQuery 3.0 (2016), strict mode should be common +// enough that all such attempts are guarded in a try block. +"use strict"; + +var arr = []; + +var getProto = Object.getPrototypeOf; + +var slice = arr.slice; + +var flat = arr.flat ? function( array ) { + return arr.flat.call( array ); +} : function( array ) { + return arr.concat.apply( [], array ); +}; + + +var push = arr.push; + +var indexOf = arr.indexOf; + +var class2type = {}; + +var toString = class2type.toString; + +var hasOwn = class2type.hasOwnProperty; + +var fnToString = hasOwn.toString; + +var ObjectFunctionString = fnToString.call( Object ); + +var support = {}; + +var isFunction = function isFunction( obj ) { + + // Support: Chrome <=57, Firefox <=52 + // In some browsers, typeof returns "function" for HTML elements + // (i.e., `typeof document.createElement( "object" ) === "function"`). + // We don't want to classify *any* DOM node as a function. + // Support: QtWeb <=3.8.5, WebKit <=534.34, wkhtmltopdf tool <=0.12.5 + // Plus for old WebKit, typeof returns "function" for HTML collections + // (e.g., `typeof document.getElementsByTagName("div") === "function"`). (gh-4756) + return typeof obj === "function" && typeof obj.nodeType !== "number" && + typeof obj.item !== "function"; + }; + + +var isWindow = function isWindow( obj ) { + return obj != null && obj === obj.window; + }; + + +var document = window.document; + + + + var preservedScriptAttributes = { + type: true, + src: true, + nonce: true, + noModule: true + }; + + function DOMEval( code, node, doc ) { + doc = doc || document; + + var i, val, + script = doc.createElement( "script" ); + + script.text = code; + if ( node ) { + for ( i in preservedScriptAttributes ) { + + // Support: Firefox 64+, Edge 18+ + // Some browsers don't support the "nonce" property on scripts. + // On the other hand, just using `getAttribute` is not enough as + // the `nonce` attribute is reset to an empty string whenever it + // becomes browsing-context connected. + // See https://github.com/whatwg/html/issues/2369 + // See https://html.spec.whatwg.org/#nonce-attributes + // The `node.getAttribute` check was added for the sake of + // `jQuery.globalEval` so that it can fake a nonce-containing node + // via an object. + val = node[ i ] || node.getAttribute && node.getAttribute( i ); + if ( val ) { + script.setAttribute( i, val ); + } + } + } + doc.head.appendChild( script ).parentNode.removeChild( script ); + } + + +function toType( obj ) { + if ( obj == null ) { + return obj + ""; + } + + // Support: Android <=2.3 only (functionish RegExp) + return typeof obj === "object" || typeof obj === "function" ? + class2type[ toString.call( obj ) ] || "object" : + typeof obj; +} +/* global Symbol */ +// Defining this global in .eslintrc.json would create a danger of using the global +// unguarded in another place, it seems safer to define global only for this module + + + +var + version = "3.6.0", + + // Define a local copy of jQuery + jQuery = function( selector, context ) { + + // The jQuery object is actually just the init constructor 'enhanced' + // Need init if jQuery is called (just allow error to be thrown if not included) + return new jQuery.fn.init( selector, context ); + }; + +jQuery.fn = jQuery.prototype = { + + // The current version of jQuery being used + jquery: version, + + constructor: jQuery, + + // The default length of a jQuery object is 0 + length: 0, + + toArray: function() { + return slice.call( this ); + }, + + // Get the Nth element in the matched element set OR + // Get the whole matched element set as a clean array + get: function( num ) { + + // Return all the elements in a clean array + if ( num == null ) { + return slice.call( this ); + } + + // Return just the one element from the set + return num < 0 ? this[ num + this.length ] : this[ num ]; + }, + + // Take an array of elements and push it onto the stack + // (returning the new matched element set) + pushStack: function( elems ) { + + // Build a new jQuery matched element set + var ret = jQuery.merge( this.constructor(), elems ); + + // Add the old object onto the stack (as a reference) + ret.prevObject = this; + + // Return the newly-formed element set + return ret; + }, + + // Execute a callback for every element in the matched set. + each: function( callback ) { + return jQuery.each( this, callback ); + }, + + map: function( callback ) { + return this.pushStack( jQuery.map( this, function( elem, i ) { + return callback.call( elem, i, elem ); + } ) ); + }, + + slice: function() { + return this.pushStack( slice.apply( this, arguments ) ); + }, + + first: function() { + return this.eq( 0 ); + }, + + last: function() { + return this.eq( -1 ); + }, + + even: function() { + return this.pushStack( jQuery.grep( this, function( _elem, i ) { + return ( i + 1 ) % 2; + } ) ); + }, + + odd: function() { + return this.pushStack( jQuery.grep( this, function( _elem, i ) { + return i % 2; + } ) ); + }, + + eq: function( i ) { + var len = this.length, + j = +i + ( i < 0 ? len : 0 ); + return this.pushStack( j >= 0 && j < len ? [ this[ j ] ] : [] ); + }, + + end: function() { + return this.prevObject || this.constructor(); + }, + + // For internal use only. + // Behaves like an Array's method, not like a jQuery method. + push: push, + sort: arr.sort, + splice: arr.splice +}; + +jQuery.extend = jQuery.fn.extend = function() { + var options, name, src, copy, copyIsArray, clone, + target = arguments[ 0 ] || {}, + i = 1, + length = arguments.length, + deep = false; + + // Handle a deep copy situation + if ( typeof target === "boolean" ) { + deep = target; + + // Skip the boolean and the target + target = arguments[ i ] || {}; + i++; + } + + // Handle case when target is a string or something (possible in deep copy) + if ( typeof target !== "object" && !isFunction( target ) ) { + target = {}; + } + + // Extend jQuery itself if only one argument is passed + if ( i === length ) { + target = this; + i--; + } + + for ( ; i < length; i++ ) { + + // Only deal with non-null/undefined values + if ( ( options = arguments[ i ] ) != null ) { + + // Extend the base object + for ( name in options ) { + copy = options[ name ]; + + // Prevent Object.prototype pollution + // Prevent never-ending loop + if ( name === "__proto__" || target === copy ) { + continue; + } + + // Recurse if we're merging plain objects or arrays + if ( deep && copy && ( jQuery.isPlainObject( copy ) || + ( copyIsArray = Array.isArray( copy ) ) ) ) { + src = target[ name ]; + + // Ensure proper type for the source value + if ( copyIsArray && !Array.isArray( src ) ) { + clone = []; + } else if ( !copyIsArray && !jQuery.isPlainObject( src ) ) { + clone = {}; + } else { + clone = src; + } + copyIsArray = false; + + // Never move original objects, clone them + target[ name ] = jQuery.extend( deep, clone, copy ); + + // Don't bring in undefined values + } else if ( copy !== undefined ) { + target[ name ] = copy; + } + } + } + } + + // Return the modified object + return target; +}; + +jQuery.extend( { + + // Unique for each copy of jQuery on the page + expando: "jQuery" + ( version + Math.random() ).replace( /\D/g, "" ), + + // Assume jQuery is ready without the ready module + isReady: true, + + error: function( msg ) { + throw new Error( msg ); + }, + + noop: function() {}, + + isPlainObject: function( obj ) { + var proto, Ctor; + + // Detect obvious negatives + // Use toString instead of jQuery.type to catch host objects + if ( !obj || toString.call( obj ) !== "[object Object]" ) { + return false; + } + + proto = getProto( obj ); + + // Objects with no prototype (e.g., `Object.create( null )`) are plain + if ( !proto ) { + return true; + } + + // Objects with prototype are plain iff they were constructed by a global Object function + Ctor = hasOwn.call( proto, "constructor" ) && proto.constructor; + return typeof Ctor === "function" && fnToString.call( Ctor ) === ObjectFunctionString; + }, + + isEmptyObject: function( obj ) { + var name; + + for ( name in obj ) { + return false; + } + return true; + }, + + // Evaluates a script in a provided context; falls back to the global one + // if not specified. + globalEval: function( code, options, doc ) { + DOMEval( code, { nonce: options && options.nonce }, doc ); + }, + + each: function( obj, callback ) { + var length, i = 0; + + if ( isArrayLike( obj ) ) { + length = obj.length; + for ( ; i < length; i++ ) { + if ( callback.call( obj[ i ], i, obj[ i ] ) === false ) { + break; + } + } + } else { + for ( i in obj ) { + if ( callback.call( obj[ i ], i, obj[ i ] ) === false ) { + break; + } + } + } + + return obj; + }, + + // results is for internal usage only + makeArray: function( arr, results ) { + var ret = results || []; + + if ( arr != null ) { + if ( isArrayLike( Object( arr ) ) ) { + jQuery.merge( ret, + typeof arr === "string" ? + [ arr ] : arr + ); + } else { + push.call( ret, arr ); + } + } + + return ret; + }, + + inArray: function( elem, arr, i ) { + return arr == null ? -1 : indexOf.call( arr, elem, i ); + }, + + // Support: Android <=4.0 only, PhantomJS 1 only + // push.apply(_, arraylike) throws on ancient WebKit + merge: function( first, second ) { + var len = +second.length, + j = 0, + i = first.length; + + for ( ; j < len; j++ ) { + first[ i++ ] = second[ j ]; + } + + first.length = i; + + return first; + }, + + grep: function( elems, callback, invert ) { + var callbackInverse, + matches = [], + i = 0, + length = elems.length, + callbackExpect = !invert; + + // Go through the array, only saving the items + // that pass the validator function + for ( ; i < length; i++ ) { + callbackInverse = !callback( elems[ i ], i ); + if ( callbackInverse !== callbackExpect ) { + matches.push( elems[ i ] ); + } + } + + return matches; + }, + + // arg is for internal usage only + map: function( elems, callback, arg ) { + var length, value, + i = 0, + ret = []; + + // Go through the array, translating each of the items to their new values + if ( isArrayLike( elems ) ) { + length = elems.length; + for ( ; i < length; i++ ) { + value = callback( elems[ i ], i, arg ); + + if ( value != null ) { + ret.push( value ); + } + } + + // Go through every key on the object, + } else { + for ( i in elems ) { + value = callback( elems[ i ], i, arg ); + + if ( value != null ) { + ret.push( value ); + } + } + } + + // Flatten any nested arrays + return flat( ret ); + }, + + // A global GUID counter for objects + guid: 1, + + // jQuery.support is not used in Core but other projects attach their + // properties to it so it needs to exist. + support: support +} ); + +if ( typeof Symbol === "function" ) { + jQuery.fn[ Symbol.iterator ] = arr[ Symbol.iterator ]; +} + +// Populate the class2type map +jQuery.each( "Boolean Number String Function Array Date RegExp Object Error Symbol".split( " " ), + function( _i, name ) { + class2type[ "[object " + name + "]" ] = name.toLowerCase(); + } ); + +function isArrayLike( obj ) { + + // Support: real iOS 8.2 only (not reproducible in simulator) + // `in` check used to prevent JIT error (gh-2145) + // hasOwn isn't used here due to false negatives + // regarding Nodelist length in IE + var length = !!obj && "length" in obj && obj.length, + type = toType( obj ); + + if ( isFunction( obj ) || isWindow( obj ) ) { + return false; + } + + return type === "array" || length === 0 || + typeof length === "number" && length > 0 && ( length - 1 ) in obj; +} +var Sizzle = +/*! + * Sizzle CSS Selector Engine v2.3.6 + * https://sizzlejs.com/ + * + * Copyright JS Foundation and other contributors + * Released under the MIT license + * https://js.foundation/ + * + * Date: 2021-02-16 + */ +( function( window ) { +var i, + support, + Expr, + getText, + isXML, + tokenize, + compile, + select, + outermostContext, + sortInput, + hasDuplicate, + + // Local document vars + setDocument, + document, + docElem, + documentIsHTML, + rbuggyQSA, + rbuggyMatches, + matches, + contains, + + // Instance-specific data + expando = "sizzle" + 1 * new Date(), + preferredDoc = window.document, + dirruns = 0, + done = 0, + classCache = createCache(), + tokenCache = createCache(), + compilerCache = createCache(), + nonnativeSelectorCache = createCache(), + sortOrder = function( a, b ) { + if ( a === b ) { + hasDuplicate = true; + } + return 0; + }, + + // Instance methods + hasOwn = ( {} ).hasOwnProperty, + arr = [], + pop = arr.pop, + pushNative = arr.push, + push = arr.push, + slice = arr.slice, + + // Use a stripped-down indexOf as it's faster than native + // https://jsperf.com/thor-indexof-vs-for/5 + indexOf = function( list, elem ) { + var i = 0, + len = list.length; + for ( ; i < len; i++ ) { + if ( list[ i ] === elem ) { + return i; + } + } + return -1; + }, + + booleans = "checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|" + + "ismap|loop|multiple|open|readonly|required|scoped", + + // Regular expressions + + // http://www.w3.org/TR/css3-selectors/#whitespace + whitespace = "[\\x20\\t\\r\\n\\f]", + + // https://www.w3.org/TR/css-syntax-3/#ident-token-diagram + identifier = "(?:\\\\[\\da-fA-F]{1,6}" + whitespace + + "?|\\\\[^\\r\\n\\f]|[\\w-]|[^\0-\\x7f])+", + + // Attribute selectors: http://www.w3.org/TR/selectors/#attribute-selectors + attributes = "\\[" + whitespace + "*(" + identifier + ")(?:" + whitespace + + + // Operator (capture 2) + "*([*^$|!~]?=)" + whitespace + + + // "Attribute values must be CSS identifiers [capture 5] + // or strings [capture 3 or capture 4]" + "*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|(" + identifier + "))|)" + + whitespace + "*\\]", + + pseudos = ":(" + identifier + ")(?:\\((" + + + // To reduce the number of selectors needing tokenize in the preFilter, prefer arguments: + // 1. quoted (capture 3; capture 4 or capture 5) + "('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|" + + + // 2. simple (capture 6) + "((?:\\\\.|[^\\\\()[\\]]|" + attributes + ")*)|" + + + // 3. anything else (capture 2) + ".*" + + ")\\)|)", + + // Leading and non-escaped trailing whitespace, capturing some non-whitespace characters preceding the latter + rwhitespace = new RegExp( whitespace + "+", "g" ), + rtrim = new RegExp( "^" + whitespace + "+|((?:^|[^\\\\])(?:\\\\.)*)" + + whitespace + "+$", "g" ), + + rcomma = new RegExp( "^" + whitespace + "*," + whitespace + "*" ), + rcombinators = new RegExp( "^" + whitespace + "*([>+~]|" + whitespace + ")" + whitespace + + "*" ), + rdescend = new RegExp( whitespace + "|>" ), + + rpseudo = new RegExp( pseudos ), + ridentifier = new RegExp( "^" + identifier + "$" ), + + matchExpr = { + "ID": new RegExp( "^#(" + identifier + ")" ), + "CLASS": new RegExp( "^\\.(" + identifier + ")" ), + "TAG": new RegExp( "^(" + identifier + "|[*])" ), + "ATTR": new RegExp( "^" + attributes ), + "PSEUDO": new RegExp( "^" + pseudos ), + "CHILD": new RegExp( "^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\(" + + whitespace + "*(even|odd|(([+-]|)(\\d*)n|)" + whitespace + "*(?:([+-]|)" + + whitespace + "*(\\d+)|))" + whitespace + "*\\)|)", "i" ), + "bool": new RegExp( "^(?:" + booleans + ")$", "i" ), + + // For use in libraries implementing .is() + // We use this for POS matching in `select` + "needsContext": new RegExp( "^" + whitespace + + "*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\(" + whitespace + + "*((?:-\\d)?\\d*)" + whitespace + "*\\)|)(?=[^-]|$)", "i" ) + }, + + rhtml = /HTML$/i, + rinputs = /^(?:input|select|textarea|button)$/i, + rheader = /^h\d$/i, + + rnative = /^[^{]+\{\s*\[native \w/, + + // Easily-parseable/retrievable ID or TAG or CLASS selectors + rquickExpr = /^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/, + + rsibling = /[+~]/, + + // CSS escapes + // http://www.w3.org/TR/CSS21/syndata.html#escaped-characters + runescape = new RegExp( "\\\\[\\da-fA-F]{1,6}" + whitespace + "?|\\\\([^\\r\\n\\f])", "g" ), + funescape = function( escape, nonHex ) { + var high = "0x" + escape.slice( 1 ) - 0x10000; + + return nonHex ? + + // Strip the backslash prefix from a non-hex escape sequence + nonHex : + + // Replace a hexadecimal escape sequence with the encoded Unicode code point + // Support: IE <=11+ + // For values outside the Basic Multilingual Plane (BMP), manually construct a + // surrogate pair + high < 0 ? + String.fromCharCode( high + 0x10000 ) : + String.fromCharCode( high >> 10 | 0xD800, high & 0x3FF | 0xDC00 ); + }, + + // CSS string/identifier serialization + // https://drafts.csswg.org/cssom/#common-serializing-idioms + rcssescape = /([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g, + fcssescape = function( ch, asCodePoint ) { + if ( asCodePoint ) { + + // U+0000 NULL becomes U+FFFD REPLACEMENT CHARACTER + if ( ch === "\0" ) { + return "\uFFFD"; + } + + // Control characters and (dependent upon position) numbers get escaped as code points + return ch.slice( 0, -1 ) + "\\" + + ch.charCodeAt( ch.length - 1 ).toString( 16 ) + " "; + } + + // Other potentially-special ASCII characters get backslash-escaped + return "\\" + ch; + }, + + // Used for iframes + // See setDocument() + // Removing the function wrapper causes a "Permission Denied" + // error in IE + unloadHandler = function() { + setDocument(); + }, + + inDisabledFieldset = addCombinator( + function( elem ) { + return elem.disabled === true && elem.nodeName.toLowerCase() === "fieldset"; + }, + { dir: "parentNode", next: "legend" } + ); + +// Optimize for push.apply( _, NodeList ) +try { + push.apply( + ( arr = slice.call( preferredDoc.childNodes ) ), + preferredDoc.childNodes + ); + + // Support: Android<4.0 + // Detect silently failing push.apply + // eslint-disable-next-line no-unused-expressions + arr[ preferredDoc.childNodes.length ].nodeType; +} catch ( e ) { + push = { apply: arr.length ? + + // Leverage slice if possible + function( target, els ) { + pushNative.apply( target, slice.call( els ) ); + } : + + // Support: IE<9 + // Otherwise append directly + function( target, els ) { + var j = target.length, + i = 0; + + // Can't trust NodeList.length + while ( ( target[ j++ ] = els[ i++ ] ) ) {} + target.length = j - 1; + } + }; +} + +function Sizzle( selector, context, results, seed ) { + var m, i, elem, nid, match, groups, newSelector, + newContext = context && context.ownerDocument, + + // nodeType defaults to 9, since context defaults to document + nodeType = context ? context.nodeType : 9; + + results = results || []; + + // Return early from calls with invalid selector or context + if ( typeof selector !== "string" || !selector || + nodeType !== 1 && nodeType !== 9 && nodeType !== 11 ) { + + return results; + } + + // Try to shortcut find operations (as opposed to filters) in HTML documents + if ( !seed ) { + setDocument( context ); + context = context || document; + + if ( documentIsHTML ) { + + // If the selector is sufficiently simple, try using a "get*By*" DOM method + // (excepting DocumentFragment context, where the methods don't exist) + if ( nodeType !== 11 && ( match = rquickExpr.exec( selector ) ) ) { + + // ID selector + if ( ( m = match[ 1 ] ) ) { + + // Document context + if ( nodeType === 9 ) { + if ( ( elem = context.getElementById( m ) ) ) { + + // Support: IE, Opera, Webkit + // TODO: identify versions + // getElementById can match elements by name instead of ID + if ( elem.id === m ) { + results.push( elem ); + return results; + } + } else { + return results; + } + + // Element context + } else { + + // Support: IE, Opera, Webkit + // TODO: identify versions + // getElementById can match elements by name instead of ID + if ( newContext && ( elem = newContext.getElementById( m ) ) && + contains( context, elem ) && + elem.id === m ) { + + results.push( elem ); + return results; + } + } + + // Type selector + } else if ( match[ 2 ] ) { + push.apply( results, context.getElementsByTagName( selector ) ); + return results; + + // Class selector + } else if ( ( m = match[ 3 ] ) && support.getElementsByClassName && + context.getElementsByClassName ) { + + push.apply( results, context.getElementsByClassName( m ) ); + return results; + } + } + + // Take advantage of querySelectorAll + if ( support.qsa && + !nonnativeSelectorCache[ selector + " " ] && + ( !rbuggyQSA || !rbuggyQSA.test( selector ) ) && + + // Support: IE 8 only + // Exclude object elements + ( nodeType !== 1 || context.nodeName.toLowerCase() !== "object" ) ) { + + newSelector = selector; + newContext = context; + + // qSA considers elements outside a scoping root when evaluating child or + // descendant combinators, which is not what we want. + // In such cases, we work around the behavior by prefixing every selector in the + // list with an ID selector referencing the scope context. + // The technique has to be used as well when a leading combinator is used + // as such selectors are not recognized by querySelectorAll. + // Thanks to Andrew Dupont for this technique. + if ( nodeType === 1 && + ( rdescend.test( selector ) || rcombinators.test( selector ) ) ) { + + // Expand context for sibling selectors + newContext = rsibling.test( selector ) && testContext( context.parentNode ) || + context; + + // We can use :scope instead of the ID hack if the browser + // supports it & if we're not changing the context. + if ( newContext !== context || !support.scope ) { + + // Capture the context ID, setting it first if necessary + if ( ( nid = context.getAttribute( "id" ) ) ) { + nid = nid.replace( rcssescape, fcssescape ); + } else { + context.setAttribute( "id", ( nid = expando ) ); + } + } + + // Prefix every selector in the list + groups = tokenize( selector ); + i = groups.length; + while ( i-- ) { + groups[ i ] = ( nid ? "#" + nid : ":scope" ) + " " + + toSelector( groups[ i ] ); + } + newSelector = groups.join( "," ); + } + + try { + push.apply( results, + newContext.querySelectorAll( newSelector ) + ); + return results; + } catch ( qsaError ) { + nonnativeSelectorCache( selector, true ); + } finally { + if ( nid === expando ) { + context.removeAttribute( "id" ); + } + } + } + } + } + + // All others + return select( selector.replace( rtrim, "$1" ), context, results, seed ); +} + +/** + * Create key-value caches of limited size + * @returns {function(string, object)} Returns the Object data after storing it on itself with + * property name the (space-suffixed) string and (if the cache is larger than Expr.cacheLength) + * deleting the oldest entry + */ +function createCache() { + var keys = []; + + function cache( key, value ) { + + // Use (key + " ") to avoid collision with native prototype properties (see Issue #157) + if ( keys.push( key + " " ) > Expr.cacheLength ) { + + // Only keep the most recent entries + delete cache[ keys.shift() ]; + } + return ( cache[ key + " " ] = value ); + } + return cache; +} + +/** + * Mark a function for special use by Sizzle + * @param {Function} fn The function to mark + */ +function markFunction( fn ) { + fn[ expando ] = true; + return fn; +} + +/** + * Support testing using an element + * @param {Function} fn Passed the created element and returns a boolean result + */ +function assert( fn ) { + var el = document.createElement( "fieldset" ); + + try { + return !!fn( el ); + } catch ( e ) { + return false; + } finally { + + // Remove from its parent by default + if ( el.parentNode ) { + el.parentNode.removeChild( el ); + } + + // release memory in IE + el = null; + } +} + +/** + * Adds the same handler for all of the specified attrs + * @param {String} attrs Pipe-separated list of attributes + * @param {Function} handler The method that will be applied + */ +function addHandle( attrs, handler ) { + var arr = attrs.split( "|" ), + i = arr.length; + + while ( i-- ) { + Expr.attrHandle[ arr[ i ] ] = handler; + } +} + +/** + * Checks document order of two siblings + * @param {Element} a + * @param {Element} b + * @returns {Number} Returns less than 0 if a precedes b, greater than 0 if a follows b + */ +function siblingCheck( a, b ) { + var cur = b && a, + diff = cur && a.nodeType === 1 && b.nodeType === 1 && + a.sourceIndex - b.sourceIndex; + + // Use IE sourceIndex if available on both nodes + if ( diff ) { + return diff; + } + + // Check if b follows a + if ( cur ) { + while ( ( cur = cur.nextSibling ) ) { + if ( cur === b ) { + return -1; + } + } + } + + return a ? 1 : -1; +} + +/** + * Returns a function to use in pseudos for input types + * @param {String} type + */ +function createInputPseudo( type ) { + return function( elem ) { + var name = elem.nodeName.toLowerCase(); + return name === "input" && elem.type === type; + }; +} + +/** + * Returns a function to use in pseudos for buttons + * @param {String} type + */ +function createButtonPseudo( type ) { + return function( elem ) { + var name = elem.nodeName.toLowerCase(); + return ( name === "input" || name === "button" ) && elem.type === type; + }; +} + +/** + * Returns a function to use in pseudos for :enabled/:disabled + * @param {Boolean} disabled true for :disabled; false for :enabled + */ +function createDisabledPseudo( disabled ) { + + // Known :disabled false positives: fieldset[disabled] > legend:nth-of-type(n+2) :can-disable + return function( elem ) { + + // Only certain elements can match :enabled or :disabled + // https://html.spec.whatwg.org/multipage/scripting.html#selector-enabled + // https://html.spec.whatwg.org/multipage/scripting.html#selector-disabled + if ( "form" in elem ) { + + // Check for inherited disabledness on relevant non-disabled elements: + // * listed form-associated elements in a disabled fieldset + // https://html.spec.whatwg.org/multipage/forms.html#category-listed + // https://html.spec.whatwg.org/multipage/forms.html#concept-fe-disabled + // * option elements in a disabled optgroup + // https://html.spec.whatwg.org/multipage/forms.html#concept-option-disabled + // All such elements have a "form" property. + if ( elem.parentNode && elem.disabled === false ) { + + // Option elements defer to a parent optgroup if present + if ( "label" in elem ) { + if ( "label" in elem.parentNode ) { + return elem.parentNode.disabled === disabled; + } else { + return elem.disabled === disabled; + } + } + + // Support: IE 6 - 11 + // Use the isDisabled shortcut property to check for disabled fieldset ancestors + return elem.isDisabled === disabled || + + // Where there is no isDisabled, check manually + /* jshint -W018 */ + elem.isDisabled !== !disabled && + inDisabledFieldset( elem ) === disabled; + } + + return elem.disabled === disabled; + + // Try to winnow out elements that can't be disabled before trusting the disabled property. + // Some victims get caught in our net (label, legend, menu, track), but it shouldn't + // even exist on them, let alone have a boolean value. + } else if ( "label" in elem ) { + return elem.disabled === disabled; + } + + // Remaining elements are neither :enabled nor :disabled + return false; + }; +} + +/** + * Returns a function to use in pseudos for positionals + * @param {Function} fn + */ +function createPositionalPseudo( fn ) { + return markFunction( function( argument ) { + argument = +argument; + return markFunction( function( seed, matches ) { + var j, + matchIndexes = fn( [], seed.length, argument ), + i = matchIndexes.length; + + // Match elements found at the specified indexes + while ( i-- ) { + if ( seed[ ( j = matchIndexes[ i ] ) ] ) { + seed[ j ] = !( matches[ j ] = seed[ j ] ); + } + } + } ); + } ); +} + +/** + * Checks a node for validity as a Sizzle context + * @param {Element|Object=} context + * @returns {Element|Object|Boolean} The input node if acceptable, otherwise a falsy value + */ +function testContext( context ) { + return context && typeof context.getElementsByTagName !== "undefined" && context; +} + +// Expose support vars for convenience +support = Sizzle.support = {}; + +/** + * Detects XML nodes + * @param {Element|Object} elem An element or a document + * @returns {Boolean} True iff elem is a non-HTML XML node + */ +isXML = Sizzle.isXML = function( elem ) { + var namespace = elem && elem.namespaceURI, + docElem = elem && ( elem.ownerDocument || elem ).documentElement; + + // Support: IE <=8 + // Assume HTML when documentElement doesn't yet exist, such as inside loading iframes + // https://bugs.jquery.com/ticket/4833 + return !rhtml.test( namespace || docElem && docElem.nodeName || "HTML" ); +}; + +/** + * Sets document-related variables once based on the current document + * @param {Element|Object} [doc] An element or document object to use to set the document + * @returns {Object} Returns the current document + */ +setDocument = Sizzle.setDocument = function( node ) { + var hasCompare, subWindow, + doc = node ? node.ownerDocument || node : preferredDoc; + + // Return early if doc is invalid or already selected + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( doc == document || doc.nodeType !== 9 || !doc.documentElement ) { + return document; + } + + // Update global variables + document = doc; + docElem = document.documentElement; + documentIsHTML = !isXML( document ); + + // Support: IE 9 - 11+, Edge 12 - 18+ + // Accessing iframe documents after unload throws "permission denied" errors (jQuery #13936) + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( preferredDoc != document && + ( subWindow = document.defaultView ) && subWindow.top !== subWindow ) { + + // Support: IE 11, Edge + if ( subWindow.addEventListener ) { + subWindow.addEventListener( "unload", unloadHandler, false ); + + // Support: IE 9 - 10 only + } else if ( subWindow.attachEvent ) { + subWindow.attachEvent( "onunload", unloadHandler ); + } + } + + // Support: IE 8 - 11+, Edge 12 - 18+, Chrome <=16 - 25 only, Firefox <=3.6 - 31 only, + // Safari 4 - 5 only, Opera <=11.6 - 12.x only + // IE/Edge & older browsers don't support the :scope pseudo-class. + // Support: Safari 6.0 only + // Safari 6.0 supports :scope but it's an alias of :root there. + support.scope = assert( function( el ) { + docElem.appendChild( el ).appendChild( document.createElement( "div" ) ); + return typeof el.querySelectorAll !== "undefined" && + !el.querySelectorAll( ":scope fieldset div" ).length; + } ); + + /* Attributes + ---------------------------------------------------------------------- */ + + // Support: IE<8 + // Verify that getAttribute really returns attributes and not properties + // (excepting IE8 booleans) + support.attributes = assert( function( el ) { + el.className = "i"; + return !el.getAttribute( "className" ); + } ); + + /* getElement(s)By* + ---------------------------------------------------------------------- */ + + // Check if getElementsByTagName("*") returns only elements + support.getElementsByTagName = assert( function( el ) { + el.appendChild( document.createComment( "" ) ); + return !el.getElementsByTagName( "*" ).length; + } ); + + // Support: IE<9 + support.getElementsByClassName = rnative.test( document.getElementsByClassName ); + + // Support: IE<10 + // Check if getElementById returns elements by name + // The broken getElementById methods don't pick up programmatically-set names, + // so use a roundabout getElementsByName test + support.getById = assert( function( el ) { + docElem.appendChild( el ).id = expando; + return !document.getElementsByName || !document.getElementsByName( expando ).length; + } ); + + // ID filter and find + if ( support.getById ) { + Expr.filter[ "ID" ] = function( id ) { + var attrId = id.replace( runescape, funescape ); + return function( elem ) { + return elem.getAttribute( "id" ) === attrId; + }; + }; + Expr.find[ "ID" ] = function( id, context ) { + if ( typeof context.getElementById !== "undefined" && documentIsHTML ) { + var elem = context.getElementById( id ); + return elem ? [ elem ] : []; + } + }; + } else { + Expr.filter[ "ID" ] = function( id ) { + var attrId = id.replace( runescape, funescape ); + return function( elem ) { + var node = typeof elem.getAttributeNode !== "undefined" && + elem.getAttributeNode( "id" ); + return node && node.value === attrId; + }; + }; + + // Support: IE 6 - 7 only + // getElementById is not reliable as a find shortcut + Expr.find[ "ID" ] = function( id, context ) { + if ( typeof context.getElementById !== "undefined" && documentIsHTML ) { + var node, i, elems, + elem = context.getElementById( id ); + + if ( elem ) { + + // Verify the id attribute + node = elem.getAttributeNode( "id" ); + if ( node && node.value === id ) { + return [ elem ]; + } + + // Fall back on getElementsByName + elems = context.getElementsByName( id ); + i = 0; + while ( ( elem = elems[ i++ ] ) ) { + node = elem.getAttributeNode( "id" ); + if ( node && node.value === id ) { + return [ elem ]; + } + } + } + + return []; + } + }; + } + + // Tag + Expr.find[ "TAG" ] = support.getElementsByTagName ? + function( tag, context ) { + if ( typeof context.getElementsByTagName !== "undefined" ) { + return context.getElementsByTagName( tag ); + + // DocumentFragment nodes don't have gEBTN + } else if ( support.qsa ) { + return context.querySelectorAll( tag ); + } + } : + + function( tag, context ) { + var elem, + tmp = [], + i = 0, + + // By happy coincidence, a (broken) gEBTN appears on DocumentFragment nodes too + results = context.getElementsByTagName( tag ); + + // Filter out possible comments + if ( tag === "*" ) { + while ( ( elem = results[ i++ ] ) ) { + if ( elem.nodeType === 1 ) { + tmp.push( elem ); + } + } + + return tmp; + } + return results; + }; + + // Class + Expr.find[ "CLASS" ] = support.getElementsByClassName && function( className, context ) { + if ( typeof context.getElementsByClassName !== "undefined" && documentIsHTML ) { + return context.getElementsByClassName( className ); + } + }; + + /* QSA/matchesSelector + ---------------------------------------------------------------------- */ + + // QSA and matchesSelector support + + // matchesSelector(:active) reports false when true (IE9/Opera 11.5) + rbuggyMatches = []; + + // qSa(:focus) reports false when true (Chrome 21) + // We allow this because of a bug in IE8/9 that throws an error + // whenever `document.activeElement` is accessed on an iframe + // So, we allow :focus to pass through QSA all the time to avoid the IE error + // See https://bugs.jquery.com/ticket/13378 + rbuggyQSA = []; + + if ( ( support.qsa = rnative.test( document.querySelectorAll ) ) ) { + + // Build QSA regex + // Regex strategy adopted from Diego Perini + assert( function( el ) { + + var input; + + // Select is set to empty string on purpose + // This is to test IE's treatment of not explicitly + // setting a boolean content attribute, + // since its presence should be enough + // https://bugs.jquery.com/ticket/12359 + docElem.appendChild( el ).innerHTML = "" + + ""; + + // Support: IE8, Opera 11-12.16 + // Nothing should be selected when empty strings follow ^= or $= or *= + // The test attribute must be unknown in Opera but "safe" for WinRT + // https://msdn.microsoft.com/en-us/library/ie/hh465388.aspx#attribute_section + if ( el.querySelectorAll( "[msallowcapture^='']" ).length ) { + rbuggyQSA.push( "[*^$]=" + whitespace + "*(?:''|\"\")" ); + } + + // Support: IE8 + // Boolean attributes and "value" are not treated correctly + if ( !el.querySelectorAll( "[selected]" ).length ) { + rbuggyQSA.push( "\\[" + whitespace + "*(?:value|" + booleans + ")" ); + } + + // Support: Chrome<29, Android<4.4, Safari<7.0+, iOS<7.0+, PhantomJS<1.9.8+ + if ( !el.querySelectorAll( "[id~=" + expando + "-]" ).length ) { + rbuggyQSA.push( "~=" ); + } + + // Support: IE 11+, Edge 15 - 18+ + // IE 11/Edge don't find elements on a `[name='']` query in some cases. + // Adding a temporary attribute to the document before the selection works + // around the issue. + // Interestingly, IE 10 & older don't seem to have the issue. + input = document.createElement( "input" ); + input.setAttribute( "name", "" ); + el.appendChild( input ); + if ( !el.querySelectorAll( "[name='']" ).length ) { + rbuggyQSA.push( "\\[" + whitespace + "*name" + whitespace + "*=" + + whitespace + "*(?:''|\"\")" ); + } + + // Webkit/Opera - :checked should return selected option elements + // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked + // IE8 throws error here and will not see later tests + if ( !el.querySelectorAll( ":checked" ).length ) { + rbuggyQSA.push( ":checked" ); + } + + // Support: Safari 8+, iOS 8+ + // https://bugs.webkit.org/show_bug.cgi?id=136851 + // In-page `selector#id sibling-combinator selector` fails + if ( !el.querySelectorAll( "a#" + expando + "+*" ).length ) { + rbuggyQSA.push( ".#.+[+~]" ); + } + + // Support: Firefox <=3.6 - 5 only + // Old Firefox doesn't throw on a badly-escaped identifier. + el.querySelectorAll( "\\\f" ); + rbuggyQSA.push( "[\\r\\n\\f]" ); + } ); + + assert( function( el ) { + el.innerHTML = "" + + ""; + + // Support: Windows 8 Native Apps + // The type and name attributes are restricted during .innerHTML assignment + var input = document.createElement( "input" ); + input.setAttribute( "type", "hidden" ); + el.appendChild( input ).setAttribute( "name", "D" ); + + // Support: IE8 + // Enforce case-sensitivity of name attribute + if ( el.querySelectorAll( "[name=d]" ).length ) { + rbuggyQSA.push( "name" + whitespace + "*[*^$|!~]?=" ); + } + + // FF 3.5 - :enabled/:disabled and hidden elements (hidden elements are still enabled) + // IE8 throws error here and will not see later tests + if ( el.querySelectorAll( ":enabled" ).length !== 2 ) { + rbuggyQSA.push( ":enabled", ":disabled" ); + } + + // Support: IE9-11+ + // IE's :disabled selector does not pick up the children of disabled fieldsets + docElem.appendChild( el ).disabled = true; + if ( el.querySelectorAll( ":disabled" ).length !== 2 ) { + rbuggyQSA.push( ":enabled", ":disabled" ); + } + + // Support: Opera 10 - 11 only + // Opera 10-11 does not throw on post-comma invalid pseudos + el.querySelectorAll( "*,:x" ); + rbuggyQSA.push( ",.*:" ); + } ); + } + + if ( ( support.matchesSelector = rnative.test( ( matches = docElem.matches || + docElem.webkitMatchesSelector || + docElem.mozMatchesSelector || + docElem.oMatchesSelector || + docElem.msMatchesSelector ) ) ) ) { + + assert( function( el ) { + + // Check to see if it's possible to do matchesSelector + // on a disconnected node (IE 9) + support.disconnectedMatch = matches.call( el, "*" ); + + // This should fail with an exception + // Gecko does not error, returns false instead + matches.call( el, "[s!='']:x" ); + rbuggyMatches.push( "!=", pseudos ); + } ); + } + + rbuggyQSA = rbuggyQSA.length && new RegExp( rbuggyQSA.join( "|" ) ); + rbuggyMatches = rbuggyMatches.length && new RegExp( rbuggyMatches.join( "|" ) ); + + /* Contains + ---------------------------------------------------------------------- */ + hasCompare = rnative.test( docElem.compareDocumentPosition ); + + // Element contains another + // Purposefully self-exclusive + // As in, an element does not contain itself + contains = hasCompare || rnative.test( docElem.contains ) ? + function( a, b ) { + var adown = a.nodeType === 9 ? a.documentElement : a, + bup = b && b.parentNode; + return a === bup || !!( bup && bup.nodeType === 1 && ( + adown.contains ? + adown.contains( bup ) : + a.compareDocumentPosition && a.compareDocumentPosition( bup ) & 16 + ) ); + } : + function( a, b ) { + if ( b ) { + while ( ( b = b.parentNode ) ) { + if ( b === a ) { + return true; + } + } + } + return false; + }; + + /* Sorting + ---------------------------------------------------------------------- */ + + // Document order sorting + sortOrder = hasCompare ? + function( a, b ) { + + // Flag for duplicate removal + if ( a === b ) { + hasDuplicate = true; + return 0; + } + + // Sort on method existence if only one input has compareDocumentPosition + var compare = !a.compareDocumentPosition - !b.compareDocumentPosition; + if ( compare ) { + return compare; + } + + // Calculate position if both inputs belong to the same document + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + compare = ( a.ownerDocument || a ) == ( b.ownerDocument || b ) ? + a.compareDocumentPosition( b ) : + + // Otherwise we know they are disconnected + 1; + + // Disconnected nodes + if ( compare & 1 || + ( !support.sortDetached && b.compareDocumentPosition( a ) === compare ) ) { + + // Choose the first element that is related to our preferred document + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( a == document || a.ownerDocument == preferredDoc && + contains( preferredDoc, a ) ) { + return -1; + } + + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( b == document || b.ownerDocument == preferredDoc && + contains( preferredDoc, b ) ) { + return 1; + } + + // Maintain original order + return sortInput ? + ( indexOf( sortInput, a ) - indexOf( sortInput, b ) ) : + 0; + } + + return compare & 4 ? -1 : 1; + } : + function( a, b ) { + + // Exit early if the nodes are identical + if ( a === b ) { + hasDuplicate = true; + return 0; + } + + var cur, + i = 0, + aup = a.parentNode, + bup = b.parentNode, + ap = [ a ], + bp = [ b ]; + + // Parentless nodes are either documents or disconnected + if ( !aup || !bup ) { + + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + /* eslint-disable eqeqeq */ + return a == document ? -1 : + b == document ? 1 : + /* eslint-enable eqeqeq */ + aup ? -1 : + bup ? 1 : + sortInput ? + ( indexOf( sortInput, a ) - indexOf( sortInput, b ) ) : + 0; + + // If the nodes are siblings, we can do a quick check + } else if ( aup === bup ) { + return siblingCheck( a, b ); + } + + // Otherwise we need full lists of their ancestors for comparison + cur = a; + while ( ( cur = cur.parentNode ) ) { + ap.unshift( cur ); + } + cur = b; + while ( ( cur = cur.parentNode ) ) { + bp.unshift( cur ); + } + + // Walk down the tree looking for a discrepancy + while ( ap[ i ] === bp[ i ] ) { + i++; + } + + return i ? + + // Do a sibling check if the nodes have a common ancestor + siblingCheck( ap[ i ], bp[ i ] ) : + + // Otherwise nodes in our document sort first + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + /* eslint-disable eqeqeq */ + ap[ i ] == preferredDoc ? -1 : + bp[ i ] == preferredDoc ? 1 : + /* eslint-enable eqeqeq */ + 0; + }; + + return document; +}; + +Sizzle.matches = function( expr, elements ) { + return Sizzle( expr, null, null, elements ); +}; + +Sizzle.matchesSelector = function( elem, expr ) { + setDocument( elem ); + + if ( support.matchesSelector && documentIsHTML && + !nonnativeSelectorCache[ expr + " " ] && + ( !rbuggyMatches || !rbuggyMatches.test( expr ) ) && + ( !rbuggyQSA || !rbuggyQSA.test( expr ) ) ) { + + try { + var ret = matches.call( elem, expr ); + + // IE 9's matchesSelector returns false on disconnected nodes + if ( ret || support.disconnectedMatch || + + // As well, disconnected nodes are said to be in a document + // fragment in IE 9 + elem.document && elem.document.nodeType !== 11 ) { + return ret; + } + } catch ( e ) { + nonnativeSelectorCache( expr, true ); + } + } + + return Sizzle( expr, document, null, [ elem ] ).length > 0; +}; + +Sizzle.contains = function( context, elem ) { + + // Set document vars if needed + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( ( context.ownerDocument || context ) != document ) { + setDocument( context ); + } + return contains( context, elem ); +}; + +Sizzle.attr = function( elem, name ) { + + // Set document vars if needed + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( ( elem.ownerDocument || elem ) != document ) { + setDocument( elem ); + } + + var fn = Expr.attrHandle[ name.toLowerCase() ], + + // Don't get fooled by Object.prototype properties (jQuery #13807) + val = fn && hasOwn.call( Expr.attrHandle, name.toLowerCase() ) ? + fn( elem, name, !documentIsHTML ) : + undefined; + + return val !== undefined ? + val : + support.attributes || !documentIsHTML ? + elem.getAttribute( name ) : + ( val = elem.getAttributeNode( name ) ) && val.specified ? + val.value : + null; +}; + +Sizzle.escape = function( sel ) { + return ( sel + "" ).replace( rcssescape, fcssescape ); +}; + +Sizzle.error = function( msg ) { + throw new Error( "Syntax error, unrecognized expression: " + msg ); +}; + +/** + * Document sorting and removing duplicates + * @param {ArrayLike} results + */ +Sizzle.uniqueSort = function( results ) { + var elem, + duplicates = [], + j = 0, + i = 0; + + // Unless we *know* we can detect duplicates, assume their presence + hasDuplicate = !support.detectDuplicates; + sortInput = !support.sortStable && results.slice( 0 ); + results.sort( sortOrder ); + + if ( hasDuplicate ) { + while ( ( elem = results[ i++ ] ) ) { + if ( elem === results[ i ] ) { + j = duplicates.push( i ); + } + } + while ( j-- ) { + results.splice( duplicates[ j ], 1 ); + } + } + + // Clear input after sorting to release objects + // See https://github.com/jquery/sizzle/pull/225 + sortInput = null; + + return results; +}; + +/** + * Utility function for retrieving the text value of an array of DOM nodes + * @param {Array|Element} elem + */ +getText = Sizzle.getText = function( elem ) { + var node, + ret = "", + i = 0, + nodeType = elem.nodeType; + + if ( !nodeType ) { + + // If no nodeType, this is expected to be an array + while ( ( node = elem[ i++ ] ) ) { + + // Do not traverse comment nodes + ret += getText( node ); + } + } else if ( nodeType === 1 || nodeType === 9 || nodeType === 11 ) { + + // Use textContent for elements + // innerText usage removed for consistency of new lines (jQuery #11153) + if ( typeof elem.textContent === "string" ) { + return elem.textContent; + } else { + + // Traverse its children + for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) { + ret += getText( elem ); + } + } + } else if ( nodeType === 3 || nodeType === 4 ) { + return elem.nodeValue; + } + + // Do not include comment or processing instruction nodes + + return ret; +}; + +Expr = Sizzle.selectors = { + + // Can be adjusted by the user + cacheLength: 50, + + createPseudo: markFunction, + + match: matchExpr, + + attrHandle: {}, + + find: {}, + + relative: { + ">": { dir: "parentNode", first: true }, + " ": { dir: "parentNode" }, + "+": { dir: "previousSibling", first: true }, + "~": { dir: "previousSibling" } + }, + + preFilter: { + "ATTR": function( match ) { + match[ 1 ] = match[ 1 ].replace( runescape, funescape ); + + // Move the given value to match[3] whether quoted or unquoted + match[ 3 ] = ( match[ 3 ] || match[ 4 ] || + match[ 5 ] || "" ).replace( runescape, funescape ); + + if ( match[ 2 ] === "~=" ) { + match[ 3 ] = " " + match[ 3 ] + " "; + } + + return match.slice( 0, 4 ); + }, + + "CHILD": function( match ) { + + /* matches from matchExpr["CHILD"] + 1 type (only|nth|...) + 2 what (child|of-type) + 3 argument (even|odd|\d*|\d*n([+-]\d+)?|...) + 4 xn-component of xn+y argument ([+-]?\d*n|) + 5 sign of xn-component + 6 x of xn-component + 7 sign of y-component + 8 y of y-component + */ + match[ 1 ] = match[ 1 ].toLowerCase(); + + if ( match[ 1 ].slice( 0, 3 ) === "nth" ) { + + // nth-* requires argument + if ( !match[ 3 ] ) { + Sizzle.error( match[ 0 ] ); + } + + // numeric x and y parameters for Expr.filter.CHILD + // remember that false/true cast respectively to 0/1 + match[ 4 ] = +( match[ 4 ] ? + match[ 5 ] + ( match[ 6 ] || 1 ) : + 2 * ( match[ 3 ] === "even" || match[ 3 ] === "odd" ) ); + match[ 5 ] = +( ( match[ 7 ] + match[ 8 ] ) || match[ 3 ] === "odd" ); + + // other types prohibit arguments + } else if ( match[ 3 ] ) { + Sizzle.error( match[ 0 ] ); + } + + return match; + }, + + "PSEUDO": function( match ) { + var excess, + unquoted = !match[ 6 ] && match[ 2 ]; + + if ( matchExpr[ "CHILD" ].test( match[ 0 ] ) ) { + return null; + } + + // Accept quoted arguments as-is + if ( match[ 3 ] ) { + match[ 2 ] = match[ 4 ] || match[ 5 ] || ""; + + // Strip excess characters from unquoted arguments + } else if ( unquoted && rpseudo.test( unquoted ) && + + // Get excess from tokenize (recursively) + ( excess = tokenize( unquoted, true ) ) && + + // advance to the next closing parenthesis + ( excess = unquoted.indexOf( ")", unquoted.length - excess ) - unquoted.length ) ) { + + // excess is a negative index + match[ 0 ] = match[ 0 ].slice( 0, excess ); + match[ 2 ] = unquoted.slice( 0, excess ); + } + + // Return only captures needed by the pseudo filter method (type and argument) + return match.slice( 0, 3 ); + } + }, + + filter: { + + "TAG": function( nodeNameSelector ) { + var nodeName = nodeNameSelector.replace( runescape, funescape ).toLowerCase(); + return nodeNameSelector === "*" ? + function() { + return true; + } : + function( elem ) { + return elem.nodeName && elem.nodeName.toLowerCase() === nodeName; + }; + }, + + "CLASS": function( className ) { + var pattern = classCache[ className + " " ]; + + return pattern || + ( pattern = new RegExp( "(^|" + whitespace + + ")" + className + "(" + whitespace + "|$)" ) ) && classCache( + className, function( elem ) { + return pattern.test( + typeof elem.className === "string" && elem.className || + typeof elem.getAttribute !== "undefined" && + elem.getAttribute( "class" ) || + "" + ); + } ); + }, + + "ATTR": function( name, operator, check ) { + return function( elem ) { + var result = Sizzle.attr( elem, name ); + + if ( result == null ) { + return operator === "!="; + } + if ( !operator ) { + return true; + } + + result += ""; + + /* eslint-disable max-len */ + + return operator === "=" ? result === check : + operator === "!=" ? result !== check : + operator === "^=" ? check && result.indexOf( check ) === 0 : + operator === "*=" ? check && result.indexOf( check ) > -1 : + operator === "$=" ? check && result.slice( -check.length ) === check : + operator === "~=" ? ( " " + result.replace( rwhitespace, " " ) + " " ).indexOf( check ) > -1 : + operator === "|=" ? result === check || result.slice( 0, check.length + 1 ) === check + "-" : + false; + /* eslint-enable max-len */ + + }; + }, + + "CHILD": function( type, what, _argument, first, last ) { + var simple = type.slice( 0, 3 ) !== "nth", + forward = type.slice( -4 ) !== "last", + ofType = what === "of-type"; + + return first === 1 && last === 0 ? + + // Shortcut for :nth-*(n) + function( elem ) { + return !!elem.parentNode; + } : + + function( elem, _context, xml ) { + var cache, uniqueCache, outerCache, node, nodeIndex, start, + dir = simple !== forward ? "nextSibling" : "previousSibling", + parent = elem.parentNode, + name = ofType && elem.nodeName.toLowerCase(), + useCache = !xml && !ofType, + diff = false; + + if ( parent ) { + + // :(first|last|only)-(child|of-type) + if ( simple ) { + while ( dir ) { + node = elem; + while ( ( node = node[ dir ] ) ) { + if ( ofType ? + node.nodeName.toLowerCase() === name : + node.nodeType === 1 ) { + + return false; + } + } + + // Reverse direction for :only-* (if we haven't yet done so) + start = dir = type === "only" && !start && "nextSibling"; + } + return true; + } + + start = [ forward ? parent.firstChild : parent.lastChild ]; + + // non-xml :nth-child(...) stores cache data on `parent` + if ( forward && useCache ) { + + // Seek `elem` from a previously-cached index + + // ...in a gzip-friendly way + node = parent; + outerCache = node[ expando ] || ( node[ expando ] = {} ); + + // Support: IE <9 only + // Defend against cloned attroperties (jQuery gh-1709) + uniqueCache = outerCache[ node.uniqueID ] || + ( outerCache[ node.uniqueID ] = {} ); + + cache = uniqueCache[ type ] || []; + nodeIndex = cache[ 0 ] === dirruns && cache[ 1 ]; + diff = nodeIndex && cache[ 2 ]; + node = nodeIndex && parent.childNodes[ nodeIndex ]; + + while ( ( node = ++nodeIndex && node && node[ dir ] || + + // Fallback to seeking `elem` from the start + ( diff = nodeIndex = 0 ) || start.pop() ) ) { + + // When found, cache indexes on `parent` and break + if ( node.nodeType === 1 && ++diff && node === elem ) { + uniqueCache[ type ] = [ dirruns, nodeIndex, diff ]; + break; + } + } + + } else { + + // Use previously-cached element index if available + if ( useCache ) { + + // ...in a gzip-friendly way + node = elem; + outerCache = node[ expando ] || ( node[ expando ] = {} ); + + // Support: IE <9 only + // Defend against cloned attroperties (jQuery gh-1709) + uniqueCache = outerCache[ node.uniqueID ] || + ( outerCache[ node.uniqueID ] = {} ); + + cache = uniqueCache[ type ] || []; + nodeIndex = cache[ 0 ] === dirruns && cache[ 1 ]; + diff = nodeIndex; + } + + // xml :nth-child(...) + // or :nth-last-child(...) or :nth(-last)?-of-type(...) + if ( diff === false ) { + + // Use the same loop as above to seek `elem` from the start + while ( ( node = ++nodeIndex && node && node[ dir ] || + ( diff = nodeIndex = 0 ) || start.pop() ) ) { + + if ( ( ofType ? + node.nodeName.toLowerCase() === name : + node.nodeType === 1 ) && + ++diff ) { + + // Cache the index of each encountered element + if ( useCache ) { + outerCache = node[ expando ] || + ( node[ expando ] = {} ); + + // Support: IE <9 only + // Defend against cloned attroperties (jQuery gh-1709) + uniqueCache = outerCache[ node.uniqueID ] || + ( outerCache[ node.uniqueID ] = {} ); + + uniqueCache[ type ] = [ dirruns, diff ]; + } + + if ( node === elem ) { + break; + } + } + } + } + } + + // Incorporate the offset, then check against cycle size + diff -= last; + return diff === first || ( diff % first === 0 && diff / first >= 0 ); + } + }; + }, + + "PSEUDO": function( pseudo, argument ) { + + // pseudo-class names are case-insensitive + // http://www.w3.org/TR/selectors/#pseudo-classes + // Prioritize by case sensitivity in case custom pseudos are added with uppercase letters + // Remember that setFilters inherits from pseudos + var args, + fn = Expr.pseudos[ pseudo ] || Expr.setFilters[ pseudo.toLowerCase() ] || + Sizzle.error( "unsupported pseudo: " + pseudo ); + + // The user may use createPseudo to indicate that + // arguments are needed to create the filter function + // just as Sizzle does + if ( fn[ expando ] ) { + return fn( argument ); + } + + // But maintain support for old signatures + if ( fn.length > 1 ) { + args = [ pseudo, pseudo, "", argument ]; + return Expr.setFilters.hasOwnProperty( pseudo.toLowerCase() ) ? + markFunction( function( seed, matches ) { + var idx, + matched = fn( seed, argument ), + i = matched.length; + while ( i-- ) { + idx = indexOf( seed, matched[ i ] ); + seed[ idx ] = !( matches[ idx ] = matched[ i ] ); + } + } ) : + function( elem ) { + return fn( elem, 0, args ); + }; + } + + return fn; + } + }, + + pseudos: { + + // Potentially complex pseudos + "not": markFunction( function( selector ) { + + // Trim the selector passed to compile + // to avoid treating leading and trailing + // spaces as combinators + var input = [], + results = [], + matcher = compile( selector.replace( rtrim, "$1" ) ); + + return matcher[ expando ] ? + markFunction( function( seed, matches, _context, xml ) { + var elem, + unmatched = matcher( seed, null, xml, [] ), + i = seed.length; + + // Match elements unmatched by `matcher` + while ( i-- ) { + if ( ( elem = unmatched[ i ] ) ) { + seed[ i ] = !( matches[ i ] = elem ); + } + } + } ) : + function( elem, _context, xml ) { + input[ 0 ] = elem; + matcher( input, null, xml, results ); + + // Don't keep the element (issue #299) + input[ 0 ] = null; + return !results.pop(); + }; + } ), + + "has": markFunction( function( selector ) { + return function( elem ) { + return Sizzle( selector, elem ).length > 0; + }; + } ), + + "contains": markFunction( function( text ) { + text = text.replace( runescape, funescape ); + return function( elem ) { + return ( elem.textContent || getText( elem ) ).indexOf( text ) > -1; + }; + } ), + + // "Whether an element is represented by a :lang() selector + // is based solely on the element's language value + // being equal to the identifier C, + // or beginning with the identifier C immediately followed by "-". + // The matching of C against the element's language value is performed case-insensitively. + // The identifier C does not have to be a valid language name." + // http://www.w3.org/TR/selectors/#lang-pseudo + "lang": markFunction( function( lang ) { + + // lang value must be a valid identifier + if ( !ridentifier.test( lang || "" ) ) { + Sizzle.error( "unsupported lang: " + lang ); + } + lang = lang.replace( runescape, funescape ).toLowerCase(); + return function( elem ) { + var elemLang; + do { + if ( ( elemLang = documentIsHTML ? + elem.lang : + elem.getAttribute( "xml:lang" ) || elem.getAttribute( "lang" ) ) ) { + + elemLang = elemLang.toLowerCase(); + return elemLang === lang || elemLang.indexOf( lang + "-" ) === 0; + } + } while ( ( elem = elem.parentNode ) && elem.nodeType === 1 ); + return false; + }; + } ), + + // Miscellaneous + "target": function( elem ) { + var hash = window.location && window.location.hash; + return hash && hash.slice( 1 ) === elem.id; + }, + + "root": function( elem ) { + return elem === docElem; + }, + + "focus": function( elem ) { + return elem === document.activeElement && + ( !document.hasFocus || document.hasFocus() ) && + !!( elem.type || elem.href || ~elem.tabIndex ); + }, + + // Boolean properties + "enabled": createDisabledPseudo( false ), + "disabled": createDisabledPseudo( true ), + + "checked": function( elem ) { + + // In CSS3, :checked should return both checked and selected elements + // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked + var nodeName = elem.nodeName.toLowerCase(); + return ( nodeName === "input" && !!elem.checked ) || + ( nodeName === "option" && !!elem.selected ); + }, + + "selected": function( elem ) { + + // Accessing this property makes selected-by-default + // options in Safari work properly + if ( elem.parentNode ) { + // eslint-disable-next-line no-unused-expressions + elem.parentNode.selectedIndex; + } + + return elem.selected === true; + }, + + // Contents + "empty": function( elem ) { + + // http://www.w3.org/TR/selectors/#empty-pseudo + // :empty is negated by element (1) or content nodes (text: 3; cdata: 4; entity ref: 5), + // but not by others (comment: 8; processing instruction: 7; etc.) + // nodeType < 6 works because attributes (2) do not appear as children + for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) { + if ( elem.nodeType < 6 ) { + return false; + } + } + return true; + }, + + "parent": function( elem ) { + return !Expr.pseudos[ "empty" ]( elem ); + }, + + // Element/input types + "header": function( elem ) { + return rheader.test( elem.nodeName ); + }, + + "input": function( elem ) { + return rinputs.test( elem.nodeName ); + }, + + "button": function( elem ) { + var name = elem.nodeName.toLowerCase(); + return name === "input" && elem.type === "button" || name === "button"; + }, + + "text": function( elem ) { + var attr; + return elem.nodeName.toLowerCase() === "input" && + elem.type === "text" && + + // Support: IE<8 + // New HTML5 attribute values (e.g., "search") appear with elem.type === "text" + ( ( attr = elem.getAttribute( "type" ) ) == null || + attr.toLowerCase() === "text" ); + }, + + // Position-in-collection + "first": createPositionalPseudo( function() { + return [ 0 ]; + } ), + + "last": createPositionalPseudo( function( _matchIndexes, length ) { + return [ length - 1 ]; + } ), + + "eq": createPositionalPseudo( function( _matchIndexes, length, argument ) { + return [ argument < 0 ? argument + length : argument ]; + } ), + + "even": createPositionalPseudo( function( matchIndexes, length ) { + var i = 0; + for ( ; i < length; i += 2 ) { + matchIndexes.push( i ); + } + return matchIndexes; + } ), + + "odd": createPositionalPseudo( function( matchIndexes, length ) { + var i = 1; + for ( ; i < length; i += 2 ) { + matchIndexes.push( i ); + } + return matchIndexes; + } ), + + "lt": createPositionalPseudo( function( matchIndexes, length, argument ) { + var i = argument < 0 ? + argument + length : + argument > length ? + length : + argument; + for ( ; --i >= 0; ) { + matchIndexes.push( i ); + } + return matchIndexes; + } ), + + "gt": createPositionalPseudo( function( matchIndexes, length, argument ) { + var i = argument < 0 ? argument + length : argument; + for ( ; ++i < length; ) { + matchIndexes.push( i ); + } + return matchIndexes; + } ) + } +}; + +Expr.pseudos[ "nth" ] = Expr.pseudos[ "eq" ]; + +// Add button/input type pseudos +for ( i in { radio: true, checkbox: true, file: true, password: true, image: true } ) { + Expr.pseudos[ i ] = createInputPseudo( i ); +} +for ( i in { submit: true, reset: true } ) { + Expr.pseudos[ i ] = createButtonPseudo( i ); +} + +// Easy API for creating new setFilters +function setFilters() {} +setFilters.prototype = Expr.filters = Expr.pseudos; +Expr.setFilters = new setFilters(); + +tokenize = Sizzle.tokenize = function( selector, parseOnly ) { + var matched, match, tokens, type, + soFar, groups, preFilters, + cached = tokenCache[ selector + " " ]; + + if ( cached ) { + return parseOnly ? 0 : cached.slice( 0 ); + } + + soFar = selector; + groups = []; + preFilters = Expr.preFilter; + + while ( soFar ) { + + // Comma and first run + if ( !matched || ( match = rcomma.exec( soFar ) ) ) { + if ( match ) { + + // Don't consume trailing commas as valid + soFar = soFar.slice( match[ 0 ].length ) || soFar; + } + groups.push( ( tokens = [] ) ); + } + + matched = false; + + // Combinators + if ( ( match = rcombinators.exec( soFar ) ) ) { + matched = match.shift(); + tokens.push( { + value: matched, + + // Cast descendant combinators to space + type: match[ 0 ].replace( rtrim, " " ) + } ); + soFar = soFar.slice( matched.length ); + } + + // Filters + for ( type in Expr.filter ) { + if ( ( match = matchExpr[ type ].exec( soFar ) ) && ( !preFilters[ type ] || + ( match = preFilters[ type ]( match ) ) ) ) { + matched = match.shift(); + tokens.push( { + value: matched, + type: type, + matches: match + } ); + soFar = soFar.slice( matched.length ); + } + } + + if ( !matched ) { + break; + } + } + + // Return the length of the invalid excess + // if we're just parsing + // Otherwise, throw an error or return tokens + return parseOnly ? + soFar.length : + soFar ? + Sizzle.error( selector ) : + + // Cache the tokens + tokenCache( selector, groups ).slice( 0 ); +}; + +function toSelector( tokens ) { + var i = 0, + len = tokens.length, + selector = ""; + for ( ; i < len; i++ ) { + selector += tokens[ i ].value; + } + return selector; +} + +function addCombinator( matcher, combinator, base ) { + var dir = combinator.dir, + skip = combinator.next, + key = skip || dir, + checkNonElements = base && key === "parentNode", + doneName = done++; + + return combinator.first ? + + // Check against closest ancestor/preceding element + function( elem, context, xml ) { + while ( ( elem = elem[ dir ] ) ) { + if ( elem.nodeType === 1 || checkNonElements ) { + return matcher( elem, context, xml ); + } + } + return false; + } : + + // Check against all ancestor/preceding elements + function( elem, context, xml ) { + var oldCache, uniqueCache, outerCache, + newCache = [ dirruns, doneName ]; + + // We can't set arbitrary data on XML nodes, so they don't benefit from combinator caching + if ( xml ) { + while ( ( elem = elem[ dir ] ) ) { + if ( elem.nodeType === 1 || checkNonElements ) { + if ( matcher( elem, context, xml ) ) { + return true; + } + } + } + } else { + while ( ( elem = elem[ dir ] ) ) { + if ( elem.nodeType === 1 || checkNonElements ) { + outerCache = elem[ expando ] || ( elem[ expando ] = {} ); + + // Support: IE <9 only + // Defend against cloned attroperties (jQuery gh-1709) + uniqueCache = outerCache[ elem.uniqueID ] || + ( outerCache[ elem.uniqueID ] = {} ); + + if ( skip && skip === elem.nodeName.toLowerCase() ) { + elem = elem[ dir ] || elem; + } else if ( ( oldCache = uniqueCache[ key ] ) && + oldCache[ 0 ] === dirruns && oldCache[ 1 ] === doneName ) { + + // Assign to newCache so results back-propagate to previous elements + return ( newCache[ 2 ] = oldCache[ 2 ] ); + } else { + + // Reuse newcache so results back-propagate to previous elements + uniqueCache[ key ] = newCache; + + // A match means we're done; a fail means we have to keep checking + if ( ( newCache[ 2 ] = matcher( elem, context, xml ) ) ) { + return true; + } + } + } + } + } + return false; + }; +} + +function elementMatcher( matchers ) { + return matchers.length > 1 ? + function( elem, context, xml ) { + var i = matchers.length; + while ( i-- ) { + if ( !matchers[ i ]( elem, context, xml ) ) { + return false; + } + } + return true; + } : + matchers[ 0 ]; +} + +function multipleContexts( selector, contexts, results ) { + var i = 0, + len = contexts.length; + for ( ; i < len; i++ ) { + Sizzle( selector, contexts[ i ], results ); + } + return results; +} + +function condense( unmatched, map, filter, context, xml ) { + var elem, + newUnmatched = [], + i = 0, + len = unmatched.length, + mapped = map != null; + + for ( ; i < len; i++ ) { + if ( ( elem = unmatched[ i ] ) ) { + if ( !filter || filter( elem, context, xml ) ) { + newUnmatched.push( elem ); + if ( mapped ) { + map.push( i ); + } + } + } + } + + return newUnmatched; +} + +function setMatcher( preFilter, selector, matcher, postFilter, postFinder, postSelector ) { + if ( postFilter && !postFilter[ expando ] ) { + postFilter = setMatcher( postFilter ); + } + if ( postFinder && !postFinder[ expando ] ) { + postFinder = setMatcher( postFinder, postSelector ); + } + return markFunction( function( seed, results, context, xml ) { + var temp, i, elem, + preMap = [], + postMap = [], + preexisting = results.length, + + // Get initial elements from seed or context + elems = seed || multipleContexts( + selector || "*", + context.nodeType ? [ context ] : context, + [] + ), + + // Prefilter to get matcher input, preserving a map for seed-results synchronization + matcherIn = preFilter && ( seed || !selector ) ? + condense( elems, preMap, preFilter, context, xml ) : + elems, + + matcherOut = matcher ? + + // If we have a postFinder, or filtered seed, or non-seed postFilter or preexisting results, + postFinder || ( seed ? preFilter : preexisting || postFilter ) ? + + // ...intermediate processing is necessary + [] : + + // ...otherwise use results directly + results : + matcherIn; + + // Find primary matches + if ( matcher ) { + matcher( matcherIn, matcherOut, context, xml ); + } + + // Apply postFilter + if ( postFilter ) { + temp = condense( matcherOut, postMap ); + postFilter( temp, [], context, xml ); + + // Un-match failing elements by moving them back to matcherIn + i = temp.length; + while ( i-- ) { + if ( ( elem = temp[ i ] ) ) { + matcherOut[ postMap[ i ] ] = !( matcherIn[ postMap[ i ] ] = elem ); + } + } + } + + if ( seed ) { + if ( postFinder || preFilter ) { + if ( postFinder ) { + + // Get the final matcherOut by condensing this intermediate into postFinder contexts + temp = []; + i = matcherOut.length; + while ( i-- ) { + if ( ( elem = matcherOut[ i ] ) ) { + + // Restore matcherIn since elem is not yet a final match + temp.push( ( matcherIn[ i ] = elem ) ); + } + } + postFinder( null, ( matcherOut = [] ), temp, xml ); + } + + // Move matched elements from seed to results to keep them synchronized + i = matcherOut.length; + while ( i-- ) { + if ( ( elem = matcherOut[ i ] ) && + ( temp = postFinder ? indexOf( seed, elem ) : preMap[ i ] ) > -1 ) { + + seed[ temp ] = !( results[ temp ] = elem ); + } + } + } + + // Add elements to results, through postFinder if defined + } else { + matcherOut = condense( + matcherOut === results ? + matcherOut.splice( preexisting, matcherOut.length ) : + matcherOut + ); + if ( postFinder ) { + postFinder( null, results, matcherOut, xml ); + } else { + push.apply( results, matcherOut ); + } + } + } ); +} + +function matcherFromTokens( tokens ) { + var checkContext, matcher, j, + len = tokens.length, + leadingRelative = Expr.relative[ tokens[ 0 ].type ], + implicitRelative = leadingRelative || Expr.relative[ " " ], + i = leadingRelative ? 1 : 0, + + // The foundational matcher ensures that elements are reachable from top-level context(s) + matchContext = addCombinator( function( elem ) { + return elem === checkContext; + }, implicitRelative, true ), + matchAnyContext = addCombinator( function( elem ) { + return indexOf( checkContext, elem ) > -1; + }, implicitRelative, true ), + matchers = [ function( elem, context, xml ) { + var ret = ( !leadingRelative && ( xml || context !== outermostContext ) ) || ( + ( checkContext = context ).nodeType ? + matchContext( elem, context, xml ) : + matchAnyContext( elem, context, xml ) ); + + // Avoid hanging onto element (issue #299) + checkContext = null; + return ret; + } ]; + + for ( ; i < len; i++ ) { + if ( ( matcher = Expr.relative[ tokens[ i ].type ] ) ) { + matchers = [ addCombinator( elementMatcher( matchers ), matcher ) ]; + } else { + matcher = Expr.filter[ tokens[ i ].type ].apply( null, tokens[ i ].matches ); + + // Return special upon seeing a positional matcher + if ( matcher[ expando ] ) { + + // Find the next relative operator (if any) for proper handling + j = ++i; + for ( ; j < len; j++ ) { + if ( Expr.relative[ tokens[ j ].type ] ) { + break; + } + } + return setMatcher( + i > 1 && elementMatcher( matchers ), + i > 1 && toSelector( + + // If the preceding token was a descendant combinator, insert an implicit any-element `*` + tokens + .slice( 0, i - 1 ) + .concat( { value: tokens[ i - 2 ].type === " " ? "*" : "" } ) + ).replace( rtrim, "$1" ), + matcher, + i < j && matcherFromTokens( tokens.slice( i, j ) ), + j < len && matcherFromTokens( ( tokens = tokens.slice( j ) ) ), + j < len && toSelector( tokens ) + ); + } + matchers.push( matcher ); + } + } + + return elementMatcher( matchers ); +} + +function matcherFromGroupMatchers( elementMatchers, setMatchers ) { + var bySet = setMatchers.length > 0, + byElement = elementMatchers.length > 0, + superMatcher = function( seed, context, xml, results, outermost ) { + var elem, j, matcher, + matchedCount = 0, + i = "0", + unmatched = seed && [], + setMatched = [], + contextBackup = outermostContext, + + // We must always have either seed elements or outermost context + elems = seed || byElement && Expr.find[ "TAG" ]( "*", outermost ), + + // Use integer dirruns iff this is the outermost matcher + dirrunsUnique = ( dirruns += contextBackup == null ? 1 : Math.random() || 0.1 ), + len = elems.length; + + if ( outermost ) { + + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + outermostContext = context == document || context || outermost; + } + + // Add elements passing elementMatchers directly to results + // Support: IE<9, Safari + // Tolerate NodeList properties (IE: "length"; Safari: ) matching elements by id + for ( ; i !== len && ( elem = elems[ i ] ) != null; i++ ) { + if ( byElement && elem ) { + j = 0; + + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( !context && elem.ownerDocument != document ) { + setDocument( elem ); + xml = !documentIsHTML; + } + while ( ( matcher = elementMatchers[ j++ ] ) ) { + if ( matcher( elem, context || document, xml ) ) { + results.push( elem ); + break; + } + } + if ( outermost ) { + dirruns = dirrunsUnique; + } + } + + // Track unmatched elements for set filters + if ( bySet ) { + + // They will have gone through all possible matchers + if ( ( elem = !matcher && elem ) ) { + matchedCount--; + } + + // Lengthen the array for every element, matched or not + if ( seed ) { + unmatched.push( elem ); + } + } + } + + // `i` is now the count of elements visited above, and adding it to `matchedCount` + // makes the latter nonnegative. + matchedCount += i; + + // Apply set filters to unmatched elements + // NOTE: This can be skipped if there are no unmatched elements (i.e., `matchedCount` + // equals `i`), unless we didn't visit _any_ elements in the above loop because we have + // no element matchers and no seed. + // Incrementing an initially-string "0" `i` allows `i` to remain a string only in that + // case, which will result in a "00" `matchedCount` that differs from `i` but is also + // numerically zero. + if ( bySet && i !== matchedCount ) { + j = 0; + while ( ( matcher = setMatchers[ j++ ] ) ) { + matcher( unmatched, setMatched, context, xml ); + } + + if ( seed ) { + + // Reintegrate element matches to eliminate the need for sorting + if ( matchedCount > 0 ) { + while ( i-- ) { + if ( !( unmatched[ i ] || setMatched[ i ] ) ) { + setMatched[ i ] = pop.call( results ); + } + } + } + + // Discard index placeholder values to get only actual matches + setMatched = condense( setMatched ); + } + + // Add matches to results + push.apply( results, setMatched ); + + // Seedless set matches succeeding multiple successful matchers stipulate sorting + if ( outermost && !seed && setMatched.length > 0 && + ( matchedCount + setMatchers.length ) > 1 ) { + + Sizzle.uniqueSort( results ); + } + } + + // Override manipulation of globals by nested matchers + if ( outermost ) { + dirruns = dirrunsUnique; + outermostContext = contextBackup; + } + + return unmatched; + }; + + return bySet ? + markFunction( superMatcher ) : + superMatcher; +} + +compile = Sizzle.compile = function( selector, match /* Internal Use Only */ ) { + var i, + setMatchers = [], + elementMatchers = [], + cached = compilerCache[ selector + " " ]; + + if ( !cached ) { + + // Generate a function of recursive functions that can be used to check each element + if ( !match ) { + match = tokenize( selector ); + } + i = match.length; + while ( i-- ) { + cached = matcherFromTokens( match[ i ] ); + if ( cached[ expando ] ) { + setMatchers.push( cached ); + } else { + elementMatchers.push( cached ); + } + } + + // Cache the compiled function + cached = compilerCache( + selector, + matcherFromGroupMatchers( elementMatchers, setMatchers ) + ); + + // Save selector and tokenization + cached.selector = selector; + } + return cached; +}; + +/** + * A low-level selection function that works with Sizzle's compiled + * selector functions + * @param {String|Function} selector A selector or a pre-compiled + * selector function built with Sizzle.compile + * @param {Element} context + * @param {Array} [results] + * @param {Array} [seed] A set of elements to match against + */ +select = Sizzle.select = function( selector, context, results, seed ) { + var i, tokens, token, type, find, + compiled = typeof selector === "function" && selector, + match = !seed && tokenize( ( selector = compiled.selector || selector ) ); + + results = results || []; + + // Try to minimize operations if there is only one selector in the list and no seed + // (the latter of which guarantees us context) + if ( match.length === 1 ) { + + // Reduce context if the leading compound selector is an ID + tokens = match[ 0 ] = match[ 0 ].slice( 0 ); + if ( tokens.length > 2 && ( token = tokens[ 0 ] ).type === "ID" && + context.nodeType === 9 && documentIsHTML && Expr.relative[ tokens[ 1 ].type ] ) { + + context = ( Expr.find[ "ID" ]( token.matches[ 0 ] + .replace( runescape, funescape ), context ) || [] )[ 0 ]; + if ( !context ) { + return results; + + // Precompiled matchers will still verify ancestry, so step up a level + } else if ( compiled ) { + context = context.parentNode; + } + + selector = selector.slice( tokens.shift().value.length ); + } + + // Fetch a seed set for right-to-left matching + i = matchExpr[ "needsContext" ].test( selector ) ? 0 : tokens.length; + while ( i-- ) { + token = tokens[ i ]; + + // Abort if we hit a combinator + if ( Expr.relative[ ( type = token.type ) ] ) { + break; + } + if ( ( find = Expr.find[ type ] ) ) { + + // Search, expanding context for leading sibling combinators + if ( ( seed = find( + token.matches[ 0 ].replace( runescape, funescape ), + rsibling.test( tokens[ 0 ].type ) && testContext( context.parentNode ) || + context + ) ) ) { + + // If seed is empty or no tokens remain, we can return early + tokens.splice( i, 1 ); + selector = seed.length && toSelector( tokens ); + if ( !selector ) { + push.apply( results, seed ); + return results; + } + + break; + } + } + } + } + + // Compile and execute a filtering function if one is not provided + // Provide `match` to avoid retokenization if we modified the selector above + ( compiled || compile( selector, match ) )( + seed, + context, + !documentIsHTML, + results, + !context || rsibling.test( selector ) && testContext( context.parentNode ) || context + ); + return results; +}; + +// One-time assignments + +// Sort stability +support.sortStable = expando.split( "" ).sort( sortOrder ).join( "" ) === expando; + +// Support: Chrome 14-35+ +// Always assume duplicates if they aren't passed to the comparison function +support.detectDuplicates = !!hasDuplicate; + +// Initialize against the default document +setDocument(); + +// Support: Webkit<537.32 - Safari 6.0.3/Chrome 25 (fixed in Chrome 27) +// Detached nodes confoundingly follow *each other* +support.sortDetached = assert( function( el ) { + + // Should return 1, but returns 4 (following) + return el.compareDocumentPosition( document.createElement( "fieldset" ) ) & 1; +} ); + +// Support: IE<8 +// Prevent attribute/property "interpolation" +// https://msdn.microsoft.com/en-us/library/ms536429%28VS.85%29.aspx +if ( !assert( function( el ) { + el.innerHTML = ""; + return el.firstChild.getAttribute( "href" ) === "#"; +} ) ) { + addHandle( "type|href|height|width", function( elem, name, isXML ) { + if ( !isXML ) { + return elem.getAttribute( name, name.toLowerCase() === "type" ? 1 : 2 ); + } + } ); +} + +// Support: IE<9 +// Use defaultValue in place of getAttribute("value") +if ( !support.attributes || !assert( function( el ) { + el.innerHTML = ""; + el.firstChild.setAttribute( "value", "" ); + return el.firstChild.getAttribute( "value" ) === ""; +} ) ) { + addHandle( "value", function( elem, _name, isXML ) { + if ( !isXML && elem.nodeName.toLowerCase() === "input" ) { + return elem.defaultValue; + } + } ); +} + +// Support: IE<9 +// Use getAttributeNode to fetch booleans when getAttribute lies +if ( !assert( function( el ) { + return el.getAttribute( "disabled" ) == null; +} ) ) { + addHandle( booleans, function( elem, name, isXML ) { + var val; + if ( !isXML ) { + return elem[ name ] === true ? name.toLowerCase() : + ( val = elem.getAttributeNode( name ) ) && val.specified ? + val.value : + null; + } + } ); +} + +return Sizzle; + +} )( window ); + + + +jQuery.find = Sizzle; +jQuery.expr = Sizzle.selectors; + +// Deprecated +jQuery.expr[ ":" ] = jQuery.expr.pseudos; +jQuery.uniqueSort = jQuery.unique = Sizzle.uniqueSort; +jQuery.text = Sizzle.getText; +jQuery.isXMLDoc = Sizzle.isXML; +jQuery.contains = Sizzle.contains; +jQuery.escapeSelector = Sizzle.escape; + + + + +var dir = function( elem, dir, until ) { + var matched = [], + truncate = until !== undefined; + + while ( ( elem = elem[ dir ] ) && elem.nodeType !== 9 ) { + if ( elem.nodeType === 1 ) { + if ( truncate && jQuery( elem ).is( until ) ) { + break; + } + matched.push( elem ); + } + } + return matched; +}; + + +var siblings = function( n, elem ) { + var matched = []; + + for ( ; n; n = n.nextSibling ) { + if ( n.nodeType === 1 && n !== elem ) { + matched.push( n ); + } + } + + return matched; +}; + + +var rneedsContext = jQuery.expr.match.needsContext; + + + +function nodeName( elem, name ) { + + return elem.nodeName && elem.nodeName.toLowerCase() === name.toLowerCase(); + +} +var rsingleTag = ( /^<([a-z][^\/\0>:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i ); + + + +// Implement the identical functionality for filter and not +function winnow( elements, qualifier, not ) { + if ( isFunction( qualifier ) ) { + return jQuery.grep( elements, function( elem, i ) { + return !!qualifier.call( elem, i, elem ) !== not; + } ); + } + + // Single element + if ( qualifier.nodeType ) { + return jQuery.grep( elements, function( elem ) { + return ( elem === qualifier ) !== not; + } ); + } + + // Arraylike of elements (jQuery, arguments, Array) + if ( typeof qualifier !== "string" ) { + return jQuery.grep( elements, function( elem ) { + return ( indexOf.call( qualifier, elem ) > -1 ) !== not; + } ); + } + + // Filtered directly for both simple and complex selectors + return jQuery.filter( qualifier, elements, not ); +} + +jQuery.filter = function( expr, elems, not ) { + var elem = elems[ 0 ]; + + if ( not ) { + expr = ":not(" + expr + ")"; + } + + if ( elems.length === 1 && elem.nodeType === 1 ) { + return jQuery.find.matchesSelector( elem, expr ) ? [ elem ] : []; + } + + return jQuery.find.matches( expr, jQuery.grep( elems, function( elem ) { + return elem.nodeType === 1; + } ) ); +}; + +jQuery.fn.extend( { + find: function( selector ) { + var i, ret, + len = this.length, + self = this; + + if ( typeof selector !== "string" ) { + return this.pushStack( jQuery( selector ).filter( function() { + for ( i = 0; i < len; i++ ) { + if ( jQuery.contains( self[ i ], this ) ) { + return true; + } + } + } ) ); + } + + ret = this.pushStack( [] ); + + for ( i = 0; i < len; i++ ) { + jQuery.find( selector, self[ i ], ret ); + } + + return len > 1 ? jQuery.uniqueSort( ret ) : ret; + }, + filter: function( selector ) { + return this.pushStack( winnow( this, selector || [], false ) ); + }, + not: function( selector ) { + return this.pushStack( winnow( this, selector || [], true ) ); + }, + is: function( selector ) { + return !!winnow( + this, + + // If this is a positional/relative selector, check membership in the returned set + // so $("p:first").is("p:last") won't return true for a doc with two "p". + typeof selector === "string" && rneedsContext.test( selector ) ? + jQuery( selector ) : + selector || [], + false + ).length; + } +} ); + + +// Initialize a jQuery object + + +// A central reference to the root jQuery(document) +var rootjQuery, + + // A simple way to check for HTML strings + // Prioritize #id over to avoid XSS via location.hash (#9521) + // Strict HTML recognition (#11290: must start with <) + // Shortcut simple #id case for speed + rquickExpr = /^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]+))$/, + + init = jQuery.fn.init = function( selector, context, root ) { + var match, elem; + + // HANDLE: $(""), $(null), $(undefined), $(false) + if ( !selector ) { + return this; + } + + // Method init() accepts an alternate rootjQuery + // so migrate can support jQuery.sub (gh-2101) + root = root || rootjQuery; + + // Handle HTML strings + if ( typeof selector === "string" ) { + if ( selector[ 0 ] === "<" && + selector[ selector.length - 1 ] === ">" && + selector.length >= 3 ) { + + // Assume that strings that start and end with <> are HTML and skip the regex check + match = [ null, selector, null ]; + + } else { + match = rquickExpr.exec( selector ); + } + + // Match html or make sure no context is specified for #id + if ( match && ( match[ 1 ] || !context ) ) { + + // HANDLE: $(html) -> $(array) + if ( match[ 1 ] ) { + context = context instanceof jQuery ? context[ 0 ] : context; + + // Option to run scripts is true for back-compat + // Intentionally let the error be thrown if parseHTML is not present + jQuery.merge( this, jQuery.parseHTML( + match[ 1 ], + context && context.nodeType ? context.ownerDocument || context : document, + true + ) ); + + // HANDLE: $(html, props) + if ( rsingleTag.test( match[ 1 ] ) && jQuery.isPlainObject( context ) ) { + for ( match in context ) { + + // Properties of context are called as methods if possible + if ( isFunction( this[ match ] ) ) { + this[ match ]( context[ match ] ); + + // ...and otherwise set as attributes + } else { + this.attr( match, context[ match ] ); + } + } + } + + return this; + + // HANDLE: $(#id) + } else { + elem = document.getElementById( match[ 2 ] ); + + if ( elem ) { + + // Inject the element directly into the jQuery object + this[ 0 ] = elem; + this.length = 1; + } + return this; + } + + // HANDLE: $(expr, $(...)) + } else if ( !context || context.jquery ) { + return ( context || root ).find( selector ); + + // HANDLE: $(expr, context) + // (which is just equivalent to: $(context).find(expr) + } else { + return this.constructor( context ).find( selector ); + } + + // HANDLE: $(DOMElement) + } else if ( selector.nodeType ) { + this[ 0 ] = selector; + this.length = 1; + return this; + + // HANDLE: $(function) + // Shortcut for document ready + } else if ( isFunction( selector ) ) { + return root.ready !== undefined ? + root.ready( selector ) : + + // Execute immediately if ready is not present + selector( jQuery ); + } + + return jQuery.makeArray( selector, this ); + }; + +// Give the init function the jQuery prototype for later instantiation +init.prototype = jQuery.fn; + +// Initialize central reference +rootjQuery = jQuery( document ); + + +var rparentsprev = /^(?:parents|prev(?:Until|All))/, + + // Methods guaranteed to produce a unique set when starting from a unique set + guaranteedUnique = { + children: true, + contents: true, + next: true, + prev: true + }; + +jQuery.fn.extend( { + has: function( target ) { + var targets = jQuery( target, this ), + l = targets.length; + + return this.filter( function() { + var i = 0; + for ( ; i < l; i++ ) { + if ( jQuery.contains( this, targets[ i ] ) ) { + return true; + } + } + } ); + }, + + closest: function( selectors, context ) { + var cur, + i = 0, + l = this.length, + matched = [], + targets = typeof selectors !== "string" && jQuery( selectors ); + + // Positional selectors never match, since there's no _selection_ context + if ( !rneedsContext.test( selectors ) ) { + for ( ; i < l; i++ ) { + for ( cur = this[ i ]; cur && cur !== context; cur = cur.parentNode ) { + + // Always skip document fragments + if ( cur.nodeType < 11 && ( targets ? + targets.index( cur ) > -1 : + + // Don't pass non-elements to Sizzle + cur.nodeType === 1 && + jQuery.find.matchesSelector( cur, selectors ) ) ) { + + matched.push( cur ); + break; + } + } + } + } + + return this.pushStack( matched.length > 1 ? jQuery.uniqueSort( matched ) : matched ); + }, + + // Determine the position of an element within the set + index: function( elem ) { + + // No argument, return index in parent + if ( !elem ) { + return ( this[ 0 ] && this[ 0 ].parentNode ) ? this.first().prevAll().length : -1; + } + + // Index in selector + if ( typeof elem === "string" ) { + return indexOf.call( jQuery( elem ), this[ 0 ] ); + } + + // Locate the position of the desired element + return indexOf.call( this, + + // If it receives a jQuery object, the first element is used + elem.jquery ? elem[ 0 ] : elem + ); + }, + + add: function( selector, context ) { + return this.pushStack( + jQuery.uniqueSort( + jQuery.merge( this.get(), jQuery( selector, context ) ) + ) + ); + }, + + addBack: function( selector ) { + return this.add( selector == null ? + this.prevObject : this.prevObject.filter( selector ) + ); + } +} ); + +function sibling( cur, dir ) { + while ( ( cur = cur[ dir ] ) && cur.nodeType !== 1 ) {} + return cur; +} + +jQuery.each( { + parent: function( elem ) { + var parent = elem.parentNode; + return parent && parent.nodeType !== 11 ? parent : null; + }, + parents: function( elem ) { + return dir( elem, "parentNode" ); + }, + parentsUntil: function( elem, _i, until ) { + return dir( elem, "parentNode", until ); + }, + next: function( elem ) { + return sibling( elem, "nextSibling" ); + }, + prev: function( elem ) { + return sibling( elem, "previousSibling" ); + }, + nextAll: function( elem ) { + return dir( elem, "nextSibling" ); + }, + prevAll: function( elem ) { + return dir( elem, "previousSibling" ); + }, + nextUntil: function( elem, _i, until ) { + return dir( elem, "nextSibling", until ); + }, + prevUntil: function( elem, _i, until ) { + return dir( elem, "previousSibling", until ); + }, + siblings: function( elem ) { + return siblings( ( elem.parentNode || {} ).firstChild, elem ); + }, + children: function( elem ) { + return siblings( elem.firstChild ); + }, + contents: function( elem ) { + if ( elem.contentDocument != null && + + // Support: IE 11+ + // elements with no `data` attribute has an object + // `contentDocument` with a `null` prototype. + getProto( elem.contentDocument ) ) { + + return elem.contentDocument; + } + + // Support: IE 9 - 11 only, iOS 7 only, Android Browser <=4.3 only + // Treat the template element as a regular one in browsers that + // don't support it. + if ( nodeName( elem, "template" ) ) { + elem = elem.content || elem; + } + + return jQuery.merge( [], elem.childNodes ); + } +}, function( name, fn ) { + jQuery.fn[ name ] = function( until, selector ) { + var matched = jQuery.map( this, fn, until ); + + if ( name.slice( -5 ) !== "Until" ) { + selector = until; + } + + if ( selector && typeof selector === "string" ) { + matched = jQuery.filter( selector, matched ); + } + + if ( this.length > 1 ) { + + // Remove duplicates + if ( !guaranteedUnique[ name ] ) { + jQuery.uniqueSort( matched ); + } + + // Reverse order for parents* and prev-derivatives + if ( rparentsprev.test( name ) ) { + matched.reverse(); + } + } + + return this.pushStack( matched ); + }; +} ); +var rnothtmlwhite = ( /[^\x20\t\r\n\f]+/g ); + + + +// Convert String-formatted options into Object-formatted ones +function createOptions( options ) { + var object = {}; + jQuery.each( options.match( rnothtmlwhite ) || [], function( _, flag ) { + object[ flag ] = true; + } ); + return object; +} + +/* + * Create a callback list using the following parameters: + * + * options: an optional list of space-separated options that will change how + * the callback list behaves or a more traditional option object + * + * By default a callback list will act like an event callback list and can be + * "fired" multiple times. + * + * Possible options: + * + * once: will ensure the callback list can only be fired once (like a Deferred) + * + * memory: will keep track of previous values and will call any callback added + * after the list has been fired right away with the latest "memorized" + * values (like a Deferred) + * + * unique: will ensure a callback can only be added once (no duplicate in the list) + * + * stopOnFalse: interrupt callings when a callback returns false + * + */ +jQuery.Callbacks = function( options ) { + + // Convert options from String-formatted to Object-formatted if needed + // (we check in cache first) + options = typeof options === "string" ? + createOptions( options ) : + jQuery.extend( {}, options ); + + var // Flag to know if list is currently firing + firing, + + // Last fire value for non-forgettable lists + memory, + + // Flag to know if list was already fired + fired, + + // Flag to prevent firing + locked, + + // Actual callback list + list = [], + + // Queue of execution data for repeatable lists + queue = [], + + // Index of currently firing callback (modified by add/remove as needed) + firingIndex = -1, + + // Fire callbacks + fire = function() { + + // Enforce single-firing + locked = locked || options.once; + + // Execute callbacks for all pending executions, + // respecting firingIndex overrides and runtime changes + fired = firing = true; + for ( ; queue.length; firingIndex = -1 ) { + memory = queue.shift(); + while ( ++firingIndex < list.length ) { + + // Run callback and check for early termination + if ( list[ firingIndex ].apply( memory[ 0 ], memory[ 1 ] ) === false && + options.stopOnFalse ) { + + // Jump to end and forget the data so .add doesn't re-fire + firingIndex = list.length; + memory = false; + } + } + } + + // Forget the data if we're done with it + if ( !options.memory ) { + memory = false; + } + + firing = false; + + // Clean up if we're done firing for good + if ( locked ) { + + // Keep an empty list if we have data for future add calls + if ( memory ) { + list = []; + + // Otherwise, this object is spent + } else { + list = ""; + } + } + }, + + // Actual Callbacks object + self = { + + // Add a callback or a collection of callbacks to the list + add: function() { + if ( list ) { + + // If we have memory from a past run, we should fire after adding + if ( memory && !firing ) { + firingIndex = list.length - 1; + queue.push( memory ); + } + + ( function add( args ) { + jQuery.each( args, function( _, arg ) { + if ( isFunction( arg ) ) { + if ( !options.unique || !self.has( arg ) ) { + list.push( arg ); + } + } else if ( arg && arg.length && toType( arg ) !== "string" ) { + + // Inspect recursively + add( arg ); + } + } ); + } )( arguments ); + + if ( memory && !firing ) { + fire(); + } + } + return this; + }, + + // Remove a callback from the list + remove: function() { + jQuery.each( arguments, function( _, arg ) { + var index; + while ( ( index = jQuery.inArray( arg, list, index ) ) > -1 ) { + list.splice( index, 1 ); + + // Handle firing indexes + if ( index <= firingIndex ) { + firingIndex--; + } + } + } ); + return this; + }, + + // Check if a given callback is in the list. + // If no argument is given, return whether or not list has callbacks attached. + has: function( fn ) { + return fn ? + jQuery.inArray( fn, list ) > -1 : + list.length > 0; + }, + + // Remove all callbacks from the list + empty: function() { + if ( list ) { + list = []; + } + return this; + }, + + // Disable .fire and .add + // Abort any current/pending executions + // Clear all callbacks and values + disable: function() { + locked = queue = []; + list = memory = ""; + return this; + }, + disabled: function() { + return !list; + }, + + // Disable .fire + // Also disable .add unless we have memory (since it would have no effect) + // Abort any pending executions + lock: function() { + locked = queue = []; + if ( !memory && !firing ) { + list = memory = ""; + } + return this; + }, + locked: function() { + return !!locked; + }, + + // Call all callbacks with the given context and arguments + fireWith: function( context, args ) { + if ( !locked ) { + args = args || []; + args = [ context, args.slice ? args.slice() : args ]; + queue.push( args ); + if ( !firing ) { + fire(); + } + } + return this; + }, + + // Call all the callbacks with the given arguments + fire: function() { + self.fireWith( this, arguments ); + return this; + }, + + // To know if the callbacks have already been called at least once + fired: function() { + return !!fired; + } + }; + + return self; +}; + + +function Identity( v ) { + return v; +} +function Thrower( ex ) { + throw ex; +} + +function adoptValue( value, resolve, reject, noValue ) { + var method; + + try { + + // Check for promise aspect first to privilege synchronous behavior + if ( value && isFunction( ( method = value.promise ) ) ) { + method.call( value ).done( resolve ).fail( reject ); + + // Other thenables + } else if ( value && isFunction( ( method = value.then ) ) ) { + method.call( value, resolve, reject ); + + // Other non-thenables + } else { + + // Control `resolve` arguments by letting Array#slice cast boolean `noValue` to integer: + // * false: [ value ].slice( 0 ) => resolve( value ) + // * true: [ value ].slice( 1 ) => resolve() + resolve.apply( undefined, [ value ].slice( noValue ) ); + } + + // For Promises/A+, convert exceptions into rejections + // Since jQuery.when doesn't unwrap thenables, we can skip the extra checks appearing in + // Deferred#then to conditionally suppress rejection. + } catch ( value ) { + + // Support: Android 4.0 only + // Strict mode functions invoked without .call/.apply get global-object context + reject.apply( undefined, [ value ] ); + } +} + +jQuery.extend( { + + Deferred: function( func ) { + var tuples = [ + + // action, add listener, callbacks, + // ... .then handlers, argument index, [final state] + [ "notify", "progress", jQuery.Callbacks( "memory" ), + jQuery.Callbacks( "memory" ), 2 ], + [ "resolve", "done", jQuery.Callbacks( "once memory" ), + jQuery.Callbacks( "once memory" ), 0, "resolved" ], + [ "reject", "fail", jQuery.Callbacks( "once memory" ), + jQuery.Callbacks( "once memory" ), 1, "rejected" ] + ], + state = "pending", + promise = { + state: function() { + return state; + }, + always: function() { + deferred.done( arguments ).fail( arguments ); + return this; + }, + "catch": function( fn ) { + return promise.then( null, fn ); + }, + + // Keep pipe for back-compat + pipe: function( /* fnDone, fnFail, fnProgress */ ) { + var fns = arguments; + + return jQuery.Deferred( function( newDefer ) { + jQuery.each( tuples, function( _i, tuple ) { + + // Map tuples (progress, done, fail) to arguments (done, fail, progress) + var fn = isFunction( fns[ tuple[ 4 ] ] ) && fns[ tuple[ 4 ] ]; + + // deferred.progress(function() { bind to newDefer or newDefer.notify }) + // deferred.done(function() { bind to newDefer or newDefer.resolve }) + // deferred.fail(function() { bind to newDefer or newDefer.reject }) + deferred[ tuple[ 1 ] ]( function() { + var returned = fn && fn.apply( this, arguments ); + if ( returned && isFunction( returned.promise ) ) { + returned.promise() + .progress( newDefer.notify ) + .done( newDefer.resolve ) + .fail( newDefer.reject ); + } else { + newDefer[ tuple[ 0 ] + "With" ]( + this, + fn ? [ returned ] : arguments + ); + } + } ); + } ); + fns = null; + } ).promise(); + }, + then: function( onFulfilled, onRejected, onProgress ) { + var maxDepth = 0; + function resolve( depth, deferred, handler, special ) { + return function() { + var that = this, + args = arguments, + mightThrow = function() { + var returned, then; + + // Support: Promises/A+ section 2.3.3.3.3 + // https://promisesaplus.com/#point-59 + // Ignore double-resolution attempts + if ( depth < maxDepth ) { + return; + } + + returned = handler.apply( that, args ); + + // Support: Promises/A+ section 2.3.1 + // https://promisesaplus.com/#point-48 + if ( returned === deferred.promise() ) { + throw new TypeError( "Thenable self-resolution" ); + } + + // Support: Promises/A+ sections 2.3.3.1, 3.5 + // https://promisesaplus.com/#point-54 + // https://promisesaplus.com/#point-75 + // Retrieve `then` only once + then = returned && + + // Support: Promises/A+ section 2.3.4 + // https://promisesaplus.com/#point-64 + // Only check objects and functions for thenability + ( typeof returned === "object" || + typeof returned === "function" ) && + returned.then; + + // Handle a returned thenable + if ( isFunction( then ) ) { + + // Special processors (notify) just wait for resolution + if ( special ) { + then.call( + returned, + resolve( maxDepth, deferred, Identity, special ), + resolve( maxDepth, deferred, Thrower, special ) + ); + + // Normal processors (resolve) also hook into progress + } else { + + // ...and disregard older resolution values + maxDepth++; + + then.call( + returned, + resolve( maxDepth, deferred, Identity, special ), + resolve( maxDepth, deferred, Thrower, special ), + resolve( maxDepth, deferred, Identity, + deferred.notifyWith ) + ); + } + + // Handle all other returned values + } else { + + // Only substitute handlers pass on context + // and multiple values (non-spec behavior) + if ( handler !== Identity ) { + that = undefined; + args = [ returned ]; + } + + // Process the value(s) + // Default process is resolve + ( special || deferred.resolveWith )( that, args ); + } + }, + + // Only normal processors (resolve) catch and reject exceptions + process = special ? + mightThrow : + function() { + try { + mightThrow(); + } catch ( e ) { + + if ( jQuery.Deferred.exceptionHook ) { + jQuery.Deferred.exceptionHook( e, + process.stackTrace ); + } + + // Support: Promises/A+ section 2.3.3.3.4.1 + // https://promisesaplus.com/#point-61 + // Ignore post-resolution exceptions + if ( depth + 1 >= maxDepth ) { + + // Only substitute handlers pass on context + // and multiple values (non-spec behavior) + if ( handler !== Thrower ) { + that = undefined; + args = [ e ]; + } + + deferred.rejectWith( that, args ); + } + } + }; + + // Support: Promises/A+ section 2.3.3.3.1 + // https://promisesaplus.com/#point-57 + // Re-resolve promises immediately to dodge false rejection from + // subsequent errors + if ( depth ) { + process(); + } else { + + // Call an optional hook to record the stack, in case of exception + // since it's otherwise lost when execution goes async + if ( jQuery.Deferred.getStackHook ) { + process.stackTrace = jQuery.Deferred.getStackHook(); + } + window.setTimeout( process ); + } + }; + } + + return jQuery.Deferred( function( newDefer ) { + + // progress_handlers.add( ... ) + tuples[ 0 ][ 3 ].add( + resolve( + 0, + newDefer, + isFunction( onProgress ) ? + onProgress : + Identity, + newDefer.notifyWith + ) + ); + + // fulfilled_handlers.add( ... ) + tuples[ 1 ][ 3 ].add( + resolve( + 0, + newDefer, + isFunction( onFulfilled ) ? + onFulfilled : + Identity + ) + ); + + // rejected_handlers.add( ... ) + tuples[ 2 ][ 3 ].add( + resolve( + 0, + newDefer, + isFunction( onRejected ) ? + onRejected : + Thrower + ) + ); + } ).promise(); + }, + + // Get a promise for this deferred + // If obj is provided, the promise aspect is added to the object + promise: function( obj ) { + return obj != null ? jQuery.extend( obj, promise ) : promise; + } + }, + deferred = {}; + + // Add list-specific methods + jQuery.each( tuples, function( i, tuple ) { + var list = tuple[ 2 ], + stateString = tuple[ 5 ]; + + // promise.progress = list.add + // promise.done = list.add + // promise.fail = list.add + promise[ tuple[ 1 ] ] = list.add; + + // Handle state + if ( stateString ) { + list.add( + function() { + + // state = "resolved" (i.e., fulfilled) + // state = "rejected" + state = stateString; + }, + + // rejected_callbacks.disable + // fulfilled_callbacks.disable + tuples[ 3 - i ][ 2 ].disable, + + // rejected_handlers.disable + // fulfilled_handlers.disable + tuples[ 3 - i ][ 3 ].disable, + + // progress_callbacks.lock + tuples[ 0 ][ 2 ].lock, + + // progress_handlers.lock + tuples[ 0 ][ 3 ].lock + ); + } + + // progress_handlers.fire + // fulfilled_handlers.fire + // rejected_handlers.fire + list.add( tuple[ 3 ].fire ); + + // deferred.notify = function() { deferred.notifyWith(...) } + // deferred.resolve = function() { deferred.resolveWith(...) } + // deferred.reject = function() { deferred.rejectWith(...) } + deferred[ tuple[ 0 ] ] = function() { + deferred[ tuple[ 0 ] + "With" ]( this === deferred ? undefined : this, arguments ); + return this; + }; + + // deferred.notifyWith = list.fireWith + // deferred.resolveWith = list.fireWith + // deferred.rejectWith = list.fireWith + deferred[ tuple[ 0 ] + "With" ] = list.fireWith; + } ); + + // Make the deferred a promise + promise.promise( deferred ); + + // Call given func if any + if ( func ) { + func.call( deferred, deferred ); + } + + // All done! + return deferred; + }, + + // Deferred helper + when: function( singleValue ) { + var + + // count of uncompleted subordinates + remaining = arguments.length, + + // count of unprocessed arguments + i = remaining, + + // subordinate fulfillment data + resolveContexts = Array( i ), + resolveValues = slice.call( arguments ), + + // the primary Deferred + primary = jQuery.Deferred(), + + // subordinate callback factory + updateFunc = function( i ) { + return function( value ) { + resolveContexts[ i ] = this; + resolveValues[ i ] = arguments.length > 1 ? slice.call( arguments ) : value; + if ( !( --remaining ) ) { + primary.resolveWith( resolveContexts, resolveValues ); + } + }; + }; + + // Single- and empty arguments are adopted like Promise.resolve + if ( remaining <= 1 ) { + adoptValue( singleValue, primary.done( updateFunc( i ) ).resolve, primary.reject, + !remaining ); + + // Use .then() to unwrap secondary thenables (cf. gh-3000) + if ( primary.state() === "pending" || + isFunction( resolveValues[ i ] && resolveValues[ i ].then ) ) { + + return primary.then(); + } + } + + // Multiple arguments are aggregated like Promise.all array elements + while ( i-- ) { + adoptValue( resolveValues[ i ], updateFunc( i ), primary.reject ); + } + + return primary.promise(); + } +} ); + + +// These usually indicate a programmer mistake during development, +// warn about them ASAP rather than swallowing them by default. +var rerrorNames = /^(Eval|Internal|Range|Reference|Syntax|Type|URI)Error$/; + +jQuery.Deferred.exceptionHook = function( error, stack ) { + + // Support: IE 8 - 9 only + // Console exists when dev tools are open, which can happen at any time + if ( window.console && window.console.warn && error && rerrorNames.test( error.name ) ) { + window.console.warn( "jQuery.Deferred exception: " + error.message, error.stack, stack ); + } +}; + + + + +jQuery.readyException = function( error ) { + window.setTimeout( function() { + throw error; + } ); +}; + + + + +// The deferred used on DOM ready +var readyList = jQuery.Deferred(); + +jQuery.fn.ready = function( fn ) { + + readyList + .then( fn ) + + // Wrap jQuery.readyException in a function so that the lookup + // happens at the time of error handling instead of callback + // registration. + .catch( function( error ) { + jQuery.readyException( error ); + } ); + + return this; +}; + +jQuery.extend( { + + // Is the DOM ready to be used? Set to true once it occurs. + isReady: false, + + // A counter to track how many items to wait for before + // the ready event fires. See #6781 + readyWait: 1, + + // Handle when the DOM is ready + ready: function( wait ) { + + // Abort if there are pending holds or we're already ready + if ( wait === true ? --jQuery.readyWait : jQuery.isReady ) { + return; + } + + // Remember that the DOM is ready + jQuery.isReady = true; + + // If a normal DOM Ready event fired, decrement, and wait if need be + if ( wait !== true && --jQuery.readyWait > 0 ) { + return; + } + + // If there are functions bound, to execute + readyList.resolveWith( document, [ jQuery ] ); + } +} ); + +jQuery.ready.then = readyList.then; + +// The ready event handler and self cleanup method +function completed() { + document.removeEventListener( "DOMContentLoaded", completed ); + window.removeEventListener( "load", completed ); + jQuery.ready(); +} + +// Catch cases where $(document).ready() is called +// after the browser event has already occurred. +// Support: IE <=9 - 10 only +// Older IE sometimes signals "interactive" too soon +if ( document.readyState === "complete" || + ( document.readyState !== "loading" && !document.documentElement.doScroll ) ) { + + // Handle it asynchronously to allow scripts the opportunity to delay ready + window.setTimeout( jQuery.ready ); + +} else { + + // Use the handy event callback + document.addEventListener( "DOMContentLoaded", completed ); + + // A fallback to window.onload, that will always work + window.addEventListener( "load", completed ); +} + + + + +// Multifunctional method to get and set values of a collection +// The value/s can optionally be executed if it's a function +var access = function( elems, fn, key, value, chainable, emptyGet, raw ) { + var i = 0, + len = elems.length, + bulk = key == null; + + // Sets many values + if ( toType( key ) === "object" ) { + chainable = true; + for ( i in key ) { + access( elems, fn, i, key[ i ], true, emptyGet, raw ); + } + + // Sets one value + } else if ( value !== undefined ) { + chainable = true; + + if ( !isFunction( value ) ) { + raw = true; + } + + if ( bulk ) { + + // Bulk operations run against the entire set + if ( raw ) { + fn.call( elems, value ); + fn = null; + + // ...except when executing function values + } else { + bulk = fn; + fn = function( elem, _key, value ) { + return bulk.call( jQuery( elem ), value ); + }; + } + } + + if ( fn ) { + for ( ; i < len; i++ ) { + fn( + elems[ i ], key, raw ? + value : + value.call( elems[ i ], i, fn( elems[ i ], key ) ) + ); + } + } + } + + if ( chainable ) { + return elems; + } + + // Gets + if ( bulk ) { + return fn.call( elems ); + } + + return len ? fn( elems[ 0 ], key ) : emptyGet; +}; + + +// Matches dashed string for camelizing +var rmsPrefix = /^-ms-/, + rdashAlpha = /-([a-z])/g; + +// Used by camelCase as callback to replace() +function fcamelCase( _all, letter ) { + return letter.toUpperCase(); +} + +// Convert dashed to camelCase; used by the css and data modules +// Support: IE <=9 - 11, Edge 12 - 15 +// Microsoft forgot to hump their vendor prefix (#9572) +function camelCase( string ) { + return string.replace( rmsPrefix, "ms-" ).replace( rdashAlpha, fcamelCase ); +} +var acceptData = function( owner ) { + + // Accepts only: + // - Node + // - Node.ELEMENT_NODE + // - Node.DOCUMENT_NODE + // - Object + // - Any + return owner.nodeType === 1 || owner.nodeType === 9 || !( +owner.nodeType ); +}; + + + + +function Data() { + this.expando = jQuery.expando + Data.uid++; +} + +Data.uid = 1; + +Data.prototype = { + + cache: function( owner ) { + + // Check if the owner object already has a cache + var value = owner[ this.expando ]; + + // If not, create one + if ( !value ) { + value = {}; + + // We can accept data for non-element nodes in modern browsers, + // but we should not, see #8335. + // Always return an empty object. + if ( acceptData( owner ) ) { + + // If it is a node unlikely to be stringify-ed or looped over + // use plain assignment + if ( owner.nodeType ) { + owner[ this.expando ] = value; + + // Otherwise secure it in a non-enumerable property + // configurable must be true to allow the property to be + // deleted when data is removed + } else { + Object.defineProperty( owner, this.expando, { + value: value, + configurable: true + } ); + } + } + } + + return value; + }, + set: function( owner, data, value ) { + var prop, + cache = this.cache( owner ); + + // Handle: [ owner, key, value ] args + // Always use camelCase key (gh-2257) + if ( typeof data === "string" ) { + cache[ camelCase( data ) ] = value; + + // Handle: [ owner, { properties } ] args + } else { + + // Copy the properties one-by-one to the cache object + for ( prop in data ) { + cache[ camelCase( prop ) ] = data[ prop ]; + } + } + return cache; + }, + get: function( owner, key ) { + return key === undefined ? + this.cache( owner ) : + + // Always use camelCase key (gh-2257) + owner[ this.expando ] && owner[ this.expando ][ camelCase( key ) ]; + }, + access: function( owner, key, value ) { + + // In cases where either: + // + // 1. No key was specified + // 2. A string key was specified, but no value provided + // + // Take the "read" path and allow the get method to determine + // which value to return, respectively either: + // + // 1. The entire cache object + // 2. The data stored at the key + // + if ( key === undefined || + ( ( key && typeof key === "string" ) && value === undefined ) ) { + + return this.get( owner, key ); + } + + // When the key is not a string, or both a key and value + // are specified, set or extend (existing objects) with either: + // + // 1. An object of properties + // 2. A key and value + // + this.set( owner, key, value ); + + // Since the "set" path can have two possible entry points + // return the expected data based on which path was taken[*] + return value !== undefined ? value : key; + }, + remove: function( owner, key ) { + var i, + cache = owner[ this.expando ]; + + if ( cache === undefined ) { + return; + } + + if ( key !== undefined ) { + + // Support array or space separated string of keys + if ( Array.isArray( key ) ) { + + // If key is an array of keys... + // We always set camelCase keys, so remove that. + key = key.map( camelCase ); + } else { + key = camelCase( key ); + + // If a key with the spaces exists, use it. + // Otherwise, create an array by matching non-whitespace + key = key in cache ? + [ key ] : + ( key.match( rnothtmlwhite ) || [] ); + } + + i = key.length; + + while ( i-- ) { + delete cache[ key[ i ] ]; + } + } + + // Remove the expando if there's no more data + if ( key === undefined || jQuery.isEmptyObject( cache ) ) { + + // Support: Chrome <=35 - 45 + // Webkit & Blink performance suffers when deleting properties + // from DOM nodes, so set to undefined instead + // https://bugs.chromium.org/p/chromium/issues/detail?id=378607 (bug restricted) + if ( owner.nodeType ) { + owner[ this.expando ] = undefined; + } else { + delete owner[ this.expando ]; + } + } + }, + hasData: function( owner ) { + var cache = owner[ this.expando ]; + return cache !== undefined && !jQuery.isEmptyObject( cache ); + } +}; +var dataPriv = new Data(); + +var dataUser = new Data(); + + + +// Implementation Summary +// +// 1. Enforce API surface and semantic compatibility with 1.9.x branch +// 2. Improve the module's maintainability by reducing the storage +// paths to a single mechanism. +// 3. Use the same single mechanism to support "private" and "user" data. +// 4. _Never_ expose "private" data to user code (TODO: Drop _data, _removeData) +// 5. Avoid exposing implementation details on user objects (eg. expando properties) +// 6. Provide a clear path for implementation upgrade to WeakMap in 2014 + +var rbrace = /^(?:\{[\w\W]*\}|\[[\w\W]*\])$/, + rmultiDash = /[A-Z]/g; + +function getData( data ) { + if ( data === "true" ) { + return true; + } + + if ( data === "false" ) { + return false; + } + + if ( data === "null" ) { + return null; + } + + // Only convert to a number if it doesn't change the string + if ( data === +data + "" ) { + return +data; + } + + if ( rbrace.test( data ) ) { + return JSON.parse( data ); + } + + return data; +} + +function dataAttr( elem, key, data ) { + var name; + + // If nothing was found internally, try to fetch any + // data from the HTML5 data-* attribute + if ( data === undefined && elem.nodeType === 1 ) { + name = "data-" + key.replace( rmultiDash, "-$&" ).toLowerCase(); + data = elem.getAttribute( name ); + + if ( typeof data === "string" ) { + try { + data = getData( data ); + } catch ( e ) {} + + // Make sure we set the data so it isn't changed later + dataUser.set( elem, key, data ); + } else { + data = undefined; + } + } + return data; +} + +jQuery.extend( { + hasData: function( elem ) { + return dataUser.hasData( elem ) || dataPriv.hasData( elem ); + }, + + data: function( elem, name, data ) { + return dataUser.access( elem, name, data ); + }, + + removeData: function( elem, name ) { + dataUser.remove( elem, name ); + }, + + // TODO: Now that all calls to _data and _removeData have been replaced + // with direct calls to dataPriv methods, these can be deprecated. + _data: function( elem, name, data ) { + return dataPriv.access( elem, name, data ); + }, + + _removeData: function( elem, name ) { + dataPriv.remove( elem, name ); + } +} ); + +jQuery.fn.extend( { + data: function( key, value ) { + var i, name, data, + elem = this[ 0 ], + attrs = elem && elem.attributes; + + // Gets all values + if ( key === undefined ) { + if ( this.length ) { + data = dataUser.get( elem ); + + if ( elem.nodeType === 1 && !dataPriv.get( elem, "hasDataAttrs" ) ) { + i = attrs.length; + while ( i-- ) { + + // Support: IE 11 only + // The attrs elements can be null (#14894) + if ( attrs[ i ] ) { + name = attrs[ i ].name; + if ( name.indexOf( "data-" ) === 0 ) { + name = camelCase( name.slice( 5 ) ); + dataAttr( elem, name, data[ name ] ); + } + } + } + dataPriv.set( elem, "hasDataAttrs", true ); + } + } + + return data; + } + + // Sets multiple values + if ( typeof key === "object" ) { + return this.each( function() { + dataUser.set( this, key ); + } ); + } + + return access( this, function( value ) { + var data; + + // The calling jQuery object (element matches) is not empty + // (and therefore has an element appears at this[ 0 ]) and the + // `value` parameter was not undefined. An empty jQuery object + // will result in `undefined` for elem = this[ 0 ] which will + // throw an exception if an attempt to read a data cache is made. + if ( elem && value === undefined ) { + + // Attempt to get data from the cache + // The key will always be camelCased in Data + data = dataUser.get( elem, key ); + if ( data !== undefined ) { + return data; + } + + // Attempt to "discover" the data in + // HTML5 custom data-* attrs + data = dataAttr( elem, key ); + if ( data !== undefined ) { + return data; + } + + // We tried really hard, but the data doesn't exist. + return; + } + + // Set the data... + this.each( function() { + + // We always store the camelCased key + dataUser.set( this, key, value ); + } ); + }, null, value, arguments.length > 1, null, true ); + }, + + removeData: function( key ) { + return this.each( function() { + dataUser.remove( this, key ); + } ); + } +} ); + + +jQuery.extend( { + queue: function( elem, type, data ) { + var queue; + + if ( elem ) { + type = ( type || "fx" ) + "queue"; + queue = dataPriv.get( elem, type ); + + // Speed up dequeue by getting out quickly if this is just a lookup + if ( data ) { + if ( !queue || Array.isArray( data ) ) { + queue = dataPriv.access( elem, type, jQuery.makeArray( data ) ); + } else { + queue.push( data ); + } + } + return queue || []; + } + }, + + dequeue: function( elem, type ) { + type = type || "fx"; + + var queue = jQuery.queue( elem, type ), + startLength = queue.length, + fn = queue.shift(), + hooks = jQuery._queueHooks( elem, type ), + next = function() { + jQuery.dequeue( elem, type ); + }; + + // If the fx queue is dequeued, always remove the progress sentinel + if ( fn === "inprogress" ) { + fn = queue.shift(); + startLength--; + } + + if ( fn ) { + + // Add a progress sentinel to prevent the fx queue from being + // automatically dequeued + if ( type === "fx" ) { + queue.unshift( "inprogress" ); + } + + // Clear up the last queue stop function + delete hooks.stop; + fn.call( elem, next, hooks ); + } + + if ( !startLength && hooks ) { + hooks.empty.fire(); + } + }, + + // Not public - generate a queueHooks object, or return the current one + _queueHooks: function( elem, type ) { + var key = type + "queueHooks"; + return dataPriv.get( elem, key ) || dataPriv.access( elem, key, { + empty: jQuery.Callbacks( "once memory" ).add( function() { + dataPriv.remove( elem, [ type + "queue", key ] ); + } ) + } ); + } +} ); + +jQuery.fn.extend( { + queue: function( type, data ) { + var setter = 2; + + if ( typeof type !== "string" ) { + data = type; + type = "fx"; + setter--; + } + + if ( arguments.length < setter ) { + return jQuery.queue( this[ 0 ], type ); + } + + return data === undefined ? + this : + this.each( function() { + var queue = jQuery.queue( this, type, data ); + + // Ensure a hooks for this queue + jQuery._queueHooks( this, type ); + + if ( type === "fx" && queue[ 0 ] !== "inprogress" ) { + jQuery.dequeue( this, type ); + } + } ); + }, + dequeue: function( type ) { + return this.each( function() { + jQuery.dequeue( this, type ); + } ); + }, + clearQueue: function( type ) { + return this.queue( type || "fx", [] ); + }, + + // Get a promise resolved when queues of a certain type + // are emptied (fx is the type by default) + promise: function( type, obj ) { + var tmp, + count = 1, + defer = jQuery.Deferred(), + elements = this, + i = this.length, + resolve = function() { + if ( !( --count ) ) { + defer.resolveWith( elements, [ elements ] ); + } + }; + + if ( typeof type !== "string" ) { + obj = type; + type = undefined; + } + type = type || "fx"; + + while ( i-- ) { + tmp = dataPriv.get( elements[ i ], type + "queueHooks" ); + if ( tmp && tmp.empty ) { + count++; + tmp.empty.add( resolve ); + } + } + resolve(); + return defer.promise( obj ); + } +} ); +var pnum = ( /[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/ ).source; + +var rcssNum = new RegExp( "^(?:([+-])=|)(" + pnum + ")([a-z%]*)$", "i" ); + + +var cssExpand = [ "Top", "Right", "Bottom", "Left" ]; + +var documentElement = document.documentElement; + + + + var isAttached = function( elem ) { + return jQuery.contains( elem.ownerDocument, elem ); + }, + composed = { composed: true }; + + // Support: IE 9 - 11+, Edge 12 - 18+, iOS 10.0 - 10.2 only + // Check attachment across shadow DOM boundaries when possible (gh-3504) + // Support: iOS 10.0-10.2 only + // Early iOS 10 versions support `attachShadow` but not `getRootNode`, + // leading to errors. We need to check for `getRootNode`. + if ( documentElement.getRootNode ) { + isAttached = function( elem ) { + return jQuery.contains( elem.ownerDocument, elem ) || + elem.getRootNode( composed ) === elem.ownerDocument; + }; + } +var isHiddenWithinTree = function( elem, el ) { + + // isHiddenWithinTree might be called from jQuery#filter function; + // in that case, element will be second argument + elem = el || elem; + + // Inline style trumps all + return elem.style.display === "none" || + elem.style.display === "" && + + // Otherwise, check computed style + // Support: Firefox <=43 - 45 + // Disconnected elements can have computed display: none, so first confirm that elem is + // in the document. + isAttached( elem ) && + + jQuery.css( elem, "display" ) === "none"; + }; + + + +function adjustCSS( elem, prop, valueParts, tween ) { + var adjusted, scale, + maxIterations = 20, + currentValue = tween ? + function() { + return tween.cur(); + } : + function() { + return jQuery.css( elem, prop, "" ); + }, + initial = currentValue(), + unit = valueParts && valueParts[ 3 ] || ( jQuery.cssNumber[ prop ] ? "" : "px" ), + + // Starting value computation is required for potential unit mismatches + initialInUnit = elem.nodeType && + ( jQuery.cssNumber[ prop ] || unit !== "px" && +initial ) && + rcssNum.exec( jQuery.css( elem, prop ) ); + + if ( initialInUnit && initialInUnit[ 3 ] !== unit ) { + + // Support: Firefox <=54 + // Halve the iteration target value to prevent interference from CSS upper bounds (gh-2144) + initial = initial / 2; + + // Trust units reported by jQuery.css + unit = unit || initialInUnit[ 3 ]; + + // Iteratively approximate from a nonzero starting point + initialInUnit = +initial || 1; + + while ( maxIterations-- ) { + + // Evaluate and update our best guess (doubling guesses that zero out). + // Finish if the scale equals or crosses 1 (making the old*new product non-positive). + jQuery.style( elem, prop, initialInUnit + unit ); + if ( ( 1 - scale ) * ( 1 - ( scale = currentValue() / initial || 0.5 ) ) <= 0 ) { + maxIterations = 0; + } + initialInUnit = initialInUnit / scale; + + } + + initialInUnit = initialInUnit * 2; + jQuery.style( elem, prop, initialInUnit + unit ); + + // Make sure we update the tween properties later on + valueParts = valueParts || []; + } + + if ( valueParts ) { + initialInUnit = +initialInUnit || +initial || 0; + + // Apply relative offset (+=/-=) if specified + adjusted = valueParts[ 1 ] ? + initialInUnit + ( valueParts[ 1 ] + 1 ) * valueParts[ 2 ] : + +valueParts[ 2 ]; + if ( tween ) { + tween.unit = unit; + tween.start = initialInUnit; + tween.end = adjusted; + } + } + return adjusted; +} + + +var defaultDisplayMap = {}; + +function getDefaultDisplay( elem ) { + var temp, + doc = elem.ownerDocument, + nodeName = elem.nodeName, + display = defaultDisplayMap[ nodeName ]; + + if ( display ) { + return display; + } + + temp = doc.body.appendChild( doc.createElement( nodeName ) ); + display = jQuery.css( temp, "display" ); + + temp.parentNode.removeChild( temp ); + + if ( display === "none" ) { + display = "block"; + } + defaultDisplayMap[ nodeName ] = display; + + return display; +} + +function showHide( elements, show ) { + var display, elem, + values = [], + index = 0, + length = elements.length; + + // Determine new display value for elements that need to change + for ( ; index < length; index++ ) { + elem = elements[ index ]; + if ( !elem.style ) { + continue; + } + + display = elem.style.display; + if ( show ) { + + // Since we force visibility upon cascade-hidden elements, an immediate (and slow) + // check is required in this first loop unless we have a nonempty display value (either + // inline or about-to-be-restored) + if ( display === "none" ) { + values[ index ] = dataPriv.get( elem, "display" ) || null; + if ( !values[ index ] ) { + elem.style.display = ""; + } + } + if ( elem.style.display === "" && isHiddenWithinTree( elem ) ) { + values[ index ] = getDefaultDisplay( elem ); + } + } else { + if ( display !== "none" ) { + values[ index ] = "none"; + + // Remember what we're overwriting + dataPriv.set( elem, "display", display ); + } + } + } + + // Set the display of the elements in a second loop to avoid constant reflow + for ( index = 0; index < length; index++ ) { + if ( values[ index ] != null ) { + elements[ index ].style.display = values[ index ]; + } + } + + return elements; +} + +jQuery.fn.extend( { + show: function() { + return showHide( this, true ); + }, + hide: function() { + return showHide( this ); + }, + toggle: function( state ) { + if ( typeof state === "boolean" ) { + return state ? this.show() : this.hide(); + } + + return this.each( function() { + if ( isHiddenWithinTree( this ) ) { + jQuery( this ).show(); + } else { + jQuery( this ).hide(); + } + } ); + } +} ); +var rcheckableType = ( /^(?:checkbox|radio)$/i ); + +var rtagName = ( /<([a-z][^\/\0>\x20\t\r\n\f]*)/i ); + +var rscriptType = ( /^$|^module$|\/(?:java|ecma)script/i ); + + + +( function() { + var fragment = document.createDocumentFragment(), + div = fragment.appendChild( document.createElement( "div" ) ), + input = document.createElement( "input" ); + + // Support: Android 4.0 - 4.3 only + // Check state lost if the name is set (#11217) + // Support: Windows Web Apps (WWA) + // `name` and `type` must use .setAttribute for WWA (#14901) + input.setAttribute( "type", "radio" ); + input.setAttribute( "checked", "checked" ); + input.setAttribute( "name", "t" ); + + div.appendChild( input ); + + // Support: Android <=4.1 only + // Older WebKit doesn't clone checked state correctly in fragments + support.checkClone = div.cloneNode( true ).cloneNode( true ).lastChild.checked; + + // Support: IE <=11 only + // Make sure textarea (and checkbox) defaultValue is properly cloned + div.innerHTML = ""; + support.noCloneChecked = !!div.cloneNode( true ).lastChild.defaultValue; + + // Support: IE <=9 only + // IE <=9 replaces "; + support.option = !!div.lastChild; +} )(); + + +// We have to close these tags to support XHTML (#13200) +var wrapMap = { + + // XHTML parsers do not magically insert elements in the + // same way that tag soup parsers do. So we cannot shorten + // this by omitting or other required elements. + thead: [ 1, "", "
" ], + col: [ 2, "", "
" ], + tr: [ 2, "", "
" ], + td: [ 3, "", "
" ], + + _default: [ 0, "", "" ] +}; + +wrapMap.tbody = wrapMap.tfoot = wrapMap.colgroup = wrapMap.caption = wrapMap.thead; +wrapMap.th = wrapMap.td; + +// Support: IE <=9 only +if ( !support.option ) { + wrapMap.optgroup = wrapMap.option = [ 1, "" ]; +} + + +function getAll( context, tag ) { + + // Support: IE <=9 - 11 only + // Use typeof to avoid zero-argument method invocation on host objects (#15151) + var ret; + + if ( typeof context.getElementsByTagName !== "undefined" ) { + ret = context.getElementsByTagName( tag || "*" ); + + } else if ( typeof context.querySelectorAll !== "undefined" ) { + ret = context.querySelectorAll( tag || "*" ); + + } else { + ret = []; + } + + if ( tag === undefined || tag && nodeName( context, tag ) ) { + return jQuery.merge( [ context ], ret ); + } + + return ret; +} + + +// Mark scripts as having already been evaluated +function setGlobalEval( elems, refElements ) { + var i = 0, + l = elems.length; + + for ( ; i < l; i++ ) { + dataPriv.set( + elems[ i ], + "globalEval", + !refElements || dataPriv.get( refElements[ i ], "globalEval" ) + ); + } +} + + +var rhtml = /<|&#?\w+;/; + +function buildFragment( elems, context, scripts, selection, ignored ) { + var elem, tmp, tag, wrap, attached, j, + fragment = context.createDocumentFragment(), + nodes = [], + i = 0, + l = elems.length; + + for ( ; i < l; i++ ) { + elem = elems[ i ]; + + if ( elem || elem === 0 ) { + + // Add nodes directly + if ( toType( elem ) === "object" ) { + + // Support: Android <=4.0 only, PhantomJS 1 only + // push.apply(_, arraylike) throws on ancient WebKit + jQuery.merge( nodes, elem.nodeType ? [ elem ] : elem ); + + // Convert non-html into a text node + } else if ( !rhtml.test( elem ) ) { + nodes.push( context.createTextNode( elem ) ); + + // Convert html into DOM nodes + } else { + tmp = tmp || fragment.appendChild( context.createElement( "div" ) ); + + // Deserialize a standard representation + tag = ( rtagName.exec( elem ) || [ "", "" ] )[ 1 ].toLowerCase(); + wrap = wrapMap[ tag ] || wrapMap._default; + tmp.innerHTML = wrap[ 1 ] + jQuery.htmlPrefilter( elem ) + wrap[ 2 ]; + + // Descend through wrappers to the right content + j = wrap[ 0 ]; + while ( j-- ) { + tmp = tmp.lastChild; + } + + // Support: Android <=4.0 only, PhantomJS 1 only + // push.apply(_, arraylike) throws on ancient WebKit + jQuery.merge( nodes, tmp.childNodes ); + + // Remember the top-level container + tmp = fragment.firstChild; + + // Ensure the created nodes are orphaned (#12392) + tmp.textContent = ""; + } + } + } + + // Remove wrapper from fragment + fragment.textContent = ""; + + i = 0; + while ( ( elem = nodes[ i++ ] ) ) { + + // Skip elements already in the context collection (trac-4087) + if ( selection && jQuery.inArray( elem, selection ) > -1 ) { + if ( ignored ) { + ignored.push( elem ); + } + continue; + } + + attached = isAttached( elem ); + + // Append to fragment + tmp = getAll( fragment.appendChild( elem ), "script" ); + + // Preserve script evaluation history + if ( attached ) { + setGlobalEval( tmp ); + } + + // Capture executables + if ( scripts ) { + j = 0; + while ( ( elem = tmp[ j++ ] ) ) { + if ( rscriptType.test( elem.type || "" ) ) { + scripts.push( elem ); + } + } + } + } + + return fragment; +} + + +var rtypenamespace = /^([^.]*)(?:\.(.+)|)/; + +function returnTrue() { + return true; +} + +function returnFalse() { + return false; +} + +// Support: IE <=9 - 11+ +// focus() and blur() are asynchronous, except when they are no-op. +// So expect focus to be synchronous when the element is already active, +// and blur to be synchronous when the element is not already active. +// (focus and blur are always synchronous in other supported browsers, +// this just defines when we can count on it). +function expectSync( elem, type ) { + return ( elem === safeActiveElement() ) === ( type === "focus" ); +} + +// Support: IE <=9 only +// Accessing document.activeElement can throw unexpectedly +// https://bugs.jquery.com/ticket/13393 +function safeActiveElement() { + try { + return document.activeElement; + } catch ( err ) { } +} + +function on( elem, types, selector, data, fn, one ) { + var origFn, type; + + // Types can be a map of types/handlers + if ( typeof types === "object" ) { + + // ( types-Object, selector, data ) + if ( typeof selector !== "string" ) { + + // ( types-Object, data ) + data = data || selector; + selector = undefined; + } + for ( type in types ) { + on( elem, type, selector, data, types[ type ], one ); + } + return elem; + } + + if ( data == null && fn == null ) { + + // ( types, fn ) + fn = selector; + data = selector = undefined; + } else if ( fn == null ) { + if ( typeof selector === "string" ) { + + // ( types, selector, fn ) + fn = data; + data = undefined; + } else { + + // ( types, data, fn ) + fn = data; + data = selector; + selector = undefined; + } + } + if ( fn === false ) { + fn = returnFalse; + } else if ( !fn ) { + return elem; + } + + if ( one === 1 ) { + origFn = fn; + fn = function( event ) { + + // Can use an empty set, since event contains the info + jQuery().off( event ); + return origFn.apply( this, arguments ); + }; + + // Use same guid so caller can remove using origFn + fn.guid = origFn.guid || ( origFn.guid = jQuery.guid++ ); + } + return elem.each( function() { + jQuery.event.add( this, types, fn, data, selector ); + } ); +} + +/* + * Helper functions for managing events -- not part of the public interface. + * Props to Dean Edwards' addEvent library for many of the ideas. + */ +jQuery.event = { + + global: {}, + + add: function( elem, types, handler, data, selector ) { + + var handleObjIn, eventHandle, tmp, + events, t, handleObj, + special, handlers, type, namespaces, origType, + elemData = dataPriv.get( elem ); + + // Only attach events to objects that accept data + if ( !acceptData( elem ) ) { + return; + } + + // Caller can pass in an object of custom data in lieu of the handler + if ( handler.handler ) { + handleObjIn = handler; + handler = handleObjIn.handler; + selector = handleObjIn.selector; + } + + // Ensure that invalid selectors throw exceptions at attach time + // Evaluate against documentElement in case elem is a non-element node (e.g., document) + if ( selector ) { + jQuery.find.matchesSelector( documentElement, selector ); + } + + // Make sure that the handler has a unique ID, used to find/remove it later + if ( !handler.guid ) { + handler.guid = jQuery.guid++; + } + + // Init the element's event structure and main handler, if this is the first + if ( !( events = elemData.events ) ) { + events = elemData.events = Object.create( null ); + } + if ( !( eventHandle = elemData.handle ) ) { + eventHandle = elemData.handle = function( e ) { + + // Discard the second event of a jQuery.event.trigger() and + // when an event is called after a page has unloaded + return typeof jQuery !== "undefined" && jQuery.event.triggered !== e.type ? + jQuery.event.dispatch.apply( elem, arguments ) : undefined; + }; + } + + // Handle multiple events separated by a space + types = ( types || "" ).match( rnothtmlwhite ) || [ "" ]; + t = types.length; + while ( t-- ) { + tmp = rtypenamespace.exec( types[ t ] ) || []; + type = origType = tmp[ 1 ]; + namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort(); + + // There *must* be a type, no attaching namespace-only handlers + if ( !type ) { + continue; + } + + // If event changes its type, use the special event handlers for the changed type + special = jQuery.event.special[ type ] || {}; + + // If selector defined, determine special event api type, otherwise given type + type = ( selector ? special.delegateType : special.bindType ) || type; + + // Update special based on newly reset type + special = jQuery.event.special[ type ] || {}; + + // handleObj is passed to all event handlers + handleObj = jQuery.extend( { + type: type, + origType: origType, + data: data, + handler: handler, + guid: handler.guid, + selector: selector, + needsContext: selector && jQuery.expr.match.needsContext.test( selector ), + namespace: namespaces.join( "." ) + }, handleObjIn ); + + // Init the event handler queue if we're the first + if ( !( handlers = events[ type ] ) ) { + handlers = events[ type ] = []; + handlers.delegateCount = 0; + + // Only use addEventListener if the special events handler returns false + if ( !special.setup || + special.setup.call( elem, data, namespaces, eventHandle ) === false ) { + + if ( elem.addEventListener ) { + elem.addEventListener( type, eventHandle ); + } + } + } + + if ( special.add ) { + special.add.call( elem, handleObj ); + + if ( !handleObj.handler.guid ) { + handleObj.handler.guid = handler.guid; + } + } + + // Add to the element's handler list, delegates in front + if ( selector ) { + handlers.splice( handlers.delegateCount++, 0, handleObj ); + } else { + handlers.push( handleObj ); + } + + // Keep track of which events have ever been used, for event optimization + jQuery.event.global[ type ] = true; + } + + }, + + // Detach an event or set of events from an element + remove: function( elem, types, handler, selector, mappedTypes ) { + + var j, origCount, tmp, + events, t, handleObj, + special, handlers, type, namespaces, origType, + elemData = dataPriv.hasData( elem ) && dataPriv.get( elem ); + + if ( !elemData || !( events = elemData.events ) ) { + return; + } + + // Once for each type.namespace in types; type may be omitted + types = ( types || "" ).match( rnothtmlwhite ) || [ "" ]; + t = types.length; + while ( t-- ) { + tmp = rtypenamespace.exec( types[ t ] ) || []; + type = origType = tmp[ 1 ]; + namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort(); + + // Unbind all events (on this namespace, if provided) for the element + if ( !type ) { + for ( type in events ) { + jQuery.event.remove( elem, type + types[ t ], handler, selector, true ); + } + continue; + } + + special = jQuery.event.special[ type ] || {}; + type = ( selector ? special.delegateType : special.bindType ) || type; + handlers = events[ type ] || []; + tmp = tmp[ 2 ] && + new RegExp( "(^|\\.)" + namespaces.join( "\\.(?:.*\\.|)" ) + "(\\.|$)" ); + + // Remove matching events + origCount = j = handlers.length; + while ( j-- ) { + handleObj = handlers[ j ]; + + if ( ( mappedTypes || origType === handleObj.origType ) && + ( !handler || handler.guid === handleObj.guid ) && + ( !tmp || tmp.test( handleObj.namespace ) ) && + ( !selector || selector === handleObj.selector || + selector === "**" && handleObj.selector ) ) { + handlers.splice( j, 1 ); + + if ( handleObj.selector ) { + handlers.delegateCount--; + } + if ( special.remove ) { + special.remove.call( elem, handleObj ); + } + } + } + + // Remove generic event handler if we removed something and no more handlers exist + // (avoids potential for endless recursion during removal of special event handlers) + if ( origCount && !handlers.length ) { + if ( !special.teardown || + special.teardown.call( elem, namespaces, elemData.handle ) === false ) { + + jQuery.removeEvent( elem, type, elemData.handle ); + } + + delete events[ type ]; + } + } + + // Remove data and the expando if it's no longer used + if ( jQuery.isEmptyObject( events ) ) { + dataPriv.remove( elem, "handle events" ); + } + }, + + dispatch: function( nativeEvent ) { + + var i, j, ret, matched, handleObj, handlerQueue, + args = new Array( arguments.length ), + + // Make a writable jQuery.Event from the native event object + event = jQuery.event.fix( nativeEvent ), + + handlers = ( + dataPriv.get( this, "events" ) || Object.create( null ) + )[ event.type ] || [], + special = jQuery.event.special[ event.type ] || {}; + + // Use the fix-ed jQuery.Event rather than the (read-only) native event + args[ 0 ] = event; + + for ( i = 1; i < arguments.length; i++ ) { + args[ i ] = arguments[ i ]; + } + + event.delegateTarget = this; + + // Call the preDispatch hook for the mapped type, and let it bail if desired + if ( special.preDispatch && special.preDispatch.call( this, event ) === false ) { + return; + } + + // Determine handlers + handlerQueue = jQuery.event.handlers.call( this, event, handlers ); + + // Run delegates first; they may want to stop propagation beneath us + i = 0; + while ( ( matched = handlerQueue[ i++ ] ) && !event.isPropagationStopped() ) { + event.currentTarget = matched.elem; + + j = 0; + while ( ( handleObj = matched.handlers[ j++ ] ) && + !event.isImmediatePropagationStopped() ) { + + // If the event is namespaced, then each handler is only invoked if it is + // specially universal or its namespaces are a superset of the event's. + if ( !event.rnamespace || handleObj.namespace === false || + event.rnamespace.test( handleObj.namespace ) ) { + + event.handleObj = handleObj; + event.data = handleObj.data; + + ret = ( ( jQuery.event.special[ handleObj.origType ] || {} ).handle || + handleObj.handler ).apply( matched.elem, args ); + + if ( ret !== undefined ) { + if ( ( event.result = ret ) === false ) { + event.preventDefault(); + event.stopPropagation(); + } + } + } + } + } + + // Call the postDispatch hook for the mapped type + if ( special.postDispatch ) { + special.postDispatch.call( this, event ); + } + + return event.result; + }, + + handlers: function( event, handlers ) { + var i, handleObj, sel, matchedHandlers, matchedSelectors, + handlerQueue = [], + delegateCount = handlers.delegateCount, + cur = event.target; + + // Find delegate handlers + if ( delegateCount && + + // Support: IE <=9 + // Black-hole SVG instance trees (trac-13180) + cur.nodeType && + + // Support: Firefox <=42 + // Suppress spec-violating clicks indicating a non-primary pointer button (trac-3861) + // https://www.w3.org/TR/DOM-Level-3-Events/#event-type-click + // Support: IE 11 only + // ...but not arrow key "clicks" of radio inputs, which can have `button` -1 (gh-2343) + !( event.type === "click" && event.button >= 1 ) ) { + + for ( ; cur !== this; cur = cur.parentNode || this ) { + + // Don't check non-elements (#13208) + // Don't process clicks on disabled elements (#6911, #8165, #11382, #11764) + if ( cur.nodeType === 1 && !( event.type === "click" && cur.disabled === true ) ) { + matchedHandlers = []; + matchedSelectors = {}; + for ( i = 0; i < delegateCount; i++ ) { + handleObj = handlers[ i ]; + + // Don't conflict with Object.prototype properties (#13203) + sel = handleObj.selector + " "; + + if ( matchedSelectors[ sel ] === undefined ) { + matchedSelectors[ sel ] = handleObj.needsContext ? + jQuery( sel, this ).index( cur ) > -1 : + jQuery.find( sel, this, null, [ cur ] ).length; + } + if ( matchedSelectors[ sel ] ) { + matchedHandlers.push( handleObj ); + } + } + if ( matchedHandlers.length ) { + handlerQueue.push( { elem: cur, handlers: matchedHandlers } ); + } + } + } + } + + // Add the remaining (directly-bound) handlers + cur = this; + if ( delegateCount < handlers.length ) { + handlerQueue.push( { elem: cur, handlers: handlers.slice( delegateCount ) } ); + } + + return handlerQueue; + }, + + addProp: function( name, hook ) { + Object.defineProperty( jQuery.Event.prototype, name, { + enumerable: true, + configurable: true, + + get: isFunction( hook ) ? + function() { + if ( this.originalEvent ) { + return hook( this.originalEvent ); + } + } : + function() { + if ( this.originalEvent ) { + return this.originalEvent[ name ]; + } + }, + + set: function( value ) { + Object.defineProperty( this, name, { + enumerable: true, + configurable: true, + writable: true, + value: value + } ); + } + } ); + }, + + fix: function( originalEvent ) { + return originalEvent[ jQuery.expando ] ? + originalEvent : + new jQuery.Event( originalEvent ); + }, + + special: { + load: { + + // Prevent triggered image.load events from bubbling to window.load + noBubble: true + }, + click: { + + // Utilize native event to ensure correct state for checkable inputs + setup: function( data ) { + + // For mutual compressibility with _default, replace `this` access with a local var. + // `|| data` is dead code meant only to preserve the variable through minification. + var el = this || data; + + // Claim the first handler + if ( rcheckableType.test( el.type ) && + el.click && nodeName( el, "input" ) ) { + + // dataPriv.set( el, "click", ... ) + leverageNative( el, "click", returnTrue ); + } + + // Return false to allow normal processing in the caller + return false; + }, + trigger: function( data ) { + + // For mutual compressibility with _default, replace `this` access with a local var. + // `|| data` is dead code meant only to preserve the variable through minification. + var el = this || data; + + // Force setup before triggering a click + if ( rcheckableType.test( el.type ) && + el.click && nodeName( el, "input" ) ) { + + leverageNative( el, "click" ); + } + + // Return non-false to allow normal event-path propagation + return true; + }, + + // For cross-browser consistency, suppress native .click() on links + // Also prevent it if we're currently inside a leveraged native-event stack + _default: function( event ) { + var target = event.target; + return rcheckableType.test( target.type ) && + target.click && nodeName( target, "input" ) && + dataPriv.get( target, "click" ) || + nodeName( target, "a" ); + } + }, + + beforeunload: { + postDispatch: function( event ) { + + // Support: Firefox 20+ + // Firefox doesn't alert if the returnValue field is not set. + if ( event.result !== undefined && event.originalEvent ) { + event.originalEvent.returnValue = event.result; + } + } + } + } +}; + +// Ensure the presence of an event listener that handles manually-triggered +// synthetic events by interrupting progress until reinvoked in response to +// *native* events that it fires directly, ensuring that state changes have +// already occurred before other listeners are invoked. +function leverageNative( el, type, expectSync ) { + + // Missing expectSync indicates a trigger call, which must force setup through jQuery.event.add + if ( !expectSync ) { + if ( dataPriv.get( el, type ) === undefined ) { + jQuery.event.add( el, type, returnTrue ); + } + return; + } + + // Register the controller as a special universal handler for all event namespaces + dataPriv.set( el, type, false ); + jQuery.event.add( el, type, { + namespace: false, + handler: function( event ) { + var notAsync, result, + saved = dataPriv.get( this, type ); + + if ( ( event.isTrigger & 1 ) && this[ type ] ) { + + // Interrupt processing of the outer synthetic .trigger()ed event + // Saved data should be false in such cases, but might be a leftover capture object + // from an async native handler (gh-4350) + if ( !saved.length ) { + + // Store arguments for use when handling the inner native event + // There will always be at least one argument (an event object), so this array + // will not be confused with a leftover capture object. + saved = slice.call( arguments ); + dataPriv.set( this, type, saved ); + + // Trigger the native event and capture its result + // Support: IE <=9 - 11+ + // focus() and blur() are asynchronous + notAsync = expectSync( this, type ); + this[ type ](); + result = dataPriv.get( this, type ); + if ( saved !== result || notAsync ) { + dataPriv.set( this, type, false ); + } else { + result = {}; + } + if ( saved !== result ) { + + // Cancel the outer synthetic event + event.stopImmediatePropagation(); + event.preventDefault(); + + // Support: Chrome 86+ + // In Chrome, if an element having a focusout handler is blurred by + // clicking outside of it, it invokes the handler synchronously. If + // that handler calls `.remove()` on the element, the data is cleared, + // leaving `result` undefined. We need to guard against this. + return result && result.value; + } + + // If this is an inner synthetic event for an event with a bubbling surrogate + // (focus or blur), assume that the surrogate already propagated from triggering the + // native event and prevent that from happening again here. + // This technically gets the ordering wrong w.r.t. to `.trigger()` (in which the + // bubbling surrogate propagates *after* the non-bubbling base), but that seems + // less bad than duplication. + } else if ( ( jQuery.event.special[ type ] || {} ).delegateType ) { + event.stopPropagation(); + } + + // If this is a native event triggered above, everything is now in order + // Fire an inner synthetic event with the original arguments + } else if ( saved.length ) { + + // ...and capture the result + dataPriv.set( this, type, { + value: jQuery.event.trigger( + + // Support: IE <=9 - 11+ + // Extend with the prototype to reset the above stopImmediatePropagation() + jQuery.extend( saved[ 0 ], jQuery.Event.prototype ), + saved.slice( 1 ), + this + ) + } ); + + // Abort handling of the native event + event.stopImmediatePropagation(); + } + } + } ); +} + +jQuery.removeEvent = function( elem, type, handle ) { + + // This "if" is needed for plain objects + if ( elem.removeEventListener ) { + elem.removeEventListener( type, handle ); + } +}; + +jQuery.Event = function( src, props ) { + + // Allow instantiation without the 'new' keyword + if ( !( this instanceof jQuery.Event ) ) { + return new jQuery.Event( src, props ); + } + + // Event object + if ( src && src.type ) { + this.originalEvent = src; + this.type = src.type; + + // Events bubbling up the document may have been marked as prevented + // by a handler lower down the tree; reflect the correct value. + this.isDefaultPrevented = src.defaultPrevented || + src.defaultPrevented === undefined && + + // Support: Android <=2.3 only + src.returnValue === false ? + returnTrue : + returnFalse; + + // Create target properties + // Support: Safari <=6 - 7 only + // Target should not be a text node (#504, #13143) + this.target = ( src.target && src.target.nodeType === 3 ) ? + src.target.parentNode : + src.target; + + this.currentTarget = src.currentTarget; + this.relatedTarget = src.relatedTarget; + + // Event type + } else { + this.type = src; + } + + // Put explicitly provided properties onto the event object + if ( props ) { + jQuery.extend( this, props ); + } + + // Create a timestamp if incoming event doesn't have one + this.timeStamp = src && src.timeStamp || Date.now(); + + // Mark it as fixed + this[ jQuery.expando ] = true; +}; + +// jQuery.Event is based on DOM3 Events as specified by the ECMAScript Language Binding +// https://www.w3.org/TR/2003/WD-DOM-Level-3-Events-20030331/ecma-script-binding.html +jQuery.Event.prototype = { + constructor: jQuery.Event, + isDefaultPrevented: returnFalse, + isPropagationStopped: returnFalse, + isImmediatePropagationStopped: returnFalse, + isSimulated: false, + + preventDefault: function() { + var e = this.originalEvent; + + this.isDefaultPrevented = returnTrue; + + if ( e && !this.isSimulated ) { + e.preventDefault(); + } + }, + stopPropagation: function() { + var e = this.originalEvent; + + this.isPropagationStopped = returnTrue; + + if ( e && !this.isSimulated ) { + e.stopPropagation(); + } + }, + stopImmediatePropagation: function() { + var e = this.originalEvent; + + this.isImmediatePropagationStopped = returnTrue; + + if ( e && !this.isSimulated ) { + e.stopImmediatePropagation(); + } + + this.stopPropagation(); + } +}; + +// Includes all common event props including KeyEvent and MouseEvent specific props +jQuery.each( { + altKey: true, + bubbles: true, + cancelable: true, + changedTouches: true, + ctrlKey: true, + detail: true, + eventPhase: true, + metaKey: true, + pageX: true, + pageY: true, + shiftKey: true, + view: true, + "char": true, + code: true, + charCode: true, + key: true, + keyCode: true, + button: true, + buttons: true, + clientX: true, + clientY: true, + offsetX: true, + offsetY: true, + pointerId: true, + pointerType: true, + screenX: true, + screenY: true, + targetTouches: true, + toElement: true, + touches: true, + which: true +}, jQuery.event.addProp ); + +jQuery.each( { focus: "focusin", blur: "focusout" }, function( type, delegateType ) { + jQuery.event.special[ type ] = { + + // Utilize native event if possible so blur/focus sequence is correct + setup: function() { + + // Claim the first handler + // dataPriv.set( this, "focus", ... ) + // dataPriv.set( this, "blur", ... ) + leverageNative( this, type, expectSync ); + + // Return false to allow normal processing in the caller + return false; + }, + trigger: function() { + + // Force setup before trigger + leverageNative( this, type ); + + // Return non-false to allow normal event-path propagation + return true; + }, + + // Suppress native focus or blur as it's already being fired + // in leverageNative. + _default: function() { + return true; + }, + + delegateType: delegateType + }; +} ); + +// Create mouseenter/leave events using mouseover/out and event-time checks +// so that event delegation works in jQuery. +// Do the same for pointerenter/pointerleave and pointerover/pointerout +// +// Support: Safari 7 only +// Safari sends mouseenter too often; see: +// https://bugs.chromium.org/p/chromium/issues/detail?id=470258 +// for the description of the bug (it existed in older Chrome versions as well). +jQuery.each( { + mouseenter: "mouseover", + mouseleave: "mouseout", + pointerenter: "pointerover", + pointerleave: "pointerout" +}, function( orig, fix ) { + jQuery.event.special[ orig ] = { + delegateType: fix, + bindType: fix, + + handle: function( event ) { + var ret, + target = this, + related = event.relatedTarget, + handleObj = event.handleObj; + + // For mouseenter/leave call the handler if related is outside the target. + // NB: No relatedTarget if the mouse left/entered the browser window + if ( !related || ( related !== target && !jQuery.contains( target, related ) ) ) { + event.type = handleObj.origType; + ret = handleObj.handler.apply( this, arguments ); + event.type = fix; + } + return ret; + } + }; +} ); + +jQuery.fn.extend( { + + on: function( types, selector, data, fn ) { + return on( this, types, selector, data, fn ); + }, + one: function( types, selector, data, fn ) { + return on( this, types, selector, data, fn, 1 ); + }, + off: function( types, selector, fn ) { + var handleObj, type; + if ( types && types.preventDefault && types.handleObj ) { + + // ( event ) dispatched jQuery.Event + handleObj = types.handleObj; + jQuery( types.delegateTarget ).off( + handleObj.namespace ? + handleObj.origType + "." + handleObj.namespace : + handleObj.origType, + handleObj.selector, + handleObj.handler + ); + return this; + } + if ( typeof types === "object" ) { + + // ( types-object [, selector] ) + for ( type in types ) { + this.off( type, selector, types[ type ] ); + } + return this; + } + if ( selector === false || typeof selector === "function" ) { + + // ( types [, fn] ) + fn = selector; + selector = undefined; + } + if ( fn === false ) { + fn = returnFalse; + } + return this.each( function() { + jQuery.event.remove( this, types, fn, selector ); + } ); + } +} ); + + +var + + // Support: IE <=10 - 11, Edge 12 - 13 only + // In IE/Edge using regex groups here causes severe slowdowns. + // See https://connect.microsoft.com/IE/feedback/details/1736512/ + rnoInnerhtml = /\s*$/g; + +// Prefer a tbody over its parent table for containing new rows +function manipulationTarget( elem, content ) { + if ( nodeName( elem, "table" ) && + nodeName( content.nodeType !== 11 ? content : content.firstChild, "tr" ) ) { + + return jQuery( elem ).children( "tbody" )[ 0 ] || elem; + } + + return elem; +} + +// Replace/restore the type attribute of script elements for safe DOM manipulation +function disableScript( elem ) { + elem.type = ( elem.getAttribute( "type" ) !== null ) + "/" + elem.type; + return elem; +} +function restoreScript( elem ) { + if ( ( elem.type || "" ).slice( 0, 5 ) === "true/" ) { + elem.type = elem.type.slice( 5 ); + } else { + elem.removeAttribute( "type" ); + } + + return elem; +} + +function cloneCopyEvent( src, dest ) { + var i, l, type, pdataOld, udataOld, udataCur, events; + + if ( dest.nodeType !== 1 ) { + return; + } + + // 1. Copy private data: events, handlers, etc. + if ( dataPriv.hasData( src ) ) { + pdataOld = dataPriv.get( src ); + events = pdataOld.events; + + if ( events ) { + dataPriv.remove( dest, "handle events" ); + + for ( type in events ) { + for ( i = 0, l = events[ type ].length; i < l; i++ ) { + jQuery.event.add( dest, type, events[ type ][ i ] ); + } + } + } + } + + // 2. Copy user data + if ( dataUser.hasData( src ) ) { + udataOld = dataUser.access( src ); + udataCur = jQuery.extend( {}, udataOld ); + + dataUser.set( dest, udataCur ); + } +} + +// Fix IE bugs, see support tests +function fixInput( src, dest ) { + var nodeName = dest.nodeName.toLowerCase(); + + // Fails to persist the checked state of a cloned checkbox or radio button. + if ( nodeName === "input" && rcheckableType.test( src.type ) ) { + dest.checked = src.checked; + + // Fails to return the selected option to the default selected state when cloning options + } else if ( nodeName === "input" || nodeName === "textarea" ) { + dest.defaultValue = src.defaultValue; + } +} + +function domManip( collection, args, callback, ignored ) { + + // Flatten any nested arrays + args = flat( args ); + + var fragment, first, scripts, hasScripts, node, doc, + i = 0, + l = collection.length, + iNoClone = l - 1, + value = args[ 0 ], + valueIsFunction = isFunction( value ); + + // We can't cloneNode fragments that contain checked, in WebKit + if ( valueIsFunction || + ( l > 1 && typeof value === "string" && + !support.checkClone && rchecked.test( value ) ) ) { + return collection.each( function( index ) { + var self = collection.eq( index ); + if ( valueIsFunction ) { + args[ 0 ] = value.call( this, index, self.html() ); + } + domManip( self, args, callback, ignored ); + } ); + } + + if ( l ) { + fragment = buildFragment( args, collection[ 0 ].ownerDocument, false, collection, ignored ); + first = fragment.firstChild; + + if ( fragment.childNodes.length === 1 ) { + fragment = first; + } + + // Require either new content or an interest in ignored elements to invoke the callback + if ( first || ignored ) { + scripts = jQuery.map( getAll( fragment, "script" ), disableScript ); + hasScripts = scripts.length; + + // Use the original fragment for the last item + // instead of the first because it can end up + // being emptied incorrectly in certain situations (#8070). + for ( ; i < l; i++ ) { + node = fragment; + + if ( i !== iNoClone ) { + node = jQuery.clone( node, true, true ); + + // Keep references to cloned scripts for later restoration + if ( hasScripts ) { + + // Support: Android <=4.0 only, PhantomJS 1 only + // push.apply(_, arraylike) throws on ancient WebKit + jQuery.merge( scripts, getAll( node, "script" ) ); + } + } + + callback.call( collection[ i ], node, i ); + } + + if ( hasScripts ) { + doc = scripts[ scripts.length - 1 ].ownerDocument; + + // Reenable scripts + jQuery.map( scripts, restoreScript ); + + // Evaluate executable scripts on first document insertion + for ( i = 0; i < hasScripts; i++ ) { + node = scripts[ i ]; + if ( rscriptType.test( node.type || "" ) && + !dataPriv.access( node, "globalEval" ) && + jQuery.contains( doc, node ) ) { + + if ( node.src && ( node.type || "" ).toLowerCase() !== "module" ) { + + // Optional AJAX dependency, but won't run scripts if not present + if ( jQuery._evalUrl && !node.noModule ) { + jQuery._evalUrl( node.src, { + nonce: node.nonce || node.getAttribute( "nonce" ) + }, doc ); + } + } else { + DOMEval( node.textContent.replace( rcleanScript, "" ), node, doc ); + } + } + } + } + } + } + + return collection; +} + +function remove( elem, selector, keepData ) { + var node, + nodes = selector ? jQuery.filter( selector, elem ) : elem, + i = 0; + + for ( ; ( node = nodes[ i ] ) != null; i++ ) { + if ( !keepData && node.nodeType === 1 ) { + jQuery.cleanData( getAll( node ) ); + } + + if ( node.parentNode ) { + if ( keepData && isAttached( node ) ) { + setGlobalEval( getAll( node, "script" ) ); + } + node.parentNode.removeChild( node ); + } + } + + return elem; +} + +jQuery.extend( { + htmlPrefilter: function( html ) { + return html; + }, + + clone: function( elem, dataAndEvents, deepDataAndEvents ) { + var i, l, srcElements, destElements, + clone = elem.cloneNode( true ), + inPage = isAttached( elem ); + + // Fix IE cloning issues + if ( !support.noCloneChecked && ( elem.nodeType === 1 || elem.nodeType === 11 ) && + !jQuery.isXMLDoc( elem ) ) { + + // We eschew Sizzle here for performance reasons: https://jsperf.com/getall-vs-sizzle/2 + destElements = getAll( clone ); + srcElements = getAll( elem ); + + for ( i = 0, l = srcElements.length; i < l; i++ ) { + fixInput( srcElements[ i ], destElements[ i ] ); + } + } + + // Copy the events from the original to the clone + if ( dataAndEvents ) { + if ( deepDataAndEvents ) { + srcElements = srcElements || getAll( elem ); + destElements = destElements || getAll( clone ); + + for ( i = 0, l = srcElements.length; i < l; i++ ) { + cloneCopyEvent( srcElements[ i ], destElements[ i ] ); + } + } else { + cloneCopyEvent( elem, clone ); + } + } + + // Preserve script evaluation history + destElements = getAll( clone, "script" ); + if ( destElements.length > 0 ) { + setGlobalEval( destElements, !inPage && getAll( elem, "script" ) ); + } + + // Return the cloned set + return clone; + }, + + cleanData: function( elems ) { + var data, elem, type, + special = jQuery.event.special, + i = 0; + + for ( ; ( elem = elems[ i ] ) !== undefined; i++ ) { + if ( acceptData( elem ) ) { + if ( ( data = elem[ dataPriv.expando ] ) ) { + if ( data.events ) { + for ( type in data.events ) { + if ( special[ type ] ) { + jQuery.event.remove( elem, type ); + + // This is a shortcut to avoid jQuery.event.remove's overhead + } else { + jQuery.removeEvent( elem, type, data.handle ); + } + } + } + + // Support: Chrome <=35 - 45+ + // Assign undefined instead of using delete, see Data#remove + elem[ dataPriv.expando ] = undefined; + } + if ( elem[ dataUser.expando ] ) { + + // Support: Chrome <=35 - 45+ + // Assign undefined instead of using delete, see Data#remove + elem[ dataUser.expando ] = undefined; + } + } + } + } +} ); + +jQuery.fn.extend( { + detach: function( selector ) { + return remove( this, selector, true ); + }, + + remove: function( selector ) { + return remove( this, selector ); + }, + + text: function( value ) { + return access( this, function( value ) { + return value === undefined ? + jQuery.text( this ) : + this.empty().each( function() { + if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { + this.textContent = value; + } + } ); + }, null, value, arguments.length ); + }, + + append: function() { + return domManip( this, arguments, function( elem ) { + if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { + var target = manipulationTarget( this, elem ); + target.appendChild( elem ); + } + } ); + }, + + prepend: function() { + return domManip( this, arguments, function( elem ) { + if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { + var target = manipulationTarget( this, elem ); + target.insertBefore( elem, target.firstChild ); + } + } ); + }, + + before: function() { + return domManip( this, arguments, function( elem ) { + if ( this.parentNode ) { + this.parentNode.insertBefore( elem, this ); + } + } ); + }, + + after: function() { + return domManip( this, arguments, function( elem ) { + if ( this.parentNode ) { + this.parentNode.insertBefore( elem, this.nextSibling ); + } + } ); + }, + + empty: function() { + var elem, + i = 0; + + for ( ; ( elem = this[ i ] ) != null; i++ ) { + if ( elem.nodeType === 1 ) { + + // Prevent memory leaks + jQuery.cleanData( getAll( elem, false ) ); + + // Remove any remaining nodes + elem.textContent = ""; + } + } + + return this; + }, + + clone: function( dataAndEvents, deepDataAndEvents ) { + dataAndEvents = dataAndEvents == null ? false : dataAndEvents; + deepDataAndEvents = deepDataAndEvents == null ? dataAndEvents : deepDataAndEvents; + + return this.map( function() { + return jQuery.clone( this, dataAndEvents, deepDataAndEvents ); + } ); + }, + + html: function( value ) { + return access( this, function( value ) { + var elem = this[ 0 ] || {}, + i = 0, + l = this.length; + + if ( value === undefined && elem.nodeType === 1 ) { + return elem.innerHTML; + } + + // See if we can take a shortcut and just use innerHTML + if ( typeof value === "string" && !rnoInnerhtml.test( value ) && + !wrapMap[ ( rtagName.exec( value ) || [ "", "" ] )[ 1 ].toLowerCase() ] ) { + + value = jQuery.htmlPrefilter( value ); + + try { + for ( ; i < l; i++ ) { + elem = this[ i ] || {}; + + // Remove element nodes and prevent memory leaks + if ( elem.nodeType === 1 ) { + jQuery.cleanData( getAll( elem, false ) ); + elem.innerHTML = value; + } + } + + elem = 0; + + // If using innerHTML throws an exception, use the fallback method + } catch ( e ) {} + } + + if ( elem ) { + this.empty().append( value ); + } + }, null, value, arguments.length ); + }, + + replaceWith: function() { + var ignored = []; + + // Make the changes, replacing each non-ignored context element with the new content + return domManip( this, arguments, function( elem ) { + var parent = this.parentNode; + + if ( jQuery.inArray( this, ignored ) < 0 ) { + jQuery.cleanData( getAll( this ) ); + if ( parent ) { + parent.replaceChild( elem, this ); + } + } + + // Force callback invocation + }, ignored ); + } +} ); + +jQuery.each( { + appendTo: "append", + prependTo: "prepend", + insertBefore: "before", + insertAfter: "after", + replaceAll: "replaceWith" +}, function( name, original ) { + jQuery.fn[ name ] = function( selector ) { + var elems, + ret = [], + insert = jQuery( selector ), + last = insert.length - 1, + i = 0; + + for ( ; i <= last; i++ ) { + elems = i === last ? this : this.clone( true ); + jQuery( insert[ i ] )[ original ]( elems ); + + // Support: Android <=4.0 only, PhantomJS 1 only + // .get() because push.apply(_, arraylike) throws on ancient WebKit + push.apply( ret, elems.get() ); + } + + return this.pushStack( ret ); + }; +} ); +var rnumnonpx = new RegExp( "^(" + pnum + ")(?!px)[a-z%]+$", "i" ); + +var getStyles = function( elem ) { + + // Support: IE <=11 only, Firefox <=30 (#15098, #14150) + // IE throws on elements created in popups + // FF meanwhile throws on frame elements through "defaultView.getComputedStyle" + var view = elem.ownerDocument.defaultView; + + if ( !view || !view.opener ) { + view = window; + } + + return view.getComputedStyle( elem ); + }; + +var swap = function( elem, options, callback ) { + var ret, name, + old = {}; + + // Remember the old values, and insert the new ones + for ( name in options ) { + old[ name ] = elem.style[ name ]; + elem.style[ name ] = options[ name ]; + } + + ret = callback.call( elem ); + + // Revert the old values + for ( name in options ) { + elem.style[ name ] = old[ name ]; + } + + return ret; +}; + + +var rboxStyle = new RegExp( cssExpand.join( "|" ), "i" ); + + + +( function() { + + // Executing both pixelPosition & boxSizingReliable tests require only one layout + // so they're executed at the same time to save the second computation. + function computeStyleTests() { + + // This is a singleton, we need to execute it only once + if ( !div ) { + return; + } + + container.style.cssText = "position:absolute;left:-11111px;width:60px;" + + "margin-top:1px;padding:0;border:0"; + div.style.cssText = + "position:relative;display:block;box-sizing:border-box;overflow:scroll;" + + "margin:auto;border:1px;padding:1px;" + + "width:60%;top:1%"; + documentElement.appendChild( container ).appendChild( div ); + + var divStyle = window.getComputedStyle( div ); + pixelPositionVal = divStyle.top !== "1%"; + + // Support: Android 4.0 - 4.3 only, Firefox <=3 - 44 + reliableMarginLeftVal = roundPixelMeasures( divStyle.marginLeft ) === 12; + + // Support: Android 4.0 - 4.3 only, Safari <=9.1 - 10.1, iOS <=7.0 - 9.3 + // Some styles come back with percentage values, even though they shouldn't + div.style.right = "60%"; + pixelBoxStylesVal = roundPixelMeasures( divStyle.right ) === 36; + + // Support: IE 9 - 11 only + // Detect misreporting of content dimensions for box-sizing:border-box elements + boxSizingReliableVal = roundPixelMeasures( divStyle.width ) === 36; + + // Support: IE 9 only + // Detect overflow:scroll screwiness (gh-3699) + // Support: Chrome <=64 + // Don't get tricked when zoom affects offsetWidth (gh-4029) + div.style.position = "absolute"; + scrollboxSizeVal = roundPixelMeasures( div.offsetWidth / 3 ) === 12; + + documentElement.removeChild( container ); + + // Nullify the div so it wouldn't be stored in the memory and + // it will also be a sign that checks already performed + div = null; + } + + function roundPixelMeasures( measure ) { + return Math.round( parseFloat( measure ) ); + } + + var pixelPositionVal, boxSizingReliableVal, scrollboxSizeVal, pixelBoxStylesVal, + reliableTrDimensionsVal, reliableMarginLeftVal, + container = document.createElement( "div" ), + div = document.createElement( "div" ); + + // Finish early in limited (non-browser) environments + if ( !div.style ) { + return; + } + + // Support: IE <=9 - 11 only + // Style of cloned element affects source element cloned (#8908) + div.style.backgroundClip = "content-box"; + div.cloneNode( true ).style.backgroundClip = ""; + support.clearCloneStyle = div.style.backgroundClip === "content-box"; + + jQuery.extend( support, { + boxSizingReliable: function() { + computeStyleTests(); + return boxSizingReliableVal; + }, + pixelBoxStyles: function() { + computeStyleTests(); + return pixelBoxStylesVal; + }, + pixelPosition: function() { + computeStyleTests(); + return pixelPositionVal; + }, + reliableMarginLeft: function() { + computeStyleTests(); + return reliableMarginLeftVal; + }, + scrollboxSize: function() { + computeStyleTests(); + return scrollboxSizeVal; + }, + + // Support: IE 9 - 11+, Edge 15 - 18+ + // IE/Edge misreport `getComputedStyle` of table rows with width/height + // set in CSS while `offset*` properties report correct values. + // Behavior in IE 9 is more subtle than in newer versions & it passes + // some versions of this test; make sure not to make it pass there! + // + // Support: Firefox 70+ + // Only Firefox includes border widths + // in computed dimensions. (gh-4529) + reliableTrDimensions: function() { + var table, tr, trChild, trStyle; + if ( reliableTrDimensionsVal == null ) { + table = document.createElement( "table" ); + tr = document.createElement( "tr" ); + trChild = document.createElement( "div" ); + + table.style.cssText = "position:absolute;left:-11111px;border-collapse:separate"; + tr.style.cssText = "border:1px solid"; + + // Support: Chrome 86+ + // Height set through cssText does not get applied. + // Computed height then comes back as 0. + tr.style.height = "1px"; + trChild.style.height = "9px"; + + // Support: Android 8 Chrome 86+ + // In our bodyBackground.html iframe, + // display for all div elements is set to "inline", + // which causes a problem only in Android 8 Chrome 86. + // Ensuring the div is display: block + // gets around this issue. + trChild.style.display = "block"; + + documentElement + .appendChild( table ) + .appendChild( tr ) + .appendChild( trChild ); + + trStyle = window.getComputedStyle( tr ); + reliableTrDimensionsVal = ( parseInt( trStyle.height, 10 ) + + parseInt( trStyle.borderTopWidth, 10 ) + + parseInt( trStyle.borderBottomWidth, 10 ) ) === tr.offsetHeight; + + documentElement.removeChild( table ); + } + return reliableTrDimensionsVal; + } + } ); +} )(); + + +function curCSS( elem, name, computed ) { + var width, minWidth, maxWidth, ret, + + // Support: Firefox 51+ + // Retrieving style before computed somehow + // fixes an issue with getting wrong values + // on detached elements + style = elem.style; + + computed = computed || getStyles( elem ); + + // getPropertyValue is needed for: + // .css('filter') (IE 9 only, #12537) + // .css('--customProperty) (#3144) + if ( computed ) { + ret = computed.getPropertyValue( name ) || computed[ name ]; + + if ( ret === "" && !isAttached( elem ) ) { + ret = jQuery.style( elem, name ); + } + + // A tribute to the "awesome hack by Dean Edwards" + // Android Browser returns percentage for some values, + // but width seems to be reliably pixels. + // This is against the CSSOM draft spec: + // https://drafts.csswg.org/cssom/#resolved-values + if ( !support.pixelBoxStyles() && rnumnonpx.test( ret ) && rboxStyle.test( name ) ) { + + // Remember the original values + width = style.width; + minWidth = style.minWidth; + maxWidth = style.maxWidth; + + // Put in the new values to get a computed value out + style.minWidth = style.maxWidth = style.width = ret; + ret = computed.width; + + // Revert the changed values + style.width = width; + style.minWidth = minWidth; + style.maxWidth = maxWidth; + } + } + + return ret !== undefined ? + + // Support: IE <=9 - 11 only + // IE returns zIndex value as an integer. + ret + "" : + ret; +} + + +function addGetHookIf( conditionFn, hookFn ) { + + // Define the hook, we'll check on the first run if it's really needed. + return { + get: function() { + if ( conditionFn() ) { + + // Hook not needed (or it's not possible to use it due + // to missing dependency), remove it. + delete this.get; + return; + } + + // Hook needed; redefine it so that the support test is not executed again. + return ( this.get = hookFn ).apply( this, arguments ); + } + }; +} + + +var cssPrefixes = [ "Webkit", "Moz", "ms" ], + emptyStyle = document.createElement( "div" ).style, + vendorProps = {}; + +// Return a vendor-prefixed property or undefined +function vendorPropName( name ) { + + // Check for vendor prefixed names + var capName = name[ 0 ].toUpperCase() + name.slice( 1 ), + i = cssPrefixes.length; + + while ( i-- ) { + name = cssPrefixes[ i ] + capName; + if ( name in emptyStyle ) { + return name; + } + } +} + +// Return a potentially-mapped jQuery.cssProps or vendor prefixed property +function finalPropName( name ) { + var final = jQuery.cssProps[ name ] || vendorProps[ name ]; + + if ( final ) { + return final; + } + if ( name in emptyStyle ) { + return name; + } + return vendorProps[ name ] = vendorPropName( name ) || name; +} + + +var + + // Swappable if display is none or starts with table + // except "table", "table-cell", or "table-caption" + // See here for display values: https://developer.mozilla.org/en-US/docs/CSS/display + rdisplayswap = /^(none|table(?!-c[ea]).+)/, + rcustomProp = /^--/, + cssShow = { position: "absolute", visibility: "hidden", display: "block" }, + cssNormalTransform = { + letterSpacing: "0", + fontWeight: "400" + }; + +function setPositiveNumber( _elem, value, subtract ) { + + // Any relative (+/-) values have already been + // normalized at this point + var matches = rcssNum.exec( value ); + return matches ? + + // Guard against undefined "subtract", e.g., when used as in cssHooks + Math.max( 0, matches[ 2 ] - ( subtract || 0 ) ) + ( matches[ 3 ] || "px" ) : + value; +} + +function boxModelAdjustment( elem, dimension, box, isBorderBox, styles, computedVal ) { + var i = dimension === "width" ? 1 : 0, + extra = 0, + delta = 0; + + // Adjustment may not be necessary + if ( box === ( isBorderBox ? "border" : "content" ) ) { + return 0; + } + + for ( ; i < 4; i += 2 ) { + + // Both box models exclude margin + if ( box === "margin" ) { + delta += jQuery.css( elem, box + cssExpand[ i ], true, styles ); + } + + // If we get here with a content-box, we're seeking "padding" or "border" or "margin" + if ( !isBorderBox ) { + + // Add padding + delta += jQuery.css( elem, "padding" + cssExpand[ i ], true, styles ); + + // For "border" or "margin", add border + if ( box !== "padding" ) { + delta += jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); + + // But still keep track of it otherwise + } else { + extra += jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); + } + + // If we get here with a border-box (content + padding + border), we're seeking "content" or + // "padding" or "margin" + } else { + + // For "content", subtract padding + if ( box === "content" ) { + delta -= jQuery.css( elem, "padding" + cssExpand[ i ], true, styles ); + } + + // For "content" or "padding", subtract border + if ( box !== "margin" ) { + delta -= jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); + } + } + } + + // Account for positive content-box scroll gutter when requested by providing computedVal + if ( !isBorderBox && computedVal >= 0 ) { + + // offsetWidth/offsetHeight is a rounded sum of content, padding, scroll gutter, and border + // Assuming integer scroll gutter, subtract the rest and round down + delta += Math.max( 0, Math.ceil( + elem[ "offset" + dimension[ 0 ].toUpperCase() + dimension.slice( 1 ) ] - + computedVal - + delta - + extra - + 0.5 + + // If offsetWidth/offsetHeight is unknown, then we can't determine content-box scroll gutter + // Use an explicit zero to avoid NaN (gh-3964) + ) ) || 0; + } + + return delta; +} + +function getWidthOrHeight( elem, dimension, extra ) { + + // Start with computed style + var styles = getStyles( elem ), + + // To avoid forcing a reflow, only fetch boxSizing if we need it (gh-4322). + // Fake content-box until we know it's needed to know the true value. + boxSizingNeeded = !support.boxSizingReliable() || extra, + isBorderBox = boxSizingNeeded && + jQuery.css( elem, "boxSizing", false, styles ) === "border-box", + valueIsBorderBox = isBorderBox, + + val = curCSS( elem, dimension, styles ), + offsetProp = "offset" + dimension[ 0 ].toUpperCase() + dimension.slice( 1 ); + + // Support: Firefox <=54 + // Return a confounding non-pixel value or feign ignorance, as appropriate. + if ( rnumnonpx.test( val ) ) { + if ( !extra ) { + return val; + } + val = "auto"; + } + + + // Support: IE 9 - 11 only + // Use offsetWidth/offsetHeight for when box sizing is unreliable. + // In those cases, the computed value can be trusted to be border-box. + if ( ( !support.boxSizingReliable() && isBorderBox || + + // Support: IE 10 - 11+, Edge 15 - 18+ + // IE/Edge misreport `getComputedStyle` of table rows with width/height + // set in CSS while `offset*` properties report correct values. + // Interestingly, in some cases IE 9 doesn't suffer from this issue. + !support.reliableTrDimensions() && nodeName( elem, "tr" ) || + + // Fall back to offsetWidth/offsetHeight when value is "auto" + // This happens for inline elements with no explicit setting (gh-3571) + val === "auto" || + + // Support: Android <=4.1 - 4.3 only + // Also use offsetWidth/offsetHeight for misreported inline dimensions (gh-3602) + !parseFloat( val ) && jQuery.css( elem, "display", false, styles ) === "inline" ) && + + // Make sure the element is visible & connected + elem.getClientRects().length ) { + + isBorderBox = jQuery.css( elem, "boxSizing", false, styles ) === "border-box"; + + // Where available, offsetWidth/offsetHeight approximate border box dimensions. + // Where not available (e.g., SVG), assume unreliable box-sizing and interpret the + // retrieved value as a content box dimension. + valueIsBorderBox = offsetProp in elem; + if ( valueIsBorderBox ) { + val = elem[ offsetProp ]; + } + } + + // Normalize "" and auto + val = parseFloat( val ) || 0; + + // Adjust for the element's box model + return ( val + + boxModelAdjustment( + elem, + dimension, + extra || ( isBorderBox ? "border" : "content" ), + valueIsBorderBox, + styles, + + // Provide the current computed size to request scroll gutter calculation (gh-3589) + val + ) + ) + "px"; +} + +jQuery.extend( { + + // Add in style property hooks for overriding the default + // behavior of getting and setting a style property + cssHooks: { + opacity: { + get: function( elem, computed ) { + if ( computed ) { + + // We should always get a number back from opacity + var ret = curCSS( elem, "opacity" ); + return ret === "" ? "1" : ret; + } + } + } + }, + + // Don't automatically add "px" to these possibly-unitless properties + cssNumber: { + "animationIterationCount": true, + "columnCount": true, + "fillOpacity": true, + "flexGrow": true, + "flexShrink": true, + "fontWeight": true, + "gridArea": true, + "gridColumn": true, + "gridColumnEnd": true, + "gridColumnStart": true, + "gridRow": true, + "gridRowEnd": true, + "gridRowStart": true, + "lineHeight": true, + "opacity": true, + "order": true, + "orphans": true, + "widows": true, + "zIndex": true, + "zoom": true + }, + + // Add in properties whose names you wish to fix before + // setting or getting the value + cssProps: {}, + + // Get and set the style property on a DOM Node + style: function( elem, name, value, extra ) { + + // Don't set styles on text and comment nodes + if ( !elem || elem.nodeType === 3 || elem.nodeType === 8 || !elem.style ) { + return; + } + + // Make sure that we're working with the right name + var ret, type, hooks, + origName = camelCase( name ), + isCustomProp = rcustomProp.test( name ), + style = elem.style; + + // Make sure that we're working with the right name. We don't + // want to query the value if it is a CSS custom property + // since they are user-defined. + if ( !isCustomProp ) { + name = finalPropName( origName ); + } + + // Gets hook for the prefixed version, then unprefixed version + hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ]; + + // Check if we're setting a value + if ( value !== undefined ) { + type = typeof value; + + // Convert "+=" or "-=" to relative numbers (#7345) + if ( type === "string" && ( ret = rcssNum.exec( value ) ) && ret[ 1 ] ) { + value = adjustCSS( elem, name, ret ); + + // Fixes bug #9237 + type = "number"; + } + + // Make sure that null and NaN values aren't set (#7116) + if ( value == null || value !== value ) { + return; + } + + // If a number was passed in, add the unit (except for certain CSS properties) + // The isCustomProp check can be removed in jQuery 4.0 when we only auto-append + // "px" to a few hardcoded values. + if ( type === "number" && !isCustomProp ) { + value += ret && ret[ 3 ] || ( jQuery.cssNumber[ origName ] ? "" : "px" ); + } + + // background-* props affect original clone's values + if ( !support.clearCloneStyle && value === "" && name.indexOf( "background" ) === 0 ) { + style[ name ] = "inherit"; + } + + // If a hook was provided, use that value, otherwise just set the specified value + if ( !hooks || !( "set" in hooks ) || + ( value = hooks.set( elem, value, extra ) ) !== undefined ) { + + if ( isCustomProp ) { + style.setProperty( name, value ); + } else { + style[ name ] = value; + } + } + + } else { + + // If a hook was provided get the non-computed value from there + if ( hooks && "get" in hooks && + ( ret = hooks.get( elem, false, extra ) ) !== undefined ) { + + return ret; + } + + // Otherwise just get the value from the style object + return style[ name ]; + } + }, + + css: function( elem, name, extra, styles ) { + var val, num, hooks, + origName = camelCase( name ), + isCustomProp = rcustomProp.test( name ); + + // Make sure that we're working with the right name. We don't + // want to modify the value if it is a CSS custom property + // since they are user-defined. + if ( !isCustomProp ) { + name = finalPropName( origName ); + } + + // Try prefixed name followed by the unprefixed name + hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ]; + + // If a hook was provided get the computed value from there + if ( hooks && "get" in hooks ) { + val = hooks.get( elem, true, extra ); + } + + // Otherwise, if a way to get the computed value exists, use that + if ( val === undefined ) { + val = curCSS( elem, name, styles ); + } + + // Convert "normal" to computed value + if ( val === "normal" && name in cssNormalTransform ) { + val = cssNormalTransform[ name ]; + } + + // Make numeric if forced or a qualifier was provided and val looks numeric + if ( extra === "" || extra ) { + num = parseFloat( val ); + return extra === true || isFinite( num ) ? num || 0 : val; + } + + return val; + } +} ); + +jQuery.each( [ "height", "width" ], function( _i, dimension ) { + jQuery.cssHooks[ dimension ] = { + get: function( elem, computed, extra ) { + if ( computed ) { + + // Certain elements can have dimension info if we invisibly show them + // but it must have a current display style that would benefit + return rdisplayswap.test( jQuery.css( elem, "display" ) ) && + + // Support: Safari 8+ + // Table columns in Safari have non-zero offsetWidth & zero + // getBoundingClientRect().width unless display is changed. + // Support: IE <=11 only + // Running getBoundingClientRect on a disconnected node + // in IE throws an error. + ( !elem.getClientRects().length || !elem.getBoundingClientRect().width ) ? + swap( elem, cssShow, function() { + return getWidthOrHeight( elem, dimension, extra ); + } ) : + getWidthOrHeight( elem, dimension, extra ); + } + }, + + set: function( elem, value, extra ) { + var matches, + styles = getStyles( elem ), + + // Only read styles.position if the test has a chance to fail + // to avoid forcing a reflow. + scrollboxSizeBuggy = !support.scrollboxSize() && + styles.position === "absolute", + + // To avoid forcing a reflow, only fetch boxSizing if we need it (gh-3991) + boxSizingNeeded = scrollboxSizeBuggy || extra, + isBorderBox = boxSizingNeeded && + jQuery.css( elem, "boxSizing", false, styles ) === "border-box", + subtract = extra ? + boxModelAdjustment( + elem, + dimension, + extra, + isBorderBox, + styles + ) : + 0; + + // Account for unreliable border-box dimensions by comparing offset* to computed and + // faking a content-box to get border and padding (gh-3699) + if ( isBorderBox && scrollboxSizeBuggy ) { + subtract -= Math.ceil( + elem[ "offset" + dimension[ 0 ].toUpperCase() + dimension.slice( 1 ) ] - + parseFloat( styles[ dimension ] ) - + boxModelAdjustment( elem, dimension, "border", false, styles ) - + 0.5 + ); + } + + // Convert to pixels if value adjustment is needed + if ( subtract && ( matches = rcssNum.exec( value ) ) && + ( matches[ 3 ] || "px" ) !== "px" ) { + + elem.style[ dimension ] = value; + value = jQuery.css( elem, dimension ); + } + + return setPositiveNumber( elem, value, subtract ); + } + }; +} ); + +jQuery.cssHooks.marginLeft = addGetHookIf( support.reliableMarginLeft, + function( elem, computed ) { + if ( computed ) { + return ( parseFloat( curCSS( elem, "marginLeft" ) ) || + elem.getBoundingClientRect().left - + swap( elem, { marginLeft: 0 }, function() { + return elem.getBoundingClientRect().left; + } ) + ) + "px"; + } + } +); + +// These hooks are used by animate to expand properties +jQuery.each( { + margin: "", + padding: "", + border: "Width" +}, function( prefix, suffix ) { + jQuery.cssHooks[ prefix + suffix ] = { + expand: function( value ) { + var i = 0, + expanded = {}, + + // Assumes a single number if not a string + parts = typeof value === "string" ? value.split( " " ) : [ value ]; + + for ( ; i < 4; i++ ) { + expanded[ prefix + cssExpand[ i ] + suffix ] = + parts[ i ] || parts[ i - 2 ] || parts[ 0 ]; + } + + return expanded; + } + }; + + if ( prefix !== "margin" ) { + jQuery.cssHooks[ prefix + suffix ].set = setPositiveNumber; + } +} ); + +jQuery.fn.extend( { + css: function( name, value ) { + return access( this, function( elem, name, value ) { + var styles, len, + map = {}, + i = 0; + + if ( Array.isArray( name ) ) { + styles = getStyles( elem ); + len = name.length; + + for ( ; i < len; i++ ) { + map[ name[ i ] ] = jQuery.css( elem, name[ i ], false, styles ); + } + + return map; + } + + return value !== undefined ? + jQuery.style( elem, name, value ) : + jQuery.css( elem, name ); + }, name, value, arguments.length > 1 ); + } +} ); + + +function Tween( elem, options, prop, end, easing ) { + return new Tween.prototype.init( elem, options, prop, end, easing ); +} +jQuery.Tween = Tween; + +Tween.prototype = { + constructor: Tween, + init: function( elem, options, prop, end, easing, unit ) { + this.elem = elem; + this.prop = prop; + this.easing = easing || jQuery.easing._default; + this.options = options; + this.start = this.now = this.cur(); + this.end = end; + this.unit = unit || ( jQuery.cssNumber[ prop ] ? "" : "px" ); + }, + cur: function() { + var hooks = Tween.propHooks[ this.prop ]; + + return hooks && hooks.get ? + hooks.get( this ) : + Tween.propHooks._default.get( this ); + }, + run: function( percent ) { + var eased, + hooks = Tween.propHooks[ this.prop ]; + + if ( this.options.duration ) { + this.pos = eased = jQuery.easing[ this.easing ]( + percent, this.options.duration * percent, 0, 1, this.options.duration + ); + } else { + this.pos = eased = percent; + } + this.now = ( this.end - this.start ) * eased + this.start; + + if ( this.options.step ) { + this.options.step.call( this.elem, this.now, this ); + } + + if ( hooks && hooks.set ) { + hooks.set( this ); + } else { + Tween.propHooks._default.set( this ); + } + return this; + } +}; + +Tween.prototype.init.prototype = Tween.prototype; + +Tween.propHooks = { + _default: { + get: function( tween ) { + var result; + + // Use a property on the element directly when it is not a DOM element, + // or when there is no matching style property that exists. + if ( tween.elem.nodeType !== 1 || + tween.elem[ tween.prop ] != null && tween.elem.style[ tween.prop ] == null ) { + return tween.elem[ tween.prop ]; + } + + // Passing an empty string as a 3rd parameter to .css will automatically + // attempt a parseFloat and fallback to a string if the parse fails. + // Simple values such as "10px" are parsed to Float; + // complex values such as "rotate(1rad)" are returned as-is. + result = jQuery.css( tween.elem, tween.prop, "" ); + + // Empty strings, null, undefined and "auto" are converted to 0. + return !result || result === "auto" ? 0 : result; + }, + set: function( tween ) { + + // Use step hook for back compat. + // Use cssHook if its there. + // Use .style if available and use plain properties where available. + if ( jQuery.fx.step[ tween.prop ] ) { + jQuery.fx.step[ tween.prop ]( tween ); + } else if ( tween.elem.nodeType === 1 && ( + jQuery.cssHooks[ tween.prop ] || + tween.elem.style[ finalPropName( tween.prop ) ] != null ) ) { + jQuery.style( tween.elem, tween.prop, tween.now + tween.unit ); + } else { + tween.elem[ tween.prop ] = tween.now; + } + } + } +}; + +// Support: IE <=9 only +// Panic based approach to setting things on disconnected nodes +Tween.propHooks.scrollTop = Tween.propHooks.scrollLeft = { + set: function( tween ) { + if ( tween.elem.nodeType && tween.elem.parentNode ) { + tween.elem[ tween.prop ] = tween.now; + } + } +}; + +jQuery.easing = { + linear: function( p ) { + return p; + }, + swing: function( p ) { + return 0.5 - Math.cos( p * Math.PI ) / 2; + }, + _default: "swing" +}; + +jQuery.fx = Tween.prototype.init; + +// Back compat <1.8 extension point +jQuery.fx.step = {}; + + + + +var + fxNow, inProgress, + rfxtypes = /^(?:toggle|show|hide)$/, + rrun = /queueHooks$/; + +function schedule() { + if ( inProgress ) { + if ( document.hidden === false && window.requestAnimationFrame ) { + window.requestAnimationFrame( schedule ); + } else { + window.setTimeout( schedule, jQuery.fx.interval ); + } + + jQuery.fx.tick(); + } +} + +// Animations created synchronously will run synchronously +function createFxNow() { + window.setTimeout( function() { + fxNow = undefined; + } ); + return ( fxNow = Date.now() ); +} + +// Generate parameters to create a standard animation +function genFx( type, includeWidth ) { + var which, + i = 0, + attrs = { height: type }; + + // If we include width, step value is 1 to do all cssExpand values, + // otherwise step value is 2 to skip over Left and Right + includeWidth = includeWidth ? 1 : 0; + for ( ; i < 4; i += 2 - includeWidth ) { + which = cssExpand[ i ]; + attrs[ "margin" + which ] = attrs[ "padding" + which ] = type; + } + + if ( includeWidth ) { + attrs.opacity = attrs.width = type; + } + + return attrs; +} + +function createTween( value, prop, animation ) { + var tween, + collection = ( Animation.tweeners[ prop ] || [] ).concat( Animation.tweeners[ "*" ] ), + index = 0, + length = collection.length; + for ( ; index < length; index++ ) { + if ( ( tween = collection[ index ].call( animation, prop, value ) ) ) { + + // We're done with this property + return tween; + } + } +} + +function defaultPrefilter( elem, props, opts ) { + var prop, value, toggle, hooks, oldfire, propTween, restoreDisplay, display, + isBox = "width" in props || "height" in props, + anim = this, + orig = {}, + style = elem.style, + hidden = elem.nodeType && isHiddenWithinTree( elem ), + dataShow = dataPriv.get( elem, "fxshow" ); + + // Queue-skipping animations hijack the fx hooks + if ( !opts.queue ) { + hooks = jQuery._queueHooks( elem, "fx" ); + if ( hooks.unqueued == null ) { + hooks.unqueued = 0; + oldfire = hooks.empty.fire; + hooks.empty.fire = function() { + if ( !hooks.unqueued ) { + oldfire(); + } + }; + } + hooks.unqueued++; + + anim.always( function() { + + // Ensure the complete handler is called before this completes + anim.always( function() { + hooks.unqueued--; + if ( !jQuery.queue( elem, "fx" ).length ) { + hooks.empty.fire(); + } + } ); + } ); + } + + // Detect show/hide animations + for ( prop in props ) { + value = props[ prop ]; + if ( rfxtypes.test( value ) ) { + delete props[ prop ]; + toggle = toggle || value === "toggle"; + if ( value === ( hidden ? "hide" : "show" ) ) { + + // Pretend to be hidden if this is a "show" and + // there is still data from a stopped show/hide + if ( value === "show" && dataShow && dataShow[ prop ] !== undefined ) { + hidden = true; + + // Ignore all other no-op show/hide data + } else { + continue; + } + } + orig[ prop ] = dataShow && dataShow[ prop ] || jQuery.style( elem, prop ); + } + } + + // Bail out if this is a no-op like .hide().hide() + propTween = !jQuery.isEmptyObject( props ); + if ( !propTween && jQuery.isEmptyObject( orig ) ) { + return; + } + + // Restrict "overflow" and "display" styles during box animations + if ( isBox && elem.nodeType === 1 ) { + + // Support: IE <=9 - 11, Edge 12 - 15 + // Record all 3 overflow attributes because IE does not infer the shorthand + // from identically-valued overflowX and overflowY and Edge just mirrors + // the overflowX value there. + opts.overflow = [ style.overflow, style.overflowX, style.overflowY ]; + + // Identify a display type, preferring old show/hide data over the CSS cascade + restoreDisplay = dataShow && dataShow.display; + if ( restoreDisplay == null ) { + restoreDisplay = dataPriv.get( elem, "display" ); + } + display = jQuery.css( elem, "display" ); + if ( display === "none" ) { + if ( restoreDisplay ) { + display = restoreDisplay; + } else { + + // Get nonempty value(s) by temporarily forcing visibility + showHide( [ elem ], true ); + restoreDisplay = elem.style.display || restoreDisplay; + display = jQuery.css( elem, "display" ); + showHide( [ elem ] ); + } + } + + // Animate inline elements as inline-block + if ( display === "inline" || display === "inline-block" && restoreDisplay != null ) { + if ( jQuery.css( elem, "float" ) === "none" ) { + + // Restore the original display value at the end of pure show/hide animations + if ( !propTween ) { + anim.done( function() { + style.display = restoreDisplay; + } ); + if ( restoreDisplay == null ) { + display = style.display; + restoreDisplay = display === "none" ? "" : display; + } + } + style.display = "inline-block"; + } + } + } + + if ( opts.overflow ) { + style.overflow = "hidden"; + anim.always( function() { + style.overflow = opts.overflow[ 0 ]; + style.overflowX = opts.overflow[ 1 ]; + style.overflowY = opts.overflow[ 2 ]; + } ); + } + + // Implement show/hide animations + propTween = false; + for ( prop in orig ) { + + // General show/hide setup for this element animation + if ( !propTween ) { + if ( dataShow ) { + if ( "hidden" in dataShow ) { + hidden = dataShow.hidden; + } + } else { + dataShow = dataPriv.access( elem, "fxshow", { display: restoreDisplay } ); + } + + // Store hidden/visible for toggle so `.stop().toggle()` "reverses" + if ( toggle ) { + dataShow.hidden = !hidden; + } + + // Show elements before animating them + if ( hidden ) { + showHide( [ elem ], true ); + } + + /* eslint-disable no-loop-func */ + + anim.done( function() { + + /* eslint-enable no-loop-func */ + + // The final step of a "hide" animation is actually hiding the element + if ( !hidden ) { + showHide( [ elem ] ); + } + dataPriv.remove( elem, "fxshow" ); + for ( prop in orig ) { + jQuery.style( elem, prop, orig[ prop ] ); + } + } ); + } + + // Per-property setup + propTween = createTween( hidden ? dataShow[ prop ] : 0, prop, anim ); + if ( !( prop in dataShow ) ) { + dataShow[ prop ] = propTween.start; + if ( hidden ) { + propTween.end = propTween.start; + propTween.start = 0; + } + } + } +} + +function propFilter( props, specialEasing ) { + var index, name, easing, value, hooks; + + // camelCase, specialEasing and expand cssHook pass + for ( index in props ) { + name = camelCase( index ); + easing = specialEasing[ name ]; + value = props[ index ]; + if ( Array.isArray( value ) ) { + easing = value[ 1 ]; + value = props[ index ] = value[ 0 ]; + } + + if ( index !== name ) { + props[ name ] = value; + delete props[ index ]; + } + + hooks = jQuery.cssHooks[ name ]; + if ( hooks && "expand" in hooks ) { + value = hooks.expand( value ); + delete props[ name ]; + + // Not quite $.extend, this won't overwrite existing keys. + // Reusing 'index' because we have the correct "name" + for ( index in value ) { + if ( !( index in props ) ) { + props[ index ] = value[ index ]; + specialEasing[ index ] = easing; + } + } + } else { + specialEasing[ name ] = easing; + } + } +} + +function Animation( elem, properties, options ) { + var result, + stopped, + index = 0, + length = Animation.prefilters.length, + deferred = jQuery.Deferred().always( function() { + + // Don't match elem in the :animated selector + delete tick.elem; + } ), + tick = function() { + if ( stopped ) { + return false; + } + var currentTime = fxNow || createFxNow(), + remaining = Math.max( 0, animation.startTime + animation.duration - currentTime ), + + // Support: Android 2.3 only + // Archaic crash bug won't allow us to use `1 - ( 0.5 || 0 )` (#12497) + temp = remaining / animation.duration || 0, + percent = 1 - temp, + index = 0, + length = animation.tweens.length; + + for ( ; index < length; index++ ) { + animation.tweens[ index ].run( percent ); + } + + deferred.notifyWith( elem, [ animation, percent, remaining ] ); + + // If there's more to do, yield + if ( percent < 1 && length ) { + return remaining; + } + + // If this was an empty animation, synthesize a final progress notification + if ( !length ) { + deferred.notifyWith( elem, [ animation, 1, 0 ] ); + } + + // Resolve the animation and report its conclusion + deferred.resolveWith( elem, [ animation ] ); + return false; + }, + animation = deferred.promise( { + elem: elem, + props: jQuery.extend( {}, properties ), + opts: jQuery.extend( true, { + specialEasing: {}, + easing: jQuery.easing._default + }, options ), + originalProperties: properties, + originalOptions: options, + startTime: fxNow || createFxNow(), + duration: options.duration, + tweens: [], + createTween: function( prop, end ) { + var tween = jQuery.Tween( elem, animation.opts, prop, end, + animation.opts.specialEasing[ prop ] || animation.opts.easing ); + animation.tweens.push( tween ); + return tween; + }, + stop: function( gotoEnd ) { + var index = 0, + + // If we are going to the end, we want to run all the tweens + // otherwise we skip this part + length = gotoEnd ? animation.tweens.length : 0; + if ( stopped ) { + return this; + } + stopped = true; + for ( ; index < length; index++ ) { + animation.tweens[ index ].run( 1 ); + } + + // Resolve when we played the last frame; otherwise, reject + if ( gotoEnd ) { + deferred.notifyWith( elem, [ animation, 1, 0 ] ); + deferred.resolveWith( elem, [ animation, gotoEnd ] ); + } else { + deferred.rejectWith( elem, [ animation, gotoEnd ] ); + } + return this; + } + } ), + props = animation.props; + + propFilter( props, animation.opts.specialEasing ); + + for ( ; index < length; index++ ) { + result = Animation.prefilters[ index ].call( animation, elem, props, animation.opts ); + if ( result ) { + if ( isFunction( result.stop ) ) { + jQuery._queueHooks( animation.elem, animation.opts.queue ).stop = + result.stop.bind( result ); + } + return result; + } + } + + jQuery.map( props, createTween, animation ); + + if ( isFunction( animation.opts.start ) ) { + animation.opts.start.call( elem, animation ); + } + + // Attach callbacks from options + animation + .progress( animation.opts.progress ) + .done( animation.opts.done, animation.opts.complete ) + .fail( animation.opts.fail ) + .always( animation.opts.always ); + + jQuery.fx.timer( + jQuery.extend( tick, { + elem: elem, + anim: animation, + queue: animation.opts.queue + } ) + ); + + return animation; +} + +jQuery.Animation = jQuery.extend( Animation, { + + tweeners: { + "*": [ function( prop, value ) { + var tween = this.createTween( prop, value ); + adjustCSS( tween.elem, prop, rcssNum.exec( value ), tween ); + return tween; + } ] + }, + + tweener: function( props, callback ) { + if ( isFunction( props ) ) { + callback = props; + props = [ "*" ]; + } else { + props = props.match( rnothtmlwhite ); + } + + var prop, + index = 0, + length = props.length; + + for ( ; index < length; index++ ) { + prop = props[ index ]; + Animation.tweeners[ prop ] = Animation.tweeners[ prop ] || []; + Animation.tweeners[ prop ].unshift( callback ); + } + }, + + prefilters: [ defaultPrefilter ], + + prefilter: function( callback, prepend ) { + if ( prepend ) { + Animation.prefilters.unshift( callback ); + } else { + Animation.prefilters.push( callback ); + } + } +} ); + +jQuery.speed = function( speed, easing, fn ) { + var opt = speed && typeof speed === "object" ? jQuery.extend( {}, speed ) : { + complete: fn || !fn && easing || + isFunction( speed ) && speed, + duration: speed, + easing: fn && easing || easing && !isFunction( easing ) && easing + }; + + // Go to the end state if fx are off + if ( jQuery.fx.off ) { + opt.duration = 0; + + } else { + if ( typeof opt.duration !== "number" ) { + if ( opt.duration in jQuery.fx.speeds ) { + opt.duration = jQuery.fx.speeds[ opt.duration ]; + + } else { + opt.duration = jQuery.fx.speeds._default; + } + } + } + + // Normalize opt.queue - true/undefined/null -> "fx" + if ( opt.queue == null || opt.queue === true ) { + opt.queue = "fx"; + } + + // Queueing + opt.old = opt.complete; + + opt.complete = function() { + if ( isFunction( opt.old ) ) { + opt.old.call( this ); + } + + if ( opt.queue ) { + jQuery.dequeue( this, opt.queue ); + } + }; + + return opt; +}; + +jQuery.fn.extend( { + fadeTo: function( speed, to, easing, callback ) { + + // Show any hidden elements after setting opacity to 0 + return this.filter( isHiddenWithinTree ).css( "opacity", 0 ).show() + + // Animate to the value specified + .end().animate( { opacity: to }, speed, easing, callback ); + }, + animate: function( prop, speed, easing, callback ) { + var empty = jQuery.isEmptyObject( prop ), + optall = jQuery.speed( speed, easing, callback ), + doAnimation = function() { + + // Operate on a copy of prop so per-property easing won't be lost + var anim = Animation( this, jQuery.extend( {}, prop ), optall ); + + // Empty animations, or finishing resolves immediately + if ( empty || dataPriv.get( this, "finish" ) ) { + anim.stop( true ); + } + }; + + doAnimation.finish = doAnimation; + + return empty || optall.queue === false ? + this.each( doAnimation ) : + this.queue( optall.queue, doAnimation ); + }, + stop: function( type, clearQueue, gotoEnd ) { + var stopQueue = function( hooks ) { + var stop = hooks.stop; + delete hooks.stop; + stop( gotoEnd ); + }; + + if ( typeof type !== "string" ) { + gotoEnd = clearQueue; + clearQueue = type; + type = undefined; + } + if ( clearQueue ) { + this.queue( type || "fx", [] ); + } + + return this.each( function() { + var dequeue = true, + index = type != null && type + "queueHooks", + timers = jQuery.timers, + data = dataPriv.get( this ); + + if ( index ) { + if ( data[ index ] && data[ index ].stop ) { + stopQueue( data[ index ] ); + } + } else { + for ( index in data ) { + if ( data[ index ] && data[ index ].stop && rrun.test( index ) ) { + stopQueue( data[ index ] ); + } + } + } + + for ( index = timers.length; index--; ) { + if ( timers[ index ].elem === this && + ( type == null || timers[ index ].queue === type ) ) { + + timers[ index ].anim.stop( gotoEnd ); + dequeue = false; + timers.splice( index, 1 ); + } + } + + // Start the next in the queue if the last step wasn't forced. + // Timers currently will call their complete callbacks, which + // will dequeue but only if they were gotoEnd. + if ( dequeue || !gotoEnd ) { + jQuery.dequeue( this, type ); + } + } ); + }, + finish: function( type ) { + if ( type !== false ) { + type = type || "fx"; + } + return this.each( function() { + var index, + data = dataPriv.get( this ), + queue = data[ type + "queue" ], + hooks = data[ type + "queueHooks" ], + timers = jQuery.timers, + length = queue ? queue.length : 0; + + // Enable finishing flag on private data + data.finish = true; + + // Empty the queue first + jQuery.queue( this, type, [] ); + + if ( hooks && hooks.stop ) { + hooks.stop.call( this, true ); + } + + // Look for any active animations, and finish them + for ( index = timers.length; index--; ) { + if ( timers[ index ].elem === this && timers[ index ].queue === type ) { + timers[ index ].anim.stop( true ); + timers.splice( index, 1 ); + } + } + + // Look for any animations in the old queue and finish them + for ( index = 0; index < length; index++ ) { + if ( queue[ index ] && queue[ index ].finish ) { + queue[ index ].finish.call( this ); + } + } + + // Turn off finishing flag + delete data.finish; + } ); + } +} ); + +jQuery.each( [ "toggle", "show", "hide" ], function( _i, name ) { + var cssFn = jQuery.fn[ name ]; + jQuery.fn[ name ] = function( speed, easing, callback ) { + return speed == null || typeof speed === "boolean" ? + cssFn.apply( this, arguments ) : + this.animate( genFx( name, true ), speed, easing, callback ); + }; +} ); + +// Generate shortcuts for custom animations +jQuery.each( { + slideDown: genFx( "show" ), + slideUp: genFx( "hide" ), + slideToggle: genFx( "toggle" ), + fadeIn: { opacity: "show" }, + fadeOut: { opacity: "hide" }, + fadeToggle: { opacity: "toggle" } +}, function( name, props ) { + jQuery.fn[ name ] = function( speed, easing, callback ) { + return this.animate( props, speed, easing, callback ); + }; +} ); + +jQuery.timers = []; +jQuery.fx.tick = function() { + var timer, + i = 0, + timers = jQuery.timers; + + fxNow = Date.now(); + + for ( ; i < timers.length; i++ ) { + timer = timers[ i ]; + + // Run the timer and safely remove it when done (allowing for external removal) + if ( !timer() && timers[ i ] === timer ) { + timers.splice( i--, 1 ); + } + } + + if ( !timers.length ) { + jQuery.fx.stop(); + } + fxNow = undefined; +}; + +jQuery.fx.timer = function( timer ) { + jQuery.timers.push( timer ); + jQuery.fx.start(); +}; + +jQuery.fx.interval = 13; +jQuery.fx.start = function() { + if ( inProgress ) { + return; + } + + inProgress = true; + schedule(); +}; + +jQuery.fx.stop = function() { + inProgress = null; +}; + +jQuery.fx.speeds = { + slow: 600, + fast: 200, + + // Default speed + _default: 400 +}; + + +// Based off of the plugin by Clint Helfers, with permission. +// https://web.archive.org/web/20100324014747/http://blindsignals.com/index.php/2009/07/jquery-delay/ +jQuery.fn.delay = function( time, type ) { + time = jQuery.fx ? jQuery.fx.speeds[ time ] || time : time; + type = type || "fx"; + + return this.queue( type, function( next, hooks ) { + var timeout = window.setTimeout( next, time ); + hooks.stop = function() { + window.clearTimeout( timeout ); + }; + } ); +}; + + +( function() { + var input = document.createElement( "input" ), + select = document.createElement( "select" ), + opt = select.appendChild( document.createElement( "option" ) ); + + input.type = "checkbox"; + + // Support: Android <=4.3 only + // Default value for a checkbox should be "on" + support.checkOn = input.value !== ""; + + // Support: IE <=11 only + // Must access selectedIndex to make default options select + support.optSelected = opt.selected; + + // Support: IE <=11 only + // An input loses its value after becoming a radio + input = document.createElement( "input" ); + input.value = "t"; + input.type = "radio"; + support.radioValue = input.value === "t"; +} )(); + + +var boolHook, + attrHandle = jQuery.expr.attrHandle; + +jQuery.fn.extend( { + attr: function( name, value ) { + return access( this, jQuery.attr, name, value, arguments.length > 1 ); + }, + + removeAttr: function( name ) { + return this.each( function() { + jQuery.removeAttr( this, name ); + } ); + } +} ); + +jQuery.extend( { + attr: function( elem, name, value ) { + var ret, hooks, + nType = elem.nodeType; + + // Don't get/set attributes on text, comment and attribute nodes + if ( nType === 3 || nType === 8 || nType === 2 ) { + return; + } + + // Fallback to prop when attributes are not supported + if ( typeof elem.getAttribute === "undefined" ) { + return jQuery.prop( elem, name, value ); + } + + // Attribute hooks are determined by the lowercase version + // Grab necessary hook if one is defined + if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) { + hooks = jQuery.attrHooks[ name.toLowerCase() ] || + ( jQuery.expr.match.bool.test( name ) ? boolHook : undefined ); + } + + if ( value !== undefined ) { + if ( value === null ) { + jQuery.removeAttr( elem, name ); + return; + } + + if ( hooks && "set" in hooks && + ( ret = hooks.set( elem, value, name ) ) !== undefined ) { + return ret; + } + + elem.setAttribute( name, value + "" ); + return value; + } + + if ( hooks && "get" in hooks && ( ret = hooks.get( elem, name ) ) !== null ) { + return ret; + } + + ret = jQuery.find.attr( elem, name ); + + // Non-existent attributes return null, we normalize to undefined + return ret == null ? undefined : ret; + }, + + attrHooks: { + type: { + set: function( elem, value ) { + if ( !support.radioValue && value === "radio" && + nodeName( elem, "input" ) ) { + var val = elem.value; + elem.setAttribute( "type", value ); + if ( val ) { + elem.value = val; + } + return value; + } + } + } + }, + + removeAttr: function( elem, value ) { + var name, + i = 0, + + // Attribute names can contain non-HTML whitespace characters + // https://html.spec.whatwg.org/multipage/syntax.html#attributes-2 + attrNames = value && value.match( rnothtmlwhite ); + + if ( attrNames && elem.nodeType === 1 ) { + while ( ( name = attrNames[ i++ ] ) ) { + elem.removeAttribute( name ); + } + } + } +} ); + +// Hooks for boolean attributes +boolHook = { + set: function( elem, value, name ) { + if ( value === false ) { + + // Remove boolean attributes when set to false + jQuery.removeAttr( elem, name ); + } else { + elem.setAttribute( name, name ); + } + return name; + } +}; + +jQuery.each( jQuery.expr.match.bool.source.match( /\w+/g ), function( _i, name ) { + var getter = attrHandle[ name ] || jQuery.find.attr; + + attrHandle[ name ] = function( elem, name, isXML ) { + var ret, handle, + lowercaseName = name.toLowerCase(); + + if ( !isXML ) { + + // Avoid an infinite loop by temporarily removing this function from the getter + handle = attrHandle[ lowercaseName ]; + attrHandle[ lowercaseName ] = ret; + ret = getter( elem, name, isXML ) != null ? + lowercaseName : + null; + attrHandle[ lowercaseName ] = handle; + } + return ret; + }; +} ); + + + + +var rfocusable = /^(?:input|select|textarea|button)$/i, + rclickable = /^(?:a|area)$/i; + +jQuery.fn.extend( { + prop: function( name, value ) { + return access( this, jQuery.prop, name, value, arguments.length > 1 ); + }, + + removeProp: function( name ) { + return this.each( function() { + delete this[ jQuery.propFix[ name ] || name ]; + } ); + } +} ); + +jQuery.extend( { + prop: function( elem, name, value ) { + var ret, hooks, + nType = elem.nodeType; + + // Don't get/set properties on text, comment and attribute nodes + if ( nType === 3 || nType === 8 || nType === 2 ) { + return; + } + + if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) { + + // Fix name and attach hooks + name = jQuery.propFix[ name ] || name; + hooks = jQuery.propHooks[ name ]; + } + + if ( value !== undefined ) { + if ( hooks && "set" in hooks && + ( ret = hooks.set( elem, value, name ) ) !== undefined ) { + return ret; + } + + return ( elem[ name ] = value ); + } + + if ( hooks && "get" in hooks && ( ret = hooks.get( elem, name ) ) !== null ) { + return ret; + } + + return elem[ name ]; + }, + + propHooks: { + tabIndex: { + get: function( elem ) { + + // Support: IE <=9 - 11 only + // elem.tabIndex doesn't always return the + // correct value when it hasn't been explicitly set + // https://web.archive.org/web/20141116233347/http://fluidproject.org/blog/2008/01/09/getting-setting-and-removing-tabindex-values-with-javascript/ + // Use proper attribute retrieval(#12072) + var tabindex = jQuery.find.attr( elem, "tabindex" ); + + if ( tabindex ) { + return parseInt( tabindex, 10 ); + } + + if ( + rfocusable.test( elem.nodeName ) || + rclickable.test( elem.nodeName ) && + elem.href + ) { + return 0; + } + + return -1; + } + } + }, + + propFix: { + "for": "htmlFor", + "class": "className" + } +} ); + +// Support: IE <=11 only +// Accessing the selectedIndex property +// forces the browser to respect setting selected +// on the option +// The getter ensures a default option is selected +// when in an optgroup +// eslint rule "no-unused-expressions" is disabled for this code +// since it considers such accessions noop +if ( !support.optSelected ) { + jQuery.propHooks.selected = { + get: function( elem ) { + + /* eslint no-unused-expressions: "off" */ + + var parent = elem.parentNode; + if ( parent && parent.parentNode ) { + parent.parentNode.selectedIndex; + } + return null; + }, + set: function( elem ) { + + /* eslint no-unused-expressions: "off" */ + + var parent = elem.parentNode; + if ( parent ) { + parent.selectedIndex; + + if ( parent.parentNode ) { + parent.parentNode.selectedIndex; + } + } + } + }; +} + +jQuery.each( [ + "tabIndex", + "readOnly", + "maxLength", + "cellSpacing", + "cellPadding", + "rowSpan", + "colSpan", + "useMap", + "frameBorder", + "contentEditable" +], function() { + jQuery.propFix[ this.toLowerCase() ] = this; +} ); + + + + + // Strip and collapse whitespace according to HTML spec + // https://infra.spec.whatwg.org/#strip-and-collapse-ascii-whitespace + function stripAndCollapse( value ) { + var tokens = value.match( rnothtmlwhite ) || []; + return tokens.join( " " ); + } + + +function getClass( elem ) { + return elem.getAttribute && elem.getAttribute( "class" ) || ""; +} + +function classesToArray( value ) { + if ( Array.isArray( value ) ) { + return value; + } + if ( typeof value === "string" ) { + return value.match( rnothtmlwhite ) || []; + } + return []; +} + +jQuery.fn.extend( { + addClass: function( value ) { + var classes, elem, cur, curValue, clazz, j, finalValue, + i = 0; + + if ( isFunction( value ) ) { + return this.each( function( j ) { + jQuery( this ).addClass( value.call( this, j, getClass( this ) ) ); + } ); + } + + classes = classesToArray( value ); + + if ( classes.length ) { + while ( ( elem = this[ i++ ] ) ) { + curValue = getClass( elem ); + cur = elem.nodeType === 1 && ( " " + stripAndCollapse( curValue ) + " " ); + + if ( cur ) { + j = 0; + while ( ( clazz = classes[ j++ ] ) ) { + if ( cur.indexOf( " " + clazz + " " ) < 0 ) { + cur += clazz + " "; + } + } + + // Only assign if different to avoid unneeded rendering. + finalValue = stripAndCollapse( cur ); + if ( curValue !== finalValue ) { + elem.setAttribute( "class", finalValue ); + } + } + } + } + + return this; + }, + + removeClass: function( value ) { + var classes, elem, cur, curValue, clazz, j, finalValue, + i = 0; + + if ( isFunction( value ) ) { + return this.each( function( j ) { + jQuery( this ).removeClass( value.call( this, j, getClass( this ) ) ); + } ); + } + + if ( !arguments.length ) { + return this.attr( "class", "" ); + } + + classes = classesToArray( value ); + + if ( classes.length ) { + while ( ( elem = this[ i++ ] ) ) { + curValue = getClass( elem ); + + // This expression is here for better compressibility (see addClass) + cur = elem.nodeType === 1 && ( " " + stripAndCollapse( curValue ) + " " ); + + if ( cur ) { + j = 0; + while ( ( clazz = classes[ j++ ] ) ) { + + // Remove *all* instances + while ( cur.indexOf( " " + clazz + " " ) > -1 ) { + cur = cur.replace( " " + clazz + " ", " " ); + } + } + + // Only assign if different to avoid unneeded rendering. + finalValue = stripAndCollapse( cur ); + if ( curValue !== finalValue ) { + elem.setAttribute( "class", finalValue ); + } + } + } + } + + return this; + }, + + toggleClass: function( value, stateVal ) { + var type = typeof value, + isValidValue = type === "string" || Array.isArray( value ); + + if ( typeof stateVal === "boolean" && isValidValue ) { + return stateVal ? this.addClass( value ) : this.removeClass( value ); + } + + if ( isFunction( value ) ) { + return this.each( function( i ) { + jQuery( this ).toggleClass( + value.call( this, i, getClass( this ), stateVal ), + stateVal + ); + } ); + } + + return this.each( function() { + var className, i, self, classNames; + + if ( isValidValue ) { + + // Toggle individual class names + i = 0; + self = jQuery( this ); + classNames = classesToArray( value ); + + while ( ( className = classNames[ i++ ] ) ) { + + // Check each className given, space separated list + if ( self.hasClass( className ) ) { + self.removeClass( className ); + } else { + self.addClass( className ); + } + } + + // Toggle whole class name + } else if ( value === undefined || type === "boolean" ) { + className = getClass( this ); + if ( className ) { + + // Store className if set + dataPriv.set( this, "__className__", className ); + } + + // If the element has a class name or if we're passed `false`, + // then remove the whole classname (if there was one, the above saved it). + // Otherwise bring back whatever was previously saved (if anything), + // falling back to the empty string if nothing was stored. + if ( this.setAttribute ) { + this.setAttribute( "class", + className || value === false ? + "" : + dataPriv.get( this, "__className__" ) || "" + ); + } + } + } ); + }, + + hasClass: function( selector ) { + var className, elem, + i = 0; + + className = " " + selector + " "; + while ( ( elem = this[ i++ ] ) ) { + if ( elem.nodeType === 1 && + ( " " + stripAndCollapse( getClass( elem ) ) + " " ).indexOf( className ) > -1 ) { + return true; + } + } + + return false; + } +} ); + + + + +var rreturn = /\r/g; + +jQuery.fn.extend( { + val: function( value ) { + var hooks, ret, valueIsFunction, + elem = this[ 0 ]; + + if ( !arguments.length ) { + if ( elem ) { + hooks = jQuery.valHooks[ elem.type ] || + jQuery.valHooks[ elem.nodeName.toLowerCase() ]; + + if ( hooks && + "get" in hooks && + ( ret = hooks.get( elem, "value" ) ) !== undefined + ) { + return ret; + } + + ret = elem.value; + + // Handle most common string cases + if ( typeof ret === "string" ) { + return ret.replace( rreturn, "" ); + } + + // Handle cases where value is null/undef or number + return ret == null ? "" : ret; + } + + return; + } + + valueIsFunction = isFunction( value ); + + return this.each( function( i ) { + var val; + + if ( this.nodeType !== 1 ) { + return; + } + + if ( valueIsFunction ) { + val = value.call( this, i, jQuery( this ).val() ); + } else { + val = value; + } + + // Treat null/undefined as ""; convert numbers to string + if ( val == null ) { + val = ""; + + } else if ( typeof val === "number" ) { + val += ""; + + } else if ( Array.isArray( val ) ) { + val = jQuery.map( val, function( value ) { + return value == null ? "" : value + ""; + } ); + } + + hooks = jQuery.valHooks[ this.type ] || jQuery.valHooks[ this.nodeName.toLowerCase() ]; + + // If set returns undefined, fall back to normal setting + if ( !hooks || !( "set" in hooks ) || hooks.set( this, val, "value" ) === undefined ) { + this.value = val; + } + } ); + } +} ); + +jQuery.extend( { + valHooks: { + option: { + get: function( elem ) { + + var val = jQuery.find.attr( elem, "value" ); + return val != null ? + val : + + // Support: IE <=10 - 11 only + // option.text throws exceptions (#14686, #14858) + // Strip and collapse whitespace + // https://html.spec.whatwg.org/#strip-and-collapse-whitespace + stripAndCollapse( jQuery.text( elem ) ); + } + }, + select: { + get: function( elem ) { + var value, option, i, + options = elem.options, + index = elem.selectedIndex, + one = elem.type === "select-one", + values = one ? null : [], + max = one ? index + 1 : options.length; + + if ( index < 0 ) { + i = max; + + } else { + i = one ? index : 0; + } + + // Loop through all the selected options + for ( ; i < max; i++ ) { + option = options[ i ]; + + // Support: IE <=9 only + // IE8-9 doesn't update selected after form reset (#2551) + if ( ( option.selected || i === index ) && + + // Don't return options that are disabled or in a disabled optgroup + !option.disabled && + ( !option.parentNode.disabled || + !nodeName( option.parentNode, "optgroup" ) ) ) { + + // Get the specific value for the option + value = jQuery( option ).val(); + + // We don't need an array for one selects + if ( one ) { + return value; + } + + // Multi-Selects return an array + values.push( value ); + } + } + + return values; + }, + + set: function( elem, value ) { + var optionSet, option, + options = elem.options, + values = jQuery.makeArray( value ), + i = options.length; + + while ( i-- ) { + option = options[ i ]; + + /* eslint-disable no-cond-assign */ + + if ( option.selected = + jQuery.inArray( jQuery.valHooks.option.get( option ), values ) > -1 + ) { + optionSet = true; + } + + /* eslint-enable no-cond-assign */ + } + + // Force browsers to behave consistently when non-matching value is set + if ( !optionSet ) { + elem.selectedIndex = -1; + } + return values; + } + } + } +} ); + +// Radios and checkboxes getter/setter +jQuery.each( [ "radio", "checkbox" ], function() { + jQuery.valHooks[ this ] = { + set: function( elem, value ) { + if ( Array.isArray( value ) ) { + return ( elem.checked = jQuery.inArray( jQuery( elem ).val(), value ) > -1 ); + } + } + }; + if ( !support.checkOn ) { + jQuery.valHooks[ this ].get = function( elem ) { + return elem.getAttribute( "value" ) === null ? "on" : elem.value; + }; + } +} ); + + + + +// Return jQuery for attributes-only inclusion + + +support.focusin = "onfocusin" in window; + + +var rfocusMorph = /^(?:focusinfocus|focusoutblur)$/, + stopPropagationCallback = function( e ) { + e.stopPropagation(); + }; + +jQuery.extend( jQuery.event, { + + trigger: function( event, data, elem, onlyHandlers ) { + + var i, cur, tmp, bubbleType, ontype, handle, special, lastElement, + eventPath = [ elem || document ], + type = hasOwn.call( event, "type" ) ? event.type : event, + namespaces = hasOwn.call( event, "namespace" ) ? event.namespace.split( "." ) : []; + + cur = lastElement = tmp = elem = elem || document; + + // Don't do events on text and comment nodes + if ( elem.nodeType === 3 || elem.nodeType === 8 ) { + return; + } + + // focus/blur morphs to focusin/out; ensure we're not firing them right now + if ( rfocusMorph.test( type + jQuery.event.triggered ) ) { + return; + } + + if ( type.indexOf( "." ) > -1 ) { + + // Namespaced trigger; create a regexp to match event type in handle() + namespaces = type.split( "." ); + type = namespaces.shift(); + namespaces.sort(); + } + ontype = type.indexOf( ":" ) < 0 && "on" + type; + + // Caller can pass in a jQuery.Event object, Object, or just an event type string + event = event[ jQuery.expando ] ? + event : + new jQuery.Event( type, typeof event === "object" && event ); + + // Trigger bitmask: & 1 for native handlers; & 2 for jQuery (always true) + event.isTrigger = onlyHandlers ? 2 : 3; + event.namespace = namespaces.join( "." ); + event.rnamespace = event.namespace ? + new RegExp( "(^|\\.)" + namespaces.join( "\\.(?:.*\\.|)" ) + "(\\.|$)" ) : + null; + + // Clean up the event in case it is being reused + event.result = undefined; + if ( !event.target ) { + event.target = elem; + } + + // Clone any incoming data and prepend the event, creating the handler arg list + data = data == null ? + [ event ] : + jQuery.makeArray( data, [ event ] ); + + // Allow special events to draw outside the lines + special = jQuery.event.special[ type ] || {}; + if ( !onlyHandlers && special.trigger && special.trigger.apply( elem, data ) === false ) { + return; + } + + // Determine event propagation path in advance, per W3C events spec (#9951) + // Bubble up to document, then to window; watch for a global ownerDocument var (#9724) + if ( !onlyHandlers && !special.noBubble && !isWindow( elem ) ) { + + bubbleType = special.delegateType || type; + if ( !rfocusMorph.test( bubbleType + type ) ) { + cur = cur.parentNode; + } + for ( ; cur; cur = cur.parentNode ) { + eventPath.push( cur ); + tmp = cur; + } + + // Only add window if we got to document (e.g., not plain obj or detached DOM) + if ( tmp === ( elem.ownerDocument || document ) ) { + eventPath.push( tmp.defaultView || tmp.parentWindow || window ); + } + } + + // Fire handlers on the event path + i = 0; + while ( ( cur = eventPath[ i++ ] ) && !event.isPropagationStopped() ) { + lastElement = cur; + event.type = i > 1 ? + bubbleType : + special.bindType || type; + + // jQuery handler + handle = ( dataPriv.get( cur, "events" ) || Object.create( null ) )[ event.type ] && + dataPriv.get( cur, "handle" ); + if ( handle ) { + handle.apply( cur, data ); + } + + // Native handler + handle = ontype && cur[ ontype ]; + if ( handle && handle.apply && acceptData( cur ) ) { + event.result = handle.apply( cur, data ); + if ( event.result === false ) { + event.preventDefault(); + } + } + } + event.type = type; + + // If nobody prevented the default action, do it now + if ( !onlyHandlers && !event.isDefaultPrevented() ) { + + if ( ( !special._default || + special._default.apply( eventPath.pop(), data ) === false ) && + acceptData( elem ) ) { + + // Call a native DOM method on the target with the same name as the event. + // Don't do default actions on window, that's where global variables be (#6170) + if ( ontype && isFunction( elem[ type ] ) && !isWindow( elem ) ) { + + // Don't re-trigger an onFOO event when we call its FOO() method + tmp = elem[ ontype ]; + + if ( tmp ) { + elem[ ontype ] = null; + } + + // Prevent re-triggering of the same event, since we already bubbled it above + jQuery.event.triggered = type; + + if ( event.isPropagationStopped() ) { + lastElement.addEventListener( type, stopPropagationCallback ); + } + + elem[ type ](); + + if ( event.isPropagationStopped() ) { + lastElement.removeEventListener( type, stopPropagationCallback ); + } + + jQuery.event.triggered = undefined; + + if ( tmp ) { + elem[ ontype ] = tmp; + } + } + } + } + + return event.result; + }, + + // Piggyback on a donor event to simulate a different one + // Used only for `focus(in | out)` events + simulate: function( type, elem, event ) { + var e = jQuery.extend( + new jQuery.Event(), + event, + { + type: type, + isSimulated: true + } + ); + + jQuery.event.trigger( e, null, elem ); + } + +} ); + +jQuery.fn.extend( { + + trigger: function( type, data ) { + return this.each( function() { + jQuery.event.trigger( type, data, this ); + } ); + }, + triggerHandler: function( type, data ) { + var elem = this[ 0 ]; + if ( elem ) { + return jQuery.event.trigger( type, data, elem, true ); + } + } +} ); + + +// Support: Firefox <=44 +// Firefox doesn't have focus(in | out) events +// Related ticket - https://bugzilla.mozilla.org/show_bug.cgi?id=687787 +// +// Support: Chrome <=48 - 49, Safari <=9.0 - 9.1 +// focus(in | out) events fire after focus & blur events, +// which is spec violation - http://www.w3.org/TR/DOM-Level-3-Events/#events-focusevent-event-order +// Related ticket - https://bugs.chromium.org/p/chromium/issues/detail?id=449857 +if ( !support.focusin ) { + jQuery.each( { focus: "focusin", blur: "focusout" }, function( orig, fix ) { + + // Attach a single capturing handler on the document while someone wants focusin/focusout + var handler = function( event ) { + jQuery.event.simulate( fix, event.target, jQuery.event.fix( event ) ); + }; + + jQuery.event.special[ fix ] = { + setup: function() { + + // Handle: regular nodes (via `this.ownerDocument`), window + // (via `this.document`) & document (via `this`). + var doc = this.ownerDocument || this.document || this, + attaches = dataPriv.access( doc, fix ); + + if ( !attaches ) { + doc.addEventListener( orig, handler, true ); + } + dataPriv.access( doc, fix, ( attaches || 0 ) + 1 ); + }, + teardown: function() { + var doc = this.ownerDocument || this.document || this, + attaches = dataPriv.access( doc, fix ) - 1; + + if ( !attaches ) { + doc.removeEventListener( orig, handler, true ); + dataPriv.remove( doc, fix ); + + } else { + dataPriv.access( doc, fix, attaches ); + } + } + }; + } ); +} +var location = window.location; + +var nonce = { guid: Date.now() }; + +var rquery = ( /\?/ ); + + + +// Cross-browser xml parsing +jQuery.parseXML = function( data ) { + var xml, parserErrorElem; + if ( !data || typeof data !== "string" ) { + return null; + } + + // Support: IE 9 - 11 only + // IE throws on parseFromString with invalid input. + try { + xml = ( new window.DOMParser() ).parseFromString( data, "text/xml" ); + } catch ( e ) {} + + parserErrorElem = xml && xml.getElementsByTagName( "parsererror" )[ 0 ]; + if ( !xml || parserErrorElem ) { + jQuery.error( "Invalid XML: " + ( + parserErrorElem ? + jQuery.map( parserErrorElem.childNodes, function( el ) { + return el.textContent; + } ).join( "\n" ) : + data + ) ); + } + return xml; +}; + + +var + rbracket = /\[\]$/, + rCRLF = /\r?\n/g, + rsubmitterTypes = /^(?:submit|button|image|reset|file)$/i, + rsubmittable = /^(?:input|select|textarea|keygen)/i; + +function buildParams( prefix, obj, traditional, add ) { + var name; + + if ( Array.isArray( obj ) ) { + + // Serialize array item. + jQuery.each( obj, function( i, v ) { + if ( traditional || rbracket.test( prefix ) ) { + + // Treat each array item as a scalar. + add( prefix, v ); + + } else { + + // Item is non-scalar (array or object), encode its numeric index. + buildParams( + prefix + "[" + ( typeof v === "object" && v != null ? i : "" ) + "]", + v, + traditional, + add + ); + } + } ); + + } else if ( !traditional && toType( obj ) === "object" ) { + + // Serialize object item. + for ( name in obj ) { + buildParams( prefix + "[" + name + "]", obj[ name ], traditional, add ); + } + + } else { + + // Serialize scalar item. + add( prefix, obj ); + } +} + +// Serialize an array of form elements or a set of +// key/values into a query string +jQuery.param = function( a, traditional ) { + var prefix, + s = [], + add = function( key, valueOrFunction ) { + + // If value is a function, invoke it and use its return value + var value = isFunction( valueOrFunction ) ? + valueOrFunction() : + valueOrFunction; + + s[ s.length ] = encodeURIComponent( key ) + "=" + + encodeURIComponent( value == null ? "" : value ); + }; + + if ( a == null ) { + return ""; + } + + // If an array was passed in, assume that it is an array of form elements. + if ( Array.isArray( a ) || ( a.jquery && !jQuery.isPlainObject( a ) ) ) { + + // Serialize the form elements + jQuery.each( a, function() { + add( this.name, this.value ); + } ); + + } else { + + // If traditional, encode the "old" way (the way 1.3.2 or older + // did it), otherwise encode params recursively. + for ( prefix in a ) { + buildParams( prefix, a[ prefix ], traditional, add ); + } + } + + // Return the resulting serialization + return s.join( "&" ); +}; + +jQuery.fn.extend( { + serialize: function() { + return jQuery.param( this.serializeArray() ); + }, + serializeArray: function() { + return this.map( function() { + + // Can add propHook for "elements" to filter or add form elements + var elements = jQuery.prop( this, "elements" ); + return elements ? jQuery.makeArray( elements ) : this; + } ).filter( function() { + var type = this.type; + + // Use .is( ":disabled" ) so that fieldset[disabled] works + return this.name && !jQuery( this ).is( ":disabled" ) && + rsubmittable.test( this.nodeName ) && !rsubmitterTypes.test( type ) && + ( this.checked || !rcheckableType.test( type ) ); + } ).map( function( _i, elem ) { + var val = jQuery( this ).val(); + + if ( val == null ) { + return null; + } + + if ( Array.isArray( val ) ) { + return jQuery.map( val, function( val ) { + return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) }; + } ); + } + + return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) }; + } ).get(); + } +} ); + + +var + r20 = /%20/g, + rhash = /#.*$/, + rantiCache = /([?&])_=[^&]*/, + rheaders = /^(.*?):[ \t]*([^\r\n]*)$/mg, + + // #7653, #8125, #8152: local protocol detection + rlocalProtocol = /^(?:about|app|app-storage|.+-extension|file|res|widget):$/, + rnoContent = /^(?:GET|HEAD)$/, + rprotocol = /^\/\//, + + /* Prefilters + * 1) They are useful to introduce custom dataTypes (see ajax/jsonp.js for an example) + * 2) These are called: + * - BEFORE asking for a transport + * - AFTER param serialization (s.data is a string if s.processData is true) + * 3) key is the dataType + * 4) the catchall symbol "*" can be used + * 5) execution will start with transport dataType and THEN continue down to "*" if needed + */ + prefilters = {}, + + /* Transports bindings + * 1) key is the dataType + * 2) the catchall symbol "*" can be used + * 3) selection will start with transport dataType and THEN go to "*" if needed + */ + transports = {}, + + // Avoid comment-prolog char sequence (#10098); must appease lint and evade compression + allTypes = "*/".concat( "*" ), + + // Anchor tag for parsing the document origin + originAnchor = document.createElement( "a" ); + +originAnchor.href = location.href; + +// Base "constructor" for jQuery.ajaxPrefilter and jQuery.ajaxTransport +function addToPrefiltersOrTransports( structure ) { + + // dataTypeExpression is optional and defaults to "*" + return function( dataTypeExpression, func ) { + + if ( typeof dataTypeExpression !== "string" ) { + func = dataTypeExpression; + dataTypeExpression = "*"; + } + + var dataType, + i = 0, + dataTypes = dataTypeExpression.toLowerCase().match( rnothtmlwhite ) || []; + + if ( isFunction( func ) ) { + + // For each dataType in the dataTypeExpression + while ( ( dataType = dataTypes[ i++ ] ) ) { + + // Prepend if requested + if ( dataType[ 0 ] === "+" ) { + dataType = dataType.slice( 1 ) || "*"; + ( structure[ dataType ] = structure[ dataType ] || [] ).unshift( func ); + + // Otherwise append + } else { + ( structure[ dataType ] = structure[ dataType ] || [] ).push( func ); + } + } + } + }; +} + +// Base inspection function for prefilters and transports +function inspectPrefiltersOrTransports( structure, options, originalOptions, jqXHR ) { + + var inspected = {}, + seekingTransport = ( structure === transports ); + + function inspect( dataType ) { + var selected; + inspected[ dataType ] = true; + jQuery.each( structure[ dataType ] || [], function( _, prefilterOrFactory ) { + var dataTypeOrTransport = prefilterOrFactory( options, originalOptions, jqXHR ); + if ( typeof dataTypeOrTransport === "string" && + !seekingTransport && !inspected[ dataTypeOrTransport ] ) { + + options.dataTypes.unshift( dataTypeOrTransport ); + inspect( dataTypeOrTransport ); + return false; + } else if ( seekingTransport ) { + return !( selected = dataTypeOrTransport ); + } + } ); + return selected; + } + + return inspect( options.dataTypes[ 0 ] ) || !inspected[ "*" ] && inspect( "*" ); +} + +// A special extend for ajax options +// that takes "flat" options (not to be deep extended) +// Fixes #9887 +function ajaxExtend( target, src ) { + var key, deep, + flatOptions = jQuery.ajaxSettings.flatOptions || {}; + + for ( key in src ) { + if ( src[ key ] !== undefined ) { + ( flatOptions[ key ] ? target : ( deep || ( deep = {} ) ) )[ key ] = src[ key ]; + } + } + if ( deep ) { + jQuery.extend( true, target, deep ); + } + + return target; +} + +/* Handles responses to an ajax request: + * - finds the right dataType (mediates between content-type and expected dataType) + * - returns the corresponding response + */ +function ajaxHandleResponses( s, jqXHR, responses ) { + + var ct, type, finalDataType, firstDataType, + contents = s.contents, + dataTypes = s.dataTypes; + + // Remove auto dataType and get content-type in the process + while ( dataTypes[ 0 ] === "*" ) { + dataTypes.shift(); + if ( ct === undefined ) { + ct = s.mimeType || jqXHR.getResponseHeader( "Content-Type" ); + } + } + + // Check if we're dealing with a known content-type + if ( ct ) { + for ( type in contents ) { + if ( contents[ type ] && contents[ type ].test( ct ) ) { + dataTypes.unshift( type ); + break; + } + } + } + + // Check to see if we have a response for the expected dataType + if ( dataTypes[ 0 ] in responses ) { + finalDataType = dataTypes[ 0 ]; + } else { + + // Try convertible dataTypes + for ( type in responses ) { + if ( !dataTypes[ 0 ] || s.converters[ type + " " + dataTypes[ 0 ] ] ) { + finalDataType = type; + break; + } + if ( !firstDataType ) { + firstDataType = type; + } + } + + // Or just use first one + finalDataType = finalDataType || firstDataType; + } + + // If we found a dataType + // We add the dataType to the list if needed + // and return the corresponding response + if ( finalDataType ) { + if ( finalDataType !== dataTypes[ 0 ] ) { + dataTypes.unshift( finalDataType ); + } + return responses[ finalDataType ]; + } +} + +/* Chain conversions given the request and the original response + * Also sets the responseXXX fields on the jqXHR instance + */ +function ajaxConvert( s, response, jqXHR, isSuccess ) { + var conv2, current, conv, tmp, prev, + converters = {}, + + // Work with a copy of dataTypes in case we need to modify it for conversion + dataTypes = s.dataTypes.slice(); + + // Create converters map with lowercased keys + if ( dataTypes[ 1 ] ) { + for ( conv in s.converters ) { + converters[ conv.toLowerCase() ] = s.converters[ conv ]; + } + } + + current = dataTypes.shift(); + + // Convert to each sequential dataType + while ( current ) { + + if ( s.responseFields[ current ] ) { + jqXHR[ s.responseFields[ current ] ] = response; + } + + // Apply the dataFilter if provided + if ( !prev && isSuccess && s.dataFilter ) { + response = s.dataFilter( response, s.dataType ); + } + + prev = current; + current = dataTypes.shift(); + + if ( current ) { + + // There's only work to do if current dataType is non-auto + if ( current === "*" ) { + + current = prev; + + // Convert response if prev dataType is non-auto and differs from current + } else if ( prev !== "*" && prev !== current ) { + + // Seek a direct converter + conv = converters[ prev + " " + current ] || converters[ "* " + current ]; + + // If none found, seek a pair + if ( !conv ) { + for ( conv2 in converters ) { + + // If conv2 outputs current + tmp = conv2.split( " " ); + if ( tmp[ 1 ] === current ) { + + // If prev can be converted to accepted input + conv = converters[ prev + " " + tmp[ 0 ] ] || + converters[ "* " + tmp[ 0 ] ]; + if ( conv ) { + + // Condense equivalence converters + if ( conv === true ) { + conv = converters[ conv2 ]; + + // Otherwise, insert the intermediate dataType + } else if ( converters[ conv2 ] !== true ) { + current = tmp[ 0 ]; + dataTypes.unshift( tmp[ 1 ] ); + } + break; + } + } + } + } + + // Apply converter (if not an equivalence) + if ( conv !== true ) { + + // Unless errors are allowed to bubble, catch and return them + if ( conv && s.throws ) { + response = conv( response ); + } else { + try { + response = conv( response ); + } catch ( e ) { + return { + state: "parsererror", + error: conv ? e : "No conversion from " + prev + " to " + current + }; + } + } + } + } + } + } + + return { state: "success", data: response }; +} + +jQuery.extend( { + + // Counter for holding the number of active queries + active: 0, + + // Last-Modified header cache for next request + lastModified: {}, + etag: {}, + + ajaxSettings: { + url: location.href, + type: "GET", + isLocal: rlocalProtocol.test( location.protocol ), + global: true, + processData: true, + async: true, + contentType: "application/x-www-form-urlencoded; charset=UTF-8", + + /* + timeout: 0, + data: null, + dataType: null, + username: null, + password: null, + cache: null, + throws: false, + traditional: false, + headers: {}, + */ + + accepts: { + "*": allTypes, + text: "text/plain", + html: "text/html", + xml: "application/xml, text/xml", + json: "application/json, text/javascript" + }, + + contents: { + xml: /\bxml\b/, + html: /\bhtml/, + json: /\bjson\b/ + }, + + responseFields: { + xml: "responseXML", + text: "responseText", + json: "responseJSON" + }, + + // Data converters + // Keys separate source (or catchall "*") and destination types with a single space + converters: { + + // Convert anything to text + "* text": String, + + // Text to html (true = no transformation) + "text html": true, + + // Evaluate text as a json expression + "text json": JSON.parse, + + // Parse text as xml + "text xml": jQuery.parseXML + }, + + // For options that shouldn't be deep extended: + // you can add your own custom options here if + // and when you create one that shouldn't be + // deep extended (see ajaxExtend) + flatOptions: { + url: true, + context: true + } + }, + + // Creates a full fledged settings object into target + // with both ajaxSettings and settings fields. + // If target is omitted, writes into ajaxSettings. + ajaxSetup: function( target, settings ) { + return settings ? + + // Building a settings object + ajaxExtend( ajaxExtend( target, jQuery.ajaxSettings ), settings ) : + + // Extending ajaxSettings + ajaxExtend( jQuery.ajaxSettings, target ); + }, + + ajaxPrefilter: addToPrefiltersOrTransports( prefilters ), + ajaxTransport: addToPrefiltersOrTransports( transports ), + + // Main method + ajax: function( url, options ) { + + // If url is an object, simulate pre-1.5 signature + if ( typeof url === "object" ) { + options = url; + url = undefined; + } + + // Force options to be an object + options = options || {}; + + var transport, + + // URL without anti-cache param + cacheURL, + + // Response headers + responseHeadersString, + responseHeaders, + + // timeout handle + timeoutTimer, + + // Url cleanup var + urlAnchor, + + // Request state (becomes false upon send and true upon completion) + completed, + + // To know if global events are to be dispatched + fireGlobals, + + // Loop variable + i, + + // uncached part of the url + uncached, + + // Create the final options object + s = jQuery.ajaxSetup( {}, options ), + + // Callbacks context + callbackContext = s.context || s, + + // Context for global events is callbackContext if it is a DOM node or jQuery collection + globalEventContext = s.context && + ( callbackContext.nodeType || callbackContext.jquery ) ? + jQuery( callbackContext ) : + jQuery.event, + + // Deferreds + deferred = jQuery.Deferred(), + completeDeferred = jQuery.Callbacks( "once memory" ), + + // Status-dependent callbacks + statusCode = s.statusCode || {}, + + // Headers (they are sent all at once) + requestHeaders = {}, + requestHeadersNames = {}, + + // Default abort message + strAbort = "canceled", + + // Fake xhr + jqXHR = { + readyState: 0, + + // Builds headers hashtable if needed + getResponseHeader: function( key ) { + var match; + if ( completed ) { + if ( !responseHeaders ) { + responseHeaders = {}; + while ( ( match = rheaders.exec( responseHeadersString ) ) ) { + responseHeaders[ match[ 1 ].toLowerCase() + " " ] = + ( responseHeaders[ match[ 1 ].toLowerCase() + " " ] || [] ) + .concat( match[ 2 ] ); + } + } + match = responseHeaders[ key.toLowerCase() + " " ]; + } + return match == null ? null : match.join( ", " ); + }, + + // Raw string + getAllResponseHeaders: function() { + return completed ? responseHeadersString : null; + }, + + // Caches the header + setRequestHeader: function( name, value ) { + if ( completed == null ) { + name = requestHeadersNames[ name.toLowerCase() ] = + requestHeadersNames[ name.toLowerCase() ] || name; + requestHeaders[ name ] = value; + } + return this; + }, + + // Overrides response content-type header + overrideMimeType: function( type ) { + if ( completed == null ) { + s.mimeType = type; + } + return this; + }, + + // Status-dependent callbacks + statusCode: function( map ) { + var code; + if ( map ) { + if ( completed ) { + + // Execute the appropriate callbacks + jqXHR.always( map[ jqXHR.status ] ); + } else { + + // Lazy-add the new callbacks in a way that preserves old ones + for ( code in map ) { + statusCode[ code ] = [ statusCode[ code ], map[ code ] ]; + } + } + } + return this; + }, + + // Cancel the request + abort: function( statusText ) { + var finalText = statusText || strAbort; + if ( transport ) { + transport.abort( finalText ); + } + done( 0, finalText ); + return this; + } + }; + + // Attach deferreds + deferred.promise( jqXHR ); + + // Add protocol if not provided (prefilters might expect it) + // Handle falsy url in the settings object (#10093: consistency with old signature) + // We also use the url parameter if available + s.url = ( ( url || s.url || location.href ) + "" ) + .replace( rprotocol, location.protocol + "//" ); + + // Alias method option to type as per ticket #12004 + s.type = options.method || options.type || s.method || s.type; + + // Extract dataTypes list + s.dataTypes = ( s.dataType || "*" ).toLowerCase().match( rnothtmlwhite ) || [ "" ]; + + // A cross-domain request is in order when the origin doesn't match the current origin. + if ( s.crossDomain == null ) { + urlAnchor = document.createElement( "a" ); + + // Support: IE <=8 - 11, Edge 12 - 15 + // IE throws exception on accessing the href property if url is malformed, + // e.g. http://example.com:80x/ + try { + urlAnchor.href = s.url; + + // Support: IE <=8 - 11 only + // Anchor's host property isn't correctly set when s.url is relative + urlAnchor.href = urlAnchor.href; + s.crossDomain = originAnchor.protocol + "//" + originAnchor.host !== + urlAnchor.protocol + "//" + urlAnchor.host; + } catch ( e ) { + + // If there is an error parsing the URL, assume it is crossDomain, + // it can be rejected by the transport if it is invalid + s.crossDomain = true; + } + } + + // Convert data if not already a string + if ( s.data && s.processData && typeof s.data !== "string" ) { + s.data = jQuery.param( s.data, s.traditional ); + } + + // Apply prefilters + inspectPrefiltersOrTransports( prefilters, s, options, jqXHR ); + + // If request was aborted inside a prefilter, stop there + if ( completed ) { + return jqXHR; + } + + // We can fire global events as of now if asked to + // Don't fire events if jQuery.event is undefined in an AMD-usage scenario (#15118) + fireGlobals = jQuery.event && s.global; + + // Watch for a new set of requests + if ( fireGlobals && jQuery.active++ === 0 ) { + jQuery.event.trigger( "ajaxStart" ); + } + + // Uppercase the type + s.type = s.type.toUpperCase(); + + // Determine if request has content + s.hasContent = !rnoContent.test( s.type ); + + // Save the URL in case we're toying with the If-Modified-Since + // and/or If-None-Match header later on + // Remove hash to simplify url manipulation + cacheURL = s.url.replace( rhash, "" ); + + // More options handling for requests with no content + if ( !s.hasContent ) { + + // Remember the hash so we can put it back + uncached = s.url.slice( cacheURL.length ); + + // If data is available and should be processed, append data to url + if ( s.data && ( s.processData || typeof s.data === "string" ) ) { + cacheURL += ( rquery.test( cacheURL ) ? "&" : "?" ) + s.data; + + // #9682: remove data so that it's not used in an eventual retry + delete s.data; + } + + // Add or update anti-cache param if needed + if ( s.cache === false ) { + cacheURL = cacheURL.replace( rantiCache, "$1" ); + uncached = ( rquery.test( cacheURL ) ? "&" : "?" ) + "_=" + ( nonce.guid++ ) + + uncached; + } + + // Put hash and anti-cache on the URL that will be requested (gh-1732) + s.url = cacheURL + uncached; + + // Change '%20' to '+' if this is encoded form body content (gh-2658) + } else if ( s.data && s.processData && + ( s.contentType || "" ).indexOf( "application/x-www-form-urlencoded" ) === 0 ) { + s.data = s.data.replace( r20, "+" ); + } + + // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode. + if ( s.ifModified ) { + if ( jQuery.lastModified[ cacheURL ] ) { + jqXHR.setRequestHeader( "If-Modified-Since", jQuery.lastModified[ cacheURL ] ); + } + if ( jQuery.etag[ cacheURL ] ) { + jqXHR.setRequestHeader( "If-None-Match", jQuery.etag[ cacheURL ] ); + } + } + + // Set the correct header, if data is being sent + if ( s.data && s.hasContent && s.contentType !== false || options.contentType ) { + jqXHR.setRequestHeader( "Content-Type", s.contentType ); + } + + // Set the Accepts header for the server, depending on the dataType + jqXHR.setRequestHeader( + "Accept", + s.dataTypes[ 0 ] && s.accepts[ s.dataTypes[ 0 ] ] ? + s.accepts[ s.dataTypes[ 0 ] ] + + ( s.dataTypes[ 0 ] !== "*" ? ", " + allTypes + "; q=0.01" : "" ) : + s.accepts[ "*" ] + ); + + // Check for headers option + for ( i in s.headers ) { + jqXHR.setRequestHeader( i, s.headers[ i ] ); + } + + // Allow custom headers/mimetypes and early abort + if ( s.beforeSend && + ( s.beforeSend.call( callbackContext, jqXHR, s ) === false || completed ) ) { + + // Abort if not done already and return + return jqXHR.abort(); + } + + // Aborting is no longer a cancellation + strAbort = "abort"; + + // Install callbacks on deferreds + completeDeferred.add( s.complete ); + jqXHR.done( s.success ); + jqXHR.fail( s.error ); + + // Get transport + transport = inspectPrefiltersOrTransports( transports, s, options, jqXHR ); + + // If no transport, we auto-abort + if ( !transport ) { + done( -1, "No Transport" ); + } else { + jqXHR.readyState = 1; + + // Send global event + if ( fireGlobals ) { + globalEventContext.trigger( "ajaxSend", [ jqXHR, s ] ); + } + + // If request was aborted inside ajaxSend, stop there + if ( completed ) { + return jqXHR; + } + + // Timeout + if ( s.async && s.timeout > 0 ) { + timeoutTimer = window.setTimeout( function() { + jqXHR.abort( "timeout" ); + }, s.timeout ); + } + + try { + completed = false; + transport.send( requestHeaders, done ); + } catch ( e ) { + + // Rethrow post-completion exceptions + if ( completed ) { + throw e; + } + + // Propagate others as results + done( -1, e ); + } + } + + // Callback for when everything is done + function done( status, nativeStatusText, responses, headers ) { + var isSuccess, success, error, response, modified, + statusText = nativeStatusText; + + // Ignore repeat invocations + if ( completed ) { + return; + } + + completed = true; + + // Clear timeout if it exists + if ( timeoutTimer ) { + window.clearTimeout( timeoutTimer ); + } + + // Dereference transport for early garbage collection + // (no matter how long the jqXHR object will be used) + transport = undefined; + + // Cache response headers + responseHeadersString = headers || ""; + + // Set readyState + jqXHR.readyState = status > 0 ? 4 : 0; + + // Determine if successful + isSuccess = status >= 200 && status < 300 || status === 304; + + // Get response data + if ( responses ) { + response = ajaxHandleResponses( s, jqXHR, responses ); + } + + // Use a noop converter for missing script but not if jsonp + if ( !isSuccess && + jQuery.inArray( "script", s.dataTypes ) > -1 && + jQuery.inArray( "json", s.dataTypes ) < 0 ) { + s.converters[ "text script" ] = function() {}; + } + + // Convert no matter what (that way responseXXX fields are always set) + response = ajaxConvert( s, response, jqXHR, isSuccess ); + + // If successful, handle type chaining + if ( isSuccess ) { + + // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode. + if ( s.ifModified ) { + modified = jqXHR.getResponseHeader( "Last-Modified" ); + if ( modified ) { + jQuery.lastModified[ cacheURL ] = modified; + } + modified = jqXHR.getResponseHeader( "etag" ); + if ( modified ) { + jQuery.etag[ cacheURL ] = modified; + } + } + + // if no content + if ( status === 204 || s.type === "HEAD" ) { + statusText = "nocontent"; + + // if not modified + } else if ( status === 304 ) { + statusText = "notmodified"; + + // If we have data, let's convert it + } else { + statusText = response.state; + success = response.data; + error = response.error; + isSuccess = !error; + } + } else { + + // Extract error from statusText and normalize for non-aborts + error = statusText; + if ( status || !statusText ) { + statusText = "error"; + if ( status < 0 ) { + status = 0; + } + } + } + + // Set data for the fake xhr object + jqXHR.status = status; + jqXHR.statusText = ( nativeStatusText || statusText ) + ""; + + // Success/Error + if ( isSuccess ) { + deferred.resolveWith( callbackContext, [ success, statusText, jqXHR ] ); + } else { + deferred.rejectWith( callbackContext, [ jqXHR, statusText, error ] ); + } + + // Status-dependent callbacks + jqXHR.statusCode( statusCode ); + statusCode = undefined; + + if ( fireGlobals ) { + globalEventContext.trigger( isSuccess ? "ajaxSuccess" : "ajaxError", + [ jqXHR, s, isSuccess ? success : error ] ); + } + + // Complete + completeDeferred.fireWith( callbackContext, [ jqXHR, statusText ] ); + + if ( fireGlobals ) { + globalEventContext.trigger( "ajaxComplete", [ jqXHR, s ] ); + + // Handle the global AJAX counter + if ( !( --jQuery.active ) ) { + jQuery.event.trigger( "ajaxStop" ); + } + } + } + + return jqXHR; + }, + + getJSON: function( url, data, callback ) { + return jQuery.get( url, data, callback, "json" ); + }, + + getScript: function( url, callback ) { + return jQuery.get( url, undefined, callback, "script" ); + } +} ); + +jQuery.each( [ "get", "post" ], function( _i, method ) { + jQuery[ method ] = function( url, data, callback, type ) { + + // Shift arguments if data argument was omitted + if ( isFunction( data ) ) { + type = type || callback; + callback = data; + data = undefined; + } + + // The url can be an options object (which then must have .url) + return jQuery.ajax( jQuery.extend( { + url: url, + type: method, + dataType: type, + data: data, + success: callback + }, jQuery.isPlainObject( url ) && url ) ); + }; +} ); + +jQuery.ajaxPrefilter( function( s ) { + var i; + for ( i in s.headers ) { + if ( i.toLowerCase() === "content-type" ) { + s.contentType = s.headers[ i ] || ""; + } + } +} ); + + +jQuery._evalUrl = function( url, options, doc ) { + return jQuery.ajax( { + url: url, + + // Make this explicit, since user can override this through ajaxSetup (#11264) + type: "GET", + dataType: "script", + cache: true, + async: false, + global: false, + + // Only evaluate the response if it is successful (gh-4126) + // dataFilter is not invoked for failure responses, so using it instead + // of the default converter is kludgy but it works. + converters: { + "text script": function() {} + }, + dataFilter: function( response ) { + jQuery.globalEval( response, options, doc ); + } + } ); +}; + + +jQuery.fn.extend( { + wrapAll: function( html ) { + var wrap; + + if ( this[ 0 ] ) { + if ( isFunction( html ) ) { + html = html.call( this[ 0 ] ); + } + + // The elements to wrap the target around + wrap = jQuery( html, this[ 0 ].ownerDocument ).eq( 0 ).clone( true ); + + if ( this[ 0 ].parentNode ) { + wrap.insertBefore( this[ 0 ] ); + } + + wrap.map( function() { + var elem = this; + + while ( elem.firstElementChild ) { + elem = elem.firstElementChild; + } + + return elem; + } ).append( this ); + } + + return this; + }, + + wrapInner: function( html ) { + if ( isFunction( html ) ) { + return this.each( function( i ) { + jQuery( this ).wrapInner( html.call( this, i ) ); + } ); + } + + return this.each( function() { + var self = jQuery( this ), + contents = self.contents(); + + if ( contents.length ) { + contents.wrapAll( html ); + + } else { + self.append( html ); + } + } ); + }, + + wrap: function( html ) { + var htmlIsFunction = isFunction( html ); + + return this.each( function( i ) { + jQuery( this ).wrapAll( htmlIsFunction ? html.call( this, i ) : html ); + } ); + }, + + unwrap: function( selector ) { + this.parent( selector ).not( "body" ).each( function() { + jQuery( this ).replaceWith( this.childNodes ); + } ); + return this; + } +} ); + + +jQuery.expr.pseudos.hidden = function( elem ) { + return !jQuery.expr.pseudos.visible( elem ); +}; +jQuery.expr.pseudos.visible = function( elem ) { + return !!( elem.offsetWidth || elem.offsetHeight || elem.getClientRects().length ); +}; + + + + +jQuery.ajaxSettings.xhr = function() { + try { + return new window.XMLHttpRequest(); + } catch ( e ) {} +}; + +var xhrSuccessStatus = { + + // File protocol always yields status code 0, assume 200 + 0: 200, + + // Support: IE <=9 only + // #1450: sometimes IE returns 1223 when it should be 204 + 1223: 204 + }, + xhrSupported = jQuery.ajaxSettings.xhr(); + +support.cors = !!xhrSupported && ( "withCredentials" in xhrSupported ); +support.ajax = xhrSupported = !!xhrSupported; + +jQuery.ajaxTransport( function( options ) { + var callback, errorCallback; + + // Cross domain only allowed if supported through XMLHttpRequest + if ( support.cors || xhrSupported && !options.crossDomain ) { + return { + send: function( headers, complete ) { + var i, + xhr = options.xhr(); + + xhr.open( + options.type, + options.url, + options.async, + options.username, + options.password + ); + + // Apply custom fields if provided + if ( options.xhrFields ) { + for ( i in options.xhrFields ) { + xhr[ i ] = options.xhrFields[ i ]; + } + } + + // Override mime type if needed + if ( options.mimeType && xhr.overrideMimeType ) { + xhr.overrideMimeType( options.mimeType ); + } + + // X-Requested-With header + // For cross-domain requests, seeing as conditions for a preflight are + // akin to a jigsaw puzzle, we simply never set it to be sure. + // (it can always be set on a per-request basis or even using ajaxSetup) + // For same-domain requests, won't change header if already provided. + if ( !options.crossDomain && !headers[ "X-Requested-With" ] ) { + headers[ "X-Requested-With" ] = "XMLHttpRequest"; + } + + // Set headers + for ( i in headers ) { + xhr.setRequestHeader( i, headers[ i ] ); + } + + // Callback + callback = function( type ) { + return function() { + if ( callback ) { + callback = errorCallback = xhr.onload = + xhr.onerror = xhr.onabort = xhr.ontimeout = + xhr.onreadystatechange = null; + + if ( type === "abort" ) { + xhr.abort(); + } else if ( type === "error" ) { + + // Support: IE <=9 only + // On a manual native abort, IE9 throws + // errors on any property access that is not readyState + if ( typeof xhr.status !== "number" ) { + complete( 0, "error" ); + } else { + complete( + + // File: protocol always yields status 0; see #8605, #14207 + xhr.status, + xhr.statusText + ); + } + } else { + complete( + xhrSuccessStatus[ xhr.status ] || xhr.status, + xhr.statusText, + + // Support: IE <=9 only + // IE9 has no XHR2 but throws on binary (trac-11426) + // For XHR2 non-text, let the caller handle it (gh-2498) + ( xhr.responseType || "text" ) !== "text" || + typeof xhr.responseText !== "string" ? + { binary: xhr.response } : + { text: xhr.responseText }, + xhr.getAllResponseHeaders() + ); + } + } + }; + }; + + // Listen to events + xhr.onload = callback(); + errorCallback = xhr.onerror = xhr.ontimeout = callback( "error" ); + + // Support: IE 9 only + // Use onreadystatechange to replace onabort + // to handle uncaught aborts + if ( xhr.onabort !== undefined ) { + xhr.onabort = errorCallback; + } else { + xhr.onreadystatechange = function() { + + // Check readyState before timeout as it changes + if ( xhr.readyState === 4 ) { + + // Allow onerror to be called first, + // but that will not handle a native abort + // Also, save errorCallback to a variable + // as xhr.onerror cannot be accessed + window.setTimeout( function() { + if ( callback ) { + errorCallback(); + } + } ); + } + }; + } + + // Create the abort callback + callback = callback( "abort" ); + + try { + + // Do send the request (this may raise an exception) + xhr.send( options.hasContent && options.data || null ); + } catch ( e ) { + + // #14683: Only rethrow if this hasn't been notified as an error yet + if ( callback ) { + throw e; + } + } + }, + + abort: function() { + if ( callback ) { + callback(); + } + } + }; + } +} ); + + + + +// Prevent auto-execution of scripts when no explicit dataType was provided (See gh-2432) +jQuery.ajaxPrefilter( function( s ) { + if ( s.crossDomain ) { + s.contents.script = false; + } +} ); + +// Install script dataType +jQuery.ajaxSetup( { + accepts: { + script: "text/javascript, application/javascript, " + + "application/ecmascript, application/x-ecmascript" + }, + contents: { + script: /\b(?:java|ecma)script\b/ + }, + converters: { + "text script": function( text ) { + jQuery.globalEval( text ); + return text; + } + } +} ); + +// Handle cache's special case and crossDomain +jQuery.ajaxPrefilter( "script", function( s ) { + if ( s.cache === undefined ) { + s.cache = false; + } + if ( s.crossDomain ) { + s.type = "GET"; + } +} ); + +// Bind script tag hack transport +jQuery.ajaxTransport( "script", function( s ) { + + // This transport only deals with cross domain or forced-by-attrs requests + if ( s.crossDomain || s.scriptAttrs ) { + var script, callback; + return { + send: function( _, complete ) { + script = jQuery( " + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+
+
+

Package apache-airflow-providers-apache-hive

+

Apache Hive

+

This is detailed commit list of changes for versions provider package: apache.hive. +For high-level changelog, see package information including changelog.

+
+

5.0.0

+

Latest change: 2022-12-06

+ +++++ + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

5f8481c799

2022-12-06

Move hive_cli_params to hook parameters (#28101)

2d45f9d6c3

2022-11-27

Improve filtering for invalid schemas in Hive hook (#27808)

+
+
+

4.1.1

+

Latest change: 2022-11-26

+ +++++ + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

25bdbc8e67

2022-11-26

Updated docs for RC3 wave of providers (#27937)

2e20e9f7eb

2022-11-24

Prepare for follow-up relase for November providers (#27774)

80c327bd3b

2022-11-24

Bump common.sql provider to 1.3.1 (#27888)

+
+
+

4.1.0

+

Latest change: 2022-11-15

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

12c3c39d1a

2022-11-15

pRepare docs for November 2022 wave of Providers (#27613)

150dd927c3

2022-11-14

Filter out invalid schemas in Hive hook (#27647)

9ab1a6a3e7

2022-10-27

Update old style typing (#26872)

78b8ea2f22

2022-10-24

Move min airflow version to 2.3.0 for all providers (#27196)

2a34dc9e84

2022-10-23

Enable string normalization in python formatting - providers (#27205)

+
+
+

4.0.1

+

Latest change: 2022-09-28

+ +++++ + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

f8db64c35c

2022-09-28

Update docs for September Provider's release (#26731)

06acf40a43

2022-09-13

Apply PEP-563 (Postponed Evaluation of Annotations) to non-core airflow (#26289)

ca9229b6fe

2022-08-18

Add common-sql lower bound for common-sql (#25789)

+
+
+

4.0.0

+

Latest change: 2022-08-10

+ +++++ + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

e5ac6c7cfb

2022-08-10

Prepare docs for new providers release (August 2022) (#25618)

7e3d2350db

2022-08-04

Remove Smart Sensors (#25507)

5d4abbd58c

2022-07-27

Deprecate hql parameters and synchronize DBApiHook method APIs (#25299)

+
+
+

3.1.0

+

Latest change: 2022-07-13

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

d2459a241b

2022-07-13

Add documentation for July 2022 Provider's release (#25030)

46bbfdade0

2022-07-07

Move all SQL classes to common-sql provider (#24836)

0de31bd73a

2022-06-29

Move provider dependencies to inside provider folders (#24672)

cef97fccd5

2022-06-29

fix connection extra parameter 'auth_mechanism' in 'HiveMetastoreHook' and 'HiveServer2Hook' (#24713)

510a6bab45

2022-06-28

Remove 'hook-class-names' from provider.yaml (#24702)

+
+
+

3.0.0

+

Latest change: 2022-06-09

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

dcdcf3a2b8

2022-06-09

Update release notes for RC2 release of Providers for May 2022 (#24307)

717a7588bc

2022-06-07

Update package description to remove double min-airflow specification (#24292)

aeabe994b3

2022-06-07

Prepare docs for May 2022 provider's release (#24231)

b4a5783a2a

2022-06-06

chore: Refactoring and Cleaning Apache Providers (#24219)

027b707d21

2022-06-05

Add explanatory note for contributors about updating Changelog (#24229)

100ea9d1fc

2022-06-05

AIP-47 - Migrate hive DAGs to new design #22439 (#24204)

71e4deb1b0

2022-05-16

Add typing for airflow/configuration.py (#23716)

+
+
+

2.3.3

+

Latest change: 2022-05-12

+ +++++ + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

75c60923e0

2022-05-12

Prepare provider documentation 2022.05.11 (#23631)

2d109401b3

2022-05-04

Bump pre-commit hook versions (#22887)

0c9c1cf94a

2022-04-28

Fix HiveToMySqlOperator's wrong docstring (#23316)

+
+
+

2.3.2

+

Latest change: 2022-03-22

+ +++++ + + + + + + + + + + + + +

Commit

Committed

Subject

d7dbfb7e26

2022-03-22

Add documentation for bugfix release of Providers (#22383)

+
+
+

2.3.1

+

Latest change: 2022-03-14

+ +++++ + + + + + + + + + + + + +

Commit

Committed

Subject

16adc035b1

2022-03-14

Add documentation for Classifier release for March 2022 (#22226)

+
+
+

2.3.0

+

Latest change: 2022-03-07

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

f5b96315fe

2022-03-07

Add documentation for Feb Providers release (#22056)

563ecfa053

2022-03-01

Add Python 3.9 support to Hive (#21893)

f6e0ed0dcc

2022-02-15

Add how-to guide for hive operator (#21590)

041babb060

2022-02-15

Fix mypy issues in 'example_twitter_dag' (#21571)

2d6282d6b7

2022-02-15

Remove unnecessary/stale comments (#21572)

06010fa12a

2022-02-11

Fix key typo in 'template_fields_renderers' for 'HiveOperator' (#21525)

d927507899

2022-02-11

Set larger limit get_partitions_by_filter in HiveMetastoreHook (#21504)

+
+
+

2.2.0

+

Latest change: 2022-02-08

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

d94fa37830

2022-02-08

Fixed changelog for January 2022 (delayed) provider's release (#21439)

8f81b9a01c

2022-02-08

Add conditional 'template_fields_renderers' check for new SQL lexers (#21403)

6c3a67d4fc

2022-02-05

Add documentation for January 2021 providers release (#21257)

39e395f981

2022-02-04

Add more SQL template fields renderers (#21237)

602abe8394

2022-01-20

Remove ':type' lines now sphinx-autoapi supports typehints (#20951)

5569b868a9

2022-01-09

Fix MyPy Errors for providers: Tableau, CNCF, Apache (#20654)

f77417eb0d

2021-12-31

Fix K8S changelog to be PyPI-compatible (#20614)

97496ba2b4

2021-12-31

Update documentation for provider December 2021 release (#20523)

83f8e178ba

2021-12-31

Even more typing in operators (template_fields/ext) (#20608)

d56e7b56bb

2021-12-30

Fix template_fields type to have MyPy friendly Sequence type (#20571)

a0821235fb

2021-12-30

Use typed Context EVERYWHERE (#20565)

485ff6cc64

2021-12-29

Fix MyPy errors in Apache Providers (#20422)

f760823b4a

2021-12-11

Add some type hints for Hive providers (#20210)

+
+
+

2.1.0

+

Latest change: 2021-11-30

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

853576d901

2021-11-30

Update documentation for November 2021 provider's release (#19882)

16b3ab5860

2021-11-29

Improve various docstrings in Apache Hive providers (#19866)

ac752e777b

2021-11-24

hive provider: restore HA support for metastore (#19777)

f50f677514

2021-11-08

Fix typos in Hive transfer operator docstrings (#19474)

ae044884d1

2021-11-03

Cleanup of start_date and default arg use for Apache example DAGs (#18657)

+
+
+

2.0.3

+

Latest change: 2021-10-29

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

d9567eb106

2021-10-29

Prepare documentation for October Provider's release (#19321)

86a2a19ad2

2021-10-17

More f-strings (#18855)

80b5e65a6a

2021-10-17

Remove unnecessary string concatenations in AirflowException in s3_to_hive.py (#19026)

232f7d1587

2021-10-10

fix get_connections deprecation warn in hivemetastore hook (#18854)

840ea3efb9

2021-09-30

Update documentation for September providers release (#18613)

a458fcc573

2021-09-27

Updating miscellaneous provider DAGs to use TaskFlow API where applicable (#18278)

+
+
+

2.0.2

+

Latest change: 2021-08-30

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

0a68588479

2021-08-30

Add August 2021 Provider's documentation (#17890)

da99c3fa6c

2021-08-30

HiveHook fix get_pandas_df() failure when it tries to read an empty table (#17777)

be75dcd39c

2021-08-23

Update description about the new ''connection-types'' provider meta-data

76ed2a49c6

2021-08-19

Import Hooks lazily individually in providers manager (#17682)

+
+
+

2.0.1

+

Latest change: 2021-07-26

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

87f408b1e7

2021-07-26

Prepares docs for Rc2 release of July providers (#17116)

91f4d80ff0

2021-07-23

Updating Apache example DAGs to use XComArgs (#16869)

d02ded65ea

2021-07-15

Fixed wrongly escaped characters in amazon's changelog (#17020)

b916b75079

2021-07-15

Prepare documentation for July release of providers. (#17015)

866a601b76

2021-06-28

Removes pylint from our toolchain (#16682)

ce44b62890

2021-06-25

Add Python 3.9 support (#15515)

+
+
+

2.0.0

+

Latest change: 2021-06-18

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

bbc627a3da

2021-06-18

Prepares documentation for rc2 release of Providers (#16501)

cbf8001d76

2021-06-16

Synchronizes updated changelog after buggfix release (#16464)

1fba5402bb

2021-06-15

More documentation update for June providers release (#16405)

9c94b72d44

2021-06-07

Updated documentation for June 2021 provider release (#16294)

476d0f6e3d

2021-05-22

Bump pyupgrade v2.13.0 to v2.18.1 (#15991)

736a62f824

2021-05-08

Remove duplicate key from Python dictionary (#15735)

37681bca00

2021-05-07

Auto-apply apply_default decorator (#15667)

9953a047c4

2021-05-07

Add Connection Documentation for the Hive Provider (#15704)

807ad32ce5

2021-05-01

Prepares provider release after PIP 21 compatibility (#15576)

4b031d39e1

2021-04-27

Make Airflow code Pylint 2.8 compatible (#15534)

e229f3541d

2021-04-27

Use Pip 21.* to install airflow officially (#15513)

+
+
+

1.0.3

+

Latest change: 2021-04-06

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

042be2e4e0

2021-04-06

Updated documentation for provider packages before April release (#15236)

53dafa593f

2021-04-04

Fix mistake and typos in doc/docstrings (#15180)

85e0e76074

2021-03-29

Pin flynt to fix failing PRs (#15076)

68e4c4dcb0

2021-03-20

Remove Backport Providers (#14886)

6dc24c95e3

2021-03-07

Fix grammar and remove duplicate words (#14647)

b0d6069d25

2021-03-05

Fix broken static check on Master  (#14633)

d9e4454c66

2021-03-01

Resolve issue related to HiveCliHook kill (#14542)

+
+
+

1.0.2

+

Latest change: 2021-02-27

+ +++++ + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

589d6dec92

2021-02-27

Prepare to release the next wave of providers: (#14487)

10343ec29f

2021-02-05

Corrections in docs and tools after releasing provider RCs (#14082)

+
+
+

1.0.1

+

Latest change: 2021-02-04

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

88bdcfa0df

2021-02-04

Prepare to release a new wave of providers. (#14013)

ac2f72c98d

2021-02-01

Implement provider versioning tools (#13767)

a9ac2b040b

2021-01-23

Switch to f-strings using flynt. (#13732)

5f81fc73c8

2021-01-03

Fix: Remove password if in LDAP or CUSTOM mode HiveServer2Hook (#11767)

4f494d4d92

2021-01-03

Fix few typos (#13450)

295d66f914

2020-12-30

Fix Grammar in PIP warning (#13380)

6cf76d7ac0

2020-12-18

Fix typo in pip upgrade command :( (#13148)

5090fb0c89

2020-12-15

Add script to generate integrations.json (#13073)

+
+
+

1.0.0

+

Latest change: 2020-12-09

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

32971a1a2d

2020-12-09

Updates providers versions to 1.0.0 (#12955)

a075b6df99

2020-12-09

Rename remaining Sensors to match AIP-21 (#12927)

b40dffa085

2020-12-08

Rename remaing modules to match AIP-21 (#12917)

9b39f24780

2020-12-08

Add support for dynamic connection form fields per provider (#12558)

2037303eef

2020-11-29

Adds support for Connection/Hook discovery from providers (#12466)

c34ef853c8

2020-11-20

Separate out documentation building per provider  (#12444)

0080354502

2020-11-18

Update provider READMEs for 1.0.0b2 batch release (#12449)

ae7cb4a1e2

2020-11-17

Update wrong commit hash in backport provider changes (#12390)

6889a333cf

2020-11-15

Improvements for operators and hooks ref docs (#12366)

7825e8f590

2020-11-13

Docs installation improvements (#12304)

250436d962

2020-11-10

Fix spelling in Python files (#12230)

502ba309ea

2020-11-10

Enable Markdownlint rule - MD022/blanks-around-headings (#12225)

85a18e13d9

2020-11-09

Point at pypi project pages for cross-dependency of provider packages (#12212)

59eb5de78c

2020-11-09

Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)

b2a28d1590

2020-11-09

Moves provider packages scripts to dev (#12082)

41bf172c1d

2020-11-04

Simplify string expressions (#12093)

4e8f9cc8d0

2020-11-03

Enable Black - Python Auto Formmatter (#9550)

8c42cf1b00

2020-11-03

Use PyUpgrade to use Python 3.6 features (#11447)

5a439e84eb

2020-10-26

Prepare providers release 0.0.2a1 (#11855)

872b1566a1

2020-10-25

Generated backport providers readmes/setup for 2020.10.29 (#11826)

349b0811c3

2020-10-20

Add D200 pydocstyle check (#11688)

16e7129719

2020-10-13

Added support for provider packages for Airflow 2.0 (#11487)

0a0e1af800

2020-10-03

Fix Broken Markdown links in Providers README TOC (#11249)

ca4238eb4d

2020-10-02

Fixed month in backport packages to October (#11242)

5220e4c384

2020-10-02

Prepare Backport release 2020.09.07 (#11238)

e3f96ce7a8

2020-09-24

Fix incorrect Usage of Optional[bool] (#11138)

f3e87c5030

2020-09-22

Add D202 pydocstyle check (#11032)

9549274d11

2020-09-09

Upgrade black to 20.8b1 (#10818)

ac943c9e18

2020-09-08

[AIRFLOW-3964][AIP-17] Consolidate and de-dup sensor tasks using Smart Sensor (#5499)

fdd9b6f65b

2020-08-25

Enable Black on Providers Packages (#10543)

d760265452

2020-08-25

PyDocStyle: No whitespaces allowed surrounding docstring text (#10533)

3696c34c28

2020-08-24

Fix typo in the word "release" (#10528)

ee7ca128a1

2020-08-22

Fix broken Markdown refernces in Providers README (#10483)

27339a5a0f

2020-08-22

Remove mentions of Airflow Gitter (#10460)

7c206a82a6

2020-08-22

Replace assigment with Augmented assignment (#10468)

8f8db8959e

2020-08-12

DbApiHook: Support kwargs in get_pandas_df (#9730)

b43f90abf4

2020-08-09

Fix various typos in the repo (#10263)

3b3287d7ac

2020-08-05

Enforce keyword only arguments on apache operators (#10170)

7d24b088cd

2020-07-25

Stop using start_date in default_args in example_dags (2) (#9985)

33f0cd2657

2020-07-22

apply_default keeps the function signature for mypy (#9784)

c2db0dfeb1

2020-07-22

More strict rules in mypy (#9705) (#9906)

5013fda8f0

2020-07-20

Add drop_partition functionality for HiveMetastoreHook (#9472)

4d74ac2111

2020-07-19

Increase typing for Apache and http provider package (#9729)

44d4ae809c

2020-07-06

Upgrade to latest pre-commit checks (#9686)

e13a14c873

2020-06-21

Enable & Fix Whitespace related PyDocStyle Checks (#9458)

d0e7db4024

2020-06-19

Fixed release number for fresh release (#9408)

12af6a0800

2020-06-19

Final cleanup for 2020.6.23rc1 release preparation (#9404)

c7e5bce57f

2020-06-19

Prepare backport release candidate for 2020.6.23rc1 (#9370)

f6bd817a3a

2020-06-16

Introduce 'transfers' packages (#9320)

c78e2a5fea

2020-06-16

Make hive macros py3 compatible (#8598)

6350fd6ebb

2020-06-08

Don't use the term "whitelist" - language matters (#9174)

10796cb7ce

2020-06-03

Remove Hive/Hadoop/Java dependency from unit tests (#9029)

0b0e4f7a4c

2020-05-26

Preparing for RC3 relase of backports (#9026)

00642a46d0

2020-05-26

Fixed name of 20 remaining wrongly named operators. (#8994)

cdb3f25456

2020-05-26

All classes in backport providers are now importable in Airflow 1.10 (#8991)

375d1ca229

2020-05-19

Release candidate 2 for backport packages 2020.05.20 (#8898)

12c5e5d8ae

2020-05-17

Prepare release candidate for backport packages (#8891)

f3521fb0e3

2020-05-16

Regenerate readme files for backport package release (#8886)

92585ca4cb

2020-05-15

Added automated release notes generation for backport operators (#8807)

93ea058802

2020-04-21

[AIRFLOW-7059] pass hive_conf to get_pandas_df in HiveServer2Hook (#8380)

87969a350d

2020-04-09

[AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170)

cb0bf4a142

2020-03-30

Remove sql like function in base_hook (#7901)

4bde99f132

2020-03-23

Make airflow/providers pylint compatible (#7802)

7e6372a681

2020-03-23

Add call to Super call in apache providers (#7820)

3320e432a1

2020-02-24

[AIRFLOW-6817] Lazy-load 'airflow.DAG' to keep user-facing API untouched (#7517)

4d03e33c11

2020-02-22

[AIRFLOW-6817] remove imports from 'airflow/__init__.py', replaced implicit imports with explicit imports, added entry to 'UPDATING.MD' - squashed/rebased (#7456)

f3ad5cf618

2020-02-03

[AIRFLOW-4681] Make sensors module pylint compatible (#7309)

97a429f9d0

2020-02-02

[AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)

83c037873f

2020-01-30

[AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287)

057f3ae3a4

2020-01-29

[AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286)

059eda05f8

2020-01-21

[AIRFLOW-6610] Move software classes to providers package (#7231)

0481b9a957

2020-01-12

[AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142)

+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/connections/hive_cli.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/connections/hive_cli.html new file mode 100644 index 00000000000..a43f0855d22 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/connections/hive_cli.html @@ -0,0 +1,903 @@ + + + + + + + + + + + + Hive CLI Connection — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+
+
+

Hive CLI Connection

+

The Hive CLI connection type enables the Hive CLI Integrations.

+
+

Authenticating to Hive CLI

+

There are two ways to connect to Hive using Airflow.

+
    +
  1. Use the Hive Beeline. +i.e. make a JDBC connection string with host, port, and schema. Optionally you can connect with a proxy user, and specify a login and password.

  2. +
  3. Use the Hive CLI. +i.e. specify Hive CLI params in the extras field.

  4. +
+

Only one authorization method can be used at a time. If you need to manage multiple credentials or keys then you should +configure multiple connections.

+
+
+

Default Connection IDs

+

All hooks and operators related to Hive_CLI use hive_cli_default by default.

+
+
+

Configuring the Connection

+
+
Login (optional)

Specify your username for a proxy user or for the Beeline CLI.

+
+
Password (optional)

Specify your Beeline CLI password.

+
+
Host (optional)

Specify your JDBC Hive host that is used for Hive Beeline.

+
+
Port (optional)

Specify your JDBC Hive port that is used for Hive Beeline.

+
+
Schema (optional)

Specify your JDBC Hive database that you want to connect to with Beeline +or specify a schema for an HQL statement to run with the Hive CLI.

+
+
Extra (optional)

Specify the extra parameters (as json dictionary) that can be used in Hive CLI connection. +The following parameters are all optional:

+
    +
  • use_beeline +Specify as True if using the Beeline CLI. Default is False.

  • +
  • auth +Specify the auth type for use with Hive Beeline CLI.

  • +
  • proxy_user +Specify a proxy user as an owner or login or keep blank if using a +custom proxy user.

  • +
  • principal +Specify the JDBC Hive principal to be used with Hive Beeline.

  • +
+
+
+

When specifying the connection in environment variable you should specify +it using URI syntax.

+

Note that all components of the URI should be URL-encoded.

+

For example:

+
export AIRFLOW_CONN_HIVE_CLI_DEFAULT='hive-cli://beeline-username:beeline-password@jdbc-hive-host:80/hive-database?hive_cli_params=params&use_beeline=True&auth=noSasl&principal=hive%2F_HOST%40EXAMPLE.COM'
+
+
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/connections/hive_metastore.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/connections/hive_metastore.html new file mode 100644 index 00000000000..63a3dd8bc3e --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/connections/hive_metastore.html @@ -0,0 +1,882 @@ + + + + + + + + + + + + Hive Metastore Connection — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

Hive Metastore Connection

+

The Hive Metastore connection type enables the Hive Metastore Integrations.

+
+

Authenticating to Hive Metastore

+

Authentication with the Hive Metastore through Apache Thrift Hive Server +and the hmsclient.

+
+
+

Default Connection IDs

+

All hooks and operators related to the Hive Metastore use metastore_default by default.

+
+
+

Configuring the Connection

+
+
Host (optional)

The host of your Hive Metastore node. It is possible to specify multiple hosts as a comma-separated list.

+
+
Port (optional)

Your Hive Metastore port number.

+
+
Extra (optional)

Specify the extra parameters (as json dictionary) that can be used in Hive Metastore connection. +The following parameters are all optional:

+
    +
  • auth_mechanism +Specify the mechanism for authentication. Default is NOSASL.

  • +
  • kerberos_service_name +Specify the kerberos service name. Default is hive.

  • +
+
+
+

When specifying the connection in environment variable you should specify +it using URI syntax.

+

Note that all components of the URI should be URL-encoded.

+

For example:

+
export AIRFLOW_CONN_METASTORE_DEFAULT='hive-metastore://hive-metastore-node:80?auth_mechanism=NOSASL'
+
+
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/connections/hiveserver2.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/connections/hiveserver2.html new file mode 100644 index 00000000000..c7f7d46a2be --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/connections/hiveserver2.html @@ -0,0 +1,892 @@ + + + + + + + + + + + + Hive Server2 Connection — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+
+
+

Hive Server2 Connection

+

The Hive Server2 connection type enables the Hive Server2 Integrations.

+
+

Authenticating to Hive Server2

+

Connect to Hive Server2 using PyHive. +Choose between authenticating via LDAP, Kerberos, or custom.

+
+
+

Default Connection IDs

+

All hooks and operators related to Hive Server2 use hiveserver2_default by default.

+
+
+

Configuring the Connection

+
+
Login (optional)

Specify your Hive Server2 username.

+
+
Password (optional)

Specify your Hive password for use with LDAP and custom authentication.

+
+
Host (optional)

Specify the host node for Hive Server2.

+
+
Port (optional)

Specify your Hive Server2 port number.

+
+
Schema (optional)

Specify the name for the database you would like to connect to with Hive Server2.

+
+
Extra (optional)

Specify the extra parameters (as json dictionary) that can be used in Hive Server2 connection. +The following parameters are all optional:

+
    +
  • auth_mechanism +Specify the authentication method for PyHive. Choose between PLAIN, LDAP, KERBEROS or Custom. Default is PLAIN.

  • +
  • kerberos_service_name +If authenticating with Kerberos specify the Kerberos service name. Default is hive.

  • +
  • run_set_variable_statements +Specify if you want to run set variable statements. Default is True.

  • +
+
+
+

When specifying the connection in environment variable you should specify +it using URI syntax.

+

Note that all components of the URI should be URL-encoded.

+

For example:

+
export AIRFLOW_CONN_HIVESERVER2_DEFAULT='hiveserver2://username:password@hiveserver2-node:80/database?auth_mechanism=LDAP'
+
+
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/connections/index.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/connections/index.html new file mode 100644 index 00000000000..4d8ec7e833c --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/connections/index.html @@ -0,0 +1,849 @@ + + + + + + + + + + + + Connection Types — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+ +
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/genindex.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/genindex.html new file mode 100644 index 00000000000..06e640effd2 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/genindex.html @@ -0,0 +1,1423 @@ + + + + + + + + + + + + Index — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ + +

Index

+ +
+ _ + | A + | C + | D + | E + | F + | G + | H + | K + | L + | M + | N + | O + | P + | R + | S + | T + | U + | V + +
+

_

+ + + +
+ +

A

+ + + +
    +
  • + airflow.providers.apache.hive + +
  • +
  • + airflow.providers.apache.hive.hooks + +
  • +
  • + airflow.providers.apache.hive.hooks.hive + +
  • +
  • + airflow.providers.apache.hive.operators + +
  • +
  • + airflow.providers.apache.hive.operators.hive + +
  • +
  • + airflow.providers.apache.hive.operators.hive_stats + +
  • +
  • + airflow.providers.apache.hive.sensors + +
  • +
  • + airflow.providers.apache.hive.sensors.hive_partition + +
  • +
  • + airflow.providers.apache.hive.sensors.metastore_partition + +
  • +
    +
  • + airflow.providers.apache.hive.sensors.named_hive_partition + +
  • +
  • + airflow.providers.apache.hive.transfers + +
  • +
  • + airflow.providers.apache.hive.transfers.hive_to_mysql + +
  • +
  • + airflow.providers.apache.hive.transfers.hive_to_samba + +
  • +
  • + airflow.providers.apache.hive.transfers.mssql_to_hive + +
  • +
  • + airflow.providers.apache.hive.transfers.mysql_to_hive + +
  • +
  • + airflow.providers.apache.hive.transfers.s3_to_hive + +
  • +
  • + airflow.providers.apache.hive.transfers.vertica_to_hive + +
  • +
  • analyze_tweets() (in module tests.system.providers.apache.hive.example_twitter_dag) +
  • +
+ +

C

+ + + +
+ +

D

+ + + +
+ +

E

+ + +
+ +

F

+ + + +
+ +

G

+ + + +
+ +

H

+ + + +
+ +

K

+ + +
+ +

L

+ + + +
+ +

M

+ + + +
+ +

N

+ + +
+ +

O

+ + +
+ +

P

+ + + +
+ +

R

+ + +
+ +

S

+ + + +
+ +

T

+ + + +
+ +

U

+ + +
+ +

V

+ + +
+ + + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/index.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/index.html new file mode 100644 index 00000000000..6e620608258 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/index.html @@ -0,0 +1,1339 @@ + + + + + + + + + + + + apache-airflow-providers-apache-hive — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+
+
+

apache-airflow-providers-apache-hive

+
+

Content

+
+

Guides

+ +
+
+

References

+ +
+
+
+ +
+

Commits

+ +
+
+
+

Package apache-airflow-providers-apache-hive

+

Apache Hive

+

Release: 5.0.0

+
+
+

Provider package

+

This is a provider package for apache.hive provider. All classes for this provider package +are in airflow.providers.apache.hive python package.

+
+
+

Installation

+

You can install this package on top of an existing Airflow 2 installation (see Requirements below) +for the minimum Airflow version supported) via +pip install apache-airflow-providers-apache-hive

+
+
+

Requirements

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

PIP package

Version required

apache-airflow

>=2.3.0

apache-airflow-providers-common-sql

>=1.3.1

hmsclient

>=0.1.0

pandas

>=0.17.1

pyhive[hive]

>=0.6.0

sasl

>=0.3.1; python_version>="3.9"

thrift

>=0.9.2

+
+
+

Cross provider package dependencies

+

Those are dependencies that might be needed in order to use all the features of the package. +You need to install the specified provider packages in order to use them.

+

You can install such cross-provider dependencies when installing from PyPI. For example:

+
pip install apache-airflow-providers-apache-hive[amazon]
+
+
+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Dependent package

Extra

apache-airflow-providers-amazon

amazon

apache-airflow-providers-common-sql

common.sql

apache-airflow-providers-microsoft-mssql

microsoft.mssql

apache-airflow-providers-mysql

mysql

apache-airflow-providers-presto

presto

apache-airflow-providers-samba

samba

apache-airflow-providers-vertica

vertica

+
+
+

Downloading official packages

+

You can download officially released packages and verify their checksums and signatures from the +Official Apache Download site

+ +
+
+
+
+

Changelog

+
+

5.0.0

+
+

Breaking changes

+

The hive_cli_params from connection were moved to the Hook. If you have extra parameters defined in your +connections as hive_cli_params extra, you should move them to the DAG where your HiveOperator is used.

+
    +
  • Move hive_cli_params to hook parameters (#28101)

  • +
+
+
+

Features

+
    +
  • Improve filtering for invalid schemas in Hive hook (#27808)

  • +
+
+
+
+

4.1.1

+
+

Bug Fixes

+
    +
  • Bump common.sql provider to 1.3.1 (#27888)

  • +
+
+
+
+

4.1.0

+

This release of provider is only available for Airflow 2.3+ as explained in the +Apache Airflow providers support policy.

+
+

Misc

+
    +
  • Move min airflow version to 2.3.0 for all providers (#27196)

  • +
+
+
+

Bug Fixes

+
    +
  • Filter out invalid schemas in Hive hook (#27647)

  • +
+
+
+
+

4.0.1

+
+

Misc

+
    +
  • Add common-sql lower bound for common-sql (#25789)

  • +
+
+
+
+

4.0.0

+
+

Breaking Changes

+
    +
  • The hql parameter in get_records of HiveServer2Hook has been renamed to sql to match the +get_records DbApiHook signature. If you used it as a positional parameter, this is no change for you, +but if you used it as keyword one, you need to rename it.

  • +
  • hive_conf parameter has been renamed to parameters and it is now second parameter, to match get_records +signature from the DbApiHook. You need to rename it if you used it.

  • +
  • schema parameter in get_records is an optional kwargs extra parameter that you can add, to match +the schema of get_records from DbApiHook.

  • +
  • Deprecate hql parameters and synchronize DBApiHook method APIs (#25299)

  • +
  • Remove Smart Sensors (#25507)

  • +
+
+
+
+

3.1.0

+
+

Features

+
    +
  • Move all SQL classes to common-sql provider (#24836)

  • +
+
+
+

Bug Fixes

+
    +
  • fix connection extra parameter 'auth_mechanism' in 'HiveMetastoreHook' and 'HiveServer2Hook' (#24713)

  • +
+
+
+
+

3.0.0

+
+

Breaking changes

+ +
+
+

Misc

+
    +
  • chore: Refactoring and Cleaning Apache Providers (#24219)

  • +
  • AIP-47 - Migrate hive DAGs to new design #22439 (#24204)

  • +
+
+
+
+

2.3.3

+
+

Bug Fixes

+
    +
  • Fix HiveToMySqlOperator's wrong docstring (#23316)

  • +
+
+
+
+

2.3.2

+
+

Bug Fixes

+
    +
  • Fix mistakenly added install_requires for all providers (#22382)

  • +
+
+
+
+

2.3.1

+
+

Misc

+
    +
  • Add Trove classifiers in PyPI (Framework :: Apache Airflow :: Provider)

  • +
+
+
+
+

2.3.0

+
+

Features

+
    +
  • Set larger limit get_partitions_by_filter in HiveMetastoreHook (#21504)

  • +
+
+
+

Bug Fixes

+
    +
  • Fix Python 3.9 support in Hive (#21893)

  • +
  • Fix key typo in 'template_fields_renderers' for 'HiveOperator' (#21525)

  • +
+
+
+

Misc

+
    +
  • Support for Python 3.10

  • +
  • Add how-to guide for hive operator (#21590)

  • +
+
+
+
+

2.2.0

+
+

Features

+
    +
  • Add more SQL template fields renderers (#21237)

  • +
  • Add conditional 'template_fields_renderers' check for new SQL lexers (#21403)

  • +
+
+
+
+

2.1.0

+
+

Features

+
    +
  • hive provider: restore HA support for metastore (#19777)

  • +
+
+
+

Bug Fixes

+
+
+
+

2.0.3

+
+

Bug Fixes

+
    +
  • fix get_connections deprecation warn in hivemetastore hook (#18854)

  • +
+
+
+
+

2.0.2

+
+

Bug fixes

+
    +
  • HiveHook fix get_pandas_df() failure when it tries to read an empty table (#17777)

  • +
+
+
+

Misc

+
    +
  • Optimise connection importing for Airflow 2.2.0

  • +
+
+
+
+

2.0.1

+
+

Features

+
    +
  • Add Python 3.9 support (#15515)

  • +
+
+
+
+

2.0.0

+
+

Breaking changes

+
    +
  • Auto-apply apply_default decorator (#15667)

  • +
+
+

Warning

+

Due to apply_default decorator removal, this version of the provider requires Airflow 2.1.0+. +If your Airflow version is < 2.1.0, and you want to install this provider version, first upgrade +Airflow to at least version 2.1.0. Otherwise your Airflow package version will be upgraded +automatically and you will have to manually run airflow upgrade db to complete the migration.

+
+
+
+
+

1.0.3

+
+

Bug fixes

+
    +
  • Fix mistake and typos in doc/docstrings (#15180)

  • +
  • Fix grammar and remove duplicate words (#14647)

  • +
  • Resolve issue related to HiveCliHook kill (#14542)

  • +
+
+
+
+

1.0.2

+
+

Bug fixes

+
    +
  • Corrections in docs and tools after releasing provider RCs (#14082)

  • +
+
+
+
+

1.0.1

+

Updated documentation and readme files.

+
+

Bug fixes

+
    +
  • Remove password if in LDAP or CUSTOM mode HiveServer2Hook (#11767)

  • +
+
+
+
+

1.0.0

+

Initial version of the provider.

+
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/installing-providers-from-sources.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/installing-providers-from-sources.html new file mode 100644 index 00000000000..7284d7da5ce --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/installing-providers-from-sources.html @@ -0,0 +1,948 @@ + + + + + + + + + + + + Installing from sources — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+
+
+
+
+

Installing from sources

+
+

Released packages

+

This page describes downloading and verifying apache-airflow-providers-apache-hive provider version +5.0.0 using officially released packages. +You can also install the provider package - as most Python packages - via +PyPI . +You can choose different version of the provider by selecting different version from the drop-down at +the top-left of the page.

+

The sdist and whl packages released are the “official” sources of installation that you can use if +you want to verify the origin of the packages and want to verify checksums and signatures of the packages. +The packages are available via the +Official Apache Software Foundations Downloads

+

The downloads are available at:

+ +

If you want to install from the source code, you can download from the sources link above, it will contain +a INSTALL file containing details on how you can build and install the provider.

+
+
+

Release integrity

+

PGP signatures KEYS

+

It is essential that you verify the integrity of the downloaded files using the PGP or SHA signatures. +The PGP signatures can be verified using GPG or PGP. Please download the KEYS as well as the asc +signature files for relevant distribution. It is recommended to get these files from the +main distribution directory and not from the mirrors.

+
gpg -i KEYS
+
+
+

or

+
pgpk -a KEYS
+
+
+

or

+
pgp -ka KEYS
+
+
+

To verify the binaries/sources you can download the relevant asc files for it from main +distribution directory and follow the below guide.

+
gpg --verify apache-airflow-providers-********.asc apache-airflow-*********
+
+
+

or

+
pgpv apache-airflow-providers-********.asc
+
+
+

or

+
pgp apache-airflow-providers-********.asc
+
+
+

Example:

+
$ gpg --verify apache-airflow-providers-apache-hive-5.0.0.tar.gz.asc apache-airflow-providers-apache-hive-5.0.0.tar.gz
+  gpg: Signature made Sat 11 Sep 12:49:54 2021 BST
+  gpg:                using RSA key CDE15C6E4D3A8EC4ECF4BA4B6674E08AD7DE406F
+  gpg:                issuer "kaxilnaik@apache.org"
+  gpg: Good signature from "Kaxil Naik <kaxilnaik@apache.org>" [unknown]
+  gpg:                 aka "Kaxil Naik <kaxilnaik@gmail.com>" [unknown]
+  gpg: WARNING: The key's User ID is not certified with a trusted signature!
+  gpg:          There is no indication that the signature belongs to the owner.
+  Primary key fingerprint: CDE1 5C6E 4D3A 8EC4 ECF4  BA4B 6674 E08A D7DE 406F
+
+
+

The “Good signature from …” is indication that the signatures are correct. +Do not worry about the “not certified with a trusted signature” warning. Most of the certificates used +by release managers are self signed, that’s why you get this warning. By importing the server in the +previous step and importing it via ID from KEYS page, you know that this is a valid Key already.

+

For SHA512 sum check, download the relevant sha512 and run the following:

+
shasum -a 512 apache-airflow-providers-********  | diff - apache-airflow-providers-********.sha512
+
+
+

The SHASUM of the file should match the one provided in .sha512 file.

+

Example:

+
shasum -a 512 apache-airflow-providers-apache-hive-5.0.0.tar.gz  | diff - apache-airflow-providers-apache-hive-5.0.0.tar.gz.sha512
+
+
+
+
+

Verifying PyPI releases

+

You can verify the Provider .whl packages from PyPI by locally downloading the package and signature +and SHA sum files with the script below:

+
#!/bin/bash
+PACKAGE_VERSION=5.0.0
+PACKAGE_NAME=apache-airflow-providers-apache-hive
+provider_download_dir=$(mktemp -d)
+pip download --no-deps "${PACKAGE_NAME}==${PACKAGE_VERSION}" --dest "${provider_download_dir}"
+curl "https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-5.0.0-py3-none-any.whl.asc" \
+    -L -o "${provider_download_dir}/apache_airflow_providers_apache_hive-5.0.0-py3-none-any.whl.asc"
+curl "https://downloads.apache.org/airflow/providers/apache_airflow_providers_apache_hive-5.0.0-py3-none-any.whl.sha512" \
+    -L -o "${provider_download_dir}/apache_airflow_providers_apache_hive-5.0.0-py3-none-any.whl.sha512"
+echo
+echo "Please verify files downloaded to ${provider_download_dir}"
+ls -la "${provider_download_dir}"
+echo
+
+
+

Once you verify the files following the instructions from previous chapter you can remove the temporary +folder created.

+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/objects.inv b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/objects.inv new file mode 100644 index 0000000000000000000000000000000000000000..188446ca33ed47cc0504077148beaba1aae152ab GIT binary patch literal 1934 zcmV;92XXi#AX9K?X>NERX>N99Zgg*Qc_4OWa&u{KZXhxWBOp+6Z)#;@bUGkmaA9L; zWi4T8a%OCAcP(&oZ+2;9WpZ;ZVQ^t%Xk{&EX?A4_BOq2~a&u{KZaN?}E-)@I3L_v? zXk{RBWo=<;Ze(S0Aa78b#rNMXCQiPX<{x4 zc-qaJO>^5Q7{~AW6q@N?E$?n`y=BtO)-#(XTc@49F+{?#CX!GAyU~976+)IJTaFDp z7F=3Kf`=b3&z~0oIAyDh{)$RQw`oEck8p|OHHp^gmV{;XDW}OMBjN8M!eu(gD)_t$ zK3Bo#GWa|#67m+UMV@_7(PqDevjD%BM6{;#mFp(dL^4m#D03=JyL_sjUlu>^&_Cbr zzTct8Z;OX-i>JliQsN|7;P4M~7(I&mpW7_`Qa_Pt#~J3_MdDFRivkrmCkP9{B#Zn% zeclM-jz?4_agUOa72ae5bw%VJNj~-jogbTF(>8fem6M?y)jJQ+Q0YY?t^Dlb5`SyfvAHIKma_6=~DuiB$-~tPRkPkIg z1988A>QM13T2a=>ZGuY7L@H8Rct+F$r1+;;YT9FMyCK9=K7^$?}RMS{7zx%?pyBQa$O_lAboO~?lt z5Bi8c4&Rg|Wr8ET5j3WGo(ey}1Vu65I;gPLuLXG%XvJubNU=p*+-UP^Q^ZYIdQ})& z5{By@Q~R?*7tv^izl&}3d10Tn@h65MK+_@~+DDol^=TL@iYU$qW~hxX8-=!_T#GDW z6=Ivh7aa0NVyPaU;7Ffdx8pq@cxZu47XPn>Q!-M|5(p9`FEi{O>*JISfSyhnQX?Sw>y~C2X~r2K-QO`M zJRpqVPeR(PKekg!@>be*;gc{!E6ya)V2*A=%SpivM$m0EBd2cTs^S^0x)~4*tpEZw zb@sUXYuSX+)R*&&kZ@5+U$up0Vnllfofz1{IUz~+pIxjSX2g`0*NoVUy0vT=|2czv z9abxMazhN~U2gaRoDB?I?ZtrUqsDAm`P`Vn=&O7P*J+)Dz7lOz2jRVd-5Rio8#>E_ zg><-NHvb6fnjTL2T;%{j{Or*M!v$Xv{Ru%A(MZ?2*lI{7D9S6|?(8&z7?^=XY6gi< z^uAjD%nVl@E}#U(GhK6-oYQg{(;cs|=D0?-TKJqC-a;X-#L990jO|sbF$1Jhjjgu$ zmhv~4N*z&x;=!rZl{vvxYARkpm6{eASf#Asb6(4av|{q@f6p>7W9j(=ThTqQCG&|| zYznx55){wg^C#j2^ZcoK0X*LgGLYv_1z(i}<+${|A%hI0U|1^{IjsQYv>Fs&1t_N# zbcv1O0m$hzoN#hFF)xIiIzfh#(~02i<#ZZQ7&(Pt}bZ^5_NcV=K3Z+q>?8Axr-~uonGLe*<~)JFMW4_KrO%EtsFX zp$2e!m*7Byq-nEYJye<$7esf@5X;+PYW1&pUA>pyde+35t2kO$pPf&vmLOa=GlImg zR0zUQf?tdPM(lt>MW1i}LW|Fb*Ob|lSs>kg{oxxDE^t=;C!Z0bHpm~Id2@;<4`3C( z!mWsi*Iw0TL*#35HTPV#XV$zav7HV}`Awd$S7rvsM?)@>PYI2~nfd9{@X}+jf!-~a zMr?Xm!KD{?YO-8P5f~m&(!@D=#wpY}o)K6bvamuvI;|1vpDPLU4?7z(>w^ZCo8&c7 z&+fMmO-=;_x~E|bsdX^}h)vOktnY8K4vlw)2OxVL4KsCS^JHkg=QFU~pro0j^P8PQ zqfJ?X?RE?6d>n4w2#xlf1p50~jmh;{1KY2##;m8l=!QmLo&(#@ALjX}Z1YhW=i_iS z*fjP*PvG@C*NT1!qQ*jk8zy3~mG6KWqR z6OTB4#V;gOab|4=&N^eW_C;%~DAY0MR=nkP(F4oJ?^QB#p)kDtdlBAtL5{6;V~2Nb zcAYdiYe8gapuz-<4%smqx(Fjgz$$qlRKbfUw`Ig#|<*y+j}#pu`I zNrrwMjsxt>oT}iXO6@D$C4Q0?^*k=i(&D8TK;EV`<8;Gf!iUDs1l~12{3 U5!EvlXQ|#l)`ArL59tma!pG*OM*si- literal 0 HcmV?d00001 diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/operators.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/operators.html new file mode 100644 index 00000000000..46647d6e83e --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/operators.html @@ -0,0 +1,872 @@ + + + + + + + + + + + + Apache Hive Operators — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+
+
+

Apache Hive Operators

+

The Apache Hive data warehouse software facilitates reading, writing, +and managing large datasets residing in distributed storage using SQL. +Structure can be projected onto data already in storage.

+
+

HiveOperator

+

This operator executes hql code or hive script in a specific Hive database.

+
+

tests/system/providers/apache/hive/example_twitter_dag.py[source]

+
    load_to_hive = HiveOperator(
+        task_id=f"load_{channel}_to_hive",
+        hql=(
+            f"LOAD DATA INPATH '{hdfs_dir}{channel}/{file_name}'"
+            f"INTO TABLE {channel}"
+            f"PARTITION(dt='{dt}')"
+        ),
+    )
+
+
+
+
+

Reference

+

For more information check Apache Hive documentation.

+
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/py-modindex.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/py-modindex.html new file mode 100644 index 00000000000..bda08891ba8 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/py-modindex.html @@ -0,0 +1,934 @@ + + + + + + + + + + + + Python Module Index — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + + +
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/search.html b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/search.html new file mode 100644 index 00000000000..5d24561e4c6 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/search.html @@ -0,0 +1,861 @@ + + + + + + + + + + + + Search — apache-airflow-providers-apache-hive Documentation + + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Search

+ + + + +

+ Searching for multiple words only shows matches that contain + all words. +

+ + +
+ + + +
+ + + +
+ +
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/5.0.0/searchindex.js b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/searchindex.js new file mode 100644 index 00000000000..a8c1d5fff60 --- /dev/null +++ b/docs-archive/apache-airflow-providers-apache-hive/5.0.0/searchindex.js @@ -0,0 +1 @@ +Search.setIndex({"docnames": ["_api/airflow/providers/apache/hive/hooks/hive/index", "_api/airflow/providers/apache/hive/hooks/index", "_api/airflow/providers/apache/hive/index", "_api/airflow/providers/apache/hive/operators/hive/index", "_api/airflow/providers/apache/hive/operators/hive_stats/index", "_api/airflow/providers/apache/hive/operators/index", "_api/airflow/providers/apache/hive/sensors/hive_partition/index", "_api/airflow/providers/apache/hive/sensors/index", "_api/airflow/providers/apache/hive/sensors/metastore_partition/index", "_api/airflow/providers/apache/hive/sensors/named_hive_partition/index", "_api/airflow/providers/apache/hive/transfers/hive_to_mysql/index", "_api/airflow/providers/apache/hive/transfers/hive_to_samba/index", "_api/airflow/providers/apache/hive/transfers/index", "_api/airflow/providers/apache/hive/transfers/mssql_to_hive/index", "_api/airflow/providers/apache/hive/transfers/mysql_to_hive/index", "_api/airflow/providers/apache/hive/transfers/s3_to_hive/index", "_api/airflow/providers/apache/hive/transfers/vertica_to_hive/index", "_api/tests/system/providers/apache/hive/example_twitter_dag/index", "_api/tests/system/providers/apache/hive/index", "commits", "connections/hive_cli", "connections/hive_metastore", "connections/hiveserver2", "connections/index", "index", "installing-providers-from-sources", "operators"], "filenames": ["_api/airflow/providers/apache/hive/hooks/hive/index.rst", "_api/airflow/providers/apache/hive/hooks/index.rst", "_api/airflow/providers/apache/hive/index.rst", "_api/airflow/providers/apache/hive/operators/hive/index.rst", "_api/airflow/providers/apache/hive/operators/hive_stats/index.rst", "_api/airflow/providers/apache/hive/operators/index.rst", "_api/airflow/providers/apache/hive/sensors/hive_partition/index.rst", "_api/airflow/providers/apache/hive/sensors/index.rst", "_api/airflow/providers/apache/hive/sensors/metastore_partition/index.rst", "_api/airflow/providers/apache/hive/sensors/named_hive_partition/index.rst", "_api/airflow/providers/apache/hive/transfers/hive_to_mysql/index.rst", "_api/airflow/providers/apache/hive/transfers/hive_to_samba/index.rst", "_api/airflow/providers/apache/hive/transfers/index.rst", "_api/airflow/providers/apache/hive/transfers/mssql_to_hive/index.rst", "_api/airflow/providers/apache/hive/transfers/mysql_to_hive/index.rst", "_api/airflow/providers/apache/hive/transfers/s3_to_hive/index.rst", "_api/airflow/providers/apache/hive/transfers/vertica_to_hive/index.rst", "_api/tests/system/providers/apache/hive/example_twitter_dag/index.rst", "_api/tests/system/providers/apache/hive/index.rst", "commits.rst", "connections/hive_cli.rst", "connections/hive_metastore.rst", "connections/hiveserver2.rst", "connections/index.rst", "index.rst", "installing-providers-from-sources.rst", "operators.rst"], "titles": ["airflow.providers.apache.hive.hooks.hive", "airflow.providers.apache.hive.hooks", "airflow.providers.apache.hive", "airflow.providers.apache.hive.operators.hive", "airflow.providers.apache.hive.operators.hive_stats", "airflow.providers.apache.hive.operators", "airflow.providers.apache.hive.sensors.hive_partition", "airflow.providers.apache.hive.sensors", "airflow.providers.apache.hive.sensors.metastore_partition", "airflow.providers.apache.hive.sensors.named_hive_partition", "airflow.providers.apache.hive.transfers.hive_to_mysql", "airflow.providers.apache.hive.transfers.hive_to_samba", "airflow.providers.apache.hive.transfers", "airflow.providers.apache.hive.transfers.mssql_to_hive", "airflow.providers.apache.hive.transfers.mysql_to_hive", "airflow.providers.apache.hive.transfers.s3_to_hive", "airflow.providers.apache.hive.transfers.vertica_to_hive", "tests.system.providers.apache.hive.example_twitter_dag", "tests.system.providers.apache.hive", "Package apache-airflow-providers-apache-hive", "Hive CLI Connection", "Hive Metastore Connection", "Hive Server2 Connection", "Connection Types", "apache-airflow-providers-apache-hive", "Installing from sources", "Apache Hive Operators"], "terms": {"hive_queue_prior": 0, "very_high": [0, 3], "high": [0, 3, 19], "normal": [0, 3, 19], "low": [0, 3], "very_low": [0, 3], "sourc": [0, 3, 4, 6, 8, 9, 10, 11, 13, 14, 15, 16, 17, 24, 26], "get_context_from_env_var": 0, "extract": [0, 17], "context": [0, 3, 4, 6, 8, 9, 10, 11, 13, 14, 15, 16, 19], "from": [0, 3, 10, 11, 13, 14, 15, 16, 17, 19, 24], "env": 0, "variabl": [0, 3, 20, 21, 22], "e": [0, 20], "g": 0, "dag_id": [0, 17], "task_id": [0, 26], "execution_d": 0, "so": [0, 3, 10], "thei": 0, "can": [0, 3, 6, 14, 15, 17, 20, 21, 22, 24, 25, 26], "us": [0, 3, 4, 6, 9, 10, 11, 13, 14, 15, 16, 19, 20, 21, 22, 24, 25, 26], "insid": [0, 19], "bashoper": 0, "pythonoper": 0, "return": [0, 4], "The": [0, 6, 8, 13, 14, 15, 16, 20, 21, 22, 24, 25, 26], "interest": 0, "type": [0, 3, 4, 6, 13, 14, 15, 16, 19, 20, 21, 22, 24], "dict": [0, 3, 4, 10, 13, 14, 15, 16], "ani": [0, 3, 4, 14, 25], "hiveclihook": [0, 19, 24], "hive_cli_conn_id": [0, 3, 13, 14, 15, 16], "default_conn_nam": 0, "run_a": 0, "none": [0, 3, 4, 6, 9, 10, 13, 14, 15, 16, 25], "mapred_queu": [0, 3], "mapred_queue_prior": [0, 3], "mapred_job_nam": [0, 3], "hive_cli_param": [0, 3, 19, 20, 24], "base": [0, 3, 4, 6, 8, 9, 10, 11, 13, 14, 15, 16], "basehook": 0, "simpl": 0, "wrapper": 0, "around": [0, 19], "cli": [0, 3, 13, 14, 15, 16, 23], "It": [0, 14, 21, 25], "also": [0, 3, 17, 25], "support": [0, 6, 19, 24], "beelin": [0, 20], "lighter": 0, "run": [0, 3, 4, 10, 13, 14, 16, 20, 22, 24, 25], "jdbc": [0, 20], "i": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26], "replac": [0, 3, 19], "heavier": 0, "tradit": 0, "To": [0, 25], "enabl": [0, 19, 20, 21, 22], "set": [0, 3, 9, 13, 14, 15, 16, 19, 22, 24], "use_beelin": [0, 20], "param": [0, 20], "extra": [0, 10, 19, 20, 21, 22, 24], "field": [0, 3, 8, 13, 14, 15, 16, 19, 20, 24], "your": [0, 3, 13, 14, 16, 20, 21, 22, 24], "connect": [0, 3, 4, 6, 9, 10, 11, 13, 14, 15, 16, 19, 24], "true": [0, 3, 10, 13, 14, 15, 16, 20, 22], "note": [0, 3, 6, 9, 10, 13, 14, 15, 16, 19, 20, 21, 22], "you": [0, 3, 4, 6, 9, 13, 14, 15, 16, 17, 20, 21, 22, 24, 25], "default": [0, 3, 4, 6, 8, 9, 15, 19], "paramet": [0, 3, 4, 6, 8, 9, 10, 11, 13, 14, 15, 16, 19, 20, 21, 22, 24], "pass": [0, 3, 6, 9, 19], "space": 0, "separ": [0, 19, 21], "list": [0, 4, 9, 19, 21, 24], "add": [0, 19, 24], "command": [0, 10, 19], "auth": [0, 20], "get": [0, 3, 4, 9, 13, 14, 15, 16, 17, 25], "string": [0, 14, 19, 20], "str": [0, 3, 4, 6, 8, 9, 10, 11, 13, 14, 15, 16], "refer": [0, 3, 4, 6, 8, 9, 10, 11, 13, 14, 15, 16], "id": [0, 3, 4, 6, 9, 10, 11, 13, 14, 15, 16, 25], "queue": [0, 3], "hadoop": [0, 3, 19], "schedul": 0, "capac": 0, "fair": 0, "prioriti": [0, 3], "within": [0, 3], "job": 0, "possibl": [0, 3, 21], "includ": [0, 3, 19], "thi": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26], "name": [0, 3, 4, 6, 8, 9, 15, 17, 19, 21, 22], "appear": [0, 3], "jobtrack": [0, 3], "make": [0, 3, 19, 20], "monitor": [0, 3], "easier": [0, 3], "conn_name_attr": 0, "hive_cli_default": [0, 3, 13, 14, 15, 16, 20], "conn_typ": 0, "hive_cli": [0, 20], "hook_nam": 0, "client": [0, 6, 9], "run_cli": 0, "hql": [0, 3, 11, 19, 20, 24, 26], "schema": [0, 3, 6, 8, 9, 19, 20, 22, 24], "verbos": 0, "hive_conf": [0, 10, 19, 24], "an": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26], "statement": [0, 10, 13, 14, 15, 16, 20, 22], "If": [0, 3, 4, 13, 14, 15, 16, 20, 22, 24, 25], "specifi": [0, 15, 20, 21, 22, 24], "should": [0, 3, 6, 8, 9, 10, 14, 15, 17, 20, 21, 22, 24, 25], "entri": [0, 19], "kei": [0, 3, 4, 15, 19, 20, 24, 25], "valu": [0, 3, 4, 6, 13, 14, 15, 16], "pair": [0, 3], "hiveconf": [0, 3], "queri": [0, 4, 8, 10, 11, 13, 14, 15, 16], "languag": [0, 19], "databas": [0, 3, 4, 10, 11, 13, 14, 15, 16, 20, 22, 26], "bool": [0, 3, 10, 13, 14, 15, 16, 19], "addit": 0, "log": [0, 19], "after": [0, 3, 10, 19, 24], "thu": 0, "overrid": [0, 3, 6, 8, 9], "whatev": 0, "ar": [0, 4, 13, 14, 15, 16, 19, 20, 21, 22, 24, 25], "hh": 0, "result": [0, 8, 11], "ok": 0, "test_hql": 0, "test": [0, 19, 26], "explain": [0, 24], "load_df": 0, "df": 0, "tabl": [0, 4, 6, 8, 9, 10, 13, 14, 15, 16, 19, 24, 26], "field_dict": [0, 15], "delimit": [0, 10, 13, 14, 15, 16], "encod": [0, 20, 21, 22], "utf8": 0, "pandas_kwarg": 0, "kwarg": [0, 3, 4, 6, 8, 9, 10, 11, 13, 14, 15, 16, 19, 24], "load": [0, 10, 11, 13, 14, 15, 16, 19, 26], "panda": [0, 24], "datafram": 0, "data": [0, 10, 11, 13, 14, 15, 16, 17, 19, 26], "infer": [0, 13, 14, 15, 16], "column": [0, 4, 13, 14, 15, 16, 17], "sanit": 0, "target": [0, 10, 13, 14, 15, 16], "dot": [0, 6, 10, 13, 14, 15, 16], "notat": [0, 6, 10, 13, 14, 15, 16], "specif": [0, 3, 10, 11, 13, 14, 15, 16, 19, 26], "map": [0, 13, 14, 16], "must": 0, "ordereddict": 0, "keep": [0, 19, 20], "order": [0, 8, 24], "file": [0, 3, 10, 11, 13, 14, 15, 16, 17, 19, 24, 25], "when": [0, 3, 4, 8, 10, 11, 13, 14, 15, 16, 19, 20, 21, 22, 24], "write": [0, 26], "to_csv": 0, "self": [0, 25], "load_fil": 0, "filepath": 0, "creat": [0, 3, 4, 8, 10, 11, 13, 14, 15, 16, 25], "overwrit": [0, 4], "partit": [0, 4, 6, 8, 9, 13, 14, 15, 16, 26], "recreat": [0, 13, 14, 15, 16], "fals": [0, 3, 10, 13, 14, 15, 16, 20], "tblproperti": [0, 13, 14, 15], "local": [0, 10, 13, 14, 15, 16, 25], "gener": [0, 4, 6, 8, 13, 14, 15, 16, 17, 19], "store": [0, 13, 14, 15, 16, 17], "AS": [0, 13, 14, 15, 16], "textfil": [0, 13, 14, 15, 16], "which": [0, 13, 14, 15, 16], "isn": [0, 13, 14, 15, 16], "t": [0, 6, 8, 10, 13, 14, 15, 16, 19], "most": [0, 13, 14, 15, 16, 25], "effici": [0, 13, 14, 15, 16], "serial": [0, 13, 14, 15, 16], "format": [0, 4, 13, 14, 15, 16, 19], "larg": [0, 4, 13, 14, 15, 16, 26], "amount": [0, 10, 13, 14, 15, 16], "consider": [0, 13, 14, 15, 16], "mai": [0, 3, 13, 14, 15, 16, 19], "want": [0, 3, 13, 14, 15, 16, 20, 22, 24, 25], "oper": [0, 2, 6, 9, 10, 11, 13, 14, 15, 16, 19, 20, 21, 22, 24], "onli": [0, 13, 14, 15, 16, 19, 20, 24], "stage": [0, 10, 13, 14, 15, 16], "temporari": [0, 13, 14, 15, 16, 25], "befor": [0, 3, 10, 13, 14, 15, 16, 19], "its": [0, 13, 14, 15, 16], "final": [0, 13, 14, 15, 16, 19], "destin": [0, 10, 11, 13, 14, 15, 16], "hiveoper": [0, 3, 13, 14, 15, 16, 19, 24], "A": [0, 9, 15], "dictionari": [0, 3, 4, 10, 11, 13, 14, 15, 16, 19, 20, 21, 22], "whether": [0, 13, 14, 15, 16], "doesn": [0, 13, 14, 15, 16], "exist": [0, 3, 13, 14, 15, 16, 24], "drop": [0, 13, 14, 15, 16, 25], "everi": [0, 13, 14, 15, 16], "execut": [0, 3, 4, 10, 11, 13, 14, 15, 16, 26], "being": [0, 10, 13, 14, 15], "kill": [0, 3, 19, 24], "hivemetastorehook": [0, 19, 24], "metastore_conn_id": [0, 4, 6, 9], "interact": 0, "metastor": [0, 4, 6, 8, 9, 19, 23, 24], "ref": [0, 6, 11, 19], "thrift": [0, 6, 8, 9, 10, 11, 21, 24], "servic": [0, 6, 8, 9, 10, 11, 17, 21, 22], "howto": [0, 6, 11], "hive_metastor": [0, 6], "max_part_count": 0, "32767": 0, "metastore_default": [0, 4, 6, 9, 21], "__getstate__": 0, "__setstate__": 0, "d": [0, 4, 6, 8, 9, 25], "get_metastore_cli": 0, "get_conn": 0, "check_for_partit": 0, "check": [0, 9, 15, 19, 24, 25, 26], "belong": [0, 25], "express": [0, 4, 15, 19], "match": [0, 19, 24, 25], "eg": 0, "b": 0, "AND": [0, 6], "c": 0, "static_babynames_partit": 0, "2015": [0, 6], "01": [0, 6, 8, 9, 19], "check_for_named_partit": 0, "partition_nam": [0, 8, 9], "given": 0, "xxx": 0, "get_tabl": 0, "table_nam": [0, 4], "db": [0, 8, 24], "object": [0, 3], "static_babynam": 0, "tablenam": 0, "col": [0, 4], "sd": 0, "state": 0, "year": 0, "gender": 0, "num": 0, "pattern": [0, 15], "get_databas": 0, "get_partit": 0, "partition_filt": 0, "all": [0, 19, 20, 21, 22, 24], "work": 0, "less": 0, "than": [0, 15], "java": [0, 19], "short": 0, "max": 0, "val": 0, "For": [0, 19, 20, 21, 22, 24, 25, 26], "subpartit": [0, 8], "number": [0, 19, 21, 22], "might": [0, 24], "easili": 0, "exce": 0, "part": [0, 3, 17], "len": 0, "1": [0, 13, 14, 16], "max_partit": 0, "filter_map": 0, "maximum": 0, "one": [0, 14, 15, 20, 24, 25], "partition_kei": 0, "partition_valu": 0, "filter": [0, 19, 24], "out": [0, 19, 24], "table_exist": 0, "does_not_exist": 0, "drop_partit": [0, 19], "part_val": 0, "delete_data": 0, "input": 0, "spec": 0, "control": [0, 14], "underli": 0, "have": [0, 19, 24], "delet": [0, 10], "2020": [0, 19], "05": [0, 19], "hiveserver2hook": [0, 19, 24], "arg": [0, 19], "log_sql": 0, "common": [0, 8, 19, 24], "sql": [0, 3, 6, 8, 10, 11, 13, 14, 16, 19, 24, 26], "dbapihook": [0, 19, 24], "pyhiv": [0, 22, 24], "librari": 0, "auth_mechan": [0, 19, 21, 22, 24], "plain": [0, 22], "ui": 0, "run_set_variable_stat": [0, 22], "impala": 0, "need": [0, 3, 6, 20, 24], "hiveserver2_conn_id": [0, 10, 11], "server2": [0, 10, 11, 23], "hiveserver2": [0, 11, 22], "hiveserver2_default": [0, 10, 11, 22], "server": [0, 10, 13, 21, 25], "2": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 20, 21, 22, 23, 25, 26], "supports_autocommit": 0, "get_result": 0, "fetch_siz": 0, "int": 0, "size": 0, "fetch": [0, 17], "iter": 0, "alon": 0, "header": [0, 15], "csv_filepath": 0, "linetermin": 0, "r": 0, "n": 0, "output_head": 0, "1000": 0, "csv": [0, 11, 14, 17], "row": 0, "get_record": [0, 24], "record": 0, "option": [0, 10, 19, 20, 21, 22, 24], "configur": [0, 19], "select": [0, 15, 25], "limit": [0, 19, 24], "100": 0, "get_pandas_df": [0, 19, 24], "constructor": 0, "index": [0, 8], "datefram": 0, "5": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 20, 21, 22, 23, 25, 26], "0": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 20, 21, 22, 23, 25, 26], "6": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26], "dev0": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26], "experiment": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26], "featur": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 25, 26], "hook": [2, 3, 9, 19, 20, 21, 22, 24], "hive_stat": [2, 5], "sensor": [2, 19, 24], "hive_partit": [2, 7], "metastore_partit": [2, 7], "named_hive_partit": [2, 7], "transfer": [2, 19], "hive_to_mysql": [2, 12], "hive_to_samba": [2, 12], "mssql_to_hiv": [2, 12], "mysql_to_h": [2, 12], "s3_to_hiv": [2, 12, 19], "vertica_to_h": [2, 12], "hiveconf_jinja_transl": 3, "script_begin_tag": 3, "run_as_own": 3, "model": [3, 4, 10, 11, 13, 14, 15, 16], "baseoper": [3, 4, 10, 11, 13, 14, 15, 16], "code": [3, 11, 19, 25, 26], "script": [3, 19, 25, 26], "rel": 3, "path": [3, 11, 15], "dag": [3, 17, 19, 24], "templat": [3, 4, 10, 11, 13, 14, 15, 16, 19, 24], "defin": [3, 6, 8, 9, 24], "var": 3, "translat": 3, "jinja": [3, 4, 10, 11, 13, 14, 15, 16], "along": 3, "user_defined_macro": 3, "myarg": 3, "view": 3, "document": [3, 19, 24, 26], "more": [3, 4, 10, 11, 13, 14, 15, 16, 17, 19, 24, 26], "detail": [3, 19, 24, 25], "rid": [3, 17], "first": [3, 15, 24], "occurr": 3, "": [3, 8, 13, 14, 15, 16, 19, 24, 25], "owner": [3, 20, 25], "capacityschedul": 3, "template_field": [3, 4, 6, 8, 9, 10, 11, 13, 14, 15, 16, 19], "sequenc": [3, 4, 6, 8, 9, 10, 11, 13, 14, 15, 16, 19], "template_ext": [3, 10, 11, 13, 14, 15, 16], "template_fields_render": [3, 10, 11, 13, 14, 16, 19, 24], "ui_color": [3, 4, 6, 8, 9, 10, 13, 14, 15, 16], "f0e4ec": 3, "get_hook": 3, "prepare_templ": 3, "trigger": 3, "alter": 3, "render": [3, 4, 10, 11, 13, 14, 15, 16, 19, 24], "method": [3, 4, 6, 9, 10, 11, 13, 14, 15, 16, 19, 20, 22, 24], "do": [3, 15, 17, 25], "main": [3, 4, 10, 11, 13, 14, 15, 16, 24, 25], "deriv": [3, 4, 6, 8, 9, 10, 11, 13, 14, 15, 16], "same": [3, 4, 10, 11, 13, 14, 15, 16], "get_template_context": [3, 4, 10, 11, 13, 14, 15, 16], "dry_run": 3, "perform": 3, "dry": 3, "just": 3, "on_kil": 3, "cleanup": [3, 10, 19], "subprocess": 3, "task": [3, 10, 17, 19], "instanc": 3, "thread": 3, "multiprocess": 3, "clean": [3, 17, 19, 24], "up": [3, 6, 9, 19], "leav": 3, "ghost": 3, "process": [3, 15], "behind": 3, "clear_airflow_var": 3, "reset": 3, "environ": [3, 20, 21, 22], "prevent": 3, "ones": 3, "impact": 3, "behavior": 3, "hivestatscollectionoper": 4, "extra_expr": 4, "excluded_column": 4, "assignment_func": 4, "presto_conn_id": 4, "presto_default": 4, "mysql_conn_id": [4, 8, 10, 14], "airflow_db": 4, "gather": 4, "statist": 4, "dynam": [4, 19], "presto": [4, 24], "insert": 4, "stat": 4, "mysql": [4, 8, 10, 14, 17, 24], "themselv": 4, "rerun": 4, "date": 4, "varchar": 4, "16": [4, 19], "500": 4, "metric": 4, "200": 4, "bigint": 4, "against": [4, 10, 13, 14, 15, 16], "where": [4, 11, 19, 24], "compat": [4, 19], "exclud": 4, "consid": [4, 6], "blob": [4, 16, 24], "json": [4, 19, 20, 21, 22], "callabl": 4, "function": [4, 6, 8, 9, 19], "receiv": 4, "global": 4, "appli": [4, 19, 24], "empti": [4, 19, 24], "comput": 4, "dttm": 4, "aff7a6": 4, "get_default_expr": 4, "col_typ": 4, "hivepartitionsensor": [6, 8, 9], "poke_interv": [6, 9], "60": [6, 9], "3": [6, 9], "basesensoroper": [6, 9], "wait": [6, 9], "show": [6, 9], "becaus": 6, "logic": [6, 9], "ineffici": 6, "namedhivepartitionsensor": [6, 9], "instead": 6, "don": [6, 19], "full": [6, 16], "flexibl": 6, "my_databas": 6, "my_tabl": 6, "claus": 6, "get_partitions_by_filt": [6, 19, 24], "appar": 6, "like": [6, 17, 19, 22], "comparison": [6, 9], "c5cae9": 6, "poke": [6, 8, 9], "while": [6, 8, 9], "metastorepartitionsensor": 8, "metastore_mysql": 8, "sqlsensor": 8, "altern": 8, "talk": 8, "directli": [8, 10], "wa": 8, "observ": 8, "sub": 8, "optim": 8, "hit": 8, "were": [8, 24], "written": 8, "wai": [8, 20], "wouldn": 8, "leverag": 8, "doe": [8, 16], "matter": [8, 19], "exampl": [8, 9, 17, 19, 20, 21, 22, 24, 25], "2016": [8, 9], "foo": 8, "conn_id": 8, "8da7b": 8, "fulli": 9, "qualifi": 9, "form": [9, 19], "pk1": 9, "pv1": 9, "pk2": 9, "pv2": 9, "user": [9, 17, 19, 20, 25], "get_partitions_by_nam": 9, "cannot": 9, "8d99ae": 9, "static": [9, 19], "parse_partition_nam": 9, "info": [9, 19], "poke_partit": 9, "contain": [10, 11, 13, 14, 15, 16, 25], "move": [10, 11, 13, 14, 15, 16, 19, 24], "hivetomysqloper": [10, 19, 24], "mysql_tabl": 10, "mysql_default": [10, 14], "mysql_preoper": 10, "mysql_postoper": 10, "bulk_load": 10, "now": [10, 19, 24], "memori": 10, "push": [10, 11], "smallish": 10, "prior": 10, "import": [10, 19, 24, 25], "typic": 10, "truncat": 10, "place": 10, "come": [10, 19], "allow": [10, 19], "idempot": 10, "twice": 10, "won": 10, "doubl": [10, 19], "product": 10, "issu": [10, 19, 24], "flag": 10, "tab": 10, "text": [10, 17, 19], "infil": 10, "requir": [10, 15], "local_infil": 10, "a0e08c": [10, 13, 14, 15], "samba": [11, 24], "hivetosambaoper": 11, "destination_filepath": 11, "samba_conn_id": 11, "samba_default": 11, "locat": 11, "export": [11, 20, 21, 22], "onto": [11, 26], "mssql": [13, 24], "mssqltohiveoper": 13, "hive_t": [13, 14, 15, 16], "chr": [13, 14, 16], "mssql_conn_id": 13, "mssql_default": 13, "microsoft": [13, 24], "argument": [13, 14, 15, 16, 19], "cursor": [13, 14, 15, 16], "metadata": [13, 14, 15, 16], "classmethod": [13, 14, 16], "type_map": [13, 14, 16], "mssql_type": 13, "mysqltohiveoper": 14, "quot": 14, "quotechar": 14, "escapechar": 14, "writer": 14, "take": [14, 17], "quote_": 14, "constant": 14, "charact": [14, 19], "special": 14, "escap": [14, 19], "mysql_typ": 14, "s3": 15, "bucket": 15, "s3tohiveoper": 15, "s3_kei": 15, "check_head": 15, "wildcard_match": 15, "aws_conn_id": 15, "aws_default": 15, "verifi": [15, 24], "input_compress": 15, "select_express": 15, "download": [15, 25], "retriev": [15, 17], "line": [15, 19], "interpret": 15, "unix": 15, "wildcard": 15, "ssl": 15, "certif": [15, 25], "By": [15, 25], "follow": [15, 19, 20, 21, 22, 25], "valid": [15, 25], "still": 15, "unless": 15, "use_ssl": 15, "cert": 15, "bundl": 15, "pem": 15, "filenam": 15, "ca": 15, "differ": [15, 17, 25], "botocor": 15, "boolean": 15, "determin": 15, "decompress": 15, "vertica": [16, 24], "verticatohiveoper": 16, "vertica_conn_id": 16, "vertica_default": 16, "b4e0ff": 16, "vertica_typ": 16, "python": [16, 19, 24, 25], "datatyp": 16, "py": [16, 19, 26], "access": 16, "manual": [16, 24], "hack": 16, "http": [16, 19, 24, 25], "github": [16, 24], "com": [16, 20, 24, 25], "uber": 16, "master": [16, 19], "vertica_python": 16, "manag": [17, 19, 20, 25, 26], "twitter": 17, "env_id": 17, "fetch_tweet": 17, "call": [17, 19], "api": [17, 19, 24], "tweet": 17, "yesterdai": 17, "four": 17, "twitter_a": 17, "twitter_d": 17, "There": [17, 20, 25], "eight": 17, "output": 17, "convent": 17, "direct": 17, "_twitterhandle_d": 17, "clean_tweet": 17, "placehold": 17, "In": 17, "step": [17, 25], "cherri": 17, "pick": 17, "analyze_tweet": 17, "analyz": 17, "could": 17, "simpli": 17, "sentiment": 17, "analysi": 17, "through": [17, 21], "algorithm": 17, "bag": 17, "word": [17, 19, 24], "someth": 17, "complic": 17, "look": 17, "web": 17, "transfer_to_db": 17, "summari": 17, "test_run": 17, "example_twitter_dag": [18, 19, 26], "commit": 19, "chang": 19, "version": [19, 24, 25], "level": 19, "changelog": 19, "see": [19, 24], "inform": [19, 26], "latest": 19, "2022": 19, "12": [19, 25], "06": 19, "subject": 19, "5f8481c799": 19, "28101": [19, 24], "2d45f9d6c3": 19, "11": [19, 25], "27": 19, "improv": [19, 24], "invalid": [19, 24], "27808": [19, 24], "26": 19, "25bdbc8e67": 19, "updat": [19, 24], "doc": [19, 24], "rc3": 19, "wave": 19, "27937": 19, "2e20e9f7eb": 19, "24": 19, "prepar": 19, "relas": 19, "novemb": 19, "27774": 19, "80c327bd3b": 19, "bump": [19, 24], "27888": [19, 24], "15": 19, "12c3c39d1a": 19, "27613": 19, "150dd927c3": 19, "14": 19, "27647": [19, 24], "9ab1a6a3e7": 19, "10": [19, 24], "old": 19, "style": 19, "26872": 19, "78b8ea2f22": 19, "min": [19, 24], "27196": [19, 24], "2a34dc9e84": 19, "23": 19, "27205": 19, "09": 19, "28": 19, "f8db64c35c": 19, "septemb": 19, "releas": [19, 24], "26731": 19, "06acf40a43": 19, "13": 19, "pep": 19, "563": 19, "postpon": 19, "evalu": 19, "annot": 19, "non": 19, "core": 19, "26289": 19, "ca9229b6f": 19, "08": 19, "18": 19, "lower": [19, 24], "bound": [19, 24], "25789": [19, 24], "e5ac6c7cfb": 19, "new": [19, 24], "august": 19, "25618": 19, "7e3d2350db": 19, "04": 19, "remov": [19, 24, 25], "smart": [19, 24], "25507": [19, 24], "5d4abbd58c": 19, "07": 19, "deprec": [19, 24], "synchron": [19, 24], "25299": [19, 24], "d2459a241b": 19, "juli": 19, "25030": 19, "46bbfdade0": 19, "class": [19, 24], "24836": [19, 24], "0de31bd73a": 19, "29": 19, "depend": 19, "folder": [19, 25], "24672": 19, "cef97fccd5": 19, "fix": 19, "24713": [19, 24], "510a6bab45": 19, "yaml": 19, "24702": 19, "dcdcf3a2b8": 19, "rc2": 19, "24307": 19, "717a7588bc": 19, "descript": 19, "24292": 19, "aeabe994b3": 19, "24231": 19, "b4a5783a2a": 19, "chore": [19, 24], "refactor": [19, 24], "24219": [19, 24], "027b707d21": 19, "explanatori": 19, "contributor": 19, "about": [19, 25], "24229": 19, "100ea9d1fc": 19, "aip": [19, 24], "47": [19, 24], "migrat": [19, 24], "design": [19, 24], "22439": [19, 24], "24204": [19, 24], "71e4deb1b0": 19, "23716": 19, "75c60923e0": 19, "23631": 19, "2d109401b3": 19, "pre": 19, "22887": 19, "0c9c1cf94a": 19, "wrong": [19, 24], "docstr": [19, 24], "23316": [19, 24], "03": 19, "22": 19, "d7dbfb7e26": 19, "bugfix": 19, "22383": 19, "16adc035b1": 19, "classifi": [19, 24], "march": 19, "22226": 19, "f5b96315fe": 19, "feb": 19, "22056": 19, "563ecfa053": 19, "9": [19, 24], "21893": [19, 24], "f6e0ed0dcc": 19, "02": 19, "how": [19, 24, 25], "guid": [19, 25], "21590": [19, 24], "041babb060": 19, "mypi": 19, "21571": 19, "2d6282d6b7": 19, "unnecessari": 19, "stale": 19, "comment": 19, "21572": 19, "06010fa12a": 19, "typo": [19, 24], "21525": [19, 24], "d927507899": 19, "larger": [19, 24], "21504": [19, 24], "d94fa37830": 19, "januari": 19, "delai": 19, "21439": 19, "8f81b9a01c": 19, "condit": [19, 24], "lexer": [19, 24], "21403": [19, 24], "6c3a67d4fc": 19, "2021": [19, 25], "21257": 19, "39e395f981": 19, "21237": [19, 24], "602abe8394": 19, "20": 19, "sphinx": 19, "autoapi": 19, "typehint": 19, "20951": 19, "5569b868a9": 19, "error": 19, "tableau": 19, "cncf": 19, "20654": 19, "f77417eb0d": 19, "31": 19, "k8": 19, "pypi": [19, 24], "20614": 19, "97496ba2b4": 19, "decemb": 19, "20523": 19, "83f8e178ba": 19, "even": 19, "ext": 19, "20608": 19, "d56e7b56bb": 19, "30": 19, "friendli": 19, "20571": 19, "a0821235fb": 19, "everywher": 19, "20565": 19, "485ff6cc64": 19, "20422": 19, "f760823b4a": 19, "some": 19, "hint": 19, "20210": 19, "853576d901": 19, "19882": 19, "16b3ab5860": 19, "variou": 19, "19866": 19, "ac752e777b": 19, "restor": [19, 24], "ha": [19, 24], "19777": [19, 24], "f50f677514": 19, "19474": 19, "ae044884d1": 19, "start_dat": 19, "18657": 19, "d9567eb106": 19, "octob": 19, "19321": 19, "86a2a19ad2": 19, "17": [19, 24], "f": [19, 26], "18855": 19, "80b5e65a6a": 19, "concaten": 19, "airflowexcept": 19, "19026": 19, "232f7d1587": 19, "get_connect": [19, 24], "warn": [19, 24, 25], "hivemetastor": [19, 24], "18854": [19, 24], "840ea3efb9": 19, "18613": 19, "a458fcc573": 19, "miscellan": 19, "taskflow": 19, "applic": 19, "18278": 19, "0a68588479": 19, "17890": 19, "da99c3fa6c": 19, "hivehook": [19, 24], "failur": [19, 24], "tri": [19, 24], "read": [19, 24, 26], "17777": [19, 24], "be75dcd39c": 19, "meta": 19, "76ed2a49c6": 19, "19": 19, "lazili": 19, "individu": 19, "17682": 19, "87f408b1e7": 19, "17116": 19, "91f4d80ff0": 19, "xcomarg": 19, "16869": 19, "d02ded65ea": 19, "wrongli": 19, "amazon": [19, 24], "17020": 19, "b916b75079": 19, "17015": 19, "866a601b76": 19, "pylint": 19, "our": 19, "toolchain": 19, "16682": 19, "ce44b62890": 19, "25": 19, "15515": [19, 24], "bbc627a3da": 19, "16501": 19, "cbf8001d76": 19, "buggfix": 19, "16464": 19, "1fba5402bb": 19, "june": 19, "16405": 19, "9c94b72d44": 19, "16294": 19, "476d0f6e3d": 19, "pyupgrad": 19, "v2": 19, "15991": 19, "736a62f824": 19, "duplic": [19, 24], "15735": 19, "37681bca00": 19, "auto": [19, 24], "apply_default": [19, 24], "decor": [19, 24], "15667": [19, 24], "9953a047c4": 19, "15704": 19, "807ad32ce5": 19, "pip": [19, 24, 25], "21": 19, "15576": 19, "4b031d39e1": 19, "8": 19, "15534": 19, "e229f3541d": 19, "instal": 19, "offici": [19, 25], "15513": 19, "042be2e4e0": 19, "april": 19, "15236": 19, "53dafa593f": 19, "mistak": [19, 24], "15180": [19, 24], "85e0e76074": 19, "pin": 19, "flynt": 19, "fail": 19, "pr": 19, "15076": 19, "68e4c4dcb0": 19, "backport": 19, "14886": 19, "6dc24c95e3": 19, "grammar": [19, 24], "14647": [19, 24], "b0d6069d25": 19, "broken": 19, "14633": 19, "d9e4454c66": 19, "resolv": [19, 24], "relat": [19, 20, 21, 22, 24], "14542": [19, 24], "589d6dec92": 19, "next": 19, "14487": 19, "10343ec29f": 19, "correct": [19, 24, 25], "tool": [19, 24], "rc": [19, 24], "14082": [19, 24], "88bdcfa0df": 19, "14013": 19, "ac2f72c98d": 19, "implement": 19, "13767": 19, "a9ac2b040b": 19, "switch": 19, "13732": 19, "5f81fc73c8": 19, "password": [19, 20, 22, 24], "ldap": [19, 22, 24], "custom": [19, 20, 22, 24], "mode": [19, 24], "11767": [19, 24], "4f494d4d92": 19, "few": 19, "13450": 19, "295d66f914": 19, "13380": 19, "6cf76d7ac0": 19, "upgrad": [19, 24], "13148": 19, "5090fb0c89": 19, "integr": [19, 20, 21, 22], "13073": 19, "32971a1a2d": 19, "12955": 19, "a075b6df99": 19, "renam": [19, 24], "remain": 19, "12927": 19, "b40dffa085": 19, "rema": 19, "modul": 19, "12917": 19, "9b39f24780": 19, "per": 19, "12558": 19, "2037303eef": 19, "discoveri": 19, "12466": 19, "c34ef853c8": 19, "build": [19, 25], "12444": 19, "0080354502": 19, "readm": [19, 24], "0b2": 19, "batch": 19, "12449": 19, "ae7cb4a1e2": 19, "hash": 19, "12390": 19, "6889a333cf": 19, "12366": 19, "7825e8f590": 19, "12304": 19, "250436d962": 19, "spell": 19, "12230": 19, "502ba309ea": 19, "markdownlint": 19, "rule": 19, "md022": 19, "blank": [19, 20], "head": 19, "12225": 19, "85a18e13d9": 19, "point": 19, "project": [19, 26], "page": [19, 25], "cross": 19, "12212": 19, "59eb5de78c": 19, "0beta1": 19, "12206": 19, "b2a28d1590": 19, "dev": 19, "12082": 19, "41bf172c1d": 19, "simplifi": 19, "12093": 19, "4e8f9cc8d0": 19, "black": 19, "formmatt": 19, "9550": 19, "8c42cf1b00": 19, "11447": 19, "5a439e84eb": 19, "2a1": 19, "11855": 19, "872b1566a1": 19, "setup": 19, "11826": 19, "349b0811c3": 19, "d200": 19, "pydocstyl": 19, "11688": 19, "16e7129719": 19, "ad": [19, 24], "11487": 19, "0a0e1af800": 19, "markdown": 19, "link": [19, 25], "toc": 19, "11249": 19, "ca4238eb4d": 19, "month": 19, "11242": 19, "5220e4c384": 19, "11238": 19, "e3f96ce7a8": 19, "incorrect": 19, "usag": 19, "11138": 19, "f3e87c5030": 19, "d202": 19, "11032": 19, "9549274d11": 19, "8b1": 19, "10818": 19, "ac943c9e18": 19, "3964": 19, "consolid": 19, "de": 19, "dup": 19, "5499": 19, "fdd9b6f65b": 19, "10543": 19, "d760265452": 19, "No": 19, "whitespac": 19, "surround": 19, "10533": 19, "3696c34c28": 19, "10528": 19, "ee7ca128a1": 19, "refernc": 19, "10483": 19, "27339a5a0f": 19, "mention": 19, "gitter": 19, "10460": 19, "7c206a82a6": 19, "assig": 19, "augment": 19, "assign": 19, "10468": 19, "8f8db8959e": 19, "9730": 19, "b43f90abf4": 19, "repo": 19, "10263": 19, "3b3287d7ac": 19, "enforc": 19, "keyword": [19, 24], "10170": 19, "7d24b088cd": 19, "stop": 19, "default_arg": 19, "example_dag": 19, "9985": 19, "33f0cd2657": 19, "signatur": [19, 24, 25], "9784": 19, "c2db0dfeb1": 19, "strict": 19, "9705": 19, "9906": 19, "5013fda8f0": 19, "9472": 19, "4d74ac2111": 19, "increas": 19, "9729": 19, "44d4ae809c": 19, "9686": 19, "e13a14c873": 19, "9458": 19, "d0e7db4024": 19, "fresh": 19, "9408": 19, "12af6a0800": 19, "23rc1": 19, "9404": 19, "c7e5bce57f": 19, "candid": 19, "9370": 19, "f6bd817a3a": 19, "introduc": 19, "9320": 19, "c78e2a5fea": 19, "macro": 19, "py3": [19, 25], "8598": 19, "6350fd6ebb": 19, "term": 19, "whitelist": 19, "9174": 19, "10796cb7ce": 19, "unit": 19, "9029": 19, "0b0e4f7a4c": 19, "9026": 19, "00642a46d0": 19, "8994": 19, "cdb3f25456": 19, "8991": 19, "375d1ca229": 19, "8898": 19, "12c5e5d8a": 19, "8891": 19, "f3521fb0e3": 19, "regener": 19, "8886": 19, "92585ca4cb": 19, "autom": 19, "8807": 19, "93ea058802": 19, "7059": 19, "8380": 19, "87969a350d": 19, "6515": 19, "8170": 19, "cb0bf4a142": 19, "base_hook": 19, "7901": 19, "4bde99f132": 19, "7802": 19, "7e6372a681": 19, "super": 19, "7820": 19, "3320e432a1": 19, "6817": 19, "lazi": 19, "face": 19, "untouch": 19, "7517": 19, "4d03e33c11": 19, "__init__": 19, "implicit": 19, "explicit": 19, "md": [19, 24], "squash": 19, "rebas": 19, "7456": 19, "f3ad5cf618": 19, "4681": 19, "7309": 19, "97a429f9d0": 19, "6714": 19, "magic": 19, "utf": 19, "7338": 19, "83c037873f": 19, "6674": [19, 25], "accord": 19, "7287": 19, "057f3ae3a4": 19, "6670": 19, "6669": 19, "contrib": 19, "7286": 19, "059eda05f8": 19, "6610": 19, "softwar": [19, 25, 26], "7231": 19, "0481b9a957": 19, "6539": 19, "7142": 19, "two": 20, "airflow": [20, 25], "host": [20, 21, 22], "port": [20, 21, 22], "proxi": 20, "login": [20, 22], "author": 20, "time": 20, "multipl": [20, 21], "credenti": 20, "usernam": [20, 22], "proxy_us": 20, "princip": 20, "uri": [20, 21, 22], "syntax": [20, 21, 22], "compon": [20, 21, 22], "url": [20, 21, 22], "airflow_conn_hive_cli_default": 20, "80": [20, 21, 22], "nosasl": [20, 21], "2f_host": 20, "40exampl": 20, "apach": [21, 25], "hmsclient": [21, 24], "node": [21, 22], "comma": 21, "mechan": 21, "kerberos_service_nam": [21, 22], "kerbero": [21, 22], "airflow_conn_metastore_default": 21, "choos": [22, 25], "between": 22, "via": [22, 24, 25], "would": 22, "airflow_conn_hiveserver2_default": 22, "hive": [23, 25], "repositori": 24, "top": [24, 25], "below": [24, 25], "minimum": 24, "sasl": 24, "python_vers": 24, "those": [24, 25], "them": 24, "checksum": [24, 25], "site": 24, "sdist": [24, 25], "asc": [24, 25], "sha512": [24, 25], "wheel": 24, "avail": [24, 25], "polici": 24, "been": 24, "posit": 24, "second": 24, "mistakenli": 24, "install_requir": 24, "22382": 24, "trove": 24, "framework": 24, "optimis": 24, "due": 24, "least": 24, "otherwis": 24, "automat": 24, "complet": 24, "initi": 24, "describ": 25, "provid": [25, 26], "down": 25, "left": 25, "whl": 25, "origin": 25, "foundat": 25, "abov": 25, "pgp": 25, "essenti": 25, "sha": 25, "gpg": 25, "pleas": 25, "well": 25, "relev": 25, "distribut": [25, 26], "recommend": 25, "directori": 25, "mirror": 25, "pgpk": 25, "ka": 25, "binari": 25, "pgpv": 25, "tar": 25, "gz": 25, "made": 25, "sat": 25, "sep": 25, "49": 25, "54": 25, "bst": 25, "rsa": 25, "cde15c6e4d3a8ec4ecf4ba4b6674e08ad7de406f": 25, "issuer": 25, "kaxilnaik": 25, "org": 25, "good": 25, "kaxil": 25, "naik": 25, "unknown": 25, "aka": 25, "gmail": 25, "certifi": 25, "trust": 25, "indic": 25, "primari": 25, "fingerprint": 25, "cde1": 25, "5c6e": 25, "4d3a": 25, "8ec4": 25, "ecf4": 25, "ba4b": 25, "e08a": 25, "d7de": 25, "406f": 25, "worri": 25, "sign": 25, "why": 25, "previou": 25, "know": 25, "alreadi": [25, 26], "sum": 25, "shasum": 25, "512": 25, "diff": 25, "bin": 25, "bash": 25, "package_vers": 25, "package_nam": 25, "provider_download_dir": 25, "mktemp": 25, "dep": 25, "dest": 25, "curl": 25, "apache_airflow_providers_apache_h": 25, "l": 25, "o": 25, "echo": 25, "la": 25, "onc": 25, "instruct": 25, "chapter": 25, "warehous": 26, "facilit": 26, "dataset": 26, "resid": 26, "storag": 26, "structur": 26, "system": 26, "load_to_h": 26, "load_": 26, "channel": 26, "_to_hiv": 26, "inpath": 26, "hdfs_dir": 26, "file_nam": 26, "INTO": 26, "dt": 26}, "objects": {"airflow.providers.apache": [[2, 0, 0, "-", "hive"]], "airflow.providers.apache.hive": [[1, 0, 0, "-", "hooks"], [5, 0, 0, "-", "operators"], [7, 0, 0, "-", "sensors"], [12, 0, 0, "-", "transfers"]], "airflow.providers.apache.hive.hooks": [[0, 0, 0, "-", "hive"]], "airflow.providers.apache.hive.hooks.hive": [[0, 1, 1, "", "HIVE_QUEUE_PRIORITIES"], [0, 2, 1, "", "HiveCliHook"], [0, 2, 1, "", "HiveMetastoreHook"], [0, 2, 1, "", "HiveServer2Hook"], [0, 5, 1, "", "get_context_from_env_var"]], "airflow.providers.apache.hive.hooks.hive.HiveCliHook": [[0, 3, 1, "", "conn_name_attr"], [0, 3, 1, "", "conn_type"], [0, 3, 1, "", "default_conn_name"], [0, 3, 1, "", "hook_name"], [0, 4, 1, "", "kill"], [0, 4, 1, "", "load_df"], [0, 4, 1, "", "load_file"], [0, 4, 1, "", "run_cli"], [0, 4, 1, "", "test_hql"]], "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook": [[0, 3, 1, "", "MAX_PART_COUNT"], [0, 4, 1, "", "__getstate__"], [0, 4, 1, "", "__setstate__"], [0, 4, 1, "", "check_for_named_partition"], [0, 4, 1, "", "check_for_partition"], [0, 3, 1, "", "conn_name_attr"], [0, 3, 1, "", "conn_type"], [0, 3, 1, "", "default_conn_name"], [0, 4, 1, "", "drop_partitions"], [0, 4, 1, "", "get_conn"], [0, 4, 1, "", "get_databases"], [0, 4, 1, "", "get_metastore_client"], [0, 4, 1, "", "get_partitions"], [0, 4, 1, "", "get_table"], [0, 4, 1, "", "get_tables"], [0, 3, 1, "", "hook_name"], [0, 4, 1, "", "max_partition"], [0, 4, 1, "", "table_exists"]], "airflow.providers.apache.hive.hooks.hive.HiveServer2Hook": [[0, 3, 1, "", "conn_name_attr"], [0, 3, 1, "", "conn_type"], [0, 3, 1, "", "default_conn_name"], [0, 4, 1, "", "get_conn"], [0, 4, 1, "", "get_pandas_df"], [0, 4, 1, "", "get_records"], [0, 4, 1, "", "get_results"], [0, 3, 1, "", "hook_name"], [0, 3, 1, "", "supports_autocommit"], [0, 4, 1, "", "to_csv"]], "airflow.providers.apache.hive.operators": [[3, 0, 0, "-", "hive"], [4, 0, 0, "-", "hive_stats"]], "airflow.providers.apache.hive.operators.hive": [[3, 2, 1, "", "HiveOperator"]], "airflow.providers.apache.hive.operators.hive.HiveOperator": [[3, 4, 1, "", "clear_airflow_vars"], [3, 4, 1, "", "dry_run"], [3, 4, 1, "", "execute"], [3, 4, 1, "", "get_hook"], [3, 4, 1, "", "on_kill"], [3, 4, 1, "", "prepare_template"], [3, 3, 1, "", "template_ext"], [3, 3, 1, "", "template_fields"], [3, 3, 1, "", "template_fields_renderers"], [3, 3, 1, "", "ui_color"]], "airflow.providers.apache.hive.operators.hive_stats": [[4, 2, 1, "", "HiveStatsCollectionOperator"]], "airflow.providers.apache.hive.operators.hive_stats.HiveStatsCollectionOperator": [[4, 4, 1, "", "execute"], [4, 4, 1, "", "get_default_exprs"], [4, 3, 1, "", "template_fields"], [4, 3, 1, "", "ui_color"]], "airflow.providers.apache.hive.sensors": [[6, 0, 0, "-", "hive_partition"], [8, 0, 0, "-", "metastore_partition"], [9, 0, 0, "-", "named_hive_partition"]], "airflow.providers.apache.hive.sensors.hive_partition": [[6, 2, 1, "", "HivePartitionSensor"]], "airflow.providers.apache.hive.sensors.hive_partition.HivePartitionSensor": [[6, 4, 1, "", "poke"], [6, 3, 1, "", "template_fields"], [6, 3, 1, "", "ui_color"]], "airflow.providers.apache.hive.sensors.metastore_partition": [[8, 2, 1, "", "MetastorePartitionSensor"]], "airflow.providers.apache.hive.sensors.metastore_partition.MetastorePartitionSensor": [[8, 4, 1, "", "poke"], [8, 3, 1, "", "template_fields"], [8, 3, 1, "", "ui_color"]], "airflow.providers.apache.hive.sensors.named_hive_partition": [[9, 2, 1, "", "NamedHivePartitionSensor"]], "airflow.providers.apache.hive.sensors.named_hive_partition.NamedHivePartitionSensor": [[9, 4, 1, "", "parse_partition_name"], [9, 4, 1, "", "poke"], [9, 4, 1, "", "poke_partition"], [9, 3, 1, "", "template_fields"], [9, 3, 1, "", "ui_color"]], "airflow.providers.apache.hive.transfers": [[10, 0, 0, "-", "hive_to_mysql"], [11, 0, 0, "-", "hive_to_samba"], [13, 0, 0, "-", "mssql_to_hive"], [14, 0, 0, "-", "mysql_to_hive"], [15, 0, 0, "-", "s3_to_hive"], [16, 0, 0, "-", "vertica_to_hive"]], "airflow.providers.apache.hive.transfers.hive_to_mysql": [[10, 2, 1, "", "HiveToMySqlOperator"]], "airflow.providers.apache.hive.transfers.hive_to_mysql.HiveToMySqlOperator": [[10, 4, 1, "", "execute"], [10, 3, 1, "", "template_ext"], [10, 3, 1, "", "template_fields"], [10, 3, 1, "", "template_fields_renderers"], [10, 3, 1, "", "ui_color"]], "airflow.providers.apache.hive.transfers.hive_to_samba": [[11, 2, 1, "", "HiveToSambaOperator"]], "airflow.providers.apache.hive.transfers.hive_to_samba.HiveToSambaOperator": [[11, 4, 1, "", "execute"], [11, 3, 1, "", "template_ext"], [11, 3, 1, "", "template_fields"], [11, 3, 1, "", "template_fields_renderers"]], "airflow.providers.apache.hive.transfers.mssql_to_hive": [[13, 2, 1, "", "MsSqlToHiveOperator"]], "airflow.providers.apache.hive.transfers.mssql_to_hive.MsSqlToHiveOperator": [[13, 4, 1, "", "execute"], [13, 3, 1, "", "template_ext"], [13, 3, 1, "", "template_fields"], [13, 3, 1, "", "template_fields_renderers"], [13, 4, 1, "", "type_map"], [13, 3, 1, "", "ui_color"]], "airflow.providers.apache.hive.transfers.mysql_to_hive": [[14, 2, 1, "", "MySqlToHiveOperator"]], "airflow.providers.apache.hive.transfers.mysql_to_hive.MySqlToHiveOperator": [[14, 4, 1, "", "execute"], [14, 3, 1, "", "template_ext"], [14, 3, 1, "", "template_fields"], [14, 3, 1, "", "template_fields_renderers"], [14, 4, 1, "", "type_map"], [14, 3, 1, "", "ui_color"]], "airflow.providers.apache.hive.transfers.s3_to_hive": [[15, 2, 1, "", "S3ToHiveOperator"]], "airflow.providers.apache.hive.transfers.s3_to_hive.S3ToHiveOperator": [[15, 4, 1, "", "execute"], [15, 3, 1, "", "template_ext"], [15, 3, 1, "", "template_fields"], [15, 3, 1, "", "ui_color"]], "airflow.providers.apache.hive.transfers.vertica_to_hive": [[16, 2, 1, "", "VerticaToHiveOperator"]], "airflow.providers.apache.hive.transfers.vertica_to_hive.VerticaToHiveOperator": [[16, 4, 1, "", "execute"], [16, 3, 1, "", "template_ext"], [16, 3, 1, "", "template_fields"], [16, 3, 1, "", "template_fields_renderers"], [16, 4, 1, "", "type_map"], [16, 3, 1, "", "ui_color"]], "tests.system.providers.apache": [[18, 0, 0, "-", "hive"]], "tests.system.providers.apache.hive": [[17, 0, 0, "-", "example_twitter_dag"]], "tests.system.providers.apache.hive.example_twitter_dag": [[17, 1, 1, "", "DAG_ID"], [17, 1, 1, "", "ENV_ID"], [17, 5, 1, "", "analyze_tweets"], [17, 5, 1, "", "clean_tweets"], [17, 1, 1, "", "fetch"], [17, 5, 1, "", "fetch_tweets"], [17, 1, 1, "", "test_run"], [17, 5, 1, "", "transfer_to_db"]]}, "objtypes": {"0": "py:module", "1": "py:data", "2": "py:class", "3": "py:attribute", "4": "py:method", "5": "py:function"}, "objnames": {"0": ["py", "module", "Python module"], "1": ["py", "data", "Python data"], "2": ["py", "class", "Python class"], "3": ["py", "attribute", "Python attribute"], "4": ["py", "method", "Python method"], "5": ["py", "function", "Python function"]}, "titleterms": {"airflow": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 19, 24], "provid": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 24], "apach": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 24, 26], "hive": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 24, 26], "hook": [0, 1], "modul": [0, 3, 4, 6, 8, 9, 10, 11, 13, 14, 15, 16, 17], "content": [0, 3, 4, 6, 8, 9, 10, 11, 13, 14, 15, 16, 17, 24], "class": [0, 3, 4, 6, 8, 9, 10, 11, 13, 14, 15, 16], "function": [0, 17], "attribut": [0, 17], "submodul": [1, 5, 7, 12, 18], "subpackag": 2, "oper": [3, 4, 5, 26], "hive_stat": 4, "sensor": [6, 7, 8, 9], "hive_partit": 6, "metastore_partit": 8, "named_hive_partit": 9, "transfer": [10, 11, 12, 13, 14, 15, 16], "hive_to_mysql": 10, "hive_to_samba": 11, "mssql_to_hiv": 13, "mysql_to_h": 14, "s3_to_hiv": 15, "vertica_to_h": 16, "test": [17, 18], "system": [17, 18], "example_twitter_dag": 17, "packag": [19, 24, 25], "5": [19, 24], "0": [19, 24], "4": [19, 24], "1": [19, 24], "3": [19, 24], "2": [19, 24], "cli": 20, "connect": [20, 21, 22, 23], "authent": [20, 21, 22], "default": [20, 21, 22], "id": [20, 21, 22], "configur": [20, 21, 22], "metastor": 21, "server2": 22, "type": 23, "guid": 24, "refer": [24, 26], "resourc": 24, "commit": 24, "instal": [24, 25], "requir": 24, "cross": 24, "depend": 24, "download": 24, "offici": 24, "changelog": 24, "break": 24, "chang": 24, "featur": 24, "bug": 24, "fix": 24, "misc": 24, "from": 25, "sourc": 25, "releas": 25, "integr": 25, "verifi": 25, "pypi": 25, "hiveoper": 26}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 8, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.viewcode": 1, "sphinx.ext.intersphinx": 1, "sphinx": 57}, "alltitles": {"airflow.providers.apache.hive.hooks.hive": [[0, "module-airflow.providers.apache.hive.hooks.hive"]], "Module Contents": [[0, "module-contents"], [3, "module-contents"], [4, "module-contents"], [6, "module-contents"], [8, "module-contents"], [9, "module-contents"], [10, "module-contents"], [11, "module-contents"], [13, "module-contents"], [14, "module-contents"], [15, "module-contents"], [16, "module-contents"], [17, "module-contents"]], "Classes": [[0, "classes"], [3, "classes"], [4, "classes"], [6, "classes"], [8, "classes"], [9, "classes"], [10, "classes"], [11, "classes"], [13, "classes"], [14, "classes"], [15, "classes"], [16, "classes"]], "Functions": [[0, "functions"], [17, "functions"]], "Attributes": [[0, "attributes"], [17, "attributes"]], "airflow.providers.apache.hive.hooks": [[1, "module-airflow.providers.apache.hive.hooks"]], "Submodules": [[1, "submodules"], [5, "submodules"], [7, "submodules"], [12, "submodules"], [18, "submodules"]], "airflow.providers.apache.hive": [[2, "module-airflow.providers.apache.hive"]], "Subpackages": [[2, "subpackages"]], "airflow.providers.apache.hive.operators.hive": [[3, "module-airflow.providers.apache.hive.operators.hive"]], "airflow.providers.apache.hive.operators.hive_stats": [[4, "module-airflow.providers.apache.hive.operators.hive_stats"]], "airflow.providers.apache.hive.operators": [[5, "module-airflow.providers.apache.hive.operators"]], "airflow.providers.apache.hive.sensors.hive_partition": [[6, "module-airflow.providers.apache.hive.sensors.hive_partition"]], "airflow.providers.apache.hive.sensors": [[7, "module-airflow.providers.apache.hive.sensors"]], "airflow.providers.apache.hive.sensors.metastore_partition": [[8, "module-airflow.providers.apache.hive.sensors.metastore_partition"]], "airflow.providers.apache.hive.sensors.named_hive_partition": [[9, "module-airflow.providers.apache.hive.sensors.named_hive_partition"]], "airflow.providers.apache.hive.transfers.hive_to_mysql": [[10, "module-airflow.providers.apache.hive.transfers.hive_to_mysql"]], "airflow.providers.apache.hive.transfers.hive_to_samba": [[11, "module-airflow.providers.apache.hive.transfers.hive_to_samba"]], "airflow.providers.apache.hive.transfers": [[12, "module-airflow.providers.apache.hive.transfers"]], "airflow.providers.apache.hive.transfers.mssql_to_hive": [[13, "module-airflow.providers.apache.hive.transfers.mssql_to_hive"]], "airflow.providers.apache.hive.transfers.mysql_to_hive": [[14, "module-airflow.providers.apache.hive.transfers.mysql_to_hive"]], "airflow.providers.apache.hive.transfers.s3_to_hive": [[15, "module-airflow.providers.apache.hive.transfers.s3_to_hive"]], "airflow.providers.apache.hive.transfers.vertica_to_hive": [[16, "module-airflow.providers.apache.hive.transfers.vertica_to_hive"]], "tests.system.providers.apache.hive.example_twitter_dag": [[17, "module-tests.system.providers.apache.hive.example_twitter_dag"]], "tests.system.providers.apache.hive": [[18, "module-tests.system.providers.apache.hive"]], "Package apache-airflow-providers-apache-hive": [[19, "package-apache-airflow-providers-apache-hive"], [24, "package-apache-airflow-providers-apache-hive"]], "5.0.0": [[19, "id1"], [24, "id1"]], "4.1.1": [[19, "id2"], [24, "id2"]], "4.1.0": [[19, "id3"], [24, "id3"]], "4.0.1": [[19, "id4"], [24, "id5"]], "4.0.0": [[19, "id5"], [24, "id7"]], "3.1.0": [[19, "id6"], [24, "id9"]], "3.0.0": [[19, "id7"], [24, "id12"]], "2.3.3": [[19, "id8"], [24, "id15"]], "2.3.2": [[19, "id9"], [24, "id17"]], "2.3.1": [[19, "id10"], [24, "id19"]], "2.3.0": [[19, "id11"], [24, "id21"]], "2.2.0": [[19, "id12"], [24, "id25"]], "2.1.0": [[19, "id13"], [24, "id27"]], "2.0.3": [[19, "id14"], [24, "id30"]], "2.0.2": [[19, "id15"], [24, "id32"]], "2.0.1": [[19, "id16"], [24, "id35"]], "2.0.0": [[19, "id17"], [24, "id37"]], "1.0.3": [[19, "id18"], [24, "id39"]], "1.0.2": [[19, "id19"], [24, "id41"]], "1.0.1": [[19, "id20"], [24, "id43"]], "1.0.0": [[19, "id21"], [24, "id45"]], "Hive CLI Connection": [[20, "hive-cli-connection"]], "Authenticating to Hive CLI": [[20, "authenticating-to-hive-cli"]], "Default Connection IDs": [[20, "default-connection-ids"], [21, "default-connection-ids"], [22, "default-connection-ids"]], "Configuring the Connection": [[20, "configuring-the-connection"], [21, "configuring-the-connection"], [22, "configuring-the-connection"]], "Hive Metastore Connection": [[21, "hive-metastore-connection"]], "Authenticating to Hive Metastore": [[21, "authenticating-to-hive-metastore"]], "Hive Server2 Connection": [[22, "hive-server2-connection"]], "Authenticating to Hive Server2": [[22, "authenticating-to-hive-server2"]], "Connection Types": [[23, "connection-types"]], "apache-airflow-providers-apache-hive": [[24, "apache-airflow-providers-apache-hive"]], "Content": [[24, "content"]], "Guides": [[24, null]], "References": [[24, null]], "Resources": [[24, null]], "Commits": [[24, null]], "Provider package": [[24, "provider-package"]], "Installation": [[24, "installation"]], "Requirements": [[24, "requirements"]], "Cross provider package dependencies": [[24, "cross-provider-package-dependencies"]], "Downloading official packages": [[24, "downloading-official-packages"]], "Changelog": [[24, "changelog"]], "Breaking changes": [[24, "breaking-changes"], [24, "id13"], [24, "id38"]], "Features": [[24, "features"], [24, "id10"], [24, "id22"], [24, "id26"], [24, "id28"], [24, "id36"]], "Bug Fixes": [[24, "bug-fixes"], [24, "id4"], [24, "id11"], [24, "id16"], [24, "id18"], [24, "id23"], [24, "id29"], [24, "id31"]], "Misc": [[24, "misc"], [24, "id6"], [24, "id14"], [24, "id20"], [24, "id24"], [24, "id34"]], "Breaking Changes": [[24, "id8"]], "Bug fixes": [[24, "id33"], [24, "id40"], [24, "id42"], [24, "id44"]], "Installing from sources": [[25, "installing-from-sources"]], "Released packages": [[25, "released-packages"]], "Release integrity": [[25, "release-integrity"]], "Verifying PyPI releases": [[25, "verifying-pypi-releases"]], "Apache Hive Operators": [[26, "apache-hive-operators"]], "HiveOperator": [[26, "hiveoperator"]], "Reference": [[26, "reference"]]}, "indexentries": {"hive_queue_priorities (in module airflow.providers.apache.hive.hooks.hive)": [[0, "airflow.providers.apache.hive.hooks.hive.HIVE_QUEUE_PRIORITIES"]], "hiveclihook (class in airflow.providers.apache.hive.hooks.hive)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveCliHook"]], "hivemetastorehook (class in airflow.providers.apache.hive.hooks.hive)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook"]], "hiveserver2hook (class in airflow.providers.apache.hive.hooks.hive)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveServer2Hook"]], "max_part_count (airflow.providers.apache.hive.hooks.hive.hivemetastorehook attribute)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.MAX_PART_COUNT"]], "__getstate__() (airflow.providers.apache.hive.hooks.hive.hivemetastorehook method)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.__getstate__"]], "__setstate__() (airflow.providers.apache.hive.hooks.hive.hivemetastorehook method)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.__setstate__"]], "airflow.providers.apache.hive.hooks.hive": [[0, "module-airflow.providers.apache.hive.hooks.hive"]], "check_for_named_partition() (airflow.providers.apache.hive.hooks.hive.hivemetastorehook method)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.check_for_named_partition"]], "check_for_partition() (airflow.providers.apache.hive.hooks.hive.hivemetastorehook method)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.check_for_partition"]], "conn_name_attr (airflow.providers.apache.hive.hooks.hive.hiveclihook attribute)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveCliHook.conn_name_attr"]], "conn_name_attr (airflow.providers.apache.hive.hooks.hive.hivemetastorehook attribute)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.conn_name_attr"]], "conn_name_attr (airflow.providers.apache.hive.hooks.hive.hiveserver2hook attribute)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveServer2Hook.conn_name_attr"]], "conn_type (airflow.providers.apache.hive.hooks.hive.hiveclihook attribute)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveCliHook.conn_type"]], "conn_type (airflow.providers.apache.hive.hooks.hive.hivemetastorehook attribute)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.conn_type"]], "conn_type (airflow.providers.apache.hive.hooks.hive.hiveserver2hook attribute)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveServer2Hook.conn_type"]], "default_conn_name (airflow.providers.apache.hive.hooks.hive.hiveclihook attribute)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveCliHook.default_conn_name"]], "default_conn_name (airflow.providers.apache.hive.hooks.hive.hivemetastorehook attribute)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.default_conn_name"]], "default_conn_name (airflow.providers.apache.hive.hooks.hive.hiveserver2hook attribute)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveServer2Hook.default_conn_name"]], "drop_partitions() (airflow.providers.apache.hive.hooks.hive.hivemetastorehook method)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.drop_partitions"]], "get_conn() (airflow.providers.apache.hive.hooks.hive.hivemetastorehook method)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.get_conn"]], "get_conn() (airflow.providers.apache.hive.hooks.hive.hiveserver2hook method)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveServer2Hook.get_conn"]], "get_context_from_env_var() (in module airflow.providers.apache.hive.hooks.hive)": [[0, "airflow.providers.apache.hive.hooks.hive.get_context_from_env_var"]], "get_databases() (airflow.providers.apache.hive.hooks.hive.hivemetastorehook method)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.get_databases"]], "get_metastore_client() (airflow.providers.apache.hive.hooks.hive.hivemetastorehook method)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.get_metastore_client"]], "get_pandas_df() (airflow.providers.apache.hive.hooks.hive.hiveserver2hook method)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveServer2Hook.get_pandas_df"]], "get_partitions() (airflow.providers.apache.hive.hooks.hive.hivemetastorehook method)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.get_partitions"]], "get_records() (airflow.providers.apache.hive.hooks.hive.hiveserver2hook method)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveServer2Hook.get_records"]], "get_results() (airflow.providers.apache.hive.hooks.hive.hiveserver2hook method)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveServer2Hook.get_results"]], "get_table() (airflow.providers.apache.hive.hooks.hive.hivemetastorehook method)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.get_table"]], "get_tables() (airflow.providers.apache.hive.hooks.hive.hivemetastorehook method)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.get_tables"]], "hook_name (airflow.providers.apache.hive.hooks.hive.hiveclihook attribute)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveCliHook.hook_name"]], "hook_name (airflow.providers.apache.hive.hooks.hive.hivemetastorehook attribute)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.hook_name"]], "hook_name (airflow.providers.apache.hive.hooks.hive.hiveserver2hook attribute)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveServer2Hook.hook_name"]], "kill() (airflow.providers.apache.hive.hooks.hive.hiveclihook method)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveCliHook.kill"]], "load_df() (airflow.providers.apache.hive.hooks.hive.hiveclihook method)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveCliHook.load_df"]], "load_file() (airflow.providers.apache.hive.hooks.hive.hiveclihook method)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveCliHook.load_file"]], "max_partition() (airflow.providers.apache.hive.hooks.hive.hivemetastorehook method)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.max_partition"]], "module": [[0, "module-airflow.providers.apache.hive.hooks.hive"], [1, "module-airflow.providers.apache.hive.hooks"], [2, "module-airflow.providers.apache.hive"], [3, "module-airflow.providers.apache.hive.operators.hive"], [4, "module-airflow.providers.apache.hive.operators.hive_stats"], [5, "module-airflow.providers.apache.hive.operators"], [6, "module-airflow.providers.apache.hive.sensors.hive_partition"], [7, "module-airflow.providers.apache.hive.sensors"], [8, "module-airflow.providers.apache.hive.sensors.metastore_partition"], [9, "module-airflow.providers.apache.hive.sensors.named_hive_partition"], [10, "module-airflow.providers.apache.hive.transfers.hive_to_mysql"], [11, "module-airflow.providers.apache.hive.transfers.hive_to_samba"], [12, "module-airflow.providers.apache.hive.transfers"], [13, "module-airflow.providers.apache.hive.transfers.mssql_to_hive"], [14, "module-airflow.providers.apache.hive.transfers.mysql_to_hive"], [15, "module-airflow.providers.apache.hive.transfers.s3_to_hive"], [16, "module-airflow.providers.apache.hive.transfers.vertica_to_hive"], [17, "module-tests.system.providers.apache.hive.example_twitter_dag"], [18, "module-tests.system.providers.apache.hive"]], "run_cli() (airflow.providers.apache.hive.hooks.hive.hiveclihook method)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveCliHook.run_cli"]], "supports_autocommit (airflow.providers.apache.hive.hooks.hive.hiveserver2hook attribute)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveServer2Hook.supports_autocommit"]], "table_exists() (airflow.providers.apache.hive.hooks.hive.hivemetastorehook method)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.table_exists"]], "test_hql() (airflow.providers.apache.hive.hooks.hive.hiveclihook method)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveCliHook.test_hql"]], "to_csv() (airflow.providers.apache.hive.hooks.hive.hiveserver2hook method)": [[0, "airflow.providers.apache.hive.hooks.hive.HiveServer2Hook.to_csv"]], "airflow.providers.apache.hive.hooks": [[1, "module-airflow.providers.apache.hive.hooks"]], "airflow.providers.apache.hive": [[2, "module-airflow.providers.apache.hive"]], "hiveoperator (class in airflow.providers.apache.hive.operators.hive)": [[3, "airflow.providers.apache.hive.operators.hive.HiveOperator"]], "airflow.providers.apache.hive.operators.hive": [[3, "module-airflow.providers.apache.hive.operators.hive"]], "clear_airflow_vars() (airflow.providers.apache.hive.operators.hive.hiveoperator method)": [[3, "airflow.providers.apache.hive.operators.hive.HiveOperator.clear_airflow_vars"]], "dry_run() (airflow.providers.apache.hive.operators.hive.hiveoperator method)": [[3, "airflow.providers.apache.hive.operators.hive.HiveOperator.dry_run"]], "execute() (airflow.providers.apache.hive.operators.hive.hiveoperator method)": [[3, "airflow.providers.apache.hive.operators.hive.HiveOperator.execute"]], "get_hook() (airflow.providers.apache.hive.operators.hive.hiveoperator method)": [[3, "airflow.providers.apache.hive.operators.hive.HiveOperator.get_hook"]], "on_kill() (airflow.providers.apache.hive.operators.hive.hiveoperator method)": [[3, "airflow.providers.apache.hive.operators.hive.HiveOperator.on_kill"]], "prepare_template() (airflow.providers.apache.hive.operators.hive.hiveoperator method)": [[3, "airflow.providers.apache.hive.operators.hive.HiveOperator.prepare_template"]], "template_ext (airflow.providers.apache.hive.operators.hive.hiveoperator attribute)": [[3, "airflow.providers.apache.hive.operators.hive.HiveOperator.template_ext"]], "template_fields (airflow.providers.apache.hive.operators.hive.hiveoperator attribute)": [[3, "airflow.providers.apache.hive.operators.hive.HiveOperator.template_fields"]], "template_fields_renderers (airflow.providers.apache.hive.operators.hive.hiveoperator attribute)": [[3, "airflow.providers.apache.hive.operators.hive.HiveOperator.template_fields_renderers"]], "ui_color (airflow.providers.apache.hive.operators.hive.hiveoperator attribute)": [[3, "airflow.providers.apache.hive.operators.hive.HiveOperator.ui_color"]], "hivestatscollectionoperator (class in airflow.providers.apache.hive.operators.hive_stats)": [[4, "airflow.providers.apache.hive.operators.hive_stats.HiveStatsCollectionOperator"]], "airflow.providers.apache.hive.operators.hive_stats": [[4, "module-airflow.providers.apache.hive.operators.hive_stats"]], "execute() (airflow.providers.apache.hive.operators.hive_stats.hivestatscollectionoperator method)": [[4, "airflow.providers.apache.hive.operators.hive_stats.HiveStatsCollectionOperator.execute"]], "get_default_exprs() (airflow.providers.apache.hive.operators.hive_stats.hivestatscollectionoperator method)": [[4, "airflow.providers.apache.hive.operators.hive_stats.HiveStatsCollectionOperator.get_default_exprs"]], "template_fields (airflow.providers.apache.hive.operators.hive_stats.hivestatscollectionoperator attribute)": [[4, "airflow.providers.apache.hive.operators.hive_stats.HiveStatsCollectionOperator.template_fields"]], "ui_color (airflow.providers.apache.hive.operators.hive_stats.hivestatscollectionoperator attribute)": [[4, "airflow.providers.apache.hive.operators.hive_stats.HiveStatsCollectionOperator.ui_color"]], "airflow.providers.apache.hive.operators": [[5, "module-airflow.providers.apache.hive.operators"]], "hivepartitionsensor (class in airflow.providers.apache.hive.sensors.hive_partition)": [[6, "airflow.providers.apache.hive.sensors.hive_partition.HivePartitionSensor"]], "airflow.providers.apache.hive.sensors.hive_partition": [[6, "module-airflow.providers.apache.hive.sensors.hive_partition"]], "poke() (airflow.providers.apache.hive.sensors.hive_partition.hivepartitionsensor method)": [[6, "airflow.providers.apache.hive.sensors.hive_partition.HivePartitionSensor.poke"]], "template_fields (airflow.providers.apache.hive.sensors.hive_partition.hivepartitionsensor attribute)": [[6, "airflow.providers.apache.hive.sensors.hive_partition.HivePartitionSensor.template_fields"]], "ui_color (airflow.providers.apache.hive.sensors.hive_partition.hivepartitionsensor attribute)": [[6, "airflow.providers.apache.hive.sensors.hive_partition.HivePartitionSensor.ui_color"]], "airflow.providers.apache.hive.sensors": [[7, "module-airflow.providers.apache.hive.sensors"]], "metastorepartitionsensor (class in airflow.providers.apache.hive.sensors.metastore_partition)": [[8, "airflow.providers.apache.hive.sensors.metastore_partition.MetastorePartitionSensor"]], "airflow.providers.apache.hive.sensors.metastore_partition": [[8, "module-airflow.providers.apache.hive.sensors.metastore_partition"]], "poke() (airflow.providers.apache.hive.sensors.metastore_partition.metastorepartitionsensor method)": [[8, "airflow.providers.apache.hive.sensors.metastore_partition.MetastorePartitionSensor.poke"]], "template_fields (airflow.providers.apache.hive.sensors.metastore_partition.metastorepartitionsensor attribute)": [[8, "airflow.providers.apache.hive.sensors.metastore_partition.MetastorePartitionSensor.template_fields"]], "ui_color (airflow.providers.apache.hive.sensors.metastore_partition.metastorepartitionsensor attribute)": [[8, "airflow.providers.apache.hive.sensors.metastore_partition.MetastorePartitionSensor.ui_color"]], "namedhivepartitionsensor (class in airflow.providers.apache.hive.sensors.named_hive_partition)": [[9, "airflow.providers.apache.hive.sensors.named_hive_partition.NamedHivePartitionSensor"]], "airflow.providers.apache.hive.sensors.named_hive_partition": [[9, "module-airflow.providers.apache.hive.sensors.named_hive_partition"]], "parse_partition_name() (airflow.providers.apache.hive.sensors.named_hive_partition.namedhivepartitionsensor static method)": [[9, "airflow.providers.apache.hive.sensors.named_hive_partition.NamedHivePartitionSensor.parse_partition_name"]], "poke() (airflow.providers.apache.hive.sensors.named_hive_partition.namedhivepartitionsensor method)": [[9, "airflow.providers.apache.hive.sensors.named_hive_partition.NamedHivePartitionSensor.poke"]], "poke_partition() (airflow.providers.apache.hive.sensors.named_hive_partition.namedhivepartitionsensor method)": [[9, "airflow.providers.apache.hive.sensors.named_hive_partition.NamedHivePartitionSensor.poke_partition"]], "template_fields (airflow.providers.apache.hive.sensors.named_hive_partition.namedhivepartitionsensor attribute)": [[9, "airflow.providers.apache.hive.sensors.named_hive_partition.NamedHivePartitionSensor.template_fields"]], "ui_color (airflow.providers.apache.hive.sensors.named_hive_partition.namedhivepartitionsensor attribute)": [[9, "airflow.providers.apache.hive.sensors.named_hive_partition.NamedHivePartitionSensor.ui_color"]], "hivetomysqloperator (class in airflow.providers.apache.hive.transfers.hive_to_mysql)": [[10, "airflow.providers.apache.hive.transfers.hive_to_mysql.HiveToMySqlOperator"]], "airflow.providers.apache.hive.transfers.hive_to_mysql": [[10, "module-airflow.providers.apache.hive.transfers.hive_to_mysql"]], "execute() (airflow.providers.apache.hive.transfers.hive_to_mysql.hivetomysqloperator method)": [[10, "airflow.providers.apache.hive.transfers.hive_to_mysql.HiveToMySqlOperator.execute"]], "template_ext (airflow.providers.apache.hive.transfers.hive_to_mysql.hivetomysqloperator attribute)": [[10, "airflow.providers.apache.hive.transfers.hive_to_mysql.HiveToMySqlOperator.template_ext"]], "template_fields (airflow.providers.apache.hive.transfers.hive_to_mysql.hivetomysqloperator attribute)": [[10, "airflow.providers.apache.hive.transfers.hive_to_mysql.HiveToMySqlOperator.template_fields"]], "template_fields_renderers (airflow.providers.apache.hive.transfers.hive_to_mysql.hivetomysqloperator attribute)": [[10, "airflow.providers.apache.hive.transfers.hive_to_mysql.HiveToMySqlOperator.template_fields_renderers"]], "ui_color (airflow.providers.apache.hive.transfers.hive_to_mysql.hivetomysqloperator attribute)": [[10, "airflow.providers.apache.hive.transfers.hive_to_mysql.HiveToMySqlOperator.ui_color"]], "hivetosambaoperator (class in airflow.providers.apache.hive.transfers.hive_to_samba)": [[11, "airflow.providers.apache.hive.transfers.hive_to_samba.HiveToSambaOperator"]], "airflow.providers.apache.hive.transfers.hive_to_samba": [[11, "module-airflow.providers.apache.hive.transfers.hive_to_samba"]], "execute() (airflow.providers.apache.hive.transfers.hive_to_samba.hivetosambaoperator method)": [[11, "airflow.providers.apache.hive.transfers.hive_to_samba.HiveToSambaOperator.execute"]], "template_ext (airflow.providers.apache.hive.transfers.hive_to_samba.hivetosambaoperator attribute)": [[11, "airflow.providers.apache.hive.transfers.hive_to_samba.HiveToSambaOperator.template_ext"]], "template_fields (airflow.providers.apache.hive.transfers.hive_to_samba.hivetosambaoperator attribute)": [[11, "airflow.providers.apache.hive.transfers.hive_to_samba.HiveToSambaOperator.template_fields"]], "template_fields_renderers (airflow.providers.apache.hive.transfers.hive_to_samba.hivetosambaoperator attribute)": [[11, "airflow.providers.apache.hive.transfers.hive_to_samba.HiveToSambaOperator.template_fields_renderers"]], "airflow.providers.apache.hive.transfers": [[12, "module-airflow.providers.apache.hive.transfers"]], "mssqltohiveoperator (class in airflow.providers.apache.hive.transfers.mssql_to_hive)": [[13, "airflow.providers.apache.hive.transfers.mssql_to_hive.MsSqlToHiveOperator"]], "airflow.providers.apache.hive.transfers.mssql_to_hive": [[13, "module-airflow.providers.apache.hive.transfers.mssql_to_hive"]], "execute() (airflow.providers.apache.hive.transfers.mssql_to_hive.mssqltohiveoperator method)": [[13, "airflow.providers.apache.hive.transfers.mssql_to_hive.MsSqlToHiveOperator.execute"]], "template_ext (airflow.providers.apache.hive.transfers.mssql_to_hive.mssqltohiveoperator attribute)": [[13, "airflow.providers.apache.hive.transfers.mssql_to_hive.MsSqlToHiveOperator.template_ext"]], "template_fields (airflow.providers.apache.hive.transfers.mssql_to_hive.mssqltohiveoperator attribute)": [[13, "airflow.providers.apache.hive.transfers.mssql_to_hive.MsSqlToHiveOperator.template_fields"]], "template_fields_renderers (airflow.providers.apache.hive.transfers.mssql_to_hive.mssqltohiveoperator attribute)": [[13, "airflow.providers.apache.hive.transfers.mssql_to_hive.MsSqlToHiveOperator.template_fields_renderers"]], "type_map() (airflow.providers.apache.hive.transfers.mssql_to_hive.mssqltohiveoperator class method)": [[13, "airflow.providers.apache.hive.transfers.mssql_to_hive.MsSqlToHiveOperator.type_map"]], "ui_color (airflow.providers.apache.hive.transfers.mssql_to_hive.mssqltohiveoperator attribute)": [[13, "airflow.providers.apache.hive.transfers.mssql_to_hive.MsSqlToHiveOperator.ui_color"]], "mysqltohiveoperator (class in airflow.providers.apache.hive.transfers.mysql_to_hive)": [[14, "airflow.providers.apache.hive.transfers.mysql_to_hive.MySqlToHiveOperator"]], "airflow.providers.apache.hive.transfers.mysql_to_hive": [[14, "module-airflow.providers.apache.hive.transfers.mysql_to_hive"]], "execute() (airflow.providers.apache.hive.transfers.mysql_to_hive.mysqltohiveoperator method)": [[14, "airflow.providers.apache.hive.transfers.mysql_to_hive.MySqlToHiveOperator.execute"]], "template_ext (airflow.providers.apache.hive.transfers.mysql_to_hive.mysqltohiveoperator attribute)": [[14, "airflow.providers.apache.hive.transfers.mysql_to_hive.MySqlToHiveOperator.template_ext"]], "template_fields (airflow.providers.apache.hive.transfers.mysql_to_hive.mysqltohiveoperator attribute)": [[14, "airflow.providers.apache.hive.transfers.mysql_to_hive.MySqlToHiveOperator.template_fields"]], "template_fields_renderers (airflow.providers.apache.hive.transfers.mysql_to_hive.mysqltohiveoperator attribute)": [[14, "airflow.providers.apache.hive.transfers.mysql_to_hive.MySqlToHiveOperator.template_fields_renderers"]], "type_map() (airflow.providers.apache.hive.transfers.mysql_to_hive.mysqltohiveoperator class method)": [[14, "airflow.providers.apache.hive.transfers.mysql_to_hive.MySqlToHiveOperator.type_map"]], "ui_color (airflow.providers.apache.hive.transfers.mysql_to_hive.mysqltohiveoperator attribute)": [[14, "airflow.providers.apache.hive.transfers.mysql_to_hive.MySqlToHiveOperator.ui_color"]], "s3tohiveoperator (class in airflow.providers.apache.hive.transfers.s3_to_hive)": [[15, "airflow.providers.apache.hive.transfers.s3_to_hive.S3ToHiveOperator"]], "airflow.providers.apache.hive.transfers.s3_to_hive": [[15, "module-airflow.providers.apache.hive.transfers.s3_to_hive"]], "execute() (airflow.providers.apache.hive.transfers.s3_to_hive.s3tohiveoperator method)": [[15, "airflow.providers.apache.hive.transfers.s3_to_hive.S3ToHiveOperator.execute"]], "template_ext (airflow.providers.apache.hive.transfers.s3_to_hive.s3tohiveoperator attribute)": [[15, "airflow.providers.apache.hive.transfers.s3_to_hive.S3ToHiveOperator.template_ext"]], "template_fields (airflow.providers.apache.hive.transfers.s3_to_hive.s3tohiveoperator attribute)": [[15, "airflow.providers.apache.hive.transfers.s3_to_hive.S3ToHiveOperator.template_fields"]], "ui_color (airflow.providers.apache.hive.transfers.s3_to_hive.s3tohiveoperator attribute)": [[15, "airflow.providers.apache.hive.transfers.s3_to_hive.S3ToHiveOperator.ui_color"]], "verticatohiveoperator (class in airflow.providers.apache.hive.transfers.vertica_to_hive)": [[16, "airflow.providers.apache.hive.transfers.vertica_to_hive.VerticaToHiveOperator"]], "airflow.providers.apache.hive.transfers.vertica_to_hive": [[16, "module-airflow.providers.apache.hive.transfers.vertica_to_hive"]], "execute() (airflow.providers.apache.hive.transfers.vertica_to_hive.verticatohiveoperator method)": [[16, "airflow.providers.apache.hive.transfers.vertica_to_hive.VerticaToHiveOperator.execute"]], "template_ext (airflow.providers.apache.hive.transfers.vertica_to_hive.verticatohiveoperator attribute)": [[16, "airflow.providers.apache.hive.transfers.vertica_to_hive.VerticaToHiveOperator.template_ext"]], "template_fields (airflow.providers.apache.hive.transfers.vertica_to_hive.verticatohiveoperator attribute)": [[16, "airflow.providers.apache.hive.transfers.vertica_to_hive.VerticaToHiveOperator.template_fields"]], "template_fields_renderers (airflow.providers.apache.hive.transfers.vertica_to_hive.verticatohiveoperator attribute)": [[16, "airflow.providers.apache.hive.transfers.vertica_to_hive.VerticaToHiveOperator.template_fields_renderers"]], "type_map() (airflow.providers.apache.hive.transfers.vertica_to_hive.verticatohiveoperator class method)": [[16, "airflow.providers.apache.hive.transfers.vertica_to_hive.VerticaToHiveOperator.type_map"]], "ui_color (airflow.providers.apache.hive.transfers.vertica_to_hive.verticatohiveoperator attribute)": [[16, "airflow.providers.apache.hive.transfers.vertica_to_hive.VerticaToHiveOperator.ui_color"]], "dag_id (in module tests.system.providers.apache.hive.example_twitter_dag)": [[17, "tests.system.providers.apache.hive.example_twitter_dag.DAG_ID"]], "env_id (in module tests.system.providers.apache.hive.example_twitter_dag)": [[17, "tests.system.providers.apache.hive.example_twitter_dag.ENV_ID"]], "analyze_tweets() (in module tests.system.providers.apache.hive.example_twitter_dag)": [[17, "tests.system.providers.apache.hive.example_twitter_dag.analyze_tweets"]], "clean_tweets() (in module tests.system.providers.apache.hive.example_twitter_dag)": [[17, "tests.system.providers.apache.hive.example_twitter_dag.clean_tweets"]], "fetch (in module tests.system.providers.apache.hive.example_twitter_dag)": [[17, "tests.system.providers.apache.hive.example_twitter_dag.fetch"]], "fetch_tweets() (in module tests.system.providers.apache.hive.example_twitter_dag)": [[17, "tests.system.providers.apache.hive.example_twitter_dag.fetch_tweets"]], "test_run (in module tests.system.providers.apache.hive.example_twitter_dag)": [[17, "tests.system.providers.apache.hive.example_twitter_dag.test_run"]], "tests.system.providers.apache.hive.example_twitter_dag": [[17, "module-tests.system.providers.apache.hive.example_twitter_dag"]], "transfer_to_db() (in module tests.system.providers.apache.hive.example_twitter_dag)": [[17, "tests.system.providers.apache.hive.example_twitter_dag.transfer_to_db"]], "tests.system.providers.apache.hive": [[18, "module-tests.system.providers.apache.hive"]]}}) \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-apache-hive/stable.txt b/docs-archive/apache-airflow-providers-apache-hive/stable.txt index 2582dddfd54..28cbf7c0aae 100644 --- a/docs-archive/apache-airflow-providers-apache-hive/stable.txt +++ b/docs-archive/apache-airflow-providers-apache-hive/stable.txt @@ -1 +1 @@ -4.1.1 \ No newline at end of file +5.0.0 \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/.buildinfo b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/.buildinfo new file mode 100644 index 00000000000..ac4ec85fc43 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/.buildinfo @@ -0,0 +1,4 @@ +# Sphinx build info version 1 +# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. +config: d51aae5dbae8ca89bad4562697f61300 +tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/adx/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/adx/index.html new file mode 100644 index 00000000000..91b216f3776 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/adx/index.html @@ -0,0 +1,997 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.adx — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.hooks.adx

+

This module contains Azure Data Explorer hook.

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

AzureDataExplorerHook

Interacts with Azure Data Explorer (Kusto).

+
+
+class airflow.providers.microsoft.azure.hooks.adx.AzureDataExplorerHook(azure_data_explorer_conn_id=default_conn_name)[source]
+

Bases: airflow.hooks.base.BaseHook

+

Interacts with Azure Data Explorer (Kusto).

+

Cluster:

+

Azure Data Explorer cluster is specified by a URL, for example: “https://help.kusto.windows.net”. +The parameter must be provided through the Data Explorer Cluster URL connection detail.

+

Tenant ID:

+

To learn about tenants refer to: https://docs.microsoft.com/en-us/onedrive/find-your-office-365-tenant-id

+

Authentication methods:

+

Available authentication methods are:

+
+
    +
  • AAD_APP: Authentication with AAD application certificate. A Tenant ID is required when using this +method. Provide application ID and application key through Username and Password parameters.

  • +
  • AAD_APP_CERT: Authentication with AAD application certificate. Tenant ID, Application PEM Certificate, +and Application Certificate Thumbprint are required when using this method.

  • +
  • AAD_CREDS: Authentication with AAD username and password. A Tenant ID is required when using this +method. Username and Password parameters are used for authentication with AAD.

  • +
  • AAD_DEVICE: Authenticate with AAD device code. Please note that if you choose this option, you’ll need +to authenticate for every new instance that is initialized. It is highly recommended to create one +instance and use it for all queries.

  • +
+
+
+
Parameters
+

azure_data_explorer_conn_id (str) – Reference to the +Azure Data Explorer connection.

+
+
+
+
+conn_name_attr = azure_data_explorer_conn_id[source]
+
+ +
+
+default_conn_name = azure_data_explorer_default[source]
+
+ +
+
+conn_type = azure_data_explorer[source]
+
+ +
+
+hook_name = Azure Data Explorer[source]
+
+ +
+
+static get_connection_form_widgets()[source]
+

Returns connection widgets to add to connection form

+
+
+
+ +
+
+static get_ui_field_behaviour()[source]
+

Returns custom field behaviour

+
+
+
+ +
+
+get_conn()[source]
+

Return a KustoClient object.

+
+
+
+ +
+
+run_query(query, database, options=None)[source]
+

Run KQL query using provided configuration, and return +azure.kusto.data.response.KustoResponseDataSet instance. +If query is unsuccessful AirflowException is raised.

+
+
Parameters
+
+
+
Returns
+

dict

+
+
Return type
+

azure.kusto.data.response.KustoResponseDataSetV2

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/asb/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/asb/index.html new file mode 100644 index 00000000000..9bc06badc16 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/asb/index.html @@ -0,0 +1,1127 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.asb — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.hooks.asb

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + + + + + + + +

BaseAzureServiceBusHook

BaseAzureServiceBusHook class to create session and create connection using connection string

AdminClientHook

Interacts with ServiceBusAdministrationClient client

MessageHook

Interacts with ServiceBusClient and acts as a high level interface

+
+
+class airflow.providers.microsoft.azure.hooks.asb.BaseAzureServiceBusHook(azure_service_bus_conn_id=default_conn_name)[source]
+

Bases: airflow.hooks.base.BaseHook

+

BaseAzureServiceBusHook class to create session and create connection using connection string

+
+
Parameters
+

azure_service_bus_conn_id (str) – Reference to the +Azure Service Bus connection.

+
+
+
+
+conn_name_attr = azure_service_bus_conn_id[source]
+
+ +
+
+default_conn_name = azure_service_bus_default[source]
+
+ +
+
+conn_type = azure_service_bus[source]
+
+ +
+
+hook_name = Azure Service Bus[source]
+
+ +
+
+static get_ui_field_behaviour()[source]
+

Returns custom field behaviour

+
+
+
+ +
+
+abstract get_conn()[source]
+

Returns connection for the hook.

+
+ +
+ +
+
+class airflow.providers.microsoft.azure.hooks.asb.AdminClientHook(azure_service_bus_conn_id=default_conn_name)[source]
+

Bases: BaseAzureServiceBusHook

+

Interacts with ServiceBusAdministrationClient client +to create, update, list, and delete resources of a +Service Bus namespace. This hook uses the same Azure Service Bus client connection inherited +from the base class

+
+
+
+
+get_conn()[source]
+

Create and returns ServiceBusAdministrationClient by using the connection +string in connection details

+
+
+
+ +
+
+create_queue(queue_name, max_delivery_count=10, dead_lettering_on_message_expiration=True, enable_batched_operations=True)[source]
+

Create Queue by connecting to service Bus Admin client return the QueueProperties

+
+
Parameters
+
    +
  • queue_name (str) – The name of the queue or a QueueProperties with name.

  • +
  • max_delivery_count (int) – The maximum delivery count. A message is automatically +dead lettered after this number of deliveries. Default value is 10..

  • +
  • dead_lettering_on_message_expiration (bool) – A value that indicates whether this subscription has +dead letter support when a message expires.

  • +
  • enable_batched_operations (bool) – Value that indicates whether server-side batched +operations are enabled.

  • +
+
+
+
+ +
+
+delete_queue(queue_name)[source]
+

Delete the queue by queue_name in service bus namespace

+
+
Parameters
+

queue_name (str) – The name of the queue or a QueueProperties with name.

+
+
+
+ +
+
+delete_subscription(subscription_name, topic_name)[source]
+

Delete a topic subscription entities under a ServiceBus Namespace

+
+
Parameters
+
    +
  • subscription_name (str) – The subscription name that will own the rule in topic

  • +
  • topic_name (str) – The topic that will own the subscription rule.

  • +
+
+
+
+ +
+ +
+
+class airflow.providers.microsoft.azure.hooks.asb.MessageHook(azure_service_bus_conn_id=default_conn_name)[source]
+

Bases: BaseAzureServiceBusHook

+

Interacts with ServiceBusClient and acts as a high level interface +for getting ServiceBusSender and ServiceBusReceiver.

+
+
+
+
+get_conn()[source]
+

Create and returns ServiceBusClient by using the connection string in connection details

+
+
+
+ +
+
+send_message(queue_name, messages, batch_message_flag=False)[source]
+

By using ServiceBusClient Send message(s) to a Service Bus Queue. By using +batch_message_flag it enables and send message as batch message

+
+
Parameters
+
    +
  • queue_name (str) – The name of the queue or a QueueProperties with name.

  • +
  • messages (str | list[str]) – Message which needs to be sent to the queue. It can be string or list of string.

  • +
  • batch_message_flag (bool) – bool flag, can be set to True if message needs to be +sent as batch message.

  • +
+
+
+
+ +
+
+static send_list_messages(sender, messages)[source]
+
+
+
+ +
+
+static send_batch_message(sender, messages)[source]
+
+
+
+ +
+
+receive_message(queue_name, max_message_count=1, max_wait_time=None)[source]
+

Receive a batch of messages at once in a specified Queue name

+
+
Parameters
+
    +
  • queue_name – The name of the queue name or a QueueProperties with name.

  • +
  • max_message_count (int | None) – Maximum number of messages in the batch.

  • +
  • max_wait_time (float | None) – Maximum time to wait in seconds for the first message to arrive.

  • +
+
+
+
+ +
+
+receive_subscription_message(topic_name, subscription_name, max_message_count, max_wait_time)[source]
+

Receive a batch of subscription message at once. This approach is optimal if you wish +to process multiple messages simultaneously, or perform an ad-hoc receive as a single call.

+
+
Parameters
+
    +
  • subscription_name (str) – The subscription name that will own the rule in topic

  • +
  • topic_name (str) – The topic that will own the subscription rule.

  • +
  • max_message_count (int | None) – Maximum number of messages in the batch. +Actual number returned will depend on prefetch_count and incoming stream rate. +Setting to None will fully depend on the prefetch config. The default value is 1.

  • +
  • max_wait_time (float | None) – Maximum time to wait in seconds for the first message to arrive. If no +messages arrive, and no timeout is specified, this call will not return until the +connection is closed. If specified, an no messages arrive within the timeout period, +an empty list will be returned.

  • +
+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/base_azure/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/base_azure/index.html new file mode 100644 index 00000000000..d4fc3e8690c --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/base_azure/index.html @@ -0,0 +1,960 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.base_azure — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.hooks.base_azure

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

AzureBaseHook

This hook acts as a base hook for azure services. It offers several authentication mechanisms to

+
+
+class airflow.providers.microsoft.azure.hooks.base_azure.AzureBaseHook(sdk_client, conn_id='azure_default')[source]
+

Bases: airflow.hooks.base.BaseHook

+

This hook acts as a base hook for azure services. It offers several authentication mechanisms to +authenticate the client library used for upstream azure hooks.

+
+
Parameters
+
    +
  • sdk_client (Any) – The SDKClient to use.

  • +
  • conn_id (str) – The Azure connection id +which refers to the information to connect to the service.

  • +
+
+
+
+
+conn_name_attr = azure_conn_id[source]
+
+ +
+
+default_conn_name = azure_default[source]
+
+ +
+
+conn_type = azure[source]
+
+ +
+
+hook_name = Azure[source]
+
+ +
+
+static get_connection_form_widgets()[source]
+

Returns connection widgets to add to connection form

+
+
+
+ +
+
+static get_ui_field_behaviour()[source]
+

Returns custom field behaviour

+
+
+
+ +
+
+get_conn()[source]
+

Authenticates the resource using the connection id passed during init.

+
+
Returns
+

the authenticated client.

+
+
Return type
+

Any

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/batch/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/batch/index.html new file mode 100644 index 00000000000..73ccd268dff --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/batch/index.html @@ -0,0 +1,1103 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.batch — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.hooks.batch

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

AzureBatchHook

Hook for Azure Batch APIs

+
+
+class airflow.providers.microsoft.azure.hooks.batch.AzureBatchHook(azure_batch_conn_id=default_conn_name)[source]
+

Bases: airflow.hooks.base.BaseHook

+

Hook for Azure Batch APIs

+
+
Parameters
+

azure_batch_conn_id (str) – Azure Batch connection id +of a service principal which will be used to start the container instance.

+
+
+
+
+conn_name_attr = azure_batch_conn_id[source]
+
+ +
+
+default_conn_name = azure_batch_default[source]
+
+ +
+
+conn_type = azure_batch[source]
+
+ +
+
+hook_name = Azure Batch Service[source]
+
+ +
+
+static get_connection_form_widgets()[source]
+

Returns connection widgets to add to connection form

+
+
+
+ +
+
+static get_ui_field_behaviour()[source]
+

Returns custom field behaviour

+
+
+
+ +
+
+get_conn()[source]
+

Get the Batch client connection

+
+
Returns
+

Azure Batch client

+
+
+
+ +
+
+configure_pool(pool_id, vm_size, vm_node_agent_sku_id, vm_publisher=None, vm_offer=None, sku_starts_with=None, vm_sku=None, vm_version=None, os_family=None, os_version=None, display_name=None, target_dedicated_nodes=None, use_latest_image_and_sku=False, **kwargs)[source]
+

Configures a pool

+
+
Parameters
+
    +
  • pool_id (str) – A string that uniquely identifies the Pool within the Account

  • +
  • vm_size (str) – The size of virtual machines in the Pool.

  • +
  • display_name (str | None) – The display name for the Pool

  • +
  • target_dedicated_nodes (int | None) – The desired number of dedicated Compute Nodes in the Pool.

  • +
  • use_latest_image_and_sku (bool) – Whether to use the latest verified vm image and sku

  • +
  • vm_publisher (str | None) – The publisher of the Azure Virtual Machines Marketplace Image. +For example, Canonical or MicrosoftWindowsServer.

  • +
  • vm_offer (str | None) – The offer type of the Azure Virtual Machines Marketplace Image. +For example, UbuntuServer or WindowsServer.

  • +
  • sku_starts_with (str | None) – The start name of the sku to search

  • +
  • vm_sku (str | None) – The name of the virtual machine sku to use

  • +
  • vm_version (str | None) – The version of the virtual machine

  • +
  • vm_version – str

  • +
  • vm_node_agent_sku_id (str) – The node agent sku id of the virtual machine

  • +
  • os_family (str | None) – The Azure Guest OS family to be installed on the virtual machines in the Pool.

  • +
  • os_version (str | None) – The OS family version

  • +
+
+
+
+ +
+
+create_pool(pool)[source]
+

Creates a pool if not already existing

+
+
Parameters
+

pool (azure.batch.models.PoolAddParameter) – the pool object to create

+
+
+
+ +
+
+wait_for_all_node_state(pool_id, node_state)[source]
+

Wait for all nodes in a pool to reach given states

+
+
Parameters
+
    +
  • pool_id (str) – A string that identifies the pool

  • +
  • node_state (set) – A set of batch_models.ComputeNodeState

  • +
+
+
+
+ +
+
+configure_job(job_id, pool_id, display_name=None, **kwargs)[source]
+

Configures a job for use in the pool

+
+
Parameters
+
    +
  • job_id (str) – A string that uniquely identifies the job within the account

  • +
  • pool_id (str) – A string that identifies the pool

  • +
  • display_name (str | None) – The display name for the job

  • +
+
+
+
+ +
+
+create_job(job)[source]
+

Creates a job in the pool

+
+
Parameters
+

job (azure.batch.models.JobAddParameter) – The job object to create

+
+
+
+ +
+
+configure_task(task_id, command_line, display_name=None, container_settings=None, **kwargs)[source]
+

Creates a task

+
+
Parameters
+
    +
  • task_id (str) – A string that identifies the task to create

  • +
  • command_line (str) – The command line of the Task.

  • +
  • display_name (str | None) – A display name for the Task

  • +
  • container_settings – The settings for the container under which the Task runs. +If the Pool that will run this Task has containerConfiguration set, +this must be set as well. If the Pool that will run this Task doesn’t have +containerConfiguration set, this must not be set.

  • +
+
+
+
+ +
+
+add_single_task_to_job(job_id, task)[source]
+

Add a single task to given job if it doesn’t exist

+
+
Parameters
+
    +
  • job_id (str) – A string that identifies the given job

  • +
  • task (azure.batch.models.TaskAddParameter) – The task to add

  • +
+
+
+
+ +
+
+wait_for_job_tasks_to_complete(job_id, timeout)[source]
+

Wait for tasks in a particular job to complete

+
+
Parameters
+
    +
  • job_id (str) – A string that identifies the job

  • +
  • timeout (int) – The amount of time to wait before timing out in minutes

  • +
+
+
+
+ +
+
+test_connection()[source]
+

Test a configured Azure Batch connection.

+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/container_instance/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/container_instance/index.html new file mode 100644 index 00000000000..08bd36f7658 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/container_instance/index.html @@ -0,0 +1,1072 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.container_instance — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.hooks.container_instance

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

AzureContainerInstanceHook

A hook to communicate with Azure Container Instances.

+
+
+class airflow.providers.microsoft.azure.hooks.container_instance.AzureContainerInstanceHook(azure_conn_id=default_conn_name)[source]
+

Bases: airflow.providers.microsoft.azure.hooks.base_azure.AzureBaseHook

+

A hook to communicate with Azure Container Instances.

+

This hook requires a service principal in order to work. +After creating this service principal +(Azure Active Directory/App Registrations), you need to fill in the +client_id (Application ID) as login, the generated password as password, +and tenantId and subscriptionId in the extra’s field as a json.

+
+
Parameters
+

azure_conn_id (str) – Azure connection id of +a service principal which will be used to start the container instance.

+
+
+
+
+conn_name_attr = azure_conn_id[source]
+
+ +
+
+default_conn_name = azure_default[source]
+
+ +
+
+conn_type = azure_container_instance[source]
+
+ +
+
+hook_name = Azure Container Instance[source]
+
+ +
+
+create_or_update(resource_group, name, container_group)[source]
+

Create a new container group

+
+
Parameters
+
    +
  • resource_group (str) – the name of the resource group

  • +
  • name (str) – the name of the container group

  • +
  • container_group (azure.mgmt.containerinstance.models.ContainerGroup) – the properties of the container group

  • +
+
+
+
+ +
+
+get_state_exitcode_details(resource_group, name)[source]
+

Get the state and exitcode of a container group

+
+
Parameters
+
    +
  • resource_group (str) – the name of the resource group

  • +
  • name (str) – the name of the container group

  • +
+
+
Returns
+

A tuple with the state, exitcode, and details. +If the exitcode is unknown 0 is returned.

+
+
Return type
+

tuple

+
+
+
+ +
+
+get_messages(resource_group, name)[source]
+

Get the messages of a container group

+
+
Parameters
+
    +
  • resource_group (str) – the name of the resource group

  • +
  • name (str) – the name of the container group

  • +
+
+
Returns
+

A list of the event messages

+
+
Return type
+

list

+
+
+
+ +
+
+get_state(resource_group, name)[source]
+

Get the state of a container group

+
+
Parameters
+
    +
  • resource_group (str) – the name of the resource group

  • +
  • name (str) – the name of the container group

  • +
+
+
Returns
+

ContainerGroup

+
+
Return type
+

azure.mgmt.containerinstance.models.ContainerGroup

+
+
+
+ +
+
+get_logs(resource_group, name, tail=1000)[source]
+

Get the tail from logs of a container group

+
+
Parameters
+
    +
  • resource_group (str) – the name of the resource group

  • +
  • name (str) – the name of the container group

  • +
  • tail (int) – the size of the tail

  • +
+
+
Returns
+

A list of log messages

+
+
Return type
+

list

+
+
+
+ +
+
+delete(resource_group, name)[source]
+

Delete a container group

+
+
Parameters
+
    +
  • resource_group (str) – the name of the resource group

  • +
  • name (str) – the name of the container group

  • +
+
+
+
+ +
+
+exists(resource_group, name)[source]
+

Test if a container group exists

+
+
Parameters
+
    +
  • resource_group (str) – the name of the resource group

  • +
  • name (str) – the name of the container group

  • +
+
+
+
+ +
+
+test_connection()[source]
+

Test a configured Azure Container Instance connection.

+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/container_registry/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/container_registry/index.html new file mode 100644 index 00000000000..4e88512db8a --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/container_registry/index.html @@ -0,0 +1,941 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.container_registry — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.hooks.container_registry

+

Hook for Azure Container Registry

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

AzureContainerRegistryHook

A hook to communicate with a Azure Container Registry.

+
+
+class airflow.providers.microsoft.azure.hooks.container_registry.AzureContainerRegistryHook(conn_id='azure_registry')[source]
+

Bases: airflow.hooks.base.BaseHook

+

A hook to communicate with a Azure Container Registry.

+
+
Parameters
+

conn_id (str) – Azure Container Registry connection id +of a service principal which will be used to start the container instance

+
+
+
+
+conn_name_attr = azure_container_registry_conn_id[source]
+
+ +
+
+default_conn_name = azure_container_registry_default[source]
+
+ +
+
+conn_type = azure_container_registry[source]
+
+ +
+
+hook_name = Azure Container Registry[source]
+
+ +
+
+static get_ui_field_behaviour()[source]
+

Returns custom field behaviour

+
+
+
+ +
+
+get_conn()[source]
+

Returns connection for the hook.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/container_volume/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/container_volume/index.html new file mode 100644 index 00000000000..8a06d016c6d --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/container_volume/index.html @@ -0,0 +1,961 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.container_volume — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.hooks.container_volume

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

AzureContainerVolumeHook

A hook which wraps an Azure Volume.

+
+
+class airflow.providers.microsoft.azure.hooks.container_volume.AzureContainerVolumeHook(azure_container_volume_conn_id='azure_container_volume_default')[source]
+

Bases: airflow.hooks.base.BaseHook

+

A hook which wraps an Azure Volume.

+
+
Parameters
+

azure_container_volume_conn_id (str) – Reference to the +Azure Container Volume connection id +of an Azure account of which container volumes should be used.

+
+
+
+
+conn_name_attr = azure_container_volume_conn_id[source]
+
+ +
+
+default_conn_name = azure_container_volume_default[source]
+
+ +
+
+conn_type = azure_container_volume[source]
+
+ +
+
+hook_name = Azure Container Volume[source]
+
+ +
+
+static get_connection_form_widgets()[source]
+

Returns connection widgets to add to connection form

+
+
+
+ +
+
+static get_ui_field_behaviour()[source]
+

Returns custom field behaviour

+
+
+
+ +
+
+get_storagekey()[source]
+

Get Azure File Volume storage key

+
+
+
+ +
+
+get_file_volume(mount_name, share_name, storage_account_name, read_only=False)[source]
+

Get Azure File Volume

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/cosmos/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/cosmos/index.html new file mode 100644 index 00000000000..e9cedce6eef --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/cosmos/index.html @@ -0,0 +1,1126 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.cosmos — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.hooks.cosmos

+

This module contains integration with Azure CosmosDB.

+

AzureCosmosDBHook communicates via the Azure Cosmos library. Make sure that a +Airflow connection of type azure_cosmos exists. Authorization can be done by supplying a +login (=Endpoint uri), password (=secret key) and extra fields database_name and collection_name to specify +the default database and collection to use (see connection azure_cosmos_default for an example).

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

AzureCosmosDBHook

Interacts with Azure CosmosDB.

+
+
+

Functions

+ ++++ + + + + + + + + + + + +

get_database_link(database_id)

Get Azure CosmosDB database link

get_collection_link(database_id, collection_id)

Get Azure CosmosDB collection link

get_document_link(database_id, collection_id, document_id)

Get Azure CosmosDB document link

+
+
+class airflow.providers.microsoft.azure.hooks.cosmos.AzureCosmosDBHook(azure_cosmos_conn_id=default_conn_name)[source]
+

Bases: airflow.hooks.base.BaseHook

+

Interacts with Azure CosmosDB.

+

login should be the endpoint uri, password should be the master key +optionally, you can use the following extras to default these values +{“database_name”: “<DATABASE_NAME>”, “collection_name”: “COLLECTION_NAME”}.

+
+
Parameters
+

azure_cosmos_conn_id (str) – Reference to the +Azure CosmosDB connection.

+
+
+
+
+conn_name_attr = azure_cosmos_conn_id[source]
+
+ +
+
+default_conn_name = azure_cosmos_default[source]
+
+ +
+
+conn_type = azure_cosmos[source]
+
+ +
+
+hook_name = Azure CosmosDB[source]
+
+ +
+
+static get_connection_form_widgets()[source]
+

Returns connection widgets to add to connection form

+
+
+
+ +
+
+static get_ui_field_behaviour()[source]
+

Returns custom field behaviour

+
+
+
+ +
+
+get_conn()[source]
+

Return a cosmos db client.

+
+
+
+ +
+
+does_collection_exist(collection_name, database_name)[source]
+

Checks if a collection exists in CosmosDB.

+
+
+
+ +
+
+create_collection(collection_name, database_name=None, partition_key=None)[source]
+

Creates a new collection in the CosmosDB database.

+
+
+
+ +
+
+does_database_exist(database_name)[source]
+

Checks if a database exists in CosmosDB.

+
+
+
+ +
+
+create_database(database_name)[source]
+

Creates a new database in CosmosDB.

+
+
+
+ +
+
+delete_database(database_name)[source]
+

Deletes an existing database in CosmosDB.

+
+
+
+ +
+
+delete_collection(collection_name, database_name=None)[source]
+

Deletes an existing collection in the CosmosDB database.

+
+
+
+ +
+
+upsert_document(document, database_name=None, collection_name=None, document_id=None)[source]
+

Inserts a new document (or updates an existing one) into an existing +collection in the CosmosDB database.

+
+ +
+
+insert_documents(documents, database_name=None, collection_name=None)[source]
+

Insert a list of new documents into an existing collection in the CosmosDB database.

+
+
+
+ +
+
+delete_document(document_id, database_name=None, collection_name=None, partition_key=None)[source]
+

Delete an existing document out of a collection in the CosmosDB database.

+
+
+
+ +
+
+get_document(document_id, database_name=None, collection_name=None, partition_key=None)[source]
+

Get a document from an existing collection in the CosmosDB database.

+
+
+
+ +
+
+get_documents(sql_string, database_name=None, collection_name=None, partition_key=None)[source]
+

Get a list of documents from an existing collection in the CosmosDB database via SQL query.

+
+
+
+ +
+
+test_connection()[source]
+

Test a configured Azure Cosmos connection.

+
+ +
+ +
+ +

Get Azure CosmosDB database link

+
+
+
+ +
+ +

Get Azure CosmosDB collection link

+
+
+
+ +
+ +

Get Azure CosmosDB document link

+
+
+
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/data_factory/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/data_factory/index.html new file mode 100644 index 00000000000..2be8f0d4803 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/data_factory/index.html @@ -0,0 +1,1917 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.data_factory — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.hooks.data_factory

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + + + + + + + +

PipelineRunInfo

Type class for the pipeline run info dictionary.

AzureDataFactoryPipelineRunStatus

Azure Data Factory pipeline operation statuses.

AzureDataFactoryHook

A hook to interact with Azure Data Factory.

+
+
+

Functions

+ ++++ + + + + + + + + +

provide_targeted_factory(func)

Provide the targeted factory to the decorated function in case it isn't specified.

get_field(extras, field_name[, strict])

Get field from extra, first checking short name, then for backcompat we check for prefixed name.

+
+
+

Attributes

+ ++++ + + + + + +

Credentials

+
+
+airflow.providers.microsoft.azure.hooks.data_factory.Credentials[source]
+
+ +
+
+airflow.providers.microsoft.azure.hooks.data_factory.provide_targeted_factory(func)[source]
+

Provide the targeted factory to the decorated function in case it isn’t specified.

+

If resource_group_name or factory_name is not provided it defaults to the value specified in +the connection extras.

+
+
+
+ +
+
+class airflow.providers.microsoft.azure.hooks.data_factory.PipelineRunInfo[source]
+

Bases: airflow.typing_compat.TypedDict

+

Type class for the pipeline run info dictionary.

+
+
+run_id :str[source]
+
+ +
+
+factory_name :str | None[source]
+
+ +
+
+resource_group_name :str | None[source]
+
+ +
+ +
+
+class airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryPipelineRunStatus[source]
+

Azure Data Factory pipeline operation statuses.

+
+
+QUEUED = Queued[source]
+
+ +
+
+IN_PROGRESS = InProgress[source]
+
+ +
+
+SUCCEEDED = Succeeded[source]
+
+ +
+
+FAILED = Failed[source]
+
+ +
+
+CANCELING = Canceling[source]
+
+ +
+
+CANCELLED = Cancelled[source]
+
+ +
+
+TERMINAL_STATUSES[source]
+
+ +
+ +
+
+exception airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryPipelineRunException[source]
+

Bases: airflow.exceptions.AirflowException

+

An exception that indicates a pipeline run failed to complete.

+
+ +
+
+airflow.providers.microsoft.azure.hooks.data_factory.get_field(extras, field_name, strict=False)[source]
+

Get field from extra, first checking short name, then for backcompat we check for prefixed name.

+
+
+
+ +
+
+class airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook(azure_data_factory_conn_id=default_conn_name)[source]
+

Bases: airflow.hooks.base.BaseHook

+

A hook to interact with Azure Data Factory.

+
+
Parameters
+

azure_data_factory_conn_id (str) – The Azure Data Factory connection id.

+
+
+
+
+conn_type :str = azure_data_factory[source]
+
+ +
+
+conn_name_attr :str = azure_data_factory_conn_id[source]
+
+ +
+
+default_conn_name :str = azure_data_factory_default[source]
+
+ +
+
+hook_name :str = Azure Data Factory[source]
+
+ +
+
+static get_connection_form_widgets()[source]
+

Returns connection widgets to add to connection form

+
+
+
+ +
+
+static get_ui_field_behaviour()[source]
+

Returns custom field behaviour

+
+
+
+ +
+
+get_conn()[source]
+

Returns connection for the hook.

+
+
+
+ +
+
+get_factory(resource_group_name=None, factory_name=None, **config)[source]
+

Get the factory.

+
+
Parameters
+
    +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
Returns
+

The factory.

+
+
Return type
+

azure.mgmt.datafactory.models.Factory

+
+
+
+ +
+
+update_factory(factory, resource_group_name=None, factory_name=None, **config)[source]
+

Update the factory.

+
+
Parameters
+
    +
  • factory (azure.mgmt.datafactory.models.Factory) – The factory resource definition.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
Raises
+

AirflowException – If the factory does not exist.

+
+
Returns
+

The factory.

+
+
Return type
+

azure.mgmt.datafactory.models.Factory

+
+
+
+ +
+
+create_factory(factory, resource_group_name=None, factory_name=None, **config)[source]
+

Create the factory.

+
+
Parameters
+
    +
  • factory (azure.mgmt.datafactory.models.Factory) – The factory resource definition.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
Raises
+

AirflowException – If the factory already exists.

+
+
Returns
+

The factory.

+
+
Return type
+

azure.mgmt.datafactory.models.Factory

+
+
+
+ +
+
+delete_factory(resource_group_name=None, factory_name=None, **config)[source]
+

Delete the factory.

+
+
Parameters
+
    +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
+
+ +
+
+get_linked_service(linked_service_name, resource_group_name=None, factory_name=None, **config)[source]
+

Get the linked service.

+
+
Parameters
+
    +
  • linked_service_name (str) – The linked service name.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
Returns
+

The linked service.

+
+
Return type
+

azure.mgmt.datafactory.models.LinkedServiceResource

+
+
+
+ +
+
+update_linked_service(linked_service_name, linked_service, resource_group_name=None, factory_name=None, **config)[source]
+

Update the linked service.

+
+
Parameters
+
    +
  • linked_service_name (str) – The linked service name.

  • +
  • linked_service (azure.mgmt.datafactory.models.LinkedServiceResource) – The linked service resource definition.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
Raises
+

AirflowException – If the linked service does not exist.

+
+
Returns
+

The linked service.

+
+
Return type
+

azure.mgmt.datafactory.models.LinkedServiceResource

+
+
+
+ +
+
+create_linked_service(linked_service_name, linked_service, resource_group_name=None, factory_name=None, **config)[source]
+

Create the linked service.

+
+
Parameters
+
    +
  • linked_service_name (str) – The linked service name.

  • +
  • linked_service (azure.mgmt.datafactory.models.LinkedServiceResource) – The linked service resource definition.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
Raises
+

AirflowException – If the linked service already exists.

+
+
Returns
+

The linked service.

+
+
Return type
+

azure.mgmt.datafactory.models.LinkedServiceResource

+
+
+
+ +
+
+delete_linked_service(linked_service_name, resource_group_name=None, factory_name=None, **config)[source]
+

Delete the linked service.

+
+
Parameters
+
    +
  • linked_service_name (str) – The linked service name.

  • +
  • resource_group_name (str | None) – The linked service name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
+
+ +
+
+get_dataset(dataset_name, resource_group_name=None, factory_name=None, **config)[source]
+

Get the dataset.

+
+
Parameters
+
    +
  • dataset_name (str) – The dataset name.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
Returns
+

The dataset.

+
+
Return type
+

azure.mgmt.datafactory.models.DatasetResource

+
+
+
+ +
+
+update_dataset(dataset_name, dataset, resource_group_name=None, factory_name=None, **config)[source]
+

Update the dataset.

+
+
Parameters
+
    +
  • dataset_name (str) – The dataset name.

  • +
  • dataset (azure.mgmt.datafactory.models.DatasetResource) – The dataset resource definition.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
Raises
+

AirflowException – If the dataset does not exist.

+
+
Returns
+

The dataset.

+
+
Return type
+

azure.mgmt.datafactory.models.DatasetResource

+
+
+
+ +
+
+create_dataset(dataset_name, dataset, resource_group_name=None, factory_name=None, **config)[source]
+

Create the dataset.

+
+
Parameters
+
    +
  • dataset_name (str) – The dataset name.

  • +
  • dataset (azure.mgmt.datafactory.models.DatasetResource) – The dataset resource definition.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
Raises
+

AirflowException – If the dataset already exists.

+
+
Returns
+

The dataset.

+
+
Return type
+

azure.mgmt.datafactory.models.DatasetResource

+
+
+
+ +
+
+delete_dataset(dataset_name, resource_group_name=None, factory_name=None, **config)[source]
+

Delete the dataset.

+
+
Parameters
+
    +
  • dataset_name (str) – The dataset name.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
+
+ +
+
+get_dataflow(dataflow_name, resource_group_name=None, factory_name=None, **config)[source]
+

Get the dataflow.

+
+
Parameters
+
    +
  • dataflow_name (str) – The dataflow name.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
Returns
+

The dataflow.

+
+
Return type
+

azure.mgmt.datafactory.models.DataFlow

+
+
+
+ +
+
+update_dataflow(dataflow_name, dataflow, resource_group_name=None, factory_name=None, **config)[source]
+

Update the dataflow.

+
+
Parameters
+
    +
  • dataflow_name (str) – The dataflow name.

  • +
  • dataflow (azure.mgmt.datafactory.models.DataFlow) – The dataflow resource definition.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
Raises
+

AirflowException – If the dataset does not exist.

+
+
Returns
+

The dataflow.

+
+
Return type
+

azure.mgmt.datafactory.models.DataFlow

+
+
+
+ +
+
+create_dataflow(dataflow_name, dataflow, resource_group_name=None, factory_name=None, **config)[source]
+

Create the dataflow.

+
+
Parameters
+
    +
  • dataflow_name (str) – The dataflow name.

  • +
  • dataflow (azure.mgmt.datafactory.models.DataFlow) – The dataflow resource definition.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
Raises
+

AirflowException – If the dataset already exists.

+
+
Returns
+

The dataset.

+
+
Return type
+

azure.mgmt.datafactory.models.DataFlow

+
+
+
+ +
+
+delete_dataflow(dataflow_name, resource_group_name=None, factory_name=None, **config)[source]
+

Delete the dataflow.

+
+
Parameters
+
    +
  • dataflow_name (str) – The dataflow name.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
+
+ +
+
+get_pipeline(pipeline_name, resource_group_name=None, factory_name=None, **config)[source]
+

Get the pipeline.

+
+
Parameters
+
    +
  • pipeline_name (str) – The pipeline name.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
Returns
+

The pipeline.

+
+
Return type
+

azure.mgmt.datafactory.models.PipelineResource

+
+
+
+ +
+
+update_pipeline(pipeline_name, pipeline, resource_group_name=None, factory_name=None, **config)[source]
+

Update the pipeline.

+
+
Parameters
+
    +
  • pipeline_name (str) – The pipeline name.

  • +
  • pipeline (azure.mgmt.datafactory.models.PipelineResource) – The pipeline resource definition.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
Raises
+

AirflowException – If the pipeline does not exist.

+
+
Returns
+

The pipeline.

+
+
Return type
+

azure.mgmt.datafactory.models.PipelineResource

+
+
+
+ +
+
+create_pipeline(pipeline_name, pipeline, resource_group_name=None, factory_name=None, **config)[source]
+

Create the pipeline.

+
+
Parameters
+
    +
  • pipeline_name (str) – The pipeline name.

  • +
  • pipeline (azure.mgmt.datafactory.models.PipelineResource) – The pipeline resource definition.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
Raises
+

AirflowException – If the pipeline already exists.

+
+
Returns
+

The pipeline.

+
+
Return type
+

azure.mgmt.datafactory.models.PipelineResource

+
+
+
+ +
+
+delete_pipeline(pipeline_name, resource_group_name=None, factory_name=None, **config)[source]
+

Delete the pipeline.

+
+
Parameters
+
    +
  • pipeline_name (str) – The pipeline name.

  • +
  • resource_group_name (str | None) – The pipeline name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
+
+ +
+
+run_pipeline(pipeline_name, resource_group_name=None, factory_name=None, **config)[source]
+

Run a pipeline.

+
+
Parameters
+
    +
  • pipeline_name (str) – The pipeline name.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
Returns
+

The pipeline run.

+
+
Return type
+

azure.mgmt.datafactory.models.CreateRunResponse

+
+
+
+ +
+
+get_pipeline_run(run_id, resource_group_name=None, factory_name=None, **config)[source]
+

Get the pipeline run.

+
+
Parameters
+
    +
  • run_id (str) – The pipeline run identifier.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
Returns
+

The pipeline run.

+
+
Return type
+

azure.mgmt.datafactory.models.PipelineRun

+
+
+
+ +
+
+get_pipeline_run_status(run_id, resource_group_name=None, factory_name=None)[source]
+

Get a pipeline run’s current status.

+
+
Parameters
+
    +
  • run_id (str) – The pipeline run identifier.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
+
+
Returns
+

The status of the pipeline run.

+
+
Return type
+

str

+
+
+
+ +
+
+wait_for_pipeline_run_status(run_id, expected_statuses, resource_group_name=None, factory_name=None, check_interval=60, timeout=60 * 60 * 24 * 7)[source]
+

Waits for a pipeline run to match an expected status.

+
+
Parameters
+
    +
  • run_id (str) – The pipeline run identifier.

  • +
  • expected_statuses (str | set[str]) – The desired status(es) to check against a pipeline run’s current status.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • check_interval (int) – Time in seconds to check on a pipeline run’s status.

  • +
  • timeout (int) – Time in seconds to wait for a pipeline to reach a terminal status or the expected +status.

  • +
+
+
Returns
+

Boolean indicating if the pipeline run has reached the expected_status.

+
+
Return type
+

bool

+
+
+
+ +
+
+cancel_pipeline_run(run_id, resource_group_name=None, factory_name=None, **config)[source]
+

Cancel the pipeline run.

+
+
Parameters
+
    +
  • run_id (str) – The pipeline run identifier.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
+
+ +
+
+get_trigger(trigger_name, resource_group_name=None, factory_name=None, **config)[source]
+

Get the trigger.

+
+
Parameters
+
    +
  • trigger_name (str) – The trigger name.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
Returns
+

The trigger.

+
+
Return type
+

azure.mgmt.datafactory.models.TriggerResource

+
+
+
+ +
+
+update_trigger(trigger_name, trigger, resource_group_name=None, factory_name=None, **config)[source]
+

Update the trigger.

+
+
Parameters
+
    +
  • trigger_name (str) – The trigger name.

  • +
  • trigger (azure.mgmt.datafactory.models.TriggerResource) – The trigger resource definition.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
Raises
+

AirflowException – If the trigger does not exist.

+
+
Returns
+

The trigger.

+
+
Return type
+

azure.mgmt.datafactory.models.TriggerResource

+
+
+
+ +
+
+create_trigger(trigger_name, trigger, resource_group_name=None, factory_name=None, **config)[source]
+

Create the trigger.

+
+
Parameters
+
    +
  • trigger_name (str) – The trigger name.

  • +
  • trigger (azure.mgmt.datafactory.models.TriggerResource) – The trigger resource definition.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
Raises
+

AirflowException – If the trigger already exists.

+
+
Returns
+

The trigger.

+
+
Return type
+

azure.mgmt.datafactory.models.TriggerResource

+
+
+
+ +
+
+delete_trigger(trigger_name, resource_group_name=None, factory_name=None, **config)[source]
+

Delete the trigger.

+
+
Parameters
+
    +
  • trigger_name (str) – The trigger name.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
+
+ +
+
+start_trigger(trigger_name, resource_group_name=None, factory_name=None, **config)[source]
+

Start the trigger.

+
+
Parameters
+
    +
  • trigger_name (str) – The trigger name.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
Returns
+

An Azure operation poller.

+
+
Return type
+

azure.core.polling.LROPoller

+
+
+
+ +
+
+stop_trigger(trigger_name, resource_group_name=None, factory_name=None, **config)[source]
+

Stop the trigger.

+
+
Parameters
+
    +
  • trigger_name (str) – The trigger name.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
Returns
+

An Azure operation poller.

+
+
Return type
+

azure.core.polling.LROPoller

+
+
+
+ +
+
+rerun_trigger(trigger_name, run_id, resource_group_name=None, factory_name=None, **config)[source]
+

Rerun the trigger.

+
+
Parameters
+
    +
  • trigger_name (str) – The trigger name.

  • +
  • run_id (str) – The trigger run identifier.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
+
+ +
+
+cancel_trigger(trigger_name, run_id, resource_group_name=None, factory_name=None, **config)[source]
+

Cancel the trigger.

+
+
Parameters
+
    +
  • trigger_name (str) – The trigger name.

  • +
  • run_id (str) – The trigger run identifier.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The factory name.

  • +
  • config (Any) – Extra parameters for the ADF client.

  • +
+
+
+
+ +
+
+test_connection()[source]
+

Test a configured Azure Data Factory connection.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/data_lake/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/data_lake/index.html new file mode 100644 index 00000000000..b4081bc8dea --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/data_lake/index.html @@ -0,0 +1,1067 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.data_lake — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.hooks.data_lake

+

This module contains integration with Azure Data Lake.

+

AzureDataLakeHook communicates via a REST API compatible with WebHDFS. Make sure that a +Airflow connection of type azure_data_lake exists. Authorization can be done by supplying a +login (=Client ID), password (=Client Secret) and extra fields tenant (Tenant) and account_name (Account Name) +(see connection azure_data_lake_default for an example).

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

AzureDataLakeHook

Interacts with Azure Data Lake.

+
+
+class airflow.providers.microsoft.azure.hooks.data_lake.AzureDataLakeHook(azure_data_lake_conn_id=default_conn_name)[source]
+

Bases: airflow.hooks.base.BaseHook

+

Interacts with Azure Data Lake.

+

Client ID and client secret should be in user and password parameters. +Tenant and account name should be extra field as +{“tenant”: “<TENANT>”, “account_name”: “ACCOUNT_NAME”}.

+
+
Parameters
+

azure_data_lake_conn_id (str) – Reference to the Azure Data Lake connection.

+
+
+
+
+conn_name_attr = azure_data_lake_conn_id[source]
+
+ +
+
+default_conn_name = azure_data_lake_default[source]
+
+ +
+
+conn_type = azure_data_lake[source]
+
+ +
+
+hook_name = Azure Data Lake[source]
+
+ +
+
+static get_connection_form_widgets()[source]
+

Returns connection widgets to add to connection form

+
+
+
+ +
+
+static get_ui_field_behaviour()[source]
+

Returns custom field behaviour

+
+
+
+ +
+
+get_conn()[source]
+

Return a AzureDLFileSystem object.

+
+
+
+ +
+
+check_for_file(file_path)[source]
+

Check if a file exists on Azure Data Lake.

+
+
Parameters
+

file_path (str) – Path and name of the file.

+
+
Returns
+

True if the file exists, False otherwise.

+
+
Return type
+

bool

+
+
+
+ +
+
+upload_file(local_path, remote_path, nthreads=64, overwrite=True, buffersize=4194304, blocksize=4194304, **kwargs)[source]
+

Upload a file to Azure Data Lake.

+
+
Parameters
+
    +
  • local_path (str) – local path. Can be single file, directory (in which case, +upload recursively) or glob pattern. Recursive glob patterns using ** +are not supported.

  • +
  • remote_path (str) – Remote path to upload to; if multiple files, this is the +directory root to write within.

  • +
  • nthreads (int) – Number of threads to use. If None, uses the number of cores.

  • +
  • overwrite (bool) – Whether to forcibly overwrite existing files/directories. +If False and remote path is a directory, will quit regardless if any files +would be overwritten or not. If True, only matching filenames are actually +overwritten.

  • +
  • buffersize (int) – int [2**22] +Number of bytes for internal buffer. This block cannot be bigger than +a chunk and cannot be smaller than a block.

  • +
  • blocksize (int) – int [2**22] +Number of bytes for a block. Within each chunk, we write a smaller +block for each API call. This block cannot be bigger than a chunk.

  • +
+
+
+
+ +
+
+download_file(local_path, remote_path, nthreads=64, overwrite=True, buffersize=4194304, blocksize=4194304, **kwargs)[source]
+

Download a file from Azure Blob Storage.

+
+
Parameters
+
    +
  • local_path (str) – local path. If downloading a single file, will write to this +specific file, unless it is an existing directory, in which case a file is +created within it. If downloading multiple files, this is the root +directory to write within. Will create directories as required.

  • +
  • remote_path (str) – remote path/globstring to use to find remote files. +Recursive glob patterns using ** are not supported.

  • +
  • nthreads (int) – Number of threads to use. If None, uses the number of cores.

  • +
  • overwrite (bool) – Whether to forcibly overwrite existing files/directories. +If False and remote path is a directory, will quit regardless if any files +would be overwritten or not. If True, only matching filenames are actually +overwritten.

  • +
  • buffersize (int) – int [2**22] +Number of bytes for internal buffer. This block cannot be bigger than +a chunk and cannot be smaller than a block.

  • +
  • blocksize (int) – int [2**22] +Number of bytes for a block. Within each chunk, we write a smaller +block for each API call. This block cannot be bigger than a chunk.

  • +
+
+
+
+ +
+
+list(path)[source]
+

List files in Azure Data Lake Storage

+
+
Parameters
+

path (str) – full path/globstring to use to list files in ADLS

+
+
+
+ +
+
+remove(path, recursive=False, ignore_not_found=True)[source]
+

Remove files in Azure Data Lake Storage

+
+
Parameters
+
    +
  • path (str) – A directory or file to remove in ADLS

  • +
  • recursive (bool) – Whether to loop into directories in the location and remove the files

  • +
  • ignore_not_found (bool) – Whether to raise error if file to delete is not found

  • +
+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/fileshare/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/fileshare/index.html new file mode 100644 index 00000000000..38b6d14299c --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/fileshare/index.html @@ -0,0 +1,1227 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.fileshare — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.hooks.fileshare

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

AzureFileShareHook

Interacts with Azure FileShare Storage.

+
+
+class airflow.providers.microsoft.azure.hooks.fileshare.AzureFileShareHook(azure_fileshare_conn_id='azure_fileshare_default')[source]
+

Bases: airflow.hooks.base.BaseHook

+

Interacts with Azure FileShare Storage.

+
+
Parameters
+

azure_fileshare_conn_id (str) – Reference to the +Azure Container Volume connection id +of an Azure account of which container volumes should be used.

+
+
+
+
+conn_name_attr = azure_fileshare_conn_id[source]
+
+ +
+
+default_conn_name = azure_fileshare_default[source]
+
+ +
+
+conn_type = azure_fileshare[source]
+
+ +
+
+hook_name = Azure FileShare[source]
+
+ +
+
+static get_connection_form_widgets()[source]
+

Returns connection widgets to add to connection form

+
+
+
+ +
+
+static get_ui_field_behaviour()[source]
+

Returns custom field behaviour

+
+
+
+ +
+
+get_conn()[source]
+

Return the FileService object.

+
+
+
+ +
+
+check_for_directory(share_name, directory_name, **kwargs)[source]
+

Check if a directory exists on Azure File Share.

+
+
Parameters
+
    +
  • share_name (str) – Name of the share.

  • +
  • directory_name (str) – Name of the directory.

  • +
  • kwargs – Optional keyword arguments that +FileService.exists() takes.

  • +
+
+
Returns
+

True if the file exists, False otherwise.

+
+
Return type
+

bool

+
+
+
+ +
+
+check_for_file(share_name, directory_name, file_name, **kwargs)[source]
+

Check if a file exists on Azure File Share.

+
+
Parameters
+
    +
  • share_name (str) – Name of the share.

  • +
  • directory_name (str) – Name of the directory.

  • +
  • file_name (str) – Name of the file.

  • +
  • kwargs – Optional keyword arguments that +FileService.exists() takes.

  • +
+
+
Returns
+

True if the file exists, False otherwise.

+
+
Return type
+

bool

+
+
+
+ +
+
+list_directories_and_files(share_name, directory_name=None, **kwargs)[source]
+

Return the list of directories and files stored on a Azure File Share.

+
+
Parameters
+
    +
  • share_name (str) – Name of the share.

  • +
  • directory_name (str | None) – Name of the directory.

  • +
  • kwargs – Optional keyword arguments that +FileService.list_directories_and_files() takes.

  • +
+
+
Returns
+

A list of files and directories

+
+
Return type
+

list

+
+
+
+ +
+
+list_files(share_name, directory_name=None, **kwargs)[source]
+

Return the list of files stored on a Azure File Share.

+
+
Parameters
+
    +
  • share_name (str) – Name of the share.

  • +
  • directory_name (str | None) – Name of the directory.

  • +
  • kwargs – Optional keyword arguments that +FileService.list_directories_and_files() takes.

  • +
+
+
Returns
+

A list of files

+
+
Return type
+

list[str]

+
+
+
+ +
+
+create_share(share_name, **kwargs)[source]
+

Create new Azure File Share.

+
+
Parameters
+
    +
  • share_name (str) – Name of the share.

  • +
  • kwargs – Optional keyword arguments that +FileService.create_share() takes.

  • +
+
+
Returns
+

True if share is created, False if share already exists.

+
+
Return type
+

bool

+
+
+
+ +
+
+delete_share(share_name, **kwargs)[source]
+

Delete existing Azure File Share.

+
+
Parameters
+
    +
  • share_name (str) – Name of the share.

  • +
  • kwargs – Optional keyword arguments that +FileService.delete_share() takes.

  • +
+
+
Returns
+

True if share is deleted, False if share does not exist.

+
+
Return type
+

bool

+
+
+
+ +
+
+create_directory(share_name, directory_name, **kwargs)[source]
+

Create a new directory on a Azure File Share.

+
+
Parameters
+
    +
  • share_name (str) – Name of the share.

  • +
  • directory_name (str) – Name of the directory.

  • +
  • kwargs – Optional keyword arguments that +FileService.create_directory() takes.

  • +
+
+
Returns
+

A list of files and directories

+
+
Return type
+

list

+
+
+
+ +
+
+get_file(file_path, share_name, directory_name, file_name, **kwargs)[source]
+

Download a file from Azure File Share.

+
+
Parameters
+
    +
  • file_path (str) – Where to store the file.

  • +
  • share_name (str) – Name of the share.

  • +
  • directory_name (str) – Name of the directory.

  • +
  • file_name (str) – Name of the file.

  • +
  • kwargs – Optional keyword arguments that +FileService.get_file_to_path() takes.

  • +
+
+
+
+ +
+
+get_file_to_stream(stream, share_name, directory_name, file_name, **kwargs)[source]
+

Download a file from Azure File Share.

+
+
Parameters
+
    +
  • stream (IO) – A filehandle to store the file to.

  • +
  • share_name (str) – Name of the share.

  • +
  • directory_name (str) – Name of the directory.

  • +
  • file_name (str) – Name of the file.

  • +
  • kwargs – Optional keyword arguments that +FileService.get_file_to_stream() takes.

  • +
+
+
+
+ +
+
+load_file(file_path, share_name, directory_name, file_name, **kwargs)[source]
+

Upload a file to Azure File Share.

+
+
Parameters
+
    +
  • file_path (str) – Path to the file to load.

  • +
  • share_name (str) – Name of the share.

  • +
  • directory_name (str) – Name of the directory.

  • +
  • file_name (str) – Name of the file.

  • +
  • kwargs – Optional keyword arguments that +FileService.create_file_from_path() takes.

  • +
+
+
+
+ +
+
+load_string(string_data, share_name, directory_name, file_name, **kwargs)[source]
+

Upload a string to Azure File Share.

+
+
Parameters
+
    +
  • string_data (str) – String to load.

  • +
  • share_name (str) – Name of the share.

  • +
  • directory_name (str) – Name of the directory.

  • +
  • file_name (str) – Name of the file.

  • +
  • kwargs – Optional keyword arguments that +FileService.create_file_from_text() takes.

  • +
+
+
+
+ +
+
+load_stream(stream, share_name, directory_name, file_name, count, **kwargs)[source]
+

Upload a stream to Azure File Share.

+
+
Parameters
+
    +
  • stream (str) – Opened file/stream to upload as the file content.

  • +
  • share_name (str) – Name of the share.

  • +
  • directory_name (str) – Name of the directory.

  • +
  • file_name (str) – Name of the file.

  • +
  • count (str) – Size of the stream in bytes

  • +
  • kwargs – Optional keyword arguments that +FileService.create_file_from_stream() takes.

  • +
+
+
+
+ +
+
+test_connection()[source]
+

Test Azure FileShare connection.

+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/index.html new file mode 100644 index 00000000000..bb8a11b4235 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/index.html @@ -0,0 +1,869 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + + +
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/synapse/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/synapse/index.html new file mode 100644 index 00000000000..001b7ed90f2 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/synapse/index.html @@ -0,0 +1,1111 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.synapse — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.hooks.synapse

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + + + + +

AzureSynapseSparkBatchRunStatus

Azure Synapse Spark Job operation statuses.

AzureSynapseHook

A hook to interact with Azure Synapse.

+
+
+

Attributes

+ ++++ + + + + + +

Credentials

+
+
+airflow.providers.microsoft.azure.hooks.synapse.Credentials[source]
+
+ +
+
+class airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseSparkBatchRunStatus[source]
+

Azure Synapse Spark Job operation statuses.

+
+
+NOT_STARTED = not_started[source]
+
+ +
+
+STARTING = starting[source]
+
+ +
+
+RUNNING = running[source]
+
+ +
+
+IDLE = idle[source]
+
+ +
+
+BUSY = busy[source]
+
+ +
+
+SHUTTING_DOWN = shutting_down[source]
+
+ +
+
+ERROR = error[source]
+
+ +
+
+DEAD = dead[source]
+
+ +
+
+KILLED = killed[source]
+
+ +
+
+SUCCESS = success[source]
+
+ +
+
+TERMINAL_STATUSES[source]
+
+ +
+ +
+
+class airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseHook(azure_synapse_conn_id=default_conn_name, spark_pool='')[source]
+

Bases: airflow.hooks.base.BaseHook

+

A hook to interact with Azure Synapse. +:param azure_synapse_conn_id: The Azure Synapse connection id. +:param spark_pool: The Apache Spark pool used to submit the job

+
+
+
+
+conn_type :str = azure_synapse[source]
+
+ +
+
+conn_name_attr :str = azure_synapse_conn_id[source]
+
+ +
+
+default_conn_name :str = azure_synapse_default[source]
+
+ +
+
+hook_name :str = Azure Synapse[source]
+
+ +
+
+static get_connection_form_widgets()[source]
+

Returns connection widgets to add to connection form

+
+
+
+ +
+
+static get_ui_field_behaviour()[source]
+

Returns custom field behaviour

+
+
+
+ +
+
+get_conn()[source]
+

Returns connection for the hook.

+
+
+
+ +
+
+run_spark_job(payload)[source]
+

Run a job in an Apache Spark pool. +:param payload: Livy compatible payload which represents the spark job that a user wants to submit.

+
+
+
+ +
+
+get_job_run_status()[source]
+

Get the job run status.

+
+ +
+
+wait_for_job_run_status(job_id, expected_statuses, check_interval=60, timeout=60 * 60 * 24 * 7)[source]
+

Waits for a job run to match an expected status.

+
+
Parameters
+
    +
  • job_id (int | None) – The job run identifier.

  • +
  • expected_statuses (str | set[str]) – The desired status(es) to check against a job run’s current status.

  • +
  • check_interval (int) – Time in seconds to check on a job run’s status.

  • +
  • timeout (int) – Time in seconds to wait for a job to reach a terminal status or the expected +status.

  • +
+
+
+
+ +
+
+cancel_job_run(job_id)[source]
+

Cancel the spark job run. +:param job_id: The synapse spark job identifier.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/wasb/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/wasb/index.html new file mode 100644 index 00000000000..de2e88bd19e --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/hooks/wasb/index.html @@ -0,0 +1,1218 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.wasb — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.hooks.wasb

+

This module contains integration with Azure Blob Storage.

+

It communicate via the Window Azure Storage Blob protocol. Make sure that a +Airflow connection of type wasb exists. Authorization can be done by supplying a +login (=Storage account name) and password (=KEY), or login and SAS token in the extra +field (see connection wasb_default for an example).

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

WasbHook

Interacts with Azure Blob Storage through the wasb:// protocol.

+
+
+class airflow.providers.microsoft.azure.hooks.wasb.WasbHook(wasb_conn_id=default_conn_name, public_read=False)[source]
+

Bases: airflow.hooks.base.BaseHook

+

Interacts with Azure Blob Storage through the wasb:// protocol.

+

These parameters have to be passed in Airflow Data Base: account_name and account_key.

+

Additional options passed in the ‘extra’ field of the connection will be +passed to the BlockBlockService() constructor. For example, authenticate +using a SAS token by adding {“sas_token”: “YOUR_TOKEN”}.

+

If no authentication configuration is provided, DefaultAzureCredential will be used (applicable +when using Azure compute infrastructure).

+
+
Parameters
+
    +
  • wasb_conn_id (str) – Reference to the wasb connection.

  • +
  • public_read (bool) – Whether an anonymous public read access should be used. default is False

  • +
+
+
+
+
+conn_name_attr = wasb_conn_id[source]
+
+ +
+
+default_conn_name = wasb_default[source]
+
+ +
+
+conn_type = wasb[source]
+
+ +
+
+hook_name = Azure Blob Storage[source]
+
+ +
+
+static get_connection_form_widgets()[source]
+

Returns connection widgets to add to connection form

+
+
+
+ +
+
+static get_ui_field_behaviour()[source]
+

Returns custom field behaviour

+
+
+
+ +
+
+get_conn()[source]
+

Return the BlobServiceClient object.

+
+
+
+ +
+
+check_for_blob(container_name, blob_name, **kwargs)[source]
+

Check if a blob exists on Azure Blob Storage.

+
+
Parameters
+
    +
  • container_name (str) – Name of the container.

  • +
  • blob_name (str) – Name of the blob.

  • +
  • kwargs – Optional keyword arguments for BlobClient.get_blob_properties takes.

  • +
+
+
Returns
+

True if the blob exists, False otherwise.

+
+
Return type
+

bool

+
+
+
+ +
+
+check_for_prefix(container_name, prefix, **kwargs)[source]
+

Check if a prefix exists on Azure Blob storage.

+
+
Parameters
+
    +
  • container_name (str) – Name of the container.

  • +
  • prefix (str) – Prefix of the blob.

  • +
  • kwargs – Optional keyword arguments that ContainerClient.walk_blobs takes

  • +
+
+
Returns
+

True if blobs matching the prefix exist, False otherwise.

+
+
Return type
+

bool

+
+
+
+ +
+
+get_blobs_list(container_name, prefix=None, include=None, delimiter='/', **kwargs)[source]
+

List blobs in a given container

+
+
Parameters
+
    +
  • container_name (str) – The name of the container

  • +
  • prefix (str | None) – Filters the results to return only blobs whose names +begin with the specified prefix.

  • +
  • include (list[str] | None) – Specifies one or more additional datasets to include in the +response. Options include: snapshots, metadata, uncommittedblobs, +copy`, ``deleted.

  • +
  • delimiter (str) – filters objects based on the delimiter (for e.g ‘.csv’)

  • +
+
+
+
+ +
+
+load_file(file_path, container_name, blob_name, create_container=False, **kwargs)[source]
+

Upload a file to Azure Blob Storage.

+
+
Parameters
+
    +
  • file_path (str) – Path to the file to load.

  • +
  • container_name (str) – Name of the container.

  • +
  • blob_name (str) – Name of the blob.

  • +
  • create_container (bool) – Attempt to create the target container prior to uploading the blob. This is +useful if the target container may not exist yet. Defaults to False.

  • +
  • kwargs – Optional keyword arguments that BlobClient.upload_blob() takes.

  • +
+
+
+
+ +
+
+load_string(string_data, container_name, blob_name, create_container=False, **kwargs)[source]
+

Upload a string to Azure Blob Storage.

+
+
Parameters
+
    +
  • string_data (str) – String to load.

  • +
  • container_name (str) – Name of the container.

  • +
  • blob_name (str) – Name of the blob.

  • +
  • create_container (bool) – Attempt to create the target container prior to uploading the blob. This is +useful if the target container may not exist yet. Defaults to False.

  • +
  • kwargs – Optional keyword arguments that BlobClient.upload() takes.

  • +
+
+
+
+ +
+
+get_file(file_path, container_name, blob_name, **kwargs)[source]
+

Download a file from Azure Blob Storage.

+
+
Parameters
+
    +
  • file_path (str) – Path to the file to download.

  • +
  • container_name (str) – Name of the container.

  • +
  • blob_name (str) – Name of the blob.

  • +
  • kwargs – Optional keyword arguments that BlobClient.download_blob() takes.

  • +
+
+
+
+ +
+
+read_file(container_name, blob_name, **kwargs)[source]
+

Read a file from Azure Blob Storage and return as a string.

+
+
Parameters
+
    +
  • container_name (str) – Name of the container.

  • +
  • blob_name (str) – Name of the blob.

  • +
  • kwargs – Optional keyword arguments that BlobClient.download_blob takes.

  • +
+
+
+
+ +
+
+upload(container_name, blob_name, data, blob_type='BlockBlob', length=None, create_container=False, **kwargs)[source]
+

Creates a new blob from a data source with automatic chunking.

+
+
Parameters
+
    +
  • container_name (str) – The name of the container to upload data

  • +
  • blob_name (str) – The name of the blob to upload. This need not exist in the container

  • +
  • data (Any) – The blob data to upload

  • +
  • blob_type (str) – The type of the blob. This can be either BlockBlob, +PageBlob or AppendBlob. The default value is BlockBlob.

  • +
  • length (int | None) – Number of bytes to read from the stream. This is optional, +but should be supplied for optimal performance.

  • +
  • create_container (bool) – Attempt to create the target container prior to uploading the blob. This is +useful if the target container may not exist yet. Defaults to False.

  • +
+
+
+
+ +
+
+download(container_name, blob_name, offset=None, length=None, **kwargs)[source]
+

Downloads a blob to the StorageStreamDownloader

+
+
Parameters
+
    +
  • container_name – The name of the container containing the blob

  • +
  • blob_name – The name of the blob to download

  • +
  • offset (int | None) – Start of byte range to use for downloading a section of the blob. +Must be set if length is provided.

  • +
  • length (int | None) – Number of bytes to read from the stream.

  • +
+
+
+
+ +
+
+create_container(container_name)[source]
+

Create container object if not already existing

+
+
Parameters
+

container_name (str) – The name of the container to create

+
+
+
+ +
+
+delete_container(container_name)[source]
+

Delete a container object

+
+
Parameters
+

container_name (str) – The name of the container

+
+
+
+ +
+
+delete_blobs(container_name, *blobs, **kwargs)[source]
+

Marks the specified blobs or snapshots for deletion.

+
+
Parameters
+
    +
  • container_name (str) – The name of the container containing the blobs

  • +
  • blobs – The blobs to delete. This can be a single blob, or multiple values +can be supplied, where each value is either the name of the blob (str) or BlobProperties.

  • +
+
+
+
+ +
+
+delete_file(container_name, blob_name, is_prefix=False, ignore_if_missing=False, delimiter='', **kwargs)[source]
+

Delete a file from Azure Blob Storage.

+
+
Parameters
+
    +
  • container_name (str) – Name of the container.

  • +
  • blob_name (str) – Name of the blob.

  • +
  • is_prefix (bool) – If blob_name is a prefix, delete all matching files

  • +
  • ignore_if_missing (bool) – if True, then return success even if the +blob does not exist.

  • +
  • kwargs – Optional keyword arguments that ContainerClient.delete_blobs() takes.

  • +
+
+
+
+ +
+
+test_connection()[source]
+

Test Azure Blob Storage connection.

+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/index.html new file mode 100644 index 00000000000..6dcebd83314 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/index.html @@ -0,0 +1,913 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure

+
+

Subpackages

+
+ +
+
+ +
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/log/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/log/index.html new file mode 100644 index 00000000000..ef6696effbd --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/log/index.html @@ -0,0 +1,857 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.log — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + + +
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/log/wasb_task_handler/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/log/wasb_task_handler/index.html new file mode 100644 index 00000000000..0d079a7b21b --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/log/wasb_task_handler/index.html @@ -0,0 +1,977 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.log.wasb_task_handler — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.log.wasb_task_handler

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

WasbTaskHandler

WasbTaskHandler is a python log handler that handles and reads

+
+
+class airflow.providers.microsoft.azure.log.wasb_task_handler.WasbTaskHandler(base_log_folder, wasb_log_folder, wasb_container, delete_local_copy, *, filename_template=None)[source]
+

Bases: airflow.utils.log.file_task_handler.FileTaskHandler, airflow.utils.log.logging_mixin.LoggingMixin

+

WasbTaskHandler is a python log handler that handles and reads +task instance logs. It extends airflow FileTaskHandler and +uploads to and reads from Wasb remote storage.

+
+
+
+
+hook()[source]
+

Returns WasbHook.

+
+ +
+
+set_context(ti)[source]
+

Provide task_instance context to airflow task handler.

+
+
Parameters
+

ti – task instance object

+
+
+
+ +
+
+close()[source]
+

Close and upload local log file to remote storage Wasb.

+
+
+
+ +
+
+wasb_log_exists(remote_log_location)[source]
+

Check if remote_log_location exists in remote storage

+
+
Parameters
+

remote_log_location (str) – log’s location in remote storage

+
+
Returns
+

True if location exists else False

+
+
Return type
+

bool

+
+
+
+ +
+
+wasb_read(remote_log_location, return_error=False)[source]
+

Returns the log found at the remote_log_location. Returns ‘’ if no +logs are found or there is an error.

+
+
Parameters
+
    +
  • remote_log_location (str) – the log’s location in remote storage

  • +
  • return_error (bool) – if True, returns a string error message if an +error occurs. Otherwise returns ‘’ when an error occurs.

  • +
+
+
+
+ +
+
+wasb_write(log, remote_log_location, append=True)[source]
+

Writes the log to the remote_log_location. Fails silently if no hook +was created.

+
+
Parameters
+
    +
  • log (str) – the log to write to the remote_log_location

  • +
  • remote_log_location (str) – the log’s location in remote storage

  • +
  • append (bool) – if False, any existing log file is overwritten. If True, +the new log is appended to any existing logs.

  • +
+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/adls/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/adls/index.html new file mode 100644 index 00000000000..513c74cece8 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/adls/index.html @@ -0,0 +1,996 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.operators.adls — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.operators.adls

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + + + + +

ADLSDeleteOperator

Delete files in the specified path.

ADLSListOperator

List all files from the specified path

+
+
+class airflow.providers.microsoft.azure.operators.adls.ADLSDeleteOperator(*, path, recursive=False, ignore_not_found=True, azure_data_lake_conn_id='azure_data_lake_default', **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Delete files in the specified path.

+
+
+

See also

+

For more information on how to use this operator, take a look at the guide: +ADLSDeleteOperator

+
+
+
+
Parameters
+
    +
  • path (str) – A directory or file to remove

  • +
  • recursive (bool) – Whether to loop into directories in the location and remove the files

  • +
  • ignore_not_found (bool) – Whether to raise error if file to delete is not found

  • +
  • azure_data_lake_conn_id (str) – Reference to the Azure Data Lake connection.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['path'][source]
+
+ +
+
+ui_color = #901dd2[source]
+
+ +
+
+execute(context)[source]
+

This is the main method to derive when creating an operator. +Context is the same dictionary used as when rendering jinja templates.

+

Refer to get_template_context for more context.

+
+
+
+ +
+ +
+
+class airflow.providers.microsoft.azure.operators.adls.ADLSListOperator(*, path, azure_data_lake_conn_id='azure_data_lake_default', **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

List all files from the specified path

+
+
This operator returns a python list with the names of files which can be used by

xcom in the downstream tasks.

+
+
+
+
Parameters
+
    +
  • path (str) – The Azure Data Lake path to find the objects. Supports glob +strings (templated)

  • +
  • azure_data_lake_conn_id (str) – Reference to the Azure Data Lake connection.

  • +
+
+
+
+
Example:

The following Operator would list all the Parquet files from folder/output/ +folder in the specified ADLS account

+
adls_files = ADLSListOperator(
+    task_id='adls_files',
+    path='folder/output/*.parquet',
+    azure_data_lake_conn_id='azure_data_lake_default'
+)
+
+
+
+
+
+
+template_fields :Sequence[str] = ['path'][source]
+
+ +
+
+ui_color = #901dd2[source]
+
+ +
+
+execute(context)[source]
+

This is the main method to derive when creating an operator. +Context is the same dictionary used as when rendering jinja templates.

+

Refer to get_template_context for more context.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/adx/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/adx/index.html new file mode 100644 index 00000000000..b3665e23624 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/adx/index.html @@ -0,0 +1,942 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.operators.adx — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.operators.adx

+

This module contains Azure Data Explorer operators

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

AzureDataExplorerQueryOperator

Operator for querying Azure Data Explorer (Kusto).

+
+
+class airflow.providers.microsoft.azure.operators.adx.AzureDataExplorerQueryOperator(*, query, database, options=None, azure_data_explorer_conn_id='azure_data_explorer_default', **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Operator for querying Azure Data Explorer (Kusto).

+
+
Parameters
+
+
+
+
+
+ui_color = #00a1f2[source]
+
+ +
+
+template_fields :Sequence[str] = ['query', 'database'][source]
+
+ +
+
+template_ext :Sequence[str] = ['.kql'][source]
+
+ +
+
+get_hook()[source]
+

Returns new instance of AzureDataExplorerHook

+
+
+
+ +
+
+execute(context)[source]
+

Run KQL Query on Azure Data Explorer (Kusto). +Returns PrimaryResult of Query v2 HTTP response contents +(https://docs.microsoft.com/en-us/azure/kusto/api/rest/response2)

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/asb/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/asb/index.html new file mode 100644 index 00000000000..69f7eb6ec86 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/asb/index.html @@ -0,0 +1,1499 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.operators.asb — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.operators.asb

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

AzureServiceBusCreateQueueOperator

Creates a Azure Service Bus queue under a Service Bus Namespace by using ServiceBusAdministrationClient

AzureServiceBusSendMessageOperator

Send Message or batch message to the Service Bus queue

AzureServiceBusReceiveMessageOperator

Receive a batch of messages at once in a specified Queue name

AzureServiceBusDeleteQueueOperator

Deletes the Queue in the Azure Service Bus namespace

AzureServiceBusTopicCreateOperator

Create an Azure Service Bus Topic under a Service Bus Namespace by using ServiceBusAdministrationClient

AzureServiceBusSubscriptionCreateOperator

Create an Azure Service Bus Topic Subscription under a Service Bus Namespace

AzureServiceBusUpdateSubscriptionOperator

Update an Azure ServiceBus Topic Subscription under a ServiceBus Namespace

ASBReceiveSubscriptionMessageOperator

Receive a Batch messages from a Service Bus Subscription under specific Topic.

AzureServiceBusSubscriptionDeleteOperator

Deletes the topic subscription in the Azure ServiceBus namespace

AzureServiceBusTopicDeleteOperator

Deletes the topic in the Azure Service Bus namespace

+
+
+class airflow.providers.microsoft.azure.operators.asb.AzureServiceBusCreateQueueOperator(*, queue_name, max_delivery_count=10, dead_lettering_on_message_expiration=True, enable_batched_operations=True, azure_service_bus_conn_id='azure_service_bus_default', **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Creates a Azure Service Bus queue under a Service Bus Namespace by using ServiceBusAdministrationClient

+
+

See also

+

For more information on how to use this operator, take a look at the guide: +Create Azure Service Bus Queue

+
+
+
Parameters
+
    +
  • queue_name (str) – The name of the queue. should be unique.

  • +
  • max_delivery_count (int) – The maximum delivery count. A message is automatically +dead lettered after this number of deliveries. Default value is 10..

  • +
  • dead_lettering_on_message_expiration (bool) – A value that indicates whether this subscription has +dead letter support when a message expires.

  • +
  • enable_batched_operations (bool) – Value that indicates whether server-side batched +operations are enabled.

  • +
  • azure_service_bus_conn_id (str) – Reference to the +Azure Service Bus connection.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['queue_name'][source]
+
+ +
+
+ui_color = #e4f0e8[source]
+
+ +
+
+execute(context)[source]
+

Creates Queue in Azure Service Bus namespace, by connecting to Service Bus Admin client in hook

+
+
+
+ +
+ +
+
+class airflow.providers.microsoft.azure.operators.asb.AzureServiceBusSendMessageOperator(*, queue_name, message, batch=False, azure_service_bus_conn_id='azure_service_bus_default', **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Send Message or batch message to the Service Bus queue

+
+

See also

+

For more information on how to use this operator, take a look at the guide: +Send Message to Azure Service Bus Queue

+
+
+
Parameters
+
    +
  • queue_name (str) – The name of the queue. should be unique.

  • +
  • message (str | list[str]) – Message which needs to be sent to the queue. It can be string or list of string.

  • +
  • batch (bool) – Its boolean flag by default it is set to False, if the message needs to be sent +as batch message it can be set to True.

  • +
  • azure_service_bus_conn_id (str) – Reference to the +:ref: Azure Service Bus connection<howto/connection:azure_service_bus>.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['queue_name'][source]
+
+ +
+
+ui_color = #e4f0e8[source]
+
+ +
+
+execute(context)[source]
+

Sends Message to the specific queue in Service Bus namespace, by +connecting to Service Bus client

+
+
+
+ +
+ +
+
+class airflow.providers.microsoft.azure.operators.asb.AzureServiceBusReceiveMessageOperator(*, queue_name, azure_service_bus_conn_id='azure_service_bus_default', max_message_count=10, max_wait_time=5, **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Receive a batch of messages at once in a specified Queue name

+
+

See also

+

For more information on how to use this operator, take a look at the guide: +Receive Message Azure Service Bus Queue

+
+
+
Parameters
+
    +
  • queue_name (str) – The name of the queue name or a QueueProperties with name.

  • +
  • max_message_count (int) – Maximum number of messages in the batch.

  • +
  • max_wait_time (float) – Maximum time to wait in seconds for the first message to arrive.

  • +
  • azure_service_bus_conn_id (str) – Reference to the +:ref: Azure Service Bus connection <howto/connection:azure_service_bus>.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['queue_name'][source]
+
+ +
+
+ui_color = #e4f0e8[source]
+
+ +
+
+execute(context)[source]
+

Receive Message in specific queue in Service Bus namespace, +by connecting to Service Bus client

+
+
+
+ +
+ +
+
+class airflow.providers.microsoft.azure.operators.asb.AzureServiceBusDeleteQueueOperator(*, queue_name, azure_service_bus_conn_id='azure_service_bus_default', **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Deletes the Queue in the Azure Service Bus namespace

+
+

See also

+

For more information on how to use this operator, take a look at the guide: +Delete Azure Service Bus Queue

+
+
+
Parameters
+
    +
  • queue_name (str) – The name of the queue in Service Bus namespace.

  • +
  • azure_service_bus_conn_id (str) – Reference to the +:ref: Azure Service Bus connection <howto/connection:azure_service_bus>.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['queue_name'][source]
+
+ +
+
+ui_color = #e4f0e8[source]
+
+ +
+
+execute(context)[source]
+

Delete Queue in Service Bus namespace, by connecting to Service Bus Admin client

+
+
+
+ +
+ +
+
+class airflow.providers.microsoft.azure.operators.asb.AzureServiceBusTopicCreateOperator(*, topic_name, azure_service_bus_conn_id='azure_service_bus_default', default_message_time_to_live=None, max_size_in_megabytes=None, requires_duplicate_detection=None, duplicate_detection_history_time_window=None, enable_batched_operations=None, size_in_bytes=None, filtering_messages_before_publishing=None, authorization_rules=None, support_ordering=None, auto_delete_on_idle=None, enable_partitioning=None, enable_express=None, user_metadata=None, max_message_size_in_kilobytes=None, **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Create an Azure Service Bus Topic under a Service Bus Namespace by using ServiceBusAdministrationClient

+
+

See also

+

For more information on how to use this operator, take a look at the guide: +Create Azure Service Bus Topic

+
+
+
Parameters
+
    +
  • topic_name (str) – Name of the topic.

  • +
  • default_message_time_to_live (datetime.timedelta | str | None) – ISO 8601 default message time span to live value. This is +the duration after which the message expires, starting from when the message is sent to Service +Bus. This is the default value used when TimeToLive is not set on a message itself. +Input value of either type ~datetime.timedelta or string in ISO 8601 duration format +like “PT300S” is accepted.

  • +
  • max_size_in_megabytes (int | None) – The maximum size of the topic in megabytes, which is the size of +memory allocated for the topic.

  • +
  • requires_duplicate_detection (bool | None) – A value indicating if this topic requires duplicate +detection.

  • +
  • duplicate_detection_history_time_window (datetime.timedelta | str | None) – ISO 8601 time span structure that defines the +duration of the duplicate detection history. The default value is 10 minutes. +Input value of either type ~datetime.timedelta or string in ISO 8601 duration format +like “PT300S” is accepted.

  • +
  • enable_batched_operations (bool | None) – Value that indicates whether server-side batched operations +are enabled.

  • +
  • size_in_bytes (int | None) – The size of the topic, in bytes.

  • +
  • filtering_messages_before_publishing (bool | None) – Filter messages before publishing.

  • +
  • authorization_rules (list[AuthorizationRule] | None) – List of Authorization rules for resource.

  • +
  • support_ordering (bool | None) – A value that indicates whether the topic supports ordering.

  • +
  • auto_delete_on_idle (datetime.timedelta | str | None) – ISO 8601 time span idle interval after which the topic is +automatically deleted. The minimum duration is 5 minutes. +Input value of either type ~datetime.timedelta or string in ISO 8601 duration format +like “PT300S” is accepted.

  • +
  • enable_partitioning (bool | None) – A value that indicates whether the topic is to be partitioned +across multiple message brokers.

  • +
  • enable_express (bool | None) – A value that indicates whether Express Entities are enabled. An express +queue holds a message in memory temporarily before writing it to persistent storage.

  • +
  • user_metadata (str | None) – Metadata associated with the topic.

  • +
  • max_message_size_in_kilobytes (int | None) – The maximum size in kilobytes of message payload that +can be accepted by the queue. This feature is only available when using a Premium namespace +and Service Bus API version “2021-05” or higher. +The minimum allowed value is 1024 while the maximum allowed value is 102400. Default value is 1024.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['topic_name'][source]
+
+ +
+
+ui_color = #e4f0e8[source]
+
+ +
+
+execute(context)[source]
+

Creates Topic in Service Bus namespace, by connecting to Service Bus Admin client

+
+
+
+ +
+ +
+
+class airflow.providers.microsoft.azure.operators.asb.AzureServiceBusSubscriptionCreateOperator(*, topic_name, subscription_name, azure_service_bus_conn_id='azure_service_bus_default', lock_duration=None, requires_session=None, default_message_time_to_live=None, dead_lettering_on_message_expiration=True, dead_lettering_on_filter_evaluation_exceptions=None, max_delivery_count=10, enable_batched_operations=True, forward_to=None, user_metadata=None, forward_dead_lettered_messages_to=None, auto_delete_on_idle=None, **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Create an Azure Service Bus Topic Subscription under a Service Bus Namespace +by using ServiceBusAdministrationClient

+
+

See also

+

For more information on how to use this operator, take a look at the guide: +Create Azure Service Bus Subscription

+
+
+
Parameters
+
    +
  • topic_name (str) – The topic that will own the to-be-created subscription.

  • +
  • subscription_name (str) – Name of the subscription that need to be created

  • +
  • lock_duration (datetime.timedelta | str | None) – ISO 8601 time span duration of a peek-lock; that is, the amount of time that +the message is locked for other receivers. The maximum value for LockDuration is 5 minutes; the +default value is 1 minute. Input value of either type ~datetime.timedelta or string in ISO 8601 +duration format like “PT300S” is accepted.

  • +
  • requires_session (bool | None) – A value that indicates whether the queue supports the concept of sessions.

  • +
  • default_message_time_to_live (datetime.timedelta | str | None) – ISO 8601 default message time span to live value. This is the +duration after which the message expires, starting from when the message is sent to +Service Bus. This is the default value used when TimeToLive is not set on a message itself. +Input value of either type ~datetime.timedelta or string in ISO 8601 duration +format like “PT300S” is accepted.

  • +
  • dead_lettering_on_message_expiration (bool | None) – A value that indicates whether this subscription has +dead letter support when a message expires.

  • +
  • dead_lettering_on_filter_evaluation_exceptions (bool | None) – A value that indicates whether this +subscription has dead letter support when a message expires.

  • +
  • max_delivery_count (int | None) – The maximum delivery count. A message is automatically dead lettered +after this number of deliveries. Default value is 10.

  • +
  • enable_batched_operations (bool | None) – Value that indicates whether server-side batched +operations are enabled.

  • +
  • forward_to (str | None) – The name of the recipient entity to which all the messages sent to the +subscription are forwarded to.

  • +
  • user_metadata (str | None) – Metadata associated with the subscription. Maximum number of characters is 1024.

  • +
  • forward_dead_lettered_messages_to (str | None) – The name of the recipient entity to which all the +messages sent to the subscription are forwarded to.

  • +
  • auto_delete_on_idle (datetime.timedelta | str | None) – ISO 8601 time Span idle interval after which the subscription is +automatically deleted. The minimum duration is 5 minutes. Input value of either +type ~datetime.timedelta or string in ISO 8601 duration format like “PT300S” is accepted.

  • +
  • azure_service_bus_conn_id (str) – Reference to the +Azure Service Bus connection.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['topic_name', 'subscription_name'][source]
+
+ +
+
+ui_color = #e4f0e8[source]
+
+ +
+
+execute(context)[source]
+

Creates Subscription in Service Bus namespace, by connecting to Service Bus Admin client

+
+
+
+ +
+ +
+
+class airflow.providers.microsoft.azure.operators.asb.AzureServiceBusUpdateSubscriptionOperator(*, topic_name, subscription_name, max_delivery_count=None, dead_lettering_on_message_expiration=None, enable_batched_operations=None, azure_service_bus_conn_id='azure_service_bus_default', **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Update an Azure ServiceBus Topic Subscription under a ServiceBus Namespace +by using ServiceBusAdministrationClient

+
+

See also

+

For more information on how to use this operator, take a look at the guide: +Update Azure Service Bus Subscription

+
+
+
Parameters
+
    +
  • topic_name (str) – The topic that will own the to-be-created subscription.

  • +
  • subscription_name (str) – Name of the subscription that need to be created.

  • +
  • max_delivery_count (int | None) – The maximum delivery count. A message is automatically dead lettered +after this number of deliveries. Default value is 10.

  • +
  • dead_lettering_on_message_expiration (bool | None) – A value that indicates whether this subscription +has dead letter support when a message expires.

  • +
  • enable_batched_operations (bool | None) – Value that indicates whether server-side batched +operations are enabled.

  • +
  • azure_service_bus_conn_id (str) – Reference to the +Azure Service Bus connection.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['topic_name', 'subscription_name'][source]
+
+ +
+
+ui_color = #e4f0e8[source]
+
+ +
+
+execute(context)[source]
+

Updates Subscription properties, by connecting to Service Bus Admin client

+
+
+
+ +
+ +
+
+class airflow.providers.microsoft.azure.operators.asb.ASBReceiveSubscriptionMessageOperator(*, topic_name, subscription_name, max_message_count=1, max_wait_time=5, azure_service_bus_conn_id='azure_service_bus_default', **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Receive a Batch messages from a Service Bus Subscription under specific Topic.

+
+

See also

+

For more information on how to use this operator, take a look at the guide: +Receive Azure Service Bus Subscription Message

+
+
+
Parameters
+
    +
  • subscription_name (str) – The subscription name that will own the rule in topic

  • +
  • topic_name (str) – The topic that will own the subscription rule.

  • +
  • max_message_count (int | None) – Maximum number of messages in the batch. +Actual number returned will depend on prefetch_count and incoming stream rate. +Setting to None will fully depend on the prefetch config. The default value is 1.

  • +
  • max_wait_time (float | None) – Maximum time to wait in seconds for the first message to arrive. If no +messages arrive, and no timeout is specified, this call will not return until the +connection is closed. If specified, an no messages arrive within the timeout period, +an empty list will be returned.

  • +
  • azure_service_bus_conn_id (str) – Reference to the +Azure Service Bus connection.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['topic_name', 'subscription_name'][source]
+
+ +
+
+ui_color = #e4f0e8[source]
+
+ +
+
+execute(context)[source]
+

Receive Message in specific queue in Service Bus namespace, +by connecting to Service Bus client

+
+
+
+ +
+ +
+
+class airflow.providers.microsoft.azure.operators.asb.AzureServiceBusSubscriptionDeleteOperator(*, topic_name, subscription_name, azure_service_bus_conn_id='azure_service_bus_default', **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Deletes the topic subscription in the Azure ServiceBus namespace

+
+

See also

+

For more information on how to use this operator, take a look at the guide: +Delete Azure Service Bus Subscription

+
+
+
Parameters
+
    +
  • topic_name (str) – The topic that will own the to-be-created subscription.

  • +
  • subscription_name (str) – Name of the subscription that need to be created

  • +
  • azure_service_bus_conn_id (str) – Reference to the +Azure Service Bus connection.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['topic_name', 'subscription_name'][source]
+
+ +
+
+ui_color = #e4f0e8[source]
+
+ +
+
+execute(context)[source]
+

Delete topic subscription in Service Bus namespace, by connecting to Service Bus Admin client

+
+
+
+ +
+ +
+
+class airflow.providers.microsoft.azure.operators.asb.AzureServiceBusTopicDeleteOperator(*, topic_name, azure_service_bus_conn_id='azure_service_bus_default', **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Deletes the topic in the Azure Service Bus namespace

+
+

See also

+

For more information on how to use this operator, take a look at the guide: +Delete Azure Service Bus Topic

+
+
+
Parameters
+
+
+
+
+
+template_fields :Sequence[str] = ['topic_name'][source]
+
+ +
+
+ui_color = #e4f0e8[source]
+
+ +
+
+execute(context)[source]
+

Delete topic in Service Bus namespace, by connecting to Service Bus Admin client

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/batch/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/batch/index.html new file mode 100644 index 00000000000..1898e3ddc97 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/batch/index.html @@ -0,0 +1,1014 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.operators.batch — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.operators.batch

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

AzureBatchOperator

Executes a job on Azure Batch Service

+
+
+class airflow.providers.microsoft.azure.operators.batch.AzureBatchOperator(*, batch_pool_id, batch_pool_vm_size, batch_job_id, batch_task_command_line, batch_task_id, vm_node_agent_sku_id, vm_publisher=None, vm_offer=None, sku_starts_with=None, vm_sku=None, vm_version=None, os_family=None, os_version=None, batch_pool_display_name=None, batch_job_display_name=None, batch_job_manager_task=None, batch_job_preparation_task=None, batch_job_release_task=None, batch_task_display_name=None, batch_task_container_settings=None, batch_start_task=None, batch_max_retries=3, batch_task_resource_files=None, batch_task_output_files=None, batch_task_user_identity=None, target_low_priority_nodes=None, target_dedicated_nodes=None, enable_auto_scale=False, auto_scale_formula=None, azure_batch_conn_id='azure_batch_default', use_latest_verified_vm_image_and_sku=False, timeout=25, should_delete_job=False, should_delete_pool=False, **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Executes a job on Azure Batch Service

+
+
Parameters
+
    +
  • batch_pool_id (str) – A string that uniquely identifies the Pool within the Account.

  • +
  • batch_pool_vm_size (str) – The size of virtual machines in the Pool

  • +
  • batch_job_id (str) – A string that uniquely identifies the Job within the Account.

  • +
  • batch_task_command_line (str) – The command line of the Task

  • +
  • batch_task_id (str) – A string that uniquely identifies the task within the Job.

  • +
  • batch_pool_display_name (str | None) – The display name for the Pool. +The display name need not be unique

  • +
  • batch_job_display_name (str | None) – The display name for the Job. +The display name need not be unique

  • +
  • batch_job_manager_task (batch_models.JobManagerTask | None) – Details of a Job Manager Task to be launched when the Job is started.

  • +
  • batch_job_preparation_task (batch_models.JobPreparationTask | None) – The Job Preparation Task. If set, the Batch service will +run the Job Preparation Task on a Node before starting any Tasks of that +Job on that Compute Node. Required if batch_job_release_task is set.

  • +
  • batch_job_release_task (batch_models.JobReleaseTask | None) – The Job Release Task. Use to undo changes to Compute Nodes +made by the Job Preparation Task

  • +
  • batch_task_display_name (str | None) – The display name for the task. +The display name need not be unique

  • +
  • batch_task_container_settings (batch_models.TaskContainerSettings | None) – The settings for the container under which the Task runs

  • +
  • batch_start_task (batch_models.StartTask | None) – A Task specified to run on each Compute Node as it joins the Pool. +The Task runs when the Compute Node is added to the Pool or +when the Compute Node is restarted.

  • +
  • batch_max_retries (int) – The number of times to retry this batch operation before it’s +considered a failed operation. Default is 3

  • +
  • batch_task_resource_files (list[batch_models.ResourceFile] | None) – A list of files that the Batch service will +download to the Compute Node before running the command line.

  • +
  • batch_task_output_files (list[batch_models.OutputFile] | None) – A list of files that the Batch service will upload +from the Compute Node after running the command line.

  • +
  • batch_task_user_identity (batch_models.UserIdentity | None) – The user identity under which the Task runs. +If omitted, the Task runs as a non-administrative user unique to the Task.

  • +
  • target_low_priority_nodes (int | None) – The desired number of low-priority Compute Nodes in the Pool. +This property must not be specified if enable_auto_scale is set to true.

  • +
  • target_dedicated_nodes (int | None) – The desired number of dedicated Compute Nodes in the Pool. +This property must not be specified if enable_auto_scale is set to true.

  • +
  • enable_auto_scale (bool) – Whether the Pool size should automatically adjust over time. Default is false

  • +
  • auto_scale_formula (str | None) – A formula for the desired number of Compute Nodes in the Pool. +This property must not be specified if enableAutoScale is set to false. +It is required if enableAutoScale is set to true.

  • +
  • azure_batch_conn_id – The Azure Batch connection id

  • +
  • use_latest_verified_vm_image_and_sku (bool) – Whether to use the latest verified virtual +machine image and sku in the batch account. Default is false.

  • +
  • vm_publisher (str | None) – The publisher of the Azure Virtual Machines Marketplace Image. +For example, Canonical or MicrosoftWindowsServer. Required if +use_latest_image_and_sku is set to True

  • +
  • vm_offer (str | None) – The offer type of the Azure Virtual Machines Marketplace Image. +For example, UbuntuServer or WindowsServer. Required if +use_latest_image_and_sku is set to True

  • +
  • sku_starts_with (str | None) – The starting string of the Virtual Machine SKU. Required if +use_latest_image_and_sku is set to True

  • +
  • vm_sku (str | None) – The name of the virtual machine sku to use

  • +
  • vm_version (str | None) – The version of the virtual machine

  • +
  • vm_version – str | None

  • +
  • vm_node_agent_sku_id (str) – The node agent sku id of the virtual machine

  • +
  • os_family (str | None) – The Azure Guest OS family to be installed on the virtual machines in the Pool.

  • +
  • os_version (str | None) – The OS family version

  • +
  • timeout (int) – The amount of time to wait for the job to complete in minutes. Default is 25

  • +
  • should_delete_job (bool) – Whether to delete job after execution. Default is False

  • +
  • should_delete_pool (bool) – Whether to delete pool after execution of jobs. Default is False

  • +
+
+
+
+
+template_fields :Sequence[str] = ['batch_pool_id', 'batch_pool_vm_size', 'batch_job_id', 'batch_task_id', 'batch_task_command_line'][source]
+
+ +
+
+ui_color = #f0f0e4[source]
+
+ +
+
+execute(context)[source]
+

This is the main method to derive when creating an operator. +Context is the same dictionary used as when rendering jinja templates.

+

Refer to get_template_context for more context.

+
+
+
+ +
+
+on_kill()[source]
+

Override this method to cleanup subprocesses when a task instance +gets killed. Any use of the threading, subprocess or multiprocessing +module within an operator needs to be cleaned up or it will leave +ghost processes behind.

+
+
+
+ +
+
+get_hook()[source]
+

Create and return an AzureBatchHook.

+
+
+
+ +
+
+clean_up(pool_id=None, job_id=None)[source]
+

Delete the given pool and job in the batch account

+
+
Parameters
+
    +
  • pool_id (str | None) – The id of the pool to delete

  • +
  • job_id (str | None) – The id of the job to delete

  • +
+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/container_instances/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/container_instances/index.html new file mode 100644 index 00000000000..7319cd91cb1 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/container_instances/index.html @@ -0,0 +1,1058 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.operators.container_instances — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.operators.container_instances

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

AzureContainerInstancesOperator

Start a container on Azure Container Instances

+
+
+

Attributes

+ ++++ + + + + + + + + + + + + + + + + + + + + +

Volume

DEFAULT_ENVIRONMENT_VARIABLES

DEFAULT_SECURED_VARIABLES

DEFAULT_VOLUMES

DEFAULT_MEMORY_IN_GB

DEFAULT_CPU

+
+
+airflow.providers.microsoft.azure.operators.container_instances.Volume[source]
+
+ +
+
+airflow.providers.microsoft.azure.operators.container_instances.DEFAULT_ENVIRONMENT_VARIABLES :dict[str, str][source]
+
+ +
+
+airflow.providers.microsoft.azure.operators.container_instances.DEFAULT_SECURED_VARIABLES :Sequence[str] = [][source]
+
+ +
+
+airflow.providers.microsoft.azure.operators.container_instances.DEFAULT_VOLUMES :Sequence[Volume] = [][source]
+
+ +
+
+airflow.providers.microsoft.azure.operators.container_instances.DEFAULT_MEMORY_IN_GB = 2.0[source]
+
+ +
+
+airflow.providers.microsoft.azure.operators.container_instances.DEFAULT_CPU = 1.0[source]
+
+ +
+
+class airflow.providers.microsoft.azure.operators.container_instances.AzureContainerInstancesOperator(*, ci_conn_id, registry_conn_id, resource_group, name, image, region, environment_variables=None, secured_variables=None, volumes=None, memory_in_gb=None, cpu=None, gpu=None, command=None, remove_on_error=True, fail_if_exists=True, tags=None, os_type='Linux', restart_policy='Never', ip_address=None, ports=None, network_profile=None, **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Start a container on Azure Container Instances

+
+
Parameters
+
    +
  • ci_conn_id (str) – connection id of a service principal which will be used +to start the container instance

  • +
  • registry_conn_id (str | None) – connection id of a user which can login to a +private docker registry. For Azure use Azure connection id

  • +
  • resource_group (str) – name of the resource group wherein this container +instance should be started

  • +
  • name (str) – name of this container instance. Please note this name has +to be unique in order to run containers in parallel.

  • +
  • image (str) – the docker image to be used

  • +
  • region (str) – the region wherein this container instance should be started

  • +
  • environment_variables (dict | None) – key,value pairs containing environment +variables which will be passed to the running container

  • +
  • secured_variables (str | None) – names of environmental variables that should not +be exposed outside the container (typically passwords).

  • +
  • volumes (list | None) – list of Volume tuples to be mounted to the container. +Currently only Azure Fileshares are supported.

  • +
  • memory_in_gb (Any | None) – the amount of memory to allocate to this container

  • +
  • cpu (Any | None) – the number of cpus to allocate to this container

  • +
  • gpu (Any | None) – GPU Resource for the container.

  • +
  • command (list[str] | None) – the command to run inside the container

  • +
  • container_timeout – max time allowed for the execution of +the container instance.

  • +
  • tags (dict[str, str] | None) – azure tags as dict of str:str

  • +
  • os_type (str) – The operating system type required by the containers +in the container group. Possible values include: ‘Windows’, ‘Linux’

  • +
  • restart_policy (str) – Restart policy for all containers within the container group. +Possible values include: ‘Always’, ‘OnFailure’, ‘Never’

  • +
  • ip_address (IpAddress | None) – The IP address type of the container group.

  • +
  • network_profile (ContainerGroupNetworkProfile | None) – The network profile information for a container group.

  • +
+
+
+

Example:

+
AzureContainerInstancesOperator(
+    ci_conn_id = "azure_service_principal",
+    registry_conn_id = "azure_registry_user",
+    resource_group = "my-resource-group",
+    name = "my-container-name-{{ ds }}",
+    image = "myprivateregistry.azurecr.io/my_container:latest",
+    region = "westeurope",
+    environment_variables = {"MODEL_PATH":  "my_value",
+     "POSTGRES_LOGIN": "{{ macros.connection('postgres_default').login }}",
+     "POSTGRES_PASSWORD": "{{ macros.connection('postgres_default').password }}",
+     "JOB_GUID": "{{ ti.xcom_pull(task_ids='task1', key='guid') }}" },
+    secured_variables = ['POSTGRES_PASSWORD'],
+    volumes = [("azure_container_instance_conn_id",
+            "my_storage_container",
+            "my_fileshare",
+            "/input-data",
+        True),],
+    memory_in_gb=14.0,
+    cpu=4.0,
+    gpu=GpuResource(count=1, sku='K80'),
+    command=["/bin/echo", "world"],
+    task_id="start_container"
+)
+
+
+
+
+template_fields :Sequence[str] = ['name', 'image', 'command', 'environment_variables'][source]
+
+ +
+
+template_fields_renderers[source]
+
+ +
+
+execute(context)[source]
+

This is the main method to derive when creating an operator. +Context is the same dictionary used as when rendering jinja templates.

+

Refer to get_template_context for more context.

+
+
+
+ +
+
+on_kill()[source]
+

Override this method to cleanup subprocesses when a task instance +gets killed. Any use of the threading, subprocess or multiprocessing +module within an operator needs to be cleaned up or it will leave +ghost processes behind.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/cosmos/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/cosmos/index.html new file mode 100644 index 00000000000..1e9f92a006c --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/cosmos/index.html @@ -0,0 +1,924 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.operators.cosmos — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.operators.cosmos

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

AzureCosmosInsertDocumentOperator

Inserts a new document into the specified Cosmos database and collection

+
+
+class airflow.providers.microsoft.azure.operators.cosmos.AzureCosmosInsertDocumentOperator(*, database_name, collection_name, document, azure_cosmos_conn_id='azure_cosmos_default', **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Inserts a new document into the specified Cosmos database and collection +It will create both the database and collection if they do not already exist

+
+
Parameters
+
    +
  • database_name (str) – The name of the database. (templated)

  • +
  • collection_name (str) – The name of the collection. (templated)

  • +
  • document (dict) – The document to insert

  • +
  • azure_cosmos_conn_id (str) – Reference to the +Azure CosmosDB connection.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['database_name', 'collection_name'][source]
+
+ +
+
+ui_color = #e4f0e8[source]
+
+ +
+
+execute(context)[source]
+

This is the main method to derive when creating an operator. +Context is the same dictionary used as when rendering jinja templates.

+

Refer to get_template_context for more context.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/data_factory/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/data_factory/index.html new file mode 100644 index 00000000000..5816418dcdf --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/data_factory/index.html @@ -0,0 +1,1023 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.operators.data_factory — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.operators.data_factory

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + + + + +

AzureDataFactoryPipelineRunLink

Constructs a link to monitor a pipeline run in Azure Data Factory.

AzureDataFactoryRunPipelineOperator

Executes a data factory pipeline.

+
+ +

Bases: airflow.utils.log.logging_mixin.LoggingMixin, airflow.models.BaseOperatorLink

+

Constructs a link to monitor a pipeline run in Azure Data Factory.

+
+
+name = Monitor Pipeline Run[source]
+
+ +
+ +

Link to external system.

+

Note: The old signature of this function was (self, operator, dttm: datetime). That is still +supported at runtime but is deprecated.

+
+
Parameters
+
+
+
Returns
+

link to external system

+
+
Return type
+

str

+
+
+
+ +
+ +
+
+class airflow.providers.microsoft.azure.operators.data_factory.AzureDataFactoryRunPipelineOperator(*, pipeline_name, azure_data_factory_conn_id=AzureDataFactoryHook.default_conn_name, wait_for_termination=True, resource_group_name=None, factory_name=None, reference_pipeline_run_id=None, is_recovery=None, start_activity_name=None, start_from_failure=None, parameters=None, timeout=60 * 60 * 24 * 7, check_interval=60, **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Executes a data factory pipeline.

+
+

See also

+

For more information on how to use this operator, take a look at the guide: +AzureDataFactoryRunPipelineOperator

+
+
+
Parameters
+
    +
  • azure_data_factory_conn_id (str) – The connection identifier for connecting to Azure Data Factory.

  • +
  • pipeline_name (str) – The name of the pipeline to execute.

  • +
  • wait_for_termination (bool) – Flag to wait on a pipeline run’s termination. By default, this feature is +enabled but could be disabled to perform an asynchronous wait for a long-running pipeline execution +using the AzureDataFactoryPipelineRunSensor.

  • +
  • resource_group_name (str | None) – The resource group name. If a value is not passed in to the operator, the +AzureDataFactoryHook will attempt to use the resource group name provided in the corresponding +connection.

  • +
  • factory_name (str | None) – The data factory name. If a value is not passed in to the operator, the +AzureDataFactoryHook will attempt to use the factory name name provided in the corresponding +connection.

  • +
  • reference_pipeline_run_id (str | None) – The pipeline run identifier. If this run ID is specified the parameters +of the specified run will be used to create a new run.

  • +
  • is_recovery (bool | None) – Recovery mode flag. If recovery mode is set to True, the specified referenced +pipeline run and the new run will be grouped under the same groupId.

  • +
  • start_activity_name (str | None) – In recovery mode, the rerun will start from this activity. If not specified, +all activities will run.

  • +
  • start_from_failure (bool | None) – In recovery mode, if set to true, the rerun will start from failed activities. +The property will be used only if start_activity_name is not specified.

  • +
  • parameters (dict[str, Any] | None) – Parameters of the pipeline run. These parameters are referenced in a pipeline via +@pipeline().parameters.parameterName and will be used only if the reference_pipeline_run_id is +not specified.

  • +
  • timeout (int) – Time in seconds to wait for a pipeline to reach a terminal status for non-asynchronous +waits. Used only if wait_for_termination is True.

  • +
  • check_interval (int) – Time in seconds to check on a pipeline run’s status for non-asynchronous waits. +Used only if wait_for_termination is True.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['azure_data_factory_conn_id', 'resource_group_name', 'factory_name', 'pipeline_name',...[source]
+
+ +
+
+template_fields_renderers[source]
+
+ +
+
+ui_color = #0678d4[source]
+
+ +
+ +
+ +
+
+execute(context)[source]
+

This is the main method to derive when creating an operator. +Context is the same dictionary used as when rendering jinja templates.

+

Refer to get_template_context for more context.

+
+
+
+ +
+
+on_kill()[source]
+

Override this method to cleanup subprocesses when a task instance +gets killed. Any use of the threading, subprocess or multiprocessing +module within an operator needs to be cleaned up or it will leave +ghost processes behind.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/index.html new file mode 100644 index 00000000000..c78ffbd3aac --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/index.html @@ -0,0 +1,865 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.operators — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + + +
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/synapse/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/synapse/index.html new file mode 100644 index 00000000000..059c6b6a49e --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/synapse/index.html @@ -0,0 +1,946 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.operators.synapse — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.operators.synapse

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

AzureSynapseRunSparkBatchOperator

Executes a Spark job on Azure Synapse.

+
+
+class airflow.providers.microsoft.azure.operators.synapse.AzureSynapseRunSparkBatchOperator(*, azure_synapse_conn_id=AzureSynapseHook.default_conn_name, wait_for_termination=True, spark_pool='', payload, timeout=60 * 60 * 24 * 7, check_interval=60, **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Executes a Spark job on Azure Synapse.

+
+
Parameters
+
    +
  • azure_synapse_conn_id (str) – The connection identifier for connecting to Azure Synapse.

  • +
  • wait_for_termination (bool) – Flag to wait on a job run’s termination.

  • +
  • spark_pool (str) – The target synapse spark pool used to submit the job

  • +
  • payload (azure.synapse.spark.models.SparkBatchJobOptions) – Livy compatible payload which represents the spark job that a user wants to submit

  • +
  • timeout (int) – Time in seconds to wait for a job to reach a terminal status for non-asynchronous +waits. Used only if wait_for_termination is True.

  • +
  • check_interval (int) – Time in seconds to check on a job run’s status for non-asynchronous waits. +Used only if wait_for_termination is True.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['azure_synapse_conn_id', 'spark_pool'][source]
+
+ +
+
+template_fields_renderers[source]
+
+ +
+
+ui_color = #0678d4[source]
+
+ +
+
+execute(context)[source]
+

This is the main method to derive when creating an operator. +Context is the same dictionary used as when rendering jinja templates.

+

Refer to get_template_context for more context.

+
+
+
+ +
+
+on_kill()[source]
+

Override this method to cleanup subprocesses when a task instance +gets killed. Any use of the threading, subprocess or multiprocessing +module within an operator needs to be cleaned up or it will leave +ghost processes behind.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/wasb_delete_blob/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/wasb_delete_blob/index.html new file mode 100644 index 00000000000..ac399b0da00 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/operators/wasb_delete_blob/index.html @@ -0,0 +1,919 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.operators.wasb_delete_blob — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.operators.wasb_delete_blob

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

WasbDeleteBlobOperator

Deletes blob(s) on Azure Blob Storage.

+
+
+class airflow.providers.microsoft.azure.operators.wasb_delete_blob.WasbDeleteBlobOperator(*, container_name, blob_name, wasb_conn_id='wasb_default', check_options=None, is_prefix=False, ignore_if_missing=False, **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Deletes blob(s) on Azure Blob Storage.

+
+
Parameters
+
    +
  • container_name (str) – Name of the container. (templated)

  • +
  • blob_name (str) – Name of the blob. (templated)

  • +
  • wasb_conn_id (str) – Reference to the wasb connection.

  • +
  • check_options (Any) – Optional keyword arguments that +WasbHook.check_for_blob() takes.

  • +
  • is_prefix (bool) – If blob_name is a prefix, delete all files matching prefix.

  • +
  • ignore_if_missing (bool) – if True, then return success even if the +blob does not exist.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['container_name', 'blob_name'][source]
+
+ +
+
+execute(context)[source]
+

This is the main method to derive when creating an operator. +Context is the same dictionary used as when rendering jinja templates.

+

Refer to get_template_context for more context.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/secrets/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/secrets/index.html new file mode 100644 index 00000000000..d6d62835cb3 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/secrets/index.html @@ -0,0 +1,857 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.secrets — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + + +
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/secrets/key_vault/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/secrets/key_vault/index.html new file mode 100644 index 00000000000..9abe5535f5e --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/secrets/key_vault/index.html @@ -0,0 +1,1019 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.secrets.key_vault — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.secrets.key_vault

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

AzureKeyVaultBackend

Retrieves Airflow Connections or Variables from Azure Key Vault secrets.

+
+
+class airflow.providers.microsoft.azure.secrets.key_vault.AzureKeyVaultBackend(connections_prefix='airflow-connections', variables_prefix='airflow-variables', config_prefix='airflow-config', vault_url='', sep='-', **kwargs)[source]
+

Bases: airflow.secrets.BaseSecretsBackend, airflow.utils.log.logging_mixin.LoggingMixin

+

Retrieves Airflow Connections or Variables from Azure Key Vault secrets.

+

The Azure Key Vault can be configured as a secrets backend in the airflow.cfg:

+
[secrets]
+backend = airflow.providers.microsoft.azure.secrets.key_vault.AzureKeyVaultBackend
+backend_kwargs = {"connections_prefix": "airflow-connections", "vault_url": "<azure_key_vault_uri>"}
+
+
+

For example, if the secrets prefix is airflow-connections-smtp-default, this would be accessible +if you provide {"connections_prefix": "airflow-connections"} and request conn_id smtp-default. +And if variables prefix is airflow-variables-hello, this would be accessible +if you provide {"variables_prefix": "airflow-variables"} and request variable key hello.

+

For client authentication, the DefaultAzureCredential from the Azure Python SDK is used as +credential provider, which supports service principal, managed identity and user credentials

+

For example, to specify a service principal with secret you can set the environment variables +AZURE_TENANT_ID, AZURE_CLIENT_ID and AZURE_CLIENT_SECRET.

+
+

See also

+

For more details on client authentication refer to the DefaultAzureCredential Class reference: +https://docs.microsoft.com/en-us/python/api/azure-identity/azure.identity.defaultazurecredential?view=azure-python

+
+
+
Parameters
+
    +
  • connections_prefix (str) – Specifies the prefix of the secret to read to get Connections +If set to None (null), requests for connections will not be sent to Azure Key Vault

  • +
  • variables_prefix (str) – Specifies the prefix of the secret to read to get Variables +If set to None (null), requests for variables will not be sent to Azure Key Vault

  • +
  • config_prefix (str) – Specifies the prefix of the secret to read to get Variables. +If set to None (null), requests for configurations will not be sent to Azure Key Vault

  • +
  • vault_url (str) – The URL of an Azure Key Vault to use

  • +
  • sep (str) – separator used to concatenate secret_prefix and secret_id. Default: “-“

  • +
+
+
+
+
+client()[source]
+

Create a Azure Key Vault client.

+
+
+
+ +
+
+get_conn_value(conn_id)[source]
+

Get a serialized representation of Airflow Connection from an Azure Key Vault secret

+
+
Parameters
+

conn_id (str) – The Airflow connection id to retrieve

+
+
+
+ +
+
+get_conn_uri(conn_id)[source]
+

Return URI representation of Connection conn_id.

+

As of Airflow version 2.3.0 this method is deprecated.

+
+
Parameters
+

conn_id (str) – the connection id

+
+
Returns
+

deserialized Connection

+
+
Return type
+

str | None

+
+
+
+ +
+
+get_variable(key)[source]
+

Get an Airflow Variable from an Azure Key Vault secret.

+
+
Parameters
+

key (str) – Variable Key

+
+
Returns
+

Variable Value

+
+
Return type
+

str | None

+
+
+
+ +
+
+get_config(key)[source]
+

Get Airflow Configuration

+
+
Parameters
+

key (str) – Configuration Option Key

+
+
Returns
+

Configuration Option Value

+
+
Return type
+

str | None

+
+
+
+ +
+
+static build_path(path_prefix, secret_id, sep='-')[source]
+

Given a path_prefix and secret_id, build a valid secret name for the Azure Key Vault Backend. +Also replaces underscore in the path with dashes to support easy switching between +environment variables, so connection_default becomes connection-default.

+
+
Parameters
+
    +
  • path_prefix (str) – The path prefix of the secret to retrieve

  • +
  • secret_id (str) – Name of the secret

  • +
  • sep (str) – Separator used to concatenate path_prefix and secret_id

  • +
+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/sensors/cosmos/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/sensors/cosmos/index.html new file mode 100644 index 00000000000..71ee868f325 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/sensors/cosmos/index.html @@ -0,0 +1,924 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.sensors.cosmos — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.sensors.cosmos

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

AzureCosmosDocumentSensor

Checks for the existence of a document which

+
+
+class airflow.providers.microsoft.azure.sensors.cosmos.AzureCosmosDocumentSensor(*, database_name, collection_name, document_id, azure_cosmos_conn_id='azure_cosmos_default', **kwargs)[source]
+

Bases: airflow.sensors.base.BaseSensorOperator

+

Checks for the existence of a document which +matches the given query in CosmosDB. Example:

+
azure_cosmos_sensor = AzureCosmosDocumentSensor(
+    database_name="somedatabase_name",
+    collection_name="somecollection_name",
+    document_id="unique-doc-id",
+    azure_cosmos_conn_id="azure_cosmos_default",
+    task_id="azure_cosmos_sensor",
+)
+
+
+
+
Parameters
+
    +
  • database_name (str) – Target CosmosDB database_name.

  • +
  • collection_name (str) – Target CosmosDB collection_name.

  • +
  • document_id (str) – The ID of the target document.

  • +
  • azure_cosmos_conn_id (str) – Reference to the +Azure CosmosDB connection.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['database_name', 'collection_name', 'document_id'][source]
+
+ +
+
+poke(context)[source]
+

Function defined by the sensors while deriving this class should override.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/sensors/data_factory/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/sensors/data_factory/index.html new file mode 100644 index 00000000000..9619d84e6ad --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/sensors/data_factory/index.html @@ -0,0 +1,920 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.sensors.data_factory — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.sensors.data_factory

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

AzureDataFactoryPipelineRunStatusSensor

Checks the status of a pipeline run.

+
+
+class airflow.providers.microsoft.azure.sensors.data_factory.AzureDataFactoryPipelineRunStatusSensor(*, run_id, azure_data_factory_conn_id=AzureDataFactoryHook.default_conn_name, resource_group_name=None, factory_name=None, **kwargs)[source]
+

Bases: airflow.sensors.base.BaseSensorOperator

+

Checks the status of a pipeline run.

+
+
Parameters
+
    +
  • azure_data_factory_conn_id (str) – The connection identifier for connecting to Azure Data Factory.

  • +
  • run_id (str) – The pipeline run identifier.

  • +
  • resource_group_name (str | None) – The resource group name.

  • +
  • factory_name (str | None) – The data factory name.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['azure_data_factory_conn_id', 'resource_group_name', 'factory_name', 'run_id'][source]
+
+ +
+
+ui_color = #50e6ff[source]
+
+ +
+
+poke(context)[source]
+

Function defined by the sensors while deriving this class should override.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/sensors/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/sensors/index.html new file mode 100644 index 00000000000..8ca0826d900 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/sensors/index.html @@ -0,0 +1,859 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.sensors — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + + +
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/sensors/wasb/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/sensors/wasb/index.html new file mode 100644 index 00000000000..e4f3c859534 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/sensors/wasb/index.html @@ -0,0 +1,958 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.sensors.wasb — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.sensors.wasb

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + + + + +

WasbBlobSensor

Waits for a blob to arrive on Azure Blob Storage.

WasbPrefixSensor

Waits for blobs matching a prefix to arrive on Azure Blob Storage.

+
+
+class airflow.providers.microsoft.azure.sensors.wasb.WasbBlobSensor(*, container_name, blob_name, wasb_conn_id='wasb_default', check_options=None, **kwargs)[source]
+

Bases: airflow.sensors.base.BaseSensorOperator

+

Waits for a blob to arrive on Azure Blob Storage.

+
+
Parameters
+
    +
  • container_name (str) – Name of the container.

  • +
  • blob_name (str) – Name of the blob.

  • +
  • wasb_conn_id (str) – Reference to the wasb connection.

  • +
  • check_options (dict | None) – Optional keyword arguments that +WasbHook.check_for_blob() takes.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['container_name', 'blob_name'][source]
+
+ +
+
+poke(context)[source]
+

Function defined by the sensors while deriving this class should override.

+
+
+
+ +
+ +
+
+class airflow.providers.microsoft.azure.sensors.wasb.WasbPrefixSensor(*, container_name, prefix, wasb_conn_id='wasb_default', check_options=None, **kwargs)[source]
+

Bases: airflow.sensors.base.BaseSensorOperator

+

Waits for blobs matching a prefix to arrive on Azure Blob Storage.

+
+
Parameters
+
    +
  • container_name (str) – Name of the container.

  • +
  • prefix (str) – Prefix of the blob.

  • +
  • wasb_conn_id (str) – Reference to the wasb connection.

  • +
  • check_options (dict | None) – Optional keyword arguments that +WasbHook.check_for_prefix() takes.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['container_name', 'prefix'][source]
+
+ +
+
+poke(context)[source]
+

Function defined by the sensors while deriving this class should override.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs/index.html new file mode 100644 index 00000000000..648d1a7a079 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs/index.html @@ -0,0 +1,936 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

AzureBlobStorageToGCSOperator

Operator transfers data from Azure Blob Storage to specified bucket in Google Cloud Storage

+
+
+class airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs.AzureBlobStorageToGCSOperator(*, wasb_conn_id='wasb_default', gcp_conn_id='google_cloud_default', blob_name, file_path, container_name, bucket_name, object_name, filename, gzip, delegate_to, impersonation_chain=None, **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Operator transfers data from Azure Blob Storage to specified bucket in Google Cloud Storage

+
+

See also

+

For more information on how to use this operator, take a look at the guide: +Transfer Data from Blob Storage to Google Cloud Storage

+
+
+
Parameters
+
    +
  • wasb_conn_id – Reference to the wasb connection.

  • +
  • gcp_conn_id (str) – The connection ID to use when fetching connection info.

  • +
  • blob_name (str) – Name of the blob

  • +
  • file_path (str) – Path to the file to download

  • +
  • container_name (str) – Name of the container

  • +
  • bucket_name (str) – The bucket to upload to

  • +
  • object_name (str) – The object name to set when uploading the file

  • +
  • filename (str) – The local file path to the file to be uploaded

  • +
  • gzip (bool) – Option to compress local file or file data for upload

  • +
  • delegate_to (str | None) – The account to impersonate using domain-wide delegation of authority, +if any. For this to work, the service account making the request must have +domain-wide delegation enabled.

  • +
  • impersonation_chain (str | Sequence[str] | None) – Optional service account to impersonate using short-term +credentials, or chained list of accounts required to get the access_token +of the last account in the list, which will be impersonated in the request. +If set as a string, the account must grant the originating account +the Service Account Token Creator IAM role. +If set as a sequence, the identities from the list must grant +Service Account Token Creator IAM role to the directly preceding identity, with first +account from the list granting this role to the originating account.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['blob_name', 'file_path', 'container_name', 'bucket_name', 'object_name', 'filename'][source]
+
+ +
+
+execute(context)[source]
+

This is the main method to derive when creating an operator. +Context is the same dictionary used as when rendering jinja templates.

+

Refer to get_template_context for more context.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/transfers/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/transfers/index.html new file mode 100644 index 00000000000..0230c19e2c6 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/transfers/index.html @@ -0,0 +1,861 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.transfers — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + + +
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/transfers/local_to_adls/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/transfers/local_to_adls/index.html new file mode 100644 index 00000000000..d6bb3933dc0 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/transfers/local_to_adls/index.html @@ -0,0 +1,954 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.transfers.local_to_adls — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.transfers.local_to_adls

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + + + + +

LocalFilesystemToADLSOperator

Upload file(s) to Azure Data Lake

LocalToAzureDataLakeStorageOperator

This class is deprecated.

+
+
+class airflow.providers.microsoft.azure.transfers.local_to_adls.LocalFilesystemToADLSOperator(*, local_path, remote_path, overwrite=True, nthreads=64, buffersize=4194304, blocksize=4194304, extra_upload_options=None, azure_data_lake_conn_id='azure_data_lake_default', **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Upload file(s) to Azure Data Lake

+
+

See also

+

For more information on how to use this operator, take a look at the guide: +LocalFilesystemToADLSOperator

+
+
+
Parameters
+
    +
  • local_path (str) – local path. Can be single file, directory (in which case, +upload recursively) or glob pattern. Recursive glob patterns using ** +are not supported

  • +
  • remote_path (str) – Remote path to upload to; if multiple files, this is the +directory root to write within

  • +
  • nthreads (int) – Number of threads to use. If None, uses the number of cores.

  • +
  • overwrite (bool) – Whether to forcibly overwrite existing files/directories. +If False and remote path is a directory, will quit regardless if any files +would be overwritten or not. If True, only matching filenames are actually +overwritten

  • +
  • buffersize (int) – int [2**22] +Number of bytes for internal buffer. This block cannot be bigger than +a chunk and cannot be smaller than a block

  • +
  • blocksize (int) – int [2**22] +Number of bytes for a block. Within each chunk, we write a smaller +block for each API call. This block cannot be bigger than a chunk

  • +
  • extra_upload_options (dict[str, Any] | None) – Extra upload options to add to the hook upload method

  • +
  • azure_data_lake_conn_id (str) – Reference to the Azure Data Lake connection

  • +
+
+
+
+
+template_fields :Sequence[str] = ['local_path', 'remote_path'][source]
+
+ +
+
+ui_color = #e4f0e8[source]
+
+ +
+
+execute(context)[source]
+

This is the main method to derive when creating an operator. +Context is the same dictionary used as when rendering jinja templates.

+

Refer to get_template_context for more context.

+
+
+
+ +
+ +
+
+class airflow.providers.microsoft.azure.transfers.local_to_adls.LocalToAzureDataLakeStorageOperator(*args, **kwargs)[source]
+

Bases: LocalFilesystemToADLSOperator

+

This class is deprecated. +Please use airflow.providers.microsoft.azure.transfers.local_to_adls.LocalFilesystemToADLSOperator.

+
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/transfers/local_to_wasb/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/transfers/local_to_wasb/index.html new file mode 100644 index 00000000000..5665b0e91c0 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/transfers/local_to_wasb/index.html @@ -0,0 +1,917 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.transfers.local_to_wasb — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.transfers.local_to_wasb

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

LocalFilesystemToWasbOperator

Uploads a file to Azure Blob Storage.

+
+
+class airflow.providers.microsoft.azure.transfers.local_to_wasb.LocalFilesystemToWasbOperator(*, file_path, container_name, blob_name, wasb_conn_id='wasb_default', create_container=False, load_options=None, **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Uploads a file to Azure Blob Storage.

+
+
Parameters
+
    +
  • file_path (str) – Path to the file to load. (templated)

  • +
  • container_name (str) – Name of the container. (templated)

  • +
  • blob_name (str) – Name of the blob. (templated)

  • +
  • wasb_conn_id (str) – Reference to the wasb connection.

  • +
  • create_container (bool) – Attempt to create the target container prior to uploading the blob. This is +useful if the target container may not exist yet. Defaults to False.

  • +
  • load_options (dict | None) – Optional keyword arguments that +WasbHook.load_file() takes.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['file_path', 'container_name', 'blob_name'][source]
+
+ +
+
+execute(context)[source]
+

Upload a file to Azure Blob Storage.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake/index.html new file mode 100644 index 00000000000..524d49e926a --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake/index.html @@ -0,0 +1,936 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

OracleToAzureDataLakeOperator

Moves data from Oracle to Azure Data Lake. The operator runs the query against

+
+
+class airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake.OracleToAzureDataLakeOperator(*, filename, azure_data_lake_conn_id, azure_data_lake_path, oracle_conn_id, sql, sql_params=None, delimiter=',', encoding='utf-8', quotechar='"', quoting=csv.QUOTE_MINIMAL, **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Moves data from Oracle to Azure Data Lake. The operator runs the query against +Oracle and stores the file locally before loading it into Azure Data Lake.

+
+
Parameters
+
    +
  • filename (str) – file name to be used by the csv file.

  • +
  • azure_data_lake_conn_id (str) – destination azure data lake connection.

  • +
  • azure_data_lake_path (str) – destination path in azure data lake to put the file.

  • +
  • oracle_conn_id (str) – Source Oracle connection.

  • +
  • sql (str) – SQL query to execute against the Oracle database. (templated)

  • +
  • sql_params (dict | None) – Parameters to use in sql query. (templated)

  • +
  • delimiter (str) – field delimiter in the file.

  • +
  • encoding (str) – encoding type for the file.

  • +
  • quotechar (str) – Character to use in quoting.

  • +
  • quoting (str) – Quoting strategy. See unicodecsv quoting for more information.

  • +
+
+
+
+
+template_fields :Sequence[str] = ['filename', 'sql', 'sql_params'][source]
+
+ +
+
+template_fields_renderers[source]
+
+ +
+
+ui_color = #e08c8c[source]
+
+ +
+
+execute(context)[source]
+

This is the main method to derive when creating an operator. +Context is the same dictionary used as when rendering jinja templates.

+

Refer to get_template_context for more context.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/transfers/sftp_to_wasb/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/transfers/sftp_to_wasb/index.html new file mode 100644 index 00000000000..b605b710157 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/transfers/sftp_to_wasb/index.html @@ -0,0 +1,1058 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.transfers.sftp_to_wasb — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.transfers.sftp_to_wasb

+

This module contains SFTP to Azure Blob Storage operator.

+
+

Module Contents

+
+

Classes

+ ++++ + + + + + +

SFTPToWasbOperator

Transfer files to Azure Blob Storage from SFTP server.

+
+
+

Attributes

+ ++++ + + + + + + + + +

WILDCARD

SftpFile

+
+
+airflow.providers.microsoft.azure.transfers.sftp_to_wasb.WILDCARD = *[source]
+
+ +
+
+airflow.providers.microsoft.azure.transfers.sftp_to_wasb.SftpFile[source]
+
+ +
+
+class airflow.providers.microsoft.azure.transfers.sftp_to_wasb.SFTPToWasbOperator(*, sftp_source_path, container_name, blob_prefix='', sftp_conn_id='sftp_default', wasb_conn_id='wasb_default', load_options=None, move_object=False, wasb_overwrite_object=False, create_container=False, **kwargs)[source]
+

Bases: airflow.models.BaseOperator

+

Transfer files to Azure Blob Storage from SFTP server.

+
+

See also

+

For more information on how to use this operator, take a look at the guide: +Transfer Data from SFTP Source Path to Blob Storage

+
+
+
Parameters
+
    +
  • sftp_source_path (str) – The sftp remote path. This is the specified file path +for downloading the single file or multiple files from the SFTP server. +You can use only one wildcard within your path. The wildcard can appear +inside the path or at the end of the path.

  • +
  • container_name (str) – Name of the container.

  • +
  • blob_prefix (str) – Prefix to name a blob.

  • +
  • sftp_conn_id (str) – The sftp connection id. The name or identifier for +establishing a connection to the SFTP server.

  • +
  • wasb_conn_id (str) – Reference to the wasb connection.

  • +
  • load_options (dict | None) – Optional keyword arguments that +WasbHook.load_file() takes.

  • +
  • move_object (bool) – When move object is True, the object is moved instead +of copied to the new location. This is the equivalent of a mv command +as opposed to a cp command.

  • +
  • wasb_overwrite_object (bool) – Whether the blob to be uploaded +should overwrite the current data. +When wasb_overwrite_object is True, it will overwrite the existing data. +If set to False, the operation might fail with +ResourceExistsError in case a blob object already exists.

  • +
  • create_container (bool) – Attempt to create the target container prior to uploading the blob. This is +useful if the target container may not exist yet. Defaults to False.

  • +
+
+
+
+
+property source_path_contains_wildcard: bool[source]
+

Checks if the SFTP source path contains a wildcard.

+
+
+
+ +
+
+template_fields :Sequence[str] = ['sftp_source_path', 'container_name', 'blob_prefix'][source]
+
+ +
+
+dry_run()[source]
+

Performs dry run for the operator - just render template fields.

+
+
+
+ +
+
+execute(context)[source]
+

Upload a file from SFTP to Azure Blob Storage.

+
+
+
+ +
+
+get_sftp_files_map()[source]
+

Get SFTP files from the source path, it may use a WILDCARD to this end.

+
+
+
+ +
+
+get_tree_behavior()[source]
+

Extracts from source path the tree behavior to interact with the remote folder

+
+
+
+ +
+
+check_wildcards_limit()[source]
+

Check if there are multiple wildcards used in the SFTP source path.

+
+
+
+ +
+
+sftp_hook()[source]
+

Property of sftp hook to be re-used.

+
+
+
+ +
+
+get_full_path_blob(file)[source]
+

Get a blob name based on the previous name and a blob_prefix variable

+
+
+
+ +
+
+copy_files_to_wasb(sftp_files)[source]
+

Upload a list of files from sftp_files to Azure Blob Storage with a new Blob Name.

+
+
+
+ +
+
+delete_files(uploaded_files)[source]
+

Delete files at SFTP which have been moved to Azure Blob Storage.

+
+
+
+ +
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/utils/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/utils/index.html new file mode 100644 index 00000000000..9b0e0f3bb18 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/airflow/providers/microsoft/azure/utils/index.html @@ -0,0 +1,881 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.utils — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

airflow.providers.microsoft.azure.utils

+
+

Module Contents

+
+

Functions

+ ++++ + + + + + +

get_field(*, conn_id, conn_type, extras, field_name)

Get field from extra, first checking short name, then for backcompat we check for prefixed name.

+
+
+airflow.providers.microsoft.azure.utils.get_field(*, conn_id, conn_type, extras, field_name)[source]
+

Get field from extra, first checking short name, then for backcompat we check for prefixed name.

+
+
+
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_adf_run_pipeline/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_adf_run_pipeline/index.html new file mode 100644 index 00000000000..76aa57890a0 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_adf_run_pipeline/index.html @@ -0,0 +1,882 @@ + + + + + + + + + + + + tests.system.providers.microsoft.azure.example_adf_run_pipeline — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

tests.system.providers.microsoft.azure.example_adf_run_pipeline

+
+

Module Contents

+
+
+tests.system.providers.microsoft.azure.example_adf_run_pipeline.ENV_ID[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_adf_run_pipeline.DAG_ID = example_adf_run_pipeline[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_adf_run_pipeline.begin[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_adf_run_pipeline.test_run[source]
+
+ +
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_adls_delete/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_adls_delete/index.html new file mode 100644 index 00000000000..d5f0e17638e --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_adls_delete/index.html @@ -0,0 +1,896 @@ + + + + + + + + + + + + tests.system.providers.microsoft.azure.example_adls_delete — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

tests.system.providers.microsoft.azure.example_adls_delete

+
+

Module Contents

+
+
+tests.system.providers.microsoft.azure.example_adls_delete.LOCAL_FILE_PATH[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_adls_delete.REMOTE_FILE_PATH[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_adls_delete.ENV_ID[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_adls_delete.DAG_ID = example_adls_delete[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_adls_delete.upload_file[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_adls_delete.test_run[source]
+
+ +
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_azure_blob_to_gcs/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_azure_blob_to_gcs/index.html new file mode 100644 index 00000000000..9a0613532bb --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_azure_blob_to_gcs/index.html @@ -0,0 +1,917 @@ + + + + + + + + + + + + tests.system.providers.microsoft.azure.example_azure_blob_to_gcs — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

tests.system.providers.microsoft.azure.example_azure_blob_to_gcs

+
+

Module Contents

+
+
+tests.system.providers.microsoft.azure.example_azure_blob_to_gcs.BLOB_NAME[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_blob_to_gcs.AZURE_CONTAINER_NAME[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_blob_to_gcs.GCP_BUCKET_FILE_PATH[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_blob_to_gcs.GCP_BUCKET_NAME[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_blob_to_gcs.GCP_OBJECT_NAME[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_blob_to_gcs.ENV_ID[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_blob_to_gcs.DAG_ID = example_azure_blob_to_gcs[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_blob_to_gcs.wait_for_blob[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_blob_to_gcs.test_run[source]
+
+ +
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_azure_container_instances/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_azure_container_instances/index.html new file mode 100644 index 00000000000..e275703350e --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_azure_container_instances/index.html @@ -0,0 +1,883 @@ + + + + + + + + + + + + tests.system.providers.microsoft.azure.example_azure_container_instances — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

tests.system.providers.microsoft.azure.example_azure_container_instances

+

This is an example dag for using the AzureContainerInstancesOperator.

+
+

Module Contents

+
+
+tests.system.providers.microsoft.azure.example_azure_container_instances.ENV_ID[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_container_instances.DAG_ID = aci_example[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_container_instances.t1[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_container_instances.test_run[source]
+
+ +
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_azure_cosmosdb/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_azure_cosmosdb/index.html new file mode 100644 index 00000000000..9e0228737cc --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_azure_cosmosdb/index.html @@ -0,0 +1,887 @@ + + + + + + + + + + + + tests.system.providers.microsoft.azure.example_azure_cosmosdb — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

tests.system.providers.microsoft.azure.example_azure_cosmosdb

+

This is only an example DAG to highlight usage of AzureCosmosDocumentSensor to detect +if a document now exists.

+

You can trigger this manually with airflow dags trigger example_cosmosdb_sensor.

+

Note: Make sure that connection `azure_cosmos_default` is properly set before running +this example.

+
+

Module Contents

+
+
+tests.system.providers.microsoft.azure.example_azure_cosmosdb.ENV_ID[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_cosmosdb.DAG_ID = example_azure_cosmosdb_sensor[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_cosmosdb.t1[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_cosmosdb.test_run[source]
+
+ +
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_azure_service_bus/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_azure_service_bus/index.html new file mode 100644 index 00000000000..613686b3673 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_azure_service_bus/index.html @@ -0,0 +1,917 @@ + + + + + + + + + + + + tests.system.providers.microsoft.azure.example_azure_service_bus — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

tests.system.providers.microsoft.azure.example_azure_service_bus

+
+

Module Contents

+
+
+tests.system.providers.microsoft.azure.example_azure_service_bus.EXECUTION_TIMEOUT[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_service_bus.CLIENT_ID[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_service_bus.QUEUE_NAME = sb_mgmt_queue_test[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_service_bus.MESSAGE = Test Message[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_service_bus.MESSAGE_LIST[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_service_bus.TOPIC_NAME = sb_mgmt_topic_test[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_service_bus.SUBSCRIPTION_NAME = sb_mgmt_subscription[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_service_bus.create_service_bus_queue[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_service_bus.test_run[source]
+
+ +
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_azure_synapse/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_azure_synapse/index.html new file mode 100644 index 00000000000..57347856e57 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_azure_synapse/index.html @@ -0,0 +1,896 @@ + + + + + + + + + + + + tests.system.providers.microsoft.azure.example_azure_synapse — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

tests.system.providers.microsoft.azure.example_azure_synapse

+
+

Module Contents

+
+
+tests.system.providers.microsoft.azure.example_azure_synapse.AIRFLOW_HOME[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_synapse.EXECUTION_TIMEOUT[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_synapse.default_args[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_synapse.SPARK_JOB_PAYLOAD[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_synapse.run_spark_job[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_azure_synapse.test_run[source]
+
+ +
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_fileshare/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_fileshare/index.html new file mode 100644 index 00000000000..2e3d22c786a --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_fileshare/index.html @@ -0,0 +1,952 @@ + + + + + + + + + + + + tests.system.providers.microsoft.azure.example_fileshare — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

tests.system.providers.microsoft.azure.example_fileshare

+
+

Module Contents

+
+

Functions

+ ++++ + + + + + + + + +

create_fileshare()

Create a fileshare with directory

delete_fileshare()

Delete a fileshare

+
+
+

Attributes

+ ++++ + + + + + + + + + + + + + + + + + +

NAME

DIRECTORY

ENV_ID

DAG_ID

test_run

+
+
+tests.system.providers.microsoft.azure.example_fileshare.NAME = myfileshare[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_fileshare.DIRECTORY = mydirectory[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_fileshare.ENV_ID[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_fileshare.DAG_ID = example_fileshare[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_fileshare.create_fileshare()[source]
+

Create a fileshare with directory

+
+ +
+
+tests.system.providers.microsoft.azure.example_fileshare.delete_fileshare()[source]
+

Delete a fileshare

+
+ +
+
+tests.system.providers.microsoft.azure.example_fileshare.test_run[source]
+
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_local_to_adls/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_local_to_adls/index.html new file mode 100644 index 00000000000..6f5dea82324 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_local_to_adls/index.html @@ -0,0 +1,896 @@ + + + + + + + + + + + + tests.system.providers.microsoft.azure.example_local_to_adls — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

tests.system.providers.microsoft.azure.example_local_to_adls

+
+

Module Contents

+
+
+tests.system.providers.microsoft.azure.example_local_to_adls.LOCAL_FILE_PATH[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_local_to_adls.REMOTE_FILE_PATH[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_local_to_adls.ENV_ID[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_local_to_adls.DAG_ID = example_local_to_adls[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_local_to_adls.upload_file[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_local_to_adls.test_run[source]
+
+ +
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_local_to_wasb/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_local_to_wasb/index.html new file mode 100644 index 00000000000..b06733fb727 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_local_to_wasb/index.html @@ -0,0 +1,889 @@ + + + + + + + + + + + + tests.system.providers.microsoft.azure.example_local_to_wasb — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

tests.system.providers.microsoft.azure.example_local_to_wasb

+
+

Module Contents

+
+
+tests.system.providers.microsoft.azure.example_local_to_wasb.PATH_TO_UPLOAD_FILE[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_local_to_wasb.ENV_ID[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_local_to_wasb.DAG_ID = example_local_to_wasb[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_local_to_wasb.upload[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_local_to_wasb.test_run[source]
+
+ +
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_sftp_to_wasb/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_sftp_to_wasb/index.html new file mode 100644 index 00000000000..c280172d7d3 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/example_sftp_to_wasb/index.html @@ -0,0 +1,1001 @@ + + + + + + + + + + + + tests.system.providers.microsoft.azure.example_sftp_to_wasb — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

tests.system.providers.microsoft.azure.example_sftp_to_wasb

+
+

Module Contents

+
+

Functions

+ ++++ + + + + + +

delete_sftp_file()

Delete a file at SFTP SERVER

+
+
+

Attributes

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

AZURE_CONTAINER_NAME

BLOB_PREFIX

SFTP_SRC_PATH

LOCAL_FILE_PATH

SAMPLE_FILENAME

FILE_COMPLETE_PATH

SFTP_FILE_COMPLETE_PATH

ENV_ID

DAG_ID

transfer_files_to_sftp_step

test_run

+
+
+tests.system.providers.microsoft.azure.example_sftp_to_wasb.AZURE_CONTAINER_NAME[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_sftp_to_wasb.BLOB_PREFIX[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_sftp_to_wasb.SFTP_SRC_PATH[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_sftp_to_wasb.LOCAL_FILE_PATH[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_sftp_to_wasb.SAMPLE_FILENAME[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_sftp_to_wasb.FILE_COMPLETE_PATH[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_sftp_to_wasb.SFTP_FILE_COMPLETE_PATH[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_sftp_to_wasb.ENV_ID[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_sftp_to_wasb.DAG_ID = example_sftp_to_wasb[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_sftp_to_wasb.delete_sftp_file()[source]
+

Delete a file at SFTP SERVER

+
+ +
+
+tests.system.providers.microsoft.azure.example_sftp_to_wasb.transfer_files_to_sftp_step[source]
+
+ +
+
+tests.system.providers.microsoft.azure.example_sftp_to_wasb.test_run[source]
+
+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/index.html new file mode 100644 index 00000000000..2660676fb09 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_api/tests/system/providers/microsoft/azure/index.html @@ -0,0 +1,865 @@ + + + + + + + + + + + + tests.system.providers.microsoft.azure — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + + +
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/adx.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/adx.html new file mode 100644 index 00000000000..cda12d5bbda --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/adx.html @@ -0,0 +1,1021 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.adx — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.hooks.adx

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+This module contains Azure Data Explorer hook.
+
+.. spelling::
+
+    KustoResponseDataSetV
+    kusto
+"""
+from __future__ import annotations
+
+import warnings
+from typing import Any
+
+from azure.kusto.data.exceptions import KustoServiceError
+from azure.kusto.data.request import ClientRequestProperties, KustoClient, KustoConnectionStringBuilder
+from azure.kusto.data.response import KustoResponseDataSetV2
+
+from airflow.exceptions import AirflowException
+from airflow.hooks.base import BaseHook
+from airflow.providers.microsoft.azure.utils import _ensure_prefixes
+
+
+
[docs]class AzureDataExplorerHook(BaseHook): + """ + Interacts with Azure Data Explorer (Kusto). + + **Cluster**: + + Azure Data Explorer cluster is specified by a URL, for example: "https://help.kusto.windows.net". + The parameter must be provided through the Data Explorer Cluster URL connection detail. + + **Tenant ID**: + + To learn about tenants refer to: https://docs.microsoft.com/en-us/onedrive/find-your-office-365-tenant-id + + **Authentication methods**: + + Available authentication methods are: + + - AAD_APP: Authentication with AAD application certificate. A Tenant ID is required when using this + method. Provide application ID and application key through Username and Password parameters. + + - AAD_APP_CERT: Authentication with AAD application certificate. Tenant ID, Application PEM Certificate, + and Application Certificate Thumbprint are required when using this method. + + - AAD_CREDS: Authentication with AAD username and password. A Tenant ID is required when using this + method. Username and Password parameters are used for authentication with AAD. + + - AAD_DEVICE: Authenticate with AAD device code. Please note that if you choose this option, you'll need + to authenticate for every new instance that is initialized. It is highly recommended to create one + instance and use it for all queries. + + :param azure_data_explorer_conn_id: Reference to the + :ref:`Azure Data Explorer connection<howto/connection:adx>`. + """ + +
[docs] conn_name_attr = "azure_data_explorer_conn_id"
+
[docs] default_conn_name = "azure_data_explorer_default"
+
[docs] conn_type = "azure_data_explorer"
+
[docs] hook_name = "Azure Data Explorer"
+ + @staticmethod +
[docs] def get_connection_form_widgets() -> dict[str, Any]: + """Returns connection widgets to add to connection form""" + from flask_appbuilder.fieldwidgets import BS3PasswordFieldWidget, BS3TextFieldWidget + from flask_babel import lazy_gettext + from wtforms import PasswordField, StringField + + return { + "tenant": StringField(lazy_gettext("Tenant ID"), widget=BS3TextFieldWidget()), + "auth_method": StringField(lazy_gettext("Authentication Method"), widget=BS3TextFieldWidget()), + "certificate": PasswordField( + lazy_gettext("Application PEM Certificate"), widget=BS3PasswordFieldWidget() + ), + "thumbprint": PasswordField( + lazy_gettext("Application Certificate Thumbprint"), widget=BS3PasswordFieldWidget()
+ ), + } + + @staticmethod + @_ensure_prefixes(conn_type="azure_data_explorer") +
[docs] def get_ui_field_behaviour() -> dict[str, Any]: + """Returns custom field behaviour""" + return { + "hidden_fields": ["schema", "port", "extra"], + "relabeling": { + "login": "Username", + "host": "Data Explorer Cluster URL", + }, + "placeholders": { + "login": "Varies with authentication method", + "password": "Varies with authentication method", + "auth_method": "AAD_APP/AAD_APP_CERT/AAD_CREDS/AAD_DEVICE", + "tenant": "Used with AAD_APP/AAD_APP_CERT/AAD_CREDS", + "certificate": "Used with AAD_APP_CERT", + "thumbprint": "Used with AAD_APP_CERT",
+ }, + } + + def __init__(self, azure_data_explorer_conn_id: str = default_conn_name) -> None: + super().__init__() + self.conn_id = azure_data_explorer_conn_id + self.connection = self.get_conn() # todo: make this a property, or just delete + +
[docs] def get_conn(self) -> KustoClient: + """Return a KustoClient object.""" + conn = self.get_connection(self.conn_id) + extras = conn.extra_dejson + cluster = conn.host + if not cluster: + raise AirflowException("Host connection option is required") + + def warn_if_collison(key, backcompat_key): + if backcompat_key in extras: + warnings.warn( + f"Conflicting params `{key}` and `{backcompat_key}` found in extras for conn " + f"{self.conn_id}. Using value for `{key}`. Please ensure this is the correct value " + f"and remove the backcompat key `{backcompat_key}`." + ) + + def get_required_param(name: str) -> str: + """ + Extract required parameter value from connection, raise exception if not found. + + Warns if both ``foo`` and ``extra__azure_data_explorer__foo`` found in conn extra. + + Prefers unprefixed field. + """ + backcompat_prefix = "extra__azure_data_explorer__" + backcompat_key = f"{backcompat_prefix}{name}" + value = extras.get(name) + if value: + warn_if_collison(name, backcompat_key) + if not value: + value = extras.get(backcompat_key) + if not value: + raise AirflowException(f"Required connection parameter is missing: `{name}`") + return value + + auth_method = get_required_param("auth_method") + + if auth_method == "AAD_APP": + tenant = get_required_param("tenant") + kcsb = KustoConnectionStringBuilder.with_aad_application_key_authentication( + cluster, conn.login, conn.password, tenant + ) + elif auth_method == "AAD_APP_CERT": + certificate = get_required_param("certificate") + thumbprint = get_required_param("thumbprint") + tenant = get_required_param("tenant") + kcsb = KustoConnectionStringBuilder.with_aad_application_certificate_authentication( + cluster, + conn.login, + certificate, + thumbprint, + tenant, + ) + elif auth_method == "AAD_CREDS": + tenant = get_required_param("tenant") + kcsb = KustoConnectionStringBuilder.with_aad_user_password_authentication( + cluster, conn.login, conn.password, tenant + ) + elif auth_method == "AAD_DEVICE": + kcsb = KustoConnectionStringBuilder.with_aad_device_authentication(cluster) + else: + raise AirflowException(f"Unknown authentication method: {auth_method}") + + return KustoClient(kcsb)
+ +
[docs] def run_query(self, query: str, database: str, options: dict | None = None) -> KustoResponseDataSetV2: + """ + Run KQL query using provided configuration, and return + `azure.kusto.data.response.KustoResponseDataSet` instance. + If query is unsuccessful AirflowException is raised. + + :param query: KQL query to run + :param database: Database to run the query on. + :param options: Optional query options. See: + https://docs.microsoft.com/en-us/azure/kusto/api/netfx/request-properties#list-of-clientrequestproperties + :return: dict + """ + properties = ClientRequestProperties() + if options: + for k, v in options.items(): + properties.set_option(k, v) + try: + return self.connection.execute(database, query, properties=properties) + except KustoServiceError as error: + raise AirflowException(f"Error running Kusto query: {error}")
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/asb.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/asb.html new file mode 100644 index 00000000000..2bad0060fd3 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/asb.html @@ -0,0 +1,1065 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.asb — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.hooks.asb

+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import Any
+
+from azure.servicebus import ServiceBusClient, ServiceBusMessage, ServiceBusSender
+from azure.servicebus.management import QueueProperties, ServiceBusAdministrationClient
+
+from airflow.hooks.base import BaseHook
+
+
+
[docs]class BaseAzureServiceBusHook(BaseHook): + """ + BaseAzureServiceBusHook class to create session and create connection using connection string + + :param azure_service_bus_conn_id: Reference to the + :ref:`Azure Service Bus connection<howto/connection:azure_service_bus>`. + """ + +
[docs] conn_name_attr = "azure_service_bus_conn_id"
+
[docs] default_conn_name = "azure_service_bus_default"
+
[docs] conn_type = "azure_service_bus"
+
[docs] hook_name = "Azure Service Bus"
+ + @staticmethod +
[docs] def get_ui_field_behaviour() -> dict[str, Any]: + """Returns custom field behaviour""" + return { + "hidden_fields": ["port", "host", "extra", "login", "password"], + "relabeling": {"schema": "Connection String"}, + "placeholders": { + "schema": "Endpoint=sb://<Resource group>.servicebus.windows.net/;SharedAccessKeyName=<AccessKeyName>;SharedAccessKey=<SharedAccessKey>", # noqa
+ }, + } + + def __init__(self, azure_service_bus_conn_id: str = default_conn_name) -> None: + super().__init__() + self.conn_id = azure_service_bus_conn_id + +
[docs] def get_conn(self): + raise NotImplementedError
+ + +
[docs]class AdminClientHook(BaseAzureServiceBusHook): + """ + Interacts with ServiceBusAdministrationClient client + to create, update, list, and delete resources of a + Service Bus namespace. This hook uses the same Azure Service Bus client connection inherited + from the base class + """ + +
[docs] def get_conn(self) -> ServiceBusAdministrationClient: + """ + Create and returns ServiceBusAdministrationClient by using the connection + string in connection details + """ + conn = self.get_connection(self.conn_id) + + connection_string: str = str(conn.schema) + return ServiceBusAdministrationClient.from_connection_string(connection_string)
+ +
[docs] def create_queue( + self, + queue_name: str, + max_delivery_count: int = 10, + dead_lettering_on_message_expiration: bool = True, + enable_batched_operations: bool = True, + ) -> QueueProperties: + """ + Create Queue by connecting to service Bus Admin client return the QueueProperties + + :param queue_name: The name of the queue or a QueueProperties with name. + :param max_delivery_count: The maximum delivery count. A message is automatically + dead lettered after this number of deliveries. Default value is 10.. + :param dead_lettering_on_message_expiration: A value that indicates whether this subscription has + dead letter support when a message expires. + :param enable_batched_operations: Value that indicates whether server-side batched + operations are enabled. + """ + if queue_name is None: + raise TypeError("Queue name cannot be None.") + + with self.get_conn() as service_mgmt_conn: + queue = service_mgmt_conn.create_queue( + queue_name, + max_delivery_count=max_delivery_count, + dead_lettering_on_message_expiration=dead_lettering_on_message_expiration, + enable_batched_operations=enable_batched_operations, + ) + return queue
+ +
[docs] def delete_queue(self, queue_name: str) -> None: + """ + Delete the queue by queue_name in service bus namespace + + :param queue_name: The name of the queue or a QueueProperties with name. + """ + if queue_name is None: + raise TypeError("Queue name cannot be None.") + + with self.get_conn() as service_mgmt_conn: + service_mgmt_conn.delete_queue(queue_name)
+ +
[docs] def delete_subscription(self, subscription_name: str, topic_name: str) -> None: + """ + Delete a topic subscription entities under a ServiceBus Namespace + + :param subscription_name: The subscription name that will own the rule in topic + :param topic_name: The topic that will own the subscription rule. + """ + if subscription_name is None: + raise TypeError("Subscription name cannot be None.") + if topic_name is None: + raise TypeError("Topic name cannot be None.") + + with self.get_conn() as service_mgmt_conn: + self.log.info("Deleting Subscription %s", subscription_name) + service_mgmt_conn.delete_subscription(topic_name, subscription_name)
+ + +
[docs]class MessageHook(BaseAzureServiceBusHook): + """ + Interacts with ServiceBusClient and acts as a high level interface + for getting ServiceBusSender and ServiceBusReceiver. + """ + +
[docs] def get_conn(self) -> ServiceBusClient: + """Create and returns ServiceBusClient by using the connection string in connection details""" + conn = self.get_connection(self.conn_id) + connection_string: str = str(conn.schema) + + self.log.info("Create and returns ServiceBusClient") + return ServiceBusClient.from_connection_string(conn_str=connection_string, logging_enable=True)
+ +
[docs] def send_message(self, queue_name: str, messages: str | list[str], batch_message_flag: bool = False): + """ + By using ServiceBusClient Send message(s) to a Service Bus Queue. By using + batch_message_flag it enables and send message as batch message + + :param queue_name: The name of the queue or a QueueProperties with name. + :param messages: Message which needs to be sent to the queue. It can be string or list of string. + :param batch_message_flag: bool flag, can be set to True if message needs to be + sent as batch message. + """ + if queue_name is None: + raise TypeError("Queue name cannot be None.") + if not messages: + raise ValueError("Messages list cannot be empty.") + with self.get_conn() as service_bus_client, service_bus_client.get_queue_sender( + queue_name=queue_name + ) as sender: + with sender: + if isinstance(messages, str): + if not batch_message_flag: + msg = ServiceBusMessage(messages) + sender.send_messages(msg) + else: + self.send_batch_message(sender, [messages]) + else: + if not batch_message_flag: + self.send_list_messages(sender, messages) + else: + self.send_batch_message(sender, messages)
+ + @staticmethod +
[docs] def send_list_messages(sender: ServiceBusSender, messages: list[str]): + list_messages = [ServiceBusMessage(message) for message in messages] + sender.send_messages(list_messages) # type: ignore[arg-type]
+ + @staticmethod +
[docs] def send_batch_message(sender: ServiceBusSender, messages: list[str]): + batch_message = sender.create_message_batch() + for message in messages: + batch_message.add_message(ServiceBusMessage(message)) + sender.send_messages(batch_message)
+ +
[docs] def receive_message( + self, queue_name, max_message_count: int | None = 1, max_wait_time: float | None = None + ): + """ + Receive a batch of messages at once in a specified Queue name + + :param queue_name: The name of the queue name or a QueueProperties with name. + :param max_message_count: Maximum number of messages in the batch. + :param max_wait_time: Maximum time to wait in seconds for the first message to arrive. + """ + if queue_name is None: + raise TypeError("Queue name cannot be None.") + + with self.get_conn() as service_bus_client, service_bus_client.get_queue_receiver( + queue_name=queue_name + ) as receiver: + with receiver: + received_msgs = receiver.receive_messages( + max_message_count=max_message_count, max_wait_time=max_wait_time + ) + for msg in received_msgs: + self.log.info(msg) + receiver.complete_message(msg)
+ +
[docs] def receive_subscription_message( + self, + topic_name: str, + subscription_name: str, + max_message_count: int | None, + max_wait_time: float | None, + ): + """ + Receive a batch of subscription message at once. This approach is optimal if you wish + to process multiple messages simultaneously, or perform an ad-hoc receive as a single call. + + :param subscription_name: The subscription name that will own the rule in topic + :param topic_name: The topic that will own the subscription rule. + :param max_message_count: Maximum number of messages in the batch. + Actual number returned will depend on prefetch_count and incoming stream rate. + Setting to None will fully depend on the prefetch config. The default value is 1. + :param max_wait_time: Maximum time to wait in seconds for the first message to arrive. If no + messages arrive, and no timeout is specified, this call will not return until the + connection is closed. If specified, an no messages arrive within the timeout period, + an empty list will be returned. + """ + if subscription_name is None: + raise TypeError("Subscription name cannot be None.") + if topic_name is None: + raise TypeError("Topic name cannot be None.") + with self.get_conn() as service_bus_client, service_bus_client.get_subscription_receiver( + topic_name, subscription_name + ) as subscription_receiver: + with subscription_receiver: + received_msgs = subscription_receiver.receive_messages( + max_message_count=max_message_count, max_wait_time=max_wait_time + ) + for msg in received_msgs: + self.log.info(msg) + subscription_receiver.complete_message(msg)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/base_azure.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/base_azure.html new file mode 100644 index 00000000000..a75d91607dd --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/base_azure.html @@ -0,0 +1,935 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.base_azure — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.hooks.base_azure

+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import Any
+
+from azure.common.client_factory import get_client_from_auth_file, get_client_from_json_dict
+from azure.common.credentials import ServicePrincipalCredentials
+
+from airflow.exceptions import AirflowException
+from airflow.hooks.base import BaseHook
+
+
+
[docs]class AzureBaseHook(BaseHook): + """ + This hook acts as a base hook for azure services. It offers several authentication mechanisms to + authenticate the client library used for upstream azure hooks. + + :param sdk_client: The SDKClient to use. + :param conn_id: The :ref:`Azure connection id<howto/connection:azure>` + which refers to the information to connect to the service. + """ + +
[docs] conn_name_attr = "azure_conn_id"
+
[docs] default_conn_name = "azure_default"
+
[docs] conn_type = "azure"
+
[docs] hook_name = "Azure"
+ + @staticmethod +
[docs] def get_connection_form_widgets() -> dict[str, Any]: + """Returns connection widgets to add to connection form""" + from flask_appbuilder.fieldwidgets import BS3TextFieldWidget + from flask_babel import lazy_gettext + from wtforms import StringField + + return { + "extra__azure__tenantId": StringField( + lazy_gettext("Azure Tenant ID"), widget=BS3TextFieldWidget() + ), + "extra__azure__subscriptionId": StringField( + lazy_gettext("Azure Subscription ID"), widget=BS3TextFieldWidget()
+ ), + } + + @staticmethod +
[docs] def get_ui_field_behaviour() -> dict[str, Any]: + """Returns custom field behaviour""" + import json + + return { + "hidden_fields": ["schema", "port", "host"], + "relabeling": { + "login": "Azure Client ID", + "password": "Azure Secret", + }, + "placeholders": { + "extra": json.dumps( + { + "key_path": "path to json file for auth", + "key_json": "specifies json dict for auth", + }, + indent=1, + ), + "login": "client_id (token credentials auth)", + "password": "secret (token credentials auth)", + "extra__azure__tenantId": "tenantId (token credentials auth)", + "extra__azure__subscriptionId": "subscriptionId (token credentials auth)",
+ }, + } + + def __init__(self, sdk_client: Any, conn_id: str = "azure_default"): + self.sdk_client = sdk_client + self.conn_id = conn_id + super().__init__() + +
[docs] def get_conn(self) -> Any: + """ + Authenticates the resource using the connection id passed during init. + + :return: the authenticated client. + """ + conn = self.get_connection(self.conn_id) + tenant = conn.extra_dejson.get("extra__azure__tenantId") or conn.extra_dejson.get("tenantId") + subscription_id = conn.extra_dejson.get("extra__azure__subscriptionId") or conn.extra_dejson.get( + "subscriptionId" + ) + + key_path = conn.extra_dejson.get("key_path") + if key_path: + if not key_path.endswith(".json"): + raise AirflowException("Unrecognised extension for key file.") + self.log.info("Getting connection using a JSON key file.") + return get_client_from_auth_file(client_class=self.sdk_client, auth_path=key_path) + + key_json = conn.extra_dejson.get("key_json") + if key_json: + self.log.info("Getting connection using a JSON config.") + return get_client_from_json_dict(client_class=self.sdk_client, config_dict=key_json) + + self.log.info("Getting connection using specific credentials and subscription_id.") + return self.sdk_client( + credentials=ServicePrincipalCredentials( + client_id=conn.login, secret=conn.password, tenant=tenant + ), + subscription_id=subscription_id,
+ ) +
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/batch.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/batch.html new file mode 100644 index 00000000000..2a6d77e7b57 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/batch.html @@ -0,0 +1,1206 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.batch — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.hooks.batch

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import time
+from datetime import timedelta
+from typing import Any
+
+from azure.batch import BatchServiceClient, batch_auth, models as batch_models
+from azure.batch.models import JobAddParameter, PoolAddParameter, TaskAddParameter
+
+from airflow.exceptions import AirflowException
+from airflow.hooks.base import BaseHook
+from airflow.models import Connection
+from airflow.providers.microsoft.azure.utils import get_field
+from airflow.utils import timezone
+
+
+
[docs]class AzureBatchHook(BaseHook): + """ + Hook for Azure Batch APIs + + :param azure_batch_conn_id: :ref:`Azure Batch connection id<howto/connection:azure_batch>` + of a service principal which will be used to start the container instance. + """ + +
[docs] conn_name_attr = "azure_batch_conn_id"
+
[docs] default_conn_name = "azure_batch_default"
+
[docs] conn_type = "azure_batch"
+
[docs] hook_name = "Azure Batch Service"
+ + def _get_field(self, extras, name): + return get_field( + conn_id=self.conn_id, + conn_type=self.conn_type, + extras=extras, + field_name=name, + ) + + @staticmethod +
[docs] def get_connection_form_widgets() -> dict[str, Any]: + """Returns connection widgets to add to connection form""" + from flask_appbuilder.fieldwidgets import BS3TextFieldWidget + from flask_babel import lazy_gettext + from wtforms import StringField + + return { + "account_url": StringField(lazy_gettext("Batch Account URL"), widget=BS3TextFieldWidget()),
+ } + + @staticmethod +
[docs] def get_ui_field_behaviour() -> dict[str, Any]: + """Returns custom field behaviour""" + return { + "hidden_fields": ["schema", "port", "host", "extra"], + "relabeling": { + "login": "Batch Account Name", + "password": "Batch Account Access Key",
+ }, + } + + def __init__(self, azure_batch_conn_id: str = default_conn_name) -> None: + super().__init__() + self.conn_id = azure_batch_conn_id + self.connection = self.get_conn() + + def _connection(self) -> Connection: + """Get connected to Azure Batch service""" + conn = self.get_connection(self.conn_id) + return conn + +
[docs] def get_conn(self): + """ + Get the Batch client connection + + :return: Azure Batch client + """ + conn = self._connection() + + batch_account_url = self._get_field(conn.extra_dejson, "account_url") + if not batch_account_url: + raise AirflowException("Batch Account URL parameter is missing.") + + credentials = batch_auth.SharedKeyCredentials(conn.login, conn.password) + batch_client = BatchServiceClient(credentials, batch_url=batch_account_url) + return batch_client
+ +
[docs] def configure_pool( + self, + pool_id: str, + vm_size: str, + vm_node_agent_sku_id: str, + vm_publisher: str | None = None, + vm_offer: str | None = None, + sku_starts_with: str | None = None, + vm_sku: str | None = None, + vm_version: str | None = None, + os_family: str | None = None, + os_version: str | None = None, + display_name: str | None = None, + target_dedicated_nodes: int | None = None, + use_latest_image_and_sku: bool = False, + **kwargs, + ) -> PoolAddParameter: + """ + Configures a pool + + :param pool_id: A string that uniquely identifies the Pool within the Account + + :param vm_size: The size of virtual machines in the Pool. + + :param display_name: The display name for the Pool + + :param target_dedicated_nodes: The desired number of dedicated Compute Nodes in the Pool. + + :param use_latest_image_and_sku: Whether to use the latest verified vm image and sku + + :param vm_publisher: The publisher of the Azure Virtual Machines Marketplace Image. + For example, Canonical or MicrosoftWindowsServer. + + :param vm_offer: The offer type of the Azure Virtual Machines Marketplace Image. + For example, UbuntuServer or WindowsServer. + + :param sku_starts_with: The start name of the sku to search + + :param vm_sku: The name of the virtual machine sku to use + + :param vm_version: The version of the virtual machine + :param vm_version: str + + :param vm_node_agent_sku_id: The node agent sku id of the virtual machine + + :param os_family: The Azure Guest OS family to be installed on the virtual machines in the Pool. + + :param os_version: The OS family version + + """ + if use_latest_image_and_sku: + self.log.info("Using latest verified virtual machine image with node agent sku") + sku_to_use, image_ref_to_use = self._get_latest_verified_image_vm_and_sku( + publisher=vm_publisher, offer=vm_offer, sku_starts_with=sku_starts_with + ) + pool = batch_models.PoolAddParameter( + id=pool_id, + vm_size=vm_size, + display_name=display_name, + virtual_machine_configuration=batch_models.VirtualMachineConfiguration( + image_reference=image_ref_to_use, node_agent_sku_id=sku_to_use + ), + target_dedicated_nodes=target_dedicated_nodes, + **kwargs, + ) + + elif os_family: + self.log.info( + "Using cloud service configuration to create pool, virtual machine configuration ignored" + ) + pool = batch_models.PoolAddParameter( + id=pool_id, + vm_size=vm_size, + display_name=display_name, + cloud_service_configuration=batch_models.CloudServiceConfiguration( + os_family=os_family, os_version=os_version + ), + target_dedicated_nodes=target_dedicated_nodes, + **kwargs, + ) + + else: + self.log.info("Using virtual machine configuration to create a pool") + pool = batch_models.PoolAddParameter( + id=pool_id, + vm_size=vm_size, + display_name=display_name, + virtual_machine_configuration=batch_models.VirtualMachineConfiguration( + image_reference=batch_models.ImageReference( + publisher=vm_publisher, + offer=vm_offer, + sku=vm_sku, + version=vm_version, + ), + node_agent_sku_id=vm_node_agent_sku_id, + ), + target_dedicated_nodes=target_dedicated_nodes, + **kwargs, + ) + return pool
+ +
[docs] def create_pool(self, pool: PoolAddParameter) -> None: + """ + Creates a pool if not already existing + + :param pool: the pool object to create + + """ + try: + self.log.info("Attempting to create a pool: %s", pool.id) + self.connection.pool.add(pool) + self.log.info("Created pool: %s", pool.id) + except batch_models.BatchErrorException as err: + if not err.error or err.error.code != "PoolExists": + raise + else: + self.log.info("Pool %s already exists", pool.id)
+ + def _get_latest_verified_image_vm_and_sku( + self, + publisher: str | None = None, + offer: str | None = None, + sku_starts_with: str | None = None, + ) -> tuple: + """ + Get latest verified image vm and sku + + :param publisher: The publisher of the Azure Virtual Machines Marketplace Image. + For example, Canonical or MicrosoftWindowsServer. + :param offer: The offer type of the Azure Virtual Machines Marketplace Image. + For example, UbuntuServer or WindowsServer. + :param sku_starts_with: The start name of the sku to search + """ + options = batch_models.AccountListSupportedImagesOptions(filter="verificationType eq 'verified'") + images = self.connection.account.list_supported_images(account_list_supported_images_options=options) + # pick the latest supported sku + skus_to_use = [ + (image.node_agent_sku_id, image.image_reference) + for image in images + if image.image_reference.publisher.lower() == publisher + and image.image_reference.offer.lower() == offer + and image.image_reference.sku.startswith(sku_starts_with) + ] + + # pick first + agent_sku_id, image_ref_to_use = skus_to_use[0] + return agent_sku_id, image_ref_to_use + +
[docs] def wait_for_all_node_state(self, pool_id: str, node_state: set) -> list: + """ + Wait for all nodes in a pool to reach given states + + :param pool_id: A string that identifies the pool + :param node_state: A set of batch_models.ComputeNodeState + """ + self.log.info("waiting for all nodes in pool %s to reach one of: %s", pool_id, node_state) + while True: + # refresh pool to ensure that there is no resize error + pool = self.connection.pool.get(pool_id) + if pool.resize_errors is not None: + resize_errors = "\n".join(repr(e) for e in pool.resize_errors) + raise RuntimeError(f"resize error encountered for pool {pool.id}:\n{resize_errors}") + nodes = list(self.connection.compute_node.list(pool.id)) + if len(nodes) >= pool.target_dedicated_nodes and all(node.state in node_state for node in nodes): + return nodes + # Allow the timeout to be controlled by the AzureBatchOperator + # specified timeout. This way we don't interrupt a startTask inside + # the pool + time.sleep(10)
+ +
[docs] def configure_job( + self, + job_id: str, + pool_id: str, + display_name: str | None = None, + **kwargs, + ) -> JobAddParameter: + """ + Configures a job for use in the pool + + :param job_id: A string that uniquely identifies the job within the account + :param pool_id: A string that identifies the pool + :param display_name: The display name for the job + """ + job = batch_models.JobAddParameter( + id=job_id, + pool_info=batch_models.PoolInformation(pool_id=pool_id), + display_name=display_name, + **kwargs, + ) + return job
+ +
[docs] def create_job(self, job: JobAddParameter) -> None: + """ + Creates a job in the pool + + :param job: The job object to create + """ + try: + self.connection.job.add(job) + self.log.info("Job %s created", job.id) + except batch_models.BatchErrorException as err: + if not err.error or err.error.code != "JobExists": + raise + else: + self.log.info("Job %s already exists", job.id)
+ +
[docs] def configure_task( + self, + task_id: str, + command_line: str, + display_name: str | None = None, + container_settings=None, + **kwargs, + ) -> TaskAddParameter: + """ + Creates a task + + :param task_id: A string that identifies the task to create + :param command_line: The command line of the Task. + :param display_name: A display name for the Task + :param container_settings: The settings for the container under which the Task runs. + If the Pool that will run this Task has containerConfiguration set, + this must be set as well. If the Pool that will run this Task doesn't have + containerConfiguration set, this must not be set. + """ + task = batch_models.TaskAddParameter( + id=task_id, + command_line=command_line, + display_name=display_name, + container_settings=container_settings, + **kwargs, + ) + self.log.info("Task created: %s", task_id) + return task
+ +
[docs] def add_single_task_to_job(self, job_id: str, task: TaskAddParameter) -> None: + """ + Add a single task to given job if it doesn't exist + + :param job_id: A string that identifies the given job + :param task: The task to add + """ + try: + + self.connection.task.add(job_id=job_id, task=task) + except batch_models.BatchErrorException as err: + if not err.error or err.error.code != "TaskExists": + raise + else: + self.log.info("Task %s already exists", task.id)
+ +
[docs] def wait_for_job_tasks_to_complete(self, job_id: str, timeout: int) -> list[batch_models.CloudTask]: + """ + Wait for tasks in a particular job to complete + + :param job_id: A string that identifies the job + :param timeout: The amount of time to wait before timing out in minutes + """ + timeout_time = timezone.utcnow() + timedelta(minutes=timeout) + while timezone.utcnow() < timeout_time: + tasks = self.connection.task.list(job_id) + + incomplete_tasks = [task for task in tasks if task.state != batch_models.TaskState.completed] + if not incomplete_tasks: + # detect if any task in job has failed + fail_tasks = [ + task + for task in tasks + if task.executionInfo.result == batch_models.TaskExecutionResult.failure + ] + return fail_tasks + for task in incomplete_tasks: + self.log.info("Waiting for %s to complete, currently on %s state", task.id, task.state) + time.sleep(15) + raise TimeoutError("Timed out waiting for tasks to complete")
+ +
[docs] def test_connection(self): + """Test a configured Azure Batch connection.""" + try: + # Attempt to list existing jobs under the configured Batch account and retrieve + # the first in the returned iterator. The Azure Batch API does allow for creation of a + # BatchServiceClient with incorrect values but then will fail properly once items are + # retrieved using the client. We need to _actually_ try to retrieve an object to properly + # test the connection. + next(self.get_conn().job.list(), None) + except Exception as e: + return False, str(e) + return True, "Successfully connected to Azure Batch."
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/container_instance.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/container_instance.html new file mode 100644 index 00000000000..60559f83925 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/container_instance.html @@ -0,0 +1,962 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.container_instance — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.hooks.container_instance

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import warnings
+
+from azure.mgmt.containerinstance import ContainerInstanceManagementClient
+from azure.mgmt.containerinstance.models import ContainerGroup
+
+from airflow.providers.microsoft.azure.hooks.base_azure import AzureBaseHook
+
+
+
[docs]class AzureContainerInstanceHook(AzureBaseHook): + """ + A hook to communicate with Azure Container Instances. + + This hook requires a service principal in order to work. + After creating this service principal + (Azure Active Directory/App Registrations), you need to fill in the + client_id (Application ID) as login, the generated password as password, + and tenantId and subscriptionId in the extra's field as a json. + + :param azure_conn_id: :ref:`Azure connection id<howto/connection:azure>` of + a service principal which will be used to start the container instance. + """ + +
[docs] conn_name_attr = "azure_conn_id"
+
[docs] default_conn_name = "azure_default"
+
[docs] conn_type = "azure_container_instance"
+
[docs] hook_name = "Azure Container Instance"
+ + def __init__(self, azure_conn_id: str = default_conn_name) -> None: + super().__init__(sdk_client=ContainerInstanceManagementClient, conn_id=azure_conn_id) + self.connection = self.get_conn() + +
[docs] def create_or_update(self, resource_group: str, name: str, container_group: ContainerGroup) -> None: + """ + Create a new container group + + :param resource_group: the name of the resource group + :param name: the name of the container group + :param container_group: the properties of the container group + """ + self.connection.container_groups.create_or_update(resource_group, name, container_group)
+ +
[docs] def get_state_exitcode_details(self, resource_group: str, name: str) -> tuple: + """ + Get the state and exitcode of a container group + + :param resource_group: the name of the resource group + :param name: the name of the container group + :return: A tuple with the state, exitcode, and details. + If the exitcode is unknown 0 is returned. + """ + warnings.warn( + "get_state_exitcode_details() is deprecated. Related method is get_state()", + DeprecationWarning, + stacklevel=2, + ) + cg_state = self.get_state(resource_group, name) + c_state = cg_state.containers[0].instance_view.current_state + return (c_state.state, c_state.exit_code, c_state.detail_status)
+ +
[docs] def get_messages(self, resource_group: str, name: str) -> list: + """ + Get the messages of a container group + + :param resource_group: the name of the resource group + :param name: the name of the container group + :return: A list of the event messages + """ + warnings.warn( + "get_messages() is deprecated. Related method is get_state()", DeprecationWarning, stacklevel=2 + ) + cg_state = self.get_state(resource_group, name) + instance_view = cg_state.containers[0].instance_view + return [event.message for event in instance_view.events]
+ +
[docs] def get_state(self, resource_group: str, name: str) -> ContainerGroup: + """ + Get the state of a container group + + :param resource_group: the name of the resource group + :param name: the name of the container group + :return: ContainerGroup + """ + return self.connection.container_groups.get(resource_group, name, raw=False)
+ +
[docs] def get_logs(self, resource_group: str, name: str, tail: int = 1000) -> list: + """ + Get the tail from logs of a container group + + :param resource_group: the name of the resource group + :param name: the name of the container group + :param tail: the size of the tail + :return: A list of log messages + """ + logs = self.connection.container.list_logs(resource_group, name, name, tail=tail) + return logs.content.splitlines(True)
+ +
[docs] def delete(self, resource_group: str, name: str) -> None: + """ + Delete a container group + + :param resource_group: the name of the resource group + :param name: the name of the container group + """ + self.connection.container_groups.delete(resource_group, name)
+ +
[docs] def exists(self, resource_group: str, name: str) -> bool: + """ + Test if a container group exists + + :param resource_group: the name of the resource group + :param name: the name of the container group + """ + for container in self.connection.container_groups.list_by_resource_group(resource_group): + if container.name == name: + return True + return False
+ +
[docs] def test_connection(self): + """Test a configured Azure Container Instance connection.""" + try: + # Attempt to list existing container groups under the configured subscription and retrieve the + # first in the returned iterator. We need to _actually_ try to retrieve an object to properly + # test the connection. + next(self.connection.container_groups.list(), None) + except Exception as e: + return False, str(e) + + return True, "Successfully connected to Azure Container Instance."
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/container_registry.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/container_registry.html new file mode 100644 index 00000000000..59f5d5646cd --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/container_registry.html @@ -0,0 +1,881 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.container_registry — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.hooks.container_registry

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Hook for Azure Container Registry"""
+from __future__ import annotations
+
+from typing import Any
+
+from azure.mgmt.containerinstance.models import ImageRegistryCredential
+
+from airflow.hooks.base import BaseHook
+
+
+
[docs]class AzureContainerRegistryHook(BaseHook): + """ + A hook to communicate with a Azure Container Registry. + + :param conn_id: :ref:`Azure Container Registry connection id<howto/connection:acr>` + of a service principal which will be used to start the container instance + + """ + +
[docs] conn_name_attr = "azure_container_registry_conn_id"
+
[docs] default_conn_name = "azure_container_registry_default"
+
[docs] conn_type = "azure_container_registry"
+
[docs] hook_name = "Azure Container Registry"
+ + @staticmethod +
[docs] def get_ui_field_behaviour() -> dict[str, Any]: + """Returns custom field behaviour""" + return { + "hidden_fields": ["schema", "port", "extra"], + "relabeling": { + "login": "Registry Username", + "password": "Registry Password", + "host": "Registry Server", + }, + "placeholders": { + "login": "private registry username", + "password": "private registry password", + "host": "docker image registry server",
+ }, + } + + def __init__(self, conn_id: str = "azure_registry") -> None: + super().__init__() + self.conn_id = conn_id + self.connection = self.get_conn() + +
[docs] def get_conn(self) -> ImageRegistryCredential: + conn = self.get_connection(self.conn_id) + return ImageRegistryCredential(server=conn.host, username=conn.login, password=conn.password)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/container_volume.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/container_volume.html new file mode 100644 index 00000000000..e00381d92ec --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/container_volume.html @@ -0,0 +1,923 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.container_volume — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.hooks.container_volume

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import Any
+
+from azure.mgmt.containerinstance.models import AzureFileVolume, Volume
+
+from airflow.hooks.base import BaseHook
+from airflow.providers.microsoft.azure.utils import _ensure_prefixes, get_field
+
+
+
[docs]class AzureContainerVolumeHook(BaseHook): + """ + A hook which wraps an Azure Volume. + + :param azure_container_volume_conn_id: Reference to the + :ref:`Azure Container Volume connection id <howto/connection:azure_container_volume>` + of an Azure account of which container volumes should be used. + """ + +
[docs] conn_name_attr = "azure_container_volume_conn_id"
+
[docs] default_conn_name = "azure_container_volume_default"
+
[docs] conn_type = "azure_container_volume"
+
[docs] hook_name = "Azure Container Volume"
+ + def __init__(self, azure_container_volume_conn_id: str = "azure_container_volume_default") -> None: + super().__init__() + self.conn_id = azure_container_volume_conn_id + + def _get_field(self, extras, name): + return get_field( + conn_id=self.conn_id, + conn_type=self.conn_type, + extras=extras, + field_name=name, + ) + + @staticmethod +
[docs] def get_connection_form_widgets() -> dict[str, Any]: + """Returns connection widgets to add to connection form""" + from flask_appbuilder.fieldwidgets import BS3PasswordFieldWidget + from flask_babel import lazy_gettext + from wtforms import PasswordField + + return { + "connection_string": PasswordField( + lazy_gettext("Blob Storage Connection String (optional)"), widget=BS3PasswordFieldWidget()
+ ), + } + + @staticmethod + @_ensure_prefixes(conn_type="azure_container_volume") +
[docs] def get_ui_field_behaviour() -> dict[str, Any]: + """Returns custom field behaviour""" + return { + "hidden_fields": ["schema", "port", "host", "extra"], + "relabeling": { + "login": "Azure Client ID", + "password": "Azure Secret", + }, + "placeholders": { + "login": "client_id (token credentials auth)", + "password": "secret (token credentials auth)", + "connection_string": "connection string auth",
+ }, + } + +
[docs] def get_storagekey(self) -> str: + """Get Azure File Volume storage key""" + conn = self.get_connection(self.conn_id) + extras = conn.extra_dejson + connection_string = self._get_field(extras, "connection_string") + if connection_string: + for keyvalue in connection_string.split(";"): + key, value = keyvalue.split("=", 1) + if key == "AccountKey": + return value + return conn.password
+ +
[docs] def get_file_volume( + self, mount_name: str, share_name: str, storage_account_name: str, read_only: bool = False + ) -> Volume: + """Get Azure File Volume""" + return Volume( + name=mount_name, + azure_file=AzureFileVolume( + share_name=share_name, + storage_account_name=storage_account_name, + read_only=read_only, + storage_account_key=self.get_storagekey(),
+ ), + ) +
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/cosmos.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/cosmos.html new file mode 100644 index 00000000000..3b273cb6831 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/cosmos.html @@ -0,0 +1,1186 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.cosmos — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.hooks.cosmos

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+This module contains integration with Azure CosmosDB.
+
+AzureCosmosDBHook communicates via the Azure Cosmos library. Make sure that a
+Airflow connection of type `azure_cosmos` exists. Authorization can be done by supplying a
+login (=Endpoint uri), password (=secret key) and extra fields database_name and collection_name to specify
+the default database and collection to use (see connection `azure_cosmos_default` for an example).
+"""
+from __future__ import annotations
+
+import json
+import uuid
+from typing import Any
+
+from azure.cosmos.cosmos_client import CosmosClient
+from azure.cosmos.exceptions import CosmosHttpResponseError
+
+from airflow.exceptions import AirflowBadRequest
+from airflow.hooks.base import BaseHook
+from airflow.providers.microsoft.azure.utils import _ensure_prefixes, get_field
+
+
+
[docs]class AzureCosmosDBHook(BaseHook): + """ + Interacts with Azure CosmosDB. + + login should be the endpoint uri, password should be the master key + optionally, you can use the following extras to default these values + {"database_name": "<DATABASE_NAME>", "collection_name": "COLLECTION_NAME"}. + + :param azure_cosmos_conn_id: Reference to the + :ref:`Azure CosmosDB connection<howto/connection:azure_cosmos>`. + """ + +
[docs] conn_name_attr = "azure_cosmos_conn_id"
+
[docs] default_conn_name = "azure_cosmos_default"
+
[docs] conn_type = "azure_cosmos"
+
[docs] hook_name = "Azure CosmosDB"
+ + @staticmethod +
[docs] def get_connection_form_widgets() -> dict[str, Any]: + """Returns connection widgets to add to connection form""" + from flask_appbuilder.fieldwidgets import BS3TextFieldWidget + from flask_babel import lazy_gettext + from wtforms import StringField + + return { + "database_name": StringField( + lazy_gettext("Cosmos Database Name (optional)"), widget=BS3TextFieldWidget() + ), + "collection_name": StringField( + lazy_gettext("Cosmos Collection Name (optional)"), widget=BS3TextFieldWidget()
+ ), + } + + @staticmethod + @_ensure_prefixes(conn_type="azure_cosmos") # todo: remove when min airflow version >= 2.5 +
[docs] def get_ui_field_behaviour() -> dict[str, Any]: + """Returns custom field behaviour""" + return { + "hidden_fields": ["schema", "port", "host", "extra"], + "relabeling": { + "login": "Cosmos Endpoint URI", + "password": "Cosmos Master Key Token", + }, + "placeholders": { + "login": "endpoint uri", + "password": "master key", + "database_name": "database name", + "collection_name": "collection name",
+ }, + } + + def __init__(self, azure_cosmos_conn_id: str = default_conn_name) -> None: + super().__init__() + self.conn_id = azure_cosmos_conn_id + self._conn: CosmosClient | None = None + + self.default_database_name = None + self.default_collection_name = None + + def _get_field(self, extras, name): + return get_field( + conn_id=self.conn_id, + conn_type=self.conn_type, + extras=extras, + field_name=name, + ) + +
[docs] def get_conn(self) -> CosmosClient: + """Return a cosmos db client.""" + if not self._conn: + conn = self.get_connection(self.conn_id) + extras = conn.extra_dejson + endpoint_uri = conn.login + master_key = conn.password + + self.default_database_name = self._get_field(extras, "database_name") + self.default_collection_name = self._get_field(extras, "collection_name") + + # Initialize the Python Azure Cosmos DB client + self._conn = CosmosClient(endpoint_uri, {"masterKey": master_key}) + return self._conn
+ + def __get_database_name(self, database_name: str | None = None) -> str: + self.get_conn() + db_name = database_name + if db_name is None: + db_name = self.default_database_name + + if db_name is None: + raise AirflowBadRequest("Database name must be specified") + + return db_name + + def __get_collection_name(self, collection_name: str | None = None) -> str: + self.get_conn() + coll_name = collection_name + if coll_name is None: + coll_name = self.default_collection_name + + if coll_name is None: + raise AirflowBadRequest("Collection name must be specified") + + return coll_name + +
[docs] def does_collection_exist(self, collection_name: str, database_name: str) -> bool: + """Checks if a collection exists in CosmosDB.""" + if collection_name is None: + raise AirflowBadRequest("Collection name cannot be None.") + + existing_container = list( + self.get_conn() + .get_database_client(self.__get_database_name(database_name)) + .query_containers( + "SELECT * FROM r WHERE r.id=@id", + parameters=[json.dumps({"name": "@id", "value": collection_name})], + ) + ) + if len(existing_container) == 0: + return False + + return True
+ +
[docs] def create_collection( + self, + collection_name: str, + database_name: str | None = None, + partition_key: str | None = None, + ) -> None: + """Creates a new collection in the CosmosDB database.""" + if collection_name is None: + raise AirflowBadRequest("Collection name cannot be None.") + + # We need to check to see if this container already exists so we don't try + # to create it twice + existing_container = list( + self.get_conn() + .get_database_client(self.__get_database_name(database_name)) + .query_containers( + "SELECT * FROM r WHERE r.id=@id", + parameters=[json.dumps({"name": "@id", "value": collection_name})], + ) + ) + + # Only create if we did not find it already existing + if len(existing_container) == 0: + self.get_conn().get_database_client(self.__get_database_name(database_name)).create_container( + collection_name, partition_key=partition_key
+ ) + +
[docs] def does_database_exist(self, database_name: str) -> bool: + """Checks if a database exists in CosmosDB.""" + if database_name is None: + raise AirflowBadRequest("Database name cannot be None.") + + existing_database = list( + self.get_conn().query_databases( + "SELECT * FROM r WHERE r.id=@id", + parameters=[json.dumps({"name": "@id", "value": database_name})], + ) + ) + if len(existing_database) == 0: + return False + + return True
+ +
[docs] def create_database(self, database_name: str) -> None: + """Creates a new database in CosmosDB.""" + if database_name is None: + raise AirflowBadRequest("Database name cannot be None.") + + # We need to check to see if this database already exists so we don't try + # to create it twice + existing_database = list( + self.get_conn().query_databases( + "SELECT * FROM r WHERE r.id=@id", + parameters=[json.dumps({"name": "@id", "value": database_name})], + ) + ) + + # Only create if we did not find it already existing + if len(existing_database) == 0: + self.get_conn().create_database(database_name)
+ +
[docs] def delete_database(self, database_name: str) -> None: + """Deletes an existing database in CosmosDB.""" + if database_name is None: + raise AirflowBadRequest("Database name cannot be None.") + + self.get_conn().delete_database(database_name)
+ +
[docs] def delete_collection(self, collection_name: str, database_name: str | None = None) -> None: + """Deletes an existing collection in the CosmosDB database.""" + if collection_name is None: + raise AirflowBadRequest("Collection name cannot be None.") + + self.get_conn().get_database_client(self.__get_database_name(database_name)).delete_container( + collection_name
+ ) + +
[docs] def upsert_document(self, document, database_name=None, collection_name=None, document_id=None): + """ + Inserts a new document (or updates an existing one) into an existing + collection in the CosmosDB database. + """ + # Assign unique ID if one isn't provided + if document_id is None: + document_id = str(uuid.uuid4()) + + if document is None: + raise AirflowBadRequest("You cannot insert a None document") + + # Add document id if isn't found + if "id" in document: + if document["id"] is None: + document["id"] = document_id + else: + document["id"] = document_id + + created_document = ( + self.get_conn() + .get_database_client(self.__get_database_name(database_name)) + .get_container_client(self.__get_collection_name(collection_name)) + .upsert_item(document) + ) + + return created_document
+ +
[docs] def insert_documents( + self, documents, database_name: str | None = None, collection_name: str | None = None + ) -> list: + """Insert a list of new documents into an existing collection in the CosmosDB database.""" + if documents is None: + raise AirflowBadRequest("You cannot insert empty documents") + + created_documents = [] + for single_document in documents: + created_documents.append( + self.get_conn() + .get_database_client(self.__get_database_name(database_name)) + .get_container_client(self.__get_collection_name(collection_name)) + .create_item(single_document) + ) + + return created_documents
+ +
[docs] def delete_document( + self, + document_id: str, + database_name: str | None = None, + collection_name: str | None = None, + partition_key: str | None = None, + ) -> None: + """Delete an existing document out of a collection in the CosmosDB database.""" + if document_id is None: + raise AirflowBadRequest("Cannot delete a document without an id") + ( + self.get_conn() + .get_database_client(self.__get_database_name(database_name)) + .get_container_client(self.__get_collection_name(collection_name)) + .delete_item(document_id, partition_key=partition_key)
+ ) + +
[docs] def get_document( + self, + document_id: str, + database_name: str | None = None, + collection_name: str | None = None, + partition_key: str | None = None, + ): + """Get a document from an existing collection in the CosmosDB database.""" + if document_id is None: + raise AirflowBadRequest("Cannot get a document without an id") + + try: + return ( + self.get_conn() + .get_database_client(self.__get_database_name(database_name)) + .get_container_client(self.__get_collection_name(collection_name)) + .read_item(document_id, partition_key=partition_key) + ) + except CosmosHttpResponseError: + return None
+ +
[docs] def get_documents( + self, + sql_string: str, + database_name: str | None = None, + collection_name: str | None = None, + partition_key: str | None = None, + ) -> list | None: + """Get a list of documents from an existing collection in the CosmosDB database via SQL query.""" + if sql_string is None: + raise AirflowBadRequest("SQL query string cannot be None") + + try: + result_iterable = ( + self.get_conn() + .get_database_client(self.__get_database_name(database_name)) + .get_container_client(self.__get_collection_name(collection_name)) + .query_items(sql_string, partition_key=partition_key) + ) + return list(result_iterable) + except CosmosHttpResponseError: + return None
+ +
[docs] def test_connection(self): + """Test a configured Azure Cosmos connection.""" + try: + # Attempt to list existing databases under the configured subscription and retrieve the first in + # the returned iterator. The Azure Cosmos API does allow for creation of a + # CosmosClient with incorrect values but then will fail properly once items are + # retrieved using the client. We need to _actually_ try to retrieve an object to properly test the + # connection. + next(iter(self.get_conn().list_databases()), None) + except Exception as e: + return False, str(e) + return True, "Successfully connected to Azure Cosmos."
+ + + + + + + + + +
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/data_factory.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/data_factory.html new file mode 100644 index 00000000000..45182536205 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/data_factory.html @@ -0,0 +1,1856 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.data_factory — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.hooks.data_factory

+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+.. spelling::
+
+    CreateRunResponse
+    DatasetResource
+    LinkedServiceResource
+    LROPoller
+    PipelineResource
+    PipelineRun
+    TriggerResource
+    datafactory
+    DataFlow
+    mgmt
+"""
+from __future__ import annotations
+
+import inspect
+import time
+from functools import wraps
+from typing import Any, Callable, Union
+
+from azure.core.polling import LROPoller
+from azure.identity import ClientSecretCredential, DefaultAzureCredential
+from azure.mgmt.datafactory import DataFactoryManagementClient
+from azure.mgmt.datafactory.models import (
+    CreateRunResponse,
+    DataFlow,
+    DatasetResource,
+    Factory,
+    LinkedServiceResource,
+    PipelineResource,
+    PipelineRun,
+    TriggerResource,
+)
+
+from airflow.exceptions import AirflowException
+from airflow.hooks.base import BaseHook
+from airflow.typing_compat import TypedDict
+
+
[docs]Credentials = Union[ClientSecretCredential, DefaultAzureCredential]
+ + +
[docs]def provide_targeted_factory(func: Callable) -> Callable: + """ + Provide the targeted factory to the decorated function in case it isn't specified. + + If ``resource_group_name`` or ``factory_name`` is not provided it defaults to the value specified in + the connection extras. + """ + signature = inspect.signature(func) + + @wraps(func) + def wrapper(*args, **kwargs) -> Callable: + bound_args = signature.bind(*args, **kwargs) + + def bind_argument(arg, default_key): + # Check if arg was not included in the function signature or, if it is, the value is not provided. + if arg not in bound_args.arguments or bound_args.arguments[arg] is None: + self = args[0] + conn = self.get_connection(self.conn_id) + extras = conn.extra_dejson + default_value = extras.get(default_key) or extras.get( + f"extra__azure_data_factory__{default_key}" + ) + if not default_value: + raise AirflowException("Could not determine the targeted data factory.") + + bound_args.arguments[arg] = default_value + + bind_argument("resource_group_name", "resource_group_name") + bind_argument("factory_name", "factory_name") + + return func(*bound_args.args, **bound_args.kwargs) + + return wrapper
+ + +
[docs]class PipelineRunInfo(TypedDict): + """Type class for the pipeline run info dictionary.""" + +
[docs] run_id: str
+
[docs] factory_name: str | None
+
[docs] resource_group_name: str | None
+ + +
[docs]class AzureDataFactoryPipelineRunStatus: + """Azure Data Factory pipeline operation statuses.""" + +
[docs] QUEUED = "Queued"
+
[docs] IN_PROGRESS = "InProgress"
+
[docs] SUCCEEDED = "Succeeded"
+
[docs] FAILED = "Failed"
+
[docs] CANCELING = "Canceling"
+
[docs] CANCELLED = "Cancelled"
+ +
[docs] TERMINAL_STATUSES = {CANCELLED, FAILED, SUCCEEDED}
+ + +
[docs]class AzureDataFactoryPipelineRunException(AirflowException): + """An exception that indicates a pipeline run failed to complete."""
+ + +
[docs]def get_field(extras: dict, field_name: str, strict: bool = False): + """Get field from extra, first checking short name, then for backcompat we check for prefixed name.""" + backcompat_prefix = "extra__azure_data_factory__" + if field_name.startswith("extra__"): + raise ValueError( + f"Got prefixed name {field_name}; please remove the '{backcompat_prefix}' prefix " + "when using this method." + ) + if field_name in extras: + return extras[field_name] or None + prefixed_name = f"{backcompat_prefix}{field_name}" + if prefixed_name in extras: + return extras[prefixed_name] or None + if strict: + raise KeyError(f"Field {field_name} not found in extras")
+ + +
[docs]class AzureDataFactoryHook(BaseHook): + """ + A hook to interact with Azure Data Factory. + + :param azure_data_factory_conn_id: The :ref:`Azure Data Factory connection id<howto/connection:adf>`. + """ + +
[docs] conn_type: str = "azure_data_factory"
+
[docs] conn_name_attr: str = "azure_data_factory_conn_id"
+
[docs] default_conn_name: str = "azure_data_factory_default"
+
[docs] hook_name: str = "Azure Data Factory"
+ + @staticmethod +
[docs] def get_connection_form_widgets() -> dict[str, Any]: + """Returns connection widgets to add to connection form""" + from flask_appbuilder.fieldwidgets import BS3TextFieldWidget + from flask_babel import lazy_gettext + from wtforms import StringField + + return { + "tenantId": StringField(lazy_gettext("Tenant ID"), widget=BS3TextFieldWidget()), + "subscriptionId": StringField(lazy_gettext("Subscription ID"), widget=BS3TextFieldWidget()), + "resource_group_name": StringField( + lazy_gettext("Resource Group Name"), widget=BS3TextFieldWidget() + ), + "factory_name": StringField(lazy_gettext("Factory Name"), widget=BS3TextFieldWidget()),
+ } + + @staticmethod +
[docs] def get_ui_field_behaviour() -> dict[str, Any]: + """Returns custom field behaviour""" + return { + "hidden_fields": ["schema", "port", "host", "extra"], + "relabeling": { + "login": "Client ID", + "password": "Secret",
+ }, + } + + def __init__(self, azure_data_factory_conn_id: str = default_conn_name): + self._conn: DataFactoryManagementClient = None + self.conn_id = azure_data_factory_conn_id + super().__init__() + +
[docs] def get_conn(self) -> DataFactoryManagementClient: + if self._conn is not None: + return self._conn + + conn = self.get_connection(self.conn_id) + extras = conn.extra_dejson + tenant = get_field(extras, "tenantId") + + try: + subscription_id = get_field(extras, "subscriptionId", strict=True) + except KeyError: + raise ValueError("A Subscription ID is required to connect to Azure Data Factory.") + + credential: Credentials + if conn.login is not None and conn.password is not None: + if not tenant: + raise ValueError("A Tenant ID is required when authenticating with Client ID and Secret.") + + credential = ClientSecretCredential( + client_id=conn.login, client_secret=conn.password, tenant_id=tenant + ) + else: + credential = DefaultAzureCredential() + self._conn = self._create_client(credential, subscription_id) + + return self._conn
+ + @provide_targeted_factory +
[docs] def get_factory( + self, resource_group_name: str | None = None, factory_name: str | None = None, **config: Any + ) -> Factory: + """ + Get the factory. + + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: The factory. + """ + return self.get_conn().factories.get(resource_group_name, factory_name, **config)
+ + def _factory_exists(self, resource_group_name, factory_name) -> bool: + """Return whether or not the factory already exists.""" + factories = { + factory.name for factory in self.get_conn().factories.list_by_resource_group(resource_group_name) + } + + return factory_name in factories + + @staticmethod + def _create_client(credential: Credentials, subscription_id: str): + return DataFactoryManagementClient( + credential=credential, + subscription_id=subscription_id, + ) + + @provide_targeted_factory +
[docs] def update_factory( + self, + factory: Factory, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> Factory: + """ + Update the factory. + + :param factory: The factory resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the factory does not exist. + :return: The factory. + """ + if not self._factory_exists(resource_group_name, factory_name): + raise AirflowException(f"Factory {factory!r} does not exist.") + + return self.get_conn().factories.create_or_update( + resource_group_name, factory_name, factory, **config
+ ) + + @provide_targeted_factory +
[docs] def create_factory( + self, + factory: Factory, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> Factory: + """ + Create the factory. + + :param factory: The factory resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the factory already exists. + :return: The factory. + """ + if self._factory_exists(resource_group_name, factory_name): + raise AirflowException(f"Factory {factory!r} already exists.") + + return self.get_conn().factories.create_or_update( + resource_group_name, factory_name, factory, **config
+ ) + + @provide_targeted_factory +
[docs] def delete_factory( + self, resource_group_name: str | None = None, factory_name: str | None = None, **config: Any + ) -> None: + """ + Delete the factory. + + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + """ + self.get_conn().factories.delete(resource_group_name, factory_name, **config)
+ + @provide_targeted_factory +
[docs] def get_linked_service( + self, + linked_service_name: str, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> LinkedServiceResource: + """ + Get the linked service. + + :param linked_service_name: The linked service name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: The linked service. + """ + return self.get_conn().linked_services.get( + resource_group_name, factory_name, linked_service_name, **config
+ ) + + def _linked_service_exists(self, resource_group_name, factory_name, linked_service_name) -> bool: + """Return whether or not the linked service already exists.""" + linked_services = { + linked_service.name + for linked_service in self.get_conn().linked_services.list_by_factory( + resource_group_name, factory_name + ) + } + + return linked_service_name in linked_services + + @provide_targeted_factory +
[docs] def update_linked_service( + self, + linked_service_name: str, + linked_service: LinkedServiceResource, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> LinkedServiceResource: + """ + Update the linked service. + + :param linked_service_name: The linked service name. + :param linked_service: The linked service resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the linked service does not exist. + :return: The linked service. + """ + if not self._linked_service_exists(resource_group_name, factory_name, linked_service_name): + raise AirflowException(f"Linked service {linked_service_name!r} does not exist.") + + return self.get_conn().linked_services.create_or_update( + resource_group_name, factory_name, linked_service_name, linked_service, **config
+ ) + + @provide_targeted_factory +
[docs] def create_linked_service( + self, + linked_service_name: str, + linked_service: LinkedServiceResource, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> LinkedServiceResource: + """ + Create the linked service. + + :param linked_service_name: The linked service name. + :param linked_service: The linked service resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the linked service already exists. + :return: The linked service. + """ + if self._linked_service_exists(resource_group_name, factory_name, linked_service_name): + raise AirflowException(f"Linked service {linked_service_name!r} already exists.") + + return self.get_conn().linked_services.create_or_update( + resource_group_name, factory_name, linked_service_name, linked_service, **config
+ ) + + @provide_targeted_factory +
[docs] def delete_linked_service( + self, + linked_service_name: str, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> None: + """ + Delete the linked service. + + :param linked_service_name: The linked service name. + :param resource_group_name: The linked service name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + """ + self.get_conn().linked_services.delete( + resource_group_name, factory_name, linked_service_name, **config
+ ) + + @provide_targeted_factory +
[docs] def get_dataset( + self, + dataset_name: str, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> DatasetResource: + """ + Get the dataset. + + :param dataset_name: The dataset name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: The dataset. + """ + return self.get_conn().datasets.get(resource_group_name, factory_name, dataset_name, **config)
+ + def _dataset_exists(self, resource_group_name, factory_name, dataset_name) -> bool: + """Return whether or not the dataset already exists.""" + datasets = { + dataset.name + for dataset in self.get_conn().datasets.list_by_factory(resource_group_name, factory_name) + } + + return dataset_name in datasets + + @provide_targeted_factory +
[docs] def update_dataset( + self, + dataset_name: str, + dataset: DatasetResource, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> DatasetResource: + """ + Update the dataset. + + :param dataset_name: The dataset name. + :param dataset: The dataset resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the dataset does not exist. + :return: The dataset. + """ + if not self._dataset_exists(resource_group_name, factory_name, dataset_name): + raise AirflowException(f"Dataset {dataset_name!r} does not exist.") + + return self.get_conn().datasets.create_or_update( + resource_group_name, factory_name, dataset_name, dataset, **config
+ ) + + @provide_targeted_factory +
[docs] def create_dataset( + self, + dataset_name: str, + dataset: DatasetResource, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> DatasetResource: + """ + Create the dataset. + + :param dataset_name: The dataset name. + :param dataset: The dataset resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the dataset already exists. + :return: The dataset. + """ + if self._dataset_exists(resource_group_name, factory_name, dataset_name): + raise AirflowException(f"Dataset {dataset_name!r} already exists.") + + return self.get_conn().datasets.create_or_update( + resource_group_name, factory_name, dataset_name, dataset, **config
+ ) + + @provide_targeted_factory +
[docs] def delete_dataset( + self, + dataset_name: str, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> None: + """ + Delete the dataset. + + :param dataset_name: The dataset name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + """ + self.get_conn().datasets.delete(resource_group_name, factory_name, dataset_name, **config)
+ + @provide_targeted_factory +
[docs] def get_dataflow( + self, + dataflow_name: str, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> DataFlow: + """ + Get the dataflow. + + :param dataflow_name: The dataflow name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: The dataflow. + """ + return self.get_conn().data_flows.get(resource_group_name, factory_name, dataflow_name, **config)
+ + def _dataflow_exists( + self, + dataflow_name: str, + resource_group_name: str | None = None, + factory_name: str | None = None, + ) -> bool: + """Return whether the dataflow already exists.""" + dataflows = { + dataflow.name + for dataflow in self.get_conn().data_flows.list_by_factory(resource_group_name, factory_name) + } + + return dataflow_name in dataflows + + @provide_targeted_factory +
[docs] def update_dataflow( + self, + dataflow_name: str, + dataflow: DataFlow, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> DataFlow: + """ + Update the dataflow. + + :param dataflow_name: The dataflow name. + :param dataflow: The dataflow resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the dataset does not exist. + :return: The dataflow. + """ + if not self._dataflow_exists( + dataflow_name, + resource_group_name, + factory_name, + ): + raise AirflowException(f"Dataflow {dataflow_name!r} does not exist.") + + return self.get_conn().data_flows.create_or_update( + resource_group_name, factory_name, dataflow_name, dataflow, **config
+ ) + + @provide_targeted_factory +
[docs] def create_dataflow( + self, + dataflow_name: str, + dataflow: DataFlow, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> DataFlow: + """ + Create the dataflow. + + :param dataflow_name: The dataflow name. + :param dataflow: The dataflow resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the dataset already exists. + :return: The dataset. + """ + if self._dataflow_exists(dataflow_name, resource_group_name, factory_name): + raise AirflowException(f"Dataflow {dataflow_name!r} already exists.") + + return self.get_conn().data_flows.create_or_update( + resource_group_name, factory_name, dataflow_name, dataflow, **config
+ ) + + @provide_targeted_factory +
[docs] def delete_dataflow( + self, + dataflow_name: str, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> None: + """ + Delete the dataflow. + + :param dataflow_name: The dataflow name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + """ + self.get_conn().data_flows.delete(resource_group_name, factory_name, dataflow_name, **config)
+ + @provide_targeted_factory +
[docs] def get_pipeline( + self, + pipeline_name: str, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> PipelineResource: + """ + Get the pipeline. + + :param pipeline_name: The pipeline name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: The pipeline. + """ + return self.get_conn().pipelines.get(resource_group_name, factory_name, pipeline_name, **config)
+ + def _pipeline_exists(self, resource_group_name, factory_name, pipeline_name) -> bool: + """Return whether or not the pipeline already exists.""" + pipelines = { + pipeline.name + for pipeline in self.get_conn().pipelines.list_by_factory(resource_group_name, factory_name) + } + + return pipeline_name in pipelines + + @provide_targeted_factory +
[docs] def update_pipeline( + self, + pipeline_name: str, + pipeline: PipelineResource, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> PipelineResource: + """ + Update the pipeline. + + :param pipeline_name: The pipeline name. + :param pipeline: The pipeline resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the pipeline does not exist. + :return: The pipeline. + """ + if not self._pipeline_exists(resource_group_name, factory_name, pipeline_name): + raise AirflowException(f"Pipeline {pipeline_name!r} does not exist.") + + return self.get_conn().pipelines.create_or_update( + resource_group_name, factory_name, pipeline_name, pipeline, **config
+ ) + + @provide_targeted_factory +
[docs] def create_pipeline( + self, + pipeline_name: str, + pipeline: PipelineResource, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> PipelineResource: + """ + Create the pipeline. + + :param pipeline_name: The pipeline name. + :param pipeline: The pipeline resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the pipeline already exists. + :return: The pipeline. + """ + if self._pipeline_exists(resource_group_name, factory_name, pipeline_name): + raise AirflowException(f"Pipeline {pipeline_name!r} already exists.") + + return self.get_conn().pipelines.create_or_update( + resource_group_name, factory_name, pipeline_name, pipeline, **config
+ ) + + @provide_targeted_factory +
[docs] def delete_pipeline( + self, + pipeline_name: str, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> None: + """ + Delete the pipeline. + + :param pipeline_name: The pipeline name. + :param resource_group_name: The pipeline name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + """ + self.get_conn().pipelines.delete(resource_group_name, factory_name, pipeline_name, **config)
+ + @provide_targeted_factory +
[docs] def run_pipeline( + self, + pipeline_name: str, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> CreateRunResponse: + """ + Run a pipeline. + + :param pipeline_name: The pipeline name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: The pipeline run. + """ + return self.get_conn().pipelines.create_run( + resource_group_name, factory_name, pipeline_name, **config
+ ) + + @provide_targeted_factory +
[docs] def get_pipeline_run( + self, + run_id: str, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> PipelineRun: + """ + Get the pipeline run. + + :param run_id: The pipeline run identifier. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: The pipeline run. + """ + return self.get_conn().pipeline_runs.get(resource_group_name, factory_name, run_id, **config)
+ +
[docs] def get_pipeline_run_status( + self, + run_id: str, + resource_group_name: str | None = None, + factory_name: str | None = None, + ) -> str: + """ + Get a pipeline run's current status. + + :param run_id: The pipeline run identifier. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :return: The status of the pipeline run. + """ + self.log.info("Getting the status of run ID %s.", run_id) + pipeline_run_status = self.get_pipeline_run( + run_id=run_id, + factory_name=factory_name, + resource_group_name=resource_group_name, + ).status + self.log.info("Current status of pipeline run %s: %s", run_id, pipeline_run_status) + + return pipeline_run_status
+ +
[docs] def wait_for_pipeline_run_status( + self, + run_id: str, + expected_statuses: str | set[str], + resource_group_name: str | None = None, + factory_name: str | None = None, + check_interval: int = 60, + timeout: int = 60 * 60 * 24 * 7, + ) -> bool: + """ + Waits for a pipeline run to match an expected status. + + :param run_id: The pipeline run identifier. + :param expected_statuses: The desired status(es) to check against a pipeline run's current status. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param check_interval: Time in seconds to check on a pipeline run's status. + :param timeout: Time in seconds to wait for a pipeline to reach a terminal status or the expected + status. + :return: Boolean indicating if the pipeline run has reached the ``expected_status``. + """ + pipeline_run_info = PipelineRunInfo( + run_id=run_id, + factory_name=factory_name, + resource_group_name=resource_group_name, + ) + pipeline_run_status = self.get_pipeline_run_status(**pipeline_run_info) + + start_time = time.monotonic() + + while ( + pipeline_run_status not in AzureDataFactoryPipelineRunStatus.TERMINAL_STATUSES + and pipeline_run_status not in expected_statuses + ): + # Check if the pipeline-run duration has exceeded the ``timeout`` configured. + if start_time + timeout < time.monotonic(): + raise AzureDataFactoryPipelineRunException( + f"Pipeline run {run_id} has not reached a terminal status after {timeout} seconds." + ) + + # Wait to check the status of the pipeline run based on the ``check_interval`` configured. + time.sleep(check_interval) + + pipeline_run_status = self.get_pipeline_run_status(**pipeline_run_info) + + return pipeline_run_status in expected_statuses
+ + @provide_targeted_factory +
[docs] def cancel_pipeline_run( + self, + run_id: str, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> None: + """ + Cancel the pipeline run. + + :param run_id: The pipeline run identifier. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + """ + self.get_conn().pipeline_runs.cancel(resource_group_name, factory_name, run_id, **config)
+ + @provide_targeted_factory +
[docs] def get_trigger( + self, + trigger_name: str, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> TriggerResource: + """ + Get the trigger. + + :param trigger_name: The trigger name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: The trigger. + """ + return self.get_conn().triggers.get(resource_group_name, factory_name, trigger_name, **config)
+ + def _trigger_exists(self, resource_group_name, factory_name, trigger_name) -> bool: + """Return whether or not the trigger already exists.""" + triggers = { + trigger.name + for trigger in self.get_conn().triggers.list_by_factory(resource_group_name, factory_name) + } + + return trigger_name in triggers + + @provide_targeted_factory +
[docs] def update_trigger( + self, + trigger_name: str, + trigger: TriggerResource, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> TriggerResource: + """ + Update the trigger. + + :param trigger_name: The trigger name. + :param trigger: The trigger resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the trigger does not exist. + :return: The trigger. + """ + if not self._trigger_exists(resource_group_name, factory_name, trigger_name): + raise AirflowException(f"Trigger {trigger_name!r} does not exist.") + + return self.get_conn().triggers.create_or_update( + resource_group_name, factory_name, trigger_name, trigger, **config
+ ) + + @provide_targeted_factory +
[docs] def create_trigger( + self, + trigger_name: str, + trigger: TriggerResource, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> TriggerResource: + """ + Create the trigger. + + :param trigger_name: The trigger name. + :param trigger: The trigger resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the trigger already exists. + :return: The trigger. + """ + if self._trigger_exists(resource_group_name, factory_name, trigger_name): + raise AirflowException(f"Trigger {trigger_name!r} already exists.") + + return self.get_conn().triggers.create_or_update( + resource_group_name, factory_name, trigger_name, trigger, **config
+ ) + + @provide_targeted_factory +
[docs] def delete_trigger( + self, + trigger_name: str, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> None: + """ + Delete the trigger. + + :param trigger_name: The trigger name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + """ + self.get_conn().triggers.delete(resource_group_name, factory_name, trigger_name, **config)
+ + @provide_targeted_factory +
[docs] def start_trigger( + self, + trigger_name: str, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> LROPoller: + """ + Start the trigger. + + :param trigger_name: The trigger name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: An Azure operation poller. + """ + return self.get_conn().triggers.begin_start(resource_group_name, factory_name, trigger_name, **config)
+ + @provide_targeted_factory +
[docs] def stop_trigger( + self, + trigger_name: str, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> LROPoller: + """ + Stop the trigger. + + :param trigger_name: The trigger name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: An Azure operation poller. + """ + return self.get_conn().triggers.begin_stop(resource_group_name, factory_name, trigger_name, **config)
+ + @provide_targeted_factory +
[docs] def rerun_trigger( + self, + trigger_name: str, + run_id: str, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> None: + """ + Rerun the trigger. + + :param trigger_name: The trigger name. + :param run_id: The trigger run identifier. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + """ + return self.get_conn().trigger_runs.rerun( + resource_group_name, factory_name, trigger_name, run_id, **config
+ ) + + @provide_targeted_factory +
[docs] def cancel_trigger( + self, + trigger_name: str, + run_id: str, + resource_group_name: str | None = None, + factory_name: str | None = None, + **config: Any, + ) -> None: + """ + Cancel the trigger. + + :param trigger_name: The trigger name. + :param run_id: The trigger run identifier. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + """ + self.get_conn().trigger_runs.cancel(resource_group_name, factory_name, trigger_name, run_id, **config)
+ +
[docs] def test_connection(self) -> tuple[bool, str]: + """Test a configured Azure Data Factory connection.""" + success = (True, "Successfully connected to Azure Data Factory.") + + try: + # Attempt to list existing factories under the configured subscription and retrieve the first in + # the returned iterator. The Azure Data Factory API does allow for creation of a + # DataFactoryManagementClient with incorrect values but then will fail properly once items are + # retrieved using the client. We need to _actually_ try to retrieve an object to properly test the + # connection. + next(self.get_conn().factories.list()) + return success + except StopIteration: + # If the iterator returned is empty it should still be considered a successful connection since + # it's possible to create a Data Factory via the ``AzureDataFactoryHook`` and none could + # legitimately exist yet. + return success + except Exception as e: + return False, str(e)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/data_lake.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/data_lake.html new file mode 100644 index 00000000000..70ade2d330f --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/data_lake.html @@ -0,0 +1,1047 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.data_lake — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.hooks.data_lake

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+This module contains integration with Azure Data Lake.
+
+AzureDataLakeHook communicates via a REST API compatible with WebHDFS. Make sure that a
+Airflow connection of type `azure_data_lake` exists. Authorization can be done by supplying a
+login (=Client ID), password (=Client Secret) and extra fields tenant (Tenant) and account_name (Account Name)
+(see connection `azure_data_lake_default` for an example).
+"""
+from __future__ import annotations
+
+from typing import Any
+
+from azure.datalake.store import core, lib, multithread
+
+from airflow.exceptions import AirflowException
+from airflow.hooks.base import BaseHook
+from airflow.providers.microsoft.azure.utils import _ensure_prefixes, get_field
+
+
+
[docs]class AzureDataLakeHook(BaseHook): + """ + Interacts with Azure Data Lake. + + Client ID and client secret should be in user and password parameters. + Tenant and account name should be extra field as + {"tenant": "<TENANT>", "account_name": "ACCOUNT_NAME"}. + + :param azure_data_lake_conn_id: Reference to the :ref:`Azure Data Lake connection<howto/connection:adl>`. + """ + +
[docs] conn_name_attr = "azure_data_lake_conn_id"
+
[docs] default_conn_name = "azure_data_lake_default"
+
[docs] conn_type = "azure_data_lake"
+
[docs] hook_name = "Azure Data Lake"
+ + @staticmethod +
[docs] def get_connection_form_widgets() -> dict[str, Any]: + """Returns connection widgets to add to connection form""" + from flask_appbuilder.fieldwidgets import BS3TextFieldWidget + from flask_babel import lazy_gettext + from wtforms import StringField + + return { + "tenant": StringField(lazy_gettext("Azure Tenant ID"), widget=BS3TextFieldWidget()), + "account_name": StringField( + lazy_gettext("Azure DataLake Store Name"), widget=BS3TextFieldWidget()
+ ), + } + + @staticmethod + @_ensure_prefixes(conn_type="azure_data_lake") +
[docs] def get_ui_field_behaviour() -> dict[str, Any]: + """Returns custom field behaviour""" + return { + "hidden_fields": ["schema", "port", "host", "extra"], + "relabeling": { + "login": "Azure Client ID", + "password": "Azure Client Secret", + }, + "placeholders": { + "login": "client id", + "password": "secret", + "tenant": "tenant id", + "account_name": "datalake store",
+ }, + } + + def __init__(self, azure_data_lake_conn_id: str = default_conn_name) -> None: + super().__init__() + self.conn_id = azure_data_lake_conn_id + self._conn: core.AzureDLFileSystem | None = None + self.account_name: str | None = None + + def _get_field(self, extras, name): + return get_field( + conn_id=self.conn_id, + conn_type=self.conn_type, + extras=extras, + field_name=name, + ) + +
[docs] def get_conn(self) -> core.AzureDLFileSystem: + """Return a AzureDLFileSystem object.""" + if not self._conn: + conn = self.get_connection(self.conn_id) + extras = conn.extra_dejson + self.account_name = self._get_field(extras, "account_name") + tenant = self._get_field(extras, "tenant") + adl_creds = lib.auth(tenant_id=tenant, client_secret=conn.password, client_id=conn.login) + self._conn = core.AzureDLFileSystem(adl_creds, store_name=self.account_name) + self._conn.connect() + return self._conn
+ +
[docs] def check_for_file(self, file_path: str) -> bool: + """ + Check if a file exists on Azure Data Lake. + + :param file_path: Path and name of the file. + :return: True if the file exists, False otherwise. + """ + try: + files = self.get_conn().glob(file_path, details=False, invalidate_cache=True) + return len(files) == 1 + except FileNotFoundError: + return False
+ +
[docs] def upload_file( + self, + local_path: str, + remote_path: str, + nthreads: int = 64, + overwrite: bool = True, + buffersize: int = 4194304, + blocksize: int = 4194304, + **kwargs, + ) -> None: + """ + Upload a file to Azure Data Lake. + + :param local_path: local path. Can be single file, directory (in which case, + upload recursively) or glob pattern. Recursive glob patterns using `**` + are not supported. + :param remote_path: Remote path to upload to; if multiple files, this is the + directory root to write within. + :param nthreads: Number of threads to use. If None, uses the number of cores. + :param overwrite: Whether to forcibly overwrite existing files/directories. + If False and remote path is a directory, will quit regardless if any files + would be overwritten or not. If True, only matching filenames are actually + overwritten. + :param buffersize: int [2**22] + Number of bytes for internal buffer. This block cannot be bigger than + a chunk and cannot be smaller than a block. + :param blocksize: int [2**22] + Number of bytes for a block. Within each chunk, we write a smaller + block for each API call. This block cannot be bigger than a chunk. + """ + multithread.ADLUploader( + self.get_conn(), + lpath=local_path, + rpath=remote_path, + nthreads=nthreads, + overwrite=overwrite, + buffersize=buffersize, + blocksize=blocksize, + **kwargs,
+ ) + +
[docs] def download_file( + self, + local_path: str, + remote_path: str, + nthreads: int = 64, + overwrite: bool = True, + buffersize: int = 4194304, + blocksize: int = 4194304, + **kwargs, + ) -> None: + """ + Download a file from Azure Blob Storage. + + :param local_path: local path. If downloading a single file, will write to this + specific file, unless it is an existing directory, in which case a file is + created within it. If downloading multiple files, this is the root + directory to write within. Will create directories as required. + :param remote_path: remote path/globstring to use to find remote files. + Recursive glob patterns using `**` are not supported. + :param nthreads: Number of threads to use. If None, uses the number of cores. + :param overwrite: Whether to forcibly overwrite existing files/directories. + If False and remote path is a directory, will quit regardless if any files + would be overwritten or not. If True, only matching filenames are actually + overwritten. + :param buffersize: int [2**22] + Number of bytes for internal buffer. This block cannot be bigger than + a chunk and cannot be smaller than a block. + :param blocksize: int [2**22] + Number of bytes for a block. Within each chunk, we write a smaller + block for each API call. This block cannot be bigger than a chunk. + """ + multithread.ADLDownloader( + self.get_conn(), + lpath=local_path, + rpath=remote_path, + nthreads=nthreads, + overwrite=overwrite, + buffersize=buffersize, + blocksize=blocksize, + **kwargs,
+ ) + +
[docs] def list(self, path: str) -> list: + """ + List files in Azure Data Lake Storage + + :param path: full path/globstring to use to list files in ADLS + """ + if "*" in path: + return self.get_conn().glob(path) + else: + return self.get_conn().walk(path)
+ +
[docs] def remove(self, path: str, recursive: bool = False, ignore_not_found: bool = True) -> None: + """ + Remove files in Azure Data Lake Storage + + :param path: A directory or file to remove in ADLS + :param recursive: Whether to loop into directories in the location and remove the files + :param ignore_not_found: Whether to raise error if file to delete is not found + """ + try: + self.get_conn().remove(path=path, recursive=recursive) + except FileNotFoundError: + if ignore_not_found: + self.log.info("File %s not found", path) + else: + raise AirflowException(f"File {path} not found")
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/fileshare.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/fileshare.html new file mode 100644 index 00000000000..285b04f2a6b --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/fileshare.html @@ -0,0 +1,1142 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.fileshare — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.hooks.fileshare

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import warnings
+from functools import wraps
+from typing import IO, Any
+
+from azure.storage.file import File, FileService
+
+from airflow.hooks.base import BaseHook
+
+
+def _ensure_prefixes(conn_type):
+    """
+    Remove when provider min airflow version >= 2.5.0 since this is handled by
+    provider manager from that version.
+    """
+
+    def dec(func):
+        @wraps(func)
+        def inner():
+            field_behaviors = func()
+            conn_attrs = {"host", "schema", "login", "password", "port", "extra"}
+
+            def _ensure_prefix(field):
+                if field not in conn_attrs and not field.startswith("extra__"):
+                    return f"extra__{conn_type}__{field}"
+                else:
+                    return field
+
+            if "placeholders" in field_behaviors:
+                placeholders = field_behaviors["placeholders"]
+                field_behaviors["placeholders"] = {_ensure_prefix(k): v for k, v in placeholders.items()}
+            return field_behaviors
+
+        return inner
+
+    return dec
+
+
+
[docs]class AzureFileShareHook(BaseHook): + """ + Interacts with Azure FileShare Storage. + + :param azure_fileshare_conn_id: Reference to the + :ref:`Azure Container Volume connection id<howto/connection:azure_fileshare>` + of an Azure account of which container volumes should be used. + + """ + +
[docs] conn_name_attr = "azure_fileshare_conn_id"
+
[docs] default_conn_name = "azure_fileshare_default"
+
[docs] conn_type = "azure_fileshare"
+
[docs] hook_name = "Azure FileShare"
+ + def __init__(self, azure_fileshare_conn_id: str = "azure_fileshare_default") -> None: + super().__init__() + self.conn_id = azure_fileshare_conn_id + self._conn = None + + @staticmethod +
[docs] def get_connection_form_widgets() -> dict[str, Any]: + """Returns connection widgets to add to connection form""" + from flask_appbuilder.fieldwidgets import BS3PasswordFieldWidget, BS3TextFieldWidget + from flask_babel import lazy_gettext + from wtforms import PasswordField, StringField + + return { + "sas_token": PasswordField(lazy_gettext("SAS Token (optional)"), widget=BS3PasswordFieldWidget()), + "connection_string": StringField( + lazy_gettext("Connection String (optional)"), widget=BS3TextFieldWidget() + ), + "protocol": StringField( + lazy_gettext("Account URL or token (optional)"), widget=BS3TextFieldWidget()
+ ), + } + + @staticmethod + @_ensure_prefixes(conn_type="azure_fileshare") +
[docs] def get_ui_field_behaviour() -> dict[str, Any]: + """Returns custom field behaviour""" + return { + "hidden_fields": ["schema", "port", "host", "extra"], + "relabeling": { + "login": "Blob Storage Login (optional)", + "password": "Blob Storage Key (optional)", + }, + "placeholders": { + "login": "account name", + "password": "secret", + "sas_token": "account url or token (optional)", + "connection_string": "account url or token (optional)", + "protocol": "account url or token (optional)",
+ }, + } + +
[docs] def get_conn(self) -> FileService: + """Return the FileService object.""" + + def check_for_conflict(key): + backcompat_key = f"{backcompat_prefix}{key}" + if backcompat_key in extras: + warnings.warn( + f"Conflicting params `{key}` and `{backcompat_key}` found in extras for conn " + f"{self.conn_id}. Using value for `{key}`. Please ensure this is the correct value " + f"and remove the backcompat key `{backcompat_key}`." + ) + + backcompat_prefix = "extra__azure_fileshare__" + if self._conn: + return self._conn + conn = self.get_connection(self.conn_id) + extras = conn.extra_dejson + service_options = {} + for key, value in extras.items(): + if value == "": + continue + if not key.startswith("extra__"): + service_options[key] = value + check_for_conflict(key) + elif key.startswith(backcompat_prefix): + short_name = key[len(backcompat_prefix) :] + if short_name not in service_options: # prefer values provided with short name + service_options[short_name] = value + else: + warnings.warn(f"Extra param `{key}` not recognized; ignoring.") + self._conn = FileService(account_name=conn.login, account_key=conn.password, **service_options) + return self._conn
+ +
[docs] def check_for_directory(self, share_name: str, directory_name: str, **kwargs) -> bool: + """ + Check if a directory exists on Azure File Share. + + :param share_name: Name of the share. + :param directory_name: Name of the directory. + :param kwargs: Optional keyword arguments that + `FileService.exists()` takes. + :return: True if the file exists, False otherwise. + """ + return self.get_conn().exists(share_name, directory_name, **kwargs)
+ +
[docs] def check_for_file(self, share_name: str, directory_name: str, file_name: str, **kwargs) -> bool: + """ + Check if a file exists on Azure File Share. + + :param share_name: Name of the share. + :param directory_name: Name of the directory. + :param file_name: Name of the file. + :param kwargs: Optional keyword arguments that + `FileService.exists()` takes. + :return: True if the file exists, False otherwise. + """ + return self.get_conn().exists(share_name, directory_name, file_name, **kwargs)
+ +
[docs] def list_directories_and_files( + self, share_name: str, directory_name: str | None = None, **kwargs + ) -> list: + """ + Return the list of directories and files stored on a Azure File Share. + + :param share_name: Name of the share. + :param directory_name: Name of the directory. + :param kwargs: Optional keyword arguments that + `FileService.list_directories_and_files()` takes. + :return: A list of files and directories + """ + return self.get_conn().list_directories_and_files(share_name, directory_name, **kwargs)
+ +
[docs] def list_files(self, share_name: str, directory_name: str | None = None, **kwargs) -> list[str]: + """ + Return the list of files stored on a Azure File Share. + + :param share_name: Name of the share. + :param directory_name: Name of the directory. + :param kwargs: Optional keyword arguments that + `FileService.list_directories_and_files()` takes. + :return: A list of files + """ + return [ + obj.name + for obj in self.list_directories_and_files(share_name, directory_name, **kwargs) + if isinstance(obj, File)
+ ] + +
[docs] def create_share(self, share_name: str, **kwargs) -> bool: + """ + Create new Azure File Share. + + :param share_name: Name of the share. + :param kwargs: Optional keyword arguments that + `FileService.create_share()` takes. + :return: True if share is created, False if share already exists. + """ + return self.get_conn().create_share(share_name, **kwargs)
+ +
[docs] def delete_share(self, share_name: str, **kwargs) -> bool: + """ + Delete existing Azure File Share. + + :param share_name: Name of the share. + :param kwargs: Optional keyword arguments that + `FileService.delete_share()` takes. + :return: True if share is deleted, False if share does not exist. + """ + return self.get_conn().delete_share(share_name, **kwargs)
+ +
[docs] def create_directory(self, share_name: str, directory_name: str, **kwargs) -> list: + """ + Create a new directory on a Azure File Share. + + :param share_name: Name of the share. + :param directory_name: Name of the directory. + :param kwargs: Optional keyword arguments that + `FileService.create_directory()` takes. + :return: A list of files and directories + """ + return self.get_conn().create_directory(share_name, directory_name, **kwargs)
+ +
[docs] def get_file( + self, file_path: str, share_name: str, directory_name: str, file_name: str, **kwargs + ) -> None: + """ + Download a file from Azure File Share. + + :param file_path: Where to store the file. + :param share_name: Name of the share. + :param directory_name: Name of the directory. + :param file_name: Name of the file. + :param kwargs: Optional keyword arguments that + `FileService.get_file_to_path()` takes. + """ + self.get_conn().get_file_to_path(share_name, directory_name, file_name, file_path, **kwargs)
+ +
[docs] def get_file_to_stream( + self, stream: IO, share_name: str, directory_name: str, file_name: str, **kwargs + ) -> None: + """ + Download a file from Azure File Share. + + :param stream: A filehandle to store the file to. + :param share_name: Name of the share. + :param directory_name: Name of the directory. + :param file_name: Name of the file. + :param kwargs: Optional keyword arguments that + `FileService.get_file_to_stream()` takes. + """ + self.get_conn().get_file_to_stream(share_name, directory_name, file_name, stream, **kwargs)
+ +
[docs] def load_file( + self, file_path: str, share_name: str, directory_name: str, file_name: str, **kwargs + ) -> None: + """ + Upload a file to Azure File Share. + + :param file_path: Path to the file to load. + :param share_name: Name of the share. + :param directory_name: Name of the directory. + :param file_name: Name of the file. + :param kwargs: Optional keyword arguments that + `FileService.create_file_from_path()` takes. + """ + self.get_conn().create_file_from_path(share_name, directory_name, file_name, file_path, **kwargs)
+ +
[docs] def load_string( + self, string_data: str, share_name: str, directory_name: str, file_name: str, **kwargs + ) -> None: + """ + Upload a string to Azure File Share. + + :param string_data: String to load. + :param share_name: Name of the share. + :param directory_name: Name of the directory. + :param file_name: Name of the file. + :param kwargs: Optional keyword arguments that + `FileService.create_file_from_text()` takes. + """ + self.get_conn().create_file_from_text(share_name, directory_name, file_name, string_data, **kwargs)
+ +
[docs] def load_stream( + self, stream: str, share_name: str, directory_name: str, file_name: str, count: str, **kwargs + ) -> None: + """ + Upload a stream to Azure File Share. + + :param stream: Opened file/stream to upload as the file content. + :param share_name: Name of the share. + :param directory_name: Name of the directory. + :param file_name: Name of the file. + :param count: Size of the stream in bytes + :param kwargs: Optional keyword arguments that + `FileService.create_file_from_stream()` takes. + """ + self.get_conn().create_file_from_stream( + share_name, directory_name, file_name, stream, count, **kwargs
+ ) + +
[docs] def test_connection(self): + """Test Azure FileShare connection.""" + success = (True, "Successfully connected to Azure File Share.") + + try: + # Attempt to retrieve file share information + next(iter(self.get_conn().list_shares())) + return success + except StopIteration: + # If the iterator returned is empty it should still be considered a successful connection since + # it's possible to create a storage account without any file share and none could + # legitimately exist yet. + return success + except Exception as e: + return False, str(e)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/synapse.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/synapse.html new file mode 100644 index 00000000000..38a64a6d6af --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/synapse.html @@ -0,0 +1,1015 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.synapse — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.hooks.synapse

+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import time
+from typing import Any, Union
+
+from azure.identity import ClientSecretCredential, DefaultAzureCredential
+from azure.synapse.spark import SparkClient
+from azure.synapse.spark.models import SparkBatchJobOptions
+
+from airflow.exceptions import AirflowTaskTimeout
+from airflow.hooks.base import BaseHook
+from airflow.providers.microsoft.azure.utils import get_field
+
+
[docs]Credentials = Union[ClientSecretCredential, DefaultAzureCredential]
+ + +
[docs]class AzureSynapseSparkBatchRunStatus: + """Azure Synapse Spark Job operation statuses.""" + +
[docs] NOT_STARTED = "not_started"
+
[docs] STARTING = "starting"
+
[docs] RUNNING = "running"
+
[docs] IDLE = "idle"
+
[docs] BUSY = "busy"
+
[docs] SHUTTING_DOWN = "shutting_down"
+
[docs] ERROR = "error"
+
[docs] DEAD = "dead"
+
[docs] KILLED = "killed"
+
[docs] SUCCESS = "success"
+ +
[docs] TERMINAL_STATUSES = {SUCCESS, DEAD, KILLED, ERROR}
+ + +
[docs]class AzureSynapseHook(BaseHook): + """ + A hook to interact with Azure Synapse. + :param azure_synapse_conn_id: The :ref:`Azure Synapse connection id<howto/connection:synapse>`. + :param spark_pool: The Apache Spark pool used to submit the job + """ + +
[docs] conn_type: str = "azure_synapse"
+
[docs] conn_name_attr: str = "azure_synapse_conn_id"
+
[docs] default_conn_name: str = "azure_synapse_default"
+
[docs] hook_name: str = "Azure Synapse"
+ + @staticmethod +
[docs] def get_connection_form_widgets() -> dict[str, Any]: + """Returns connection widgets to add to connection form""" + from flask_appbuilder.fieldwidgets import BS3TextFieldWidget + from flask_babel import lazy_gettext + from wtforms import StringField + + return { + "tenantId": StringField(lazy_gettext("Tenant ID"), widget=BS3TextFieldWidget()), + "subscriptionId": StringField(lazy_gettext("Subscription ID"), widget=BS3TextFieldWidget()),
+ } + + @staticmethod +
[docs] def get_ui_field_behaviour() -> dict[str, Any]: + """Returns custom field behaviour""" + return { + "hidden_fields": ["schema", "port", "extra"], + "relabeling": {"login": "Client ID", "password": "Secret", "host": "Synapse Workspace URL"},
+ } + + def __init__(self, azure_synapse_conn_id: str = default_conn_name, spark_pool: str = ""): + self.job_id: int | None = None + self._conn: SparkClient | None = None + self.conn_id = azure_synapse_conn_id + self.spark_pool = spark_pool + super().__init__() + + def _get_field(self, extras, name): + return get_field( + conn_id=self.conn_id, + conn_type=self.conn_type, + extras=extras, + field_name=name, + ) + +
[docs] def get_conn(self) -> SparkClient: + if self._conn is not None: + return self._conn + + conn = self.get_connection(self.conn_id) + extras = conn.extra_dejson + tenant = self._get_field(extras, "tenantId") + spark_pool = self.spark_pool + livy_api_version = "2022-02-22-preview" + + subscription_id = self._get_field(extras, "subscriptionId") + if not subscription_id: + raise ValueError("A Subscription ID is required to connect to Azure Synapse.") + + credential: Credentials + if conn.login is not None and conn.password is not None: + if not tenant: + raise ValueError("A Tenant ID is required when authenticating with Client ID and Secret.") + + credential = ClientSecretCredential( + client_id=conn.login, client_secret=conn.password, tenant_id=tenant + ) + else: + credential = DefaultAzureCredential() + + self._conn = self._create_client(credential, conn.host, spark_pool, livy_api_version, subscription_id) + + return self._conn
+ + @staticmethod + def _create_client(credential: Credentials, host, spark_pool, livy_api_version, subscription_id: str): + return SparkClient( + credential=credential, + endpoint=host, + spark_pool_name=spark_pool, + livy_api_version=livy_api_version, + subscription_id=subscription_id, + ) + +
[docs] def run_spark_job( + self, + payload: SparkBatchJobOptions, + ): + """ + Run a job in an Apache Spark pool. + :param payload: Livy compatible payload which represents the spark job that a user wants to submit. + """ + job = self.get_conn().spark_batch.create_spark_batch_job(payload) + self.job_id = job.id + return job
+ +
[docs] def get_job_run_status(self): + """Get the job run status.""" + job_run_status = self.get_conn().spark_batch.get_spark_batch_job(batch_id=self.job_id).state + return job_run_status
+ +
[docs] def wait_for_job_run_status( + self, + job_id: int | None, + expected_statuses: str | set[str], + check_interval: int = 60, + timeout: int = 60 * 60 * 24 * 7, + ) -> bool: + """ + Waits for a job run to match an expected status. + + :param job_id: The job run identifier. + :param expected_statuses: The desired status(es) to check against a job run's current status. + :param check_interval: Time in seconds to check on a job run's status. + :param timeout: Time in seconds to wait for a job to reach a terminal status or the expected + status. + + """ + job_run_status = self.get_job_run_status() + start_time = time.monotonic() + + while ( + job_run_status not in AzureSynapseSparkBatchRunStatus.TERMINAL_STATUSES + and job_run_status not in expected_statuses + ): + # Check if the job-run duration has exceeded the ``timeout`` configured. + if start_time + timeout < time.monotonic(): + raise AirflowTaskTimeout( + f"Job {job_id} has not reached a terminal status after {timeout} seconds." + ) + + # Wait to check the status of the job run based on the ``check_interval`` configured. + self.log.info("Sleeping for %s seconds", str(check_interval)) + time.sleep(check_interval) + + job_run_status = self.get_job_run_status() + self.log.info("Current spark job run status is %s", job_run_status) + + return job_run_status in expected_statuses
+ +
[docs] def cancel_job_run( + self, + job_id: int, + ) -> None: + """ + Cancel the spark job run. + :param job_id: The synapse spark job identifier. + """ + self.get_conn().spark_batch.cancel_spark_batch_job(job_id)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/wasb.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/wasb.html new file mode 100644 index 00000000000..f7ba72452e4 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/hooks/wasb.html @@ -0,0 +1,1316 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.hooks.wasb — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.hooks.wasb

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+This module contains integration with Azure Blob Storage.
+
+It communicate via the Window Azure Storage Blob protocol. Make sure that a
+Airflow connection of type `wasb` exists. Authorization can be done by supplying a
+login (=Storage account name) and password (=KEY), or login and SAS token in the extra
+field (see connection `wasb_default` for an example).
+"""
+from __future__ import annotations
+
+import logging
+import os
+from functools import wraps
+from typing import Any
+
+from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError
+from azure.identity import ClientSecretCredential, DefaultAzureCredential
+from azure.storage.blob import BlobClient, BlobServiceClient, ContainerClient, StorageStreamDownloader
+
+from airflow.exceptions import AirflowException
+from airflow.hooks.base import BaseHook
+
+
+def _ensure_prefixes(conn_type):
+    """
+    Remove when provider min airflow version >= 2.5.0 since this is handled by
+    provider manager from that version.
+    """
+
+    def dec(func):
+        @wraps(func)
+        def inner():
+            field_behaviors = func()
+            conn_attrs = {"host", "schema", "login", "password", "port", "extra"}
+
+            def _ensure_prefix(field):
+                if field not in conn_attrs and not field.startswith("extra__"):
+                    return f"extra__{conn_type}__{field}"
+                else:
+                    return field
+
+            if "placeholders" in field_behaviors:
+                placeholders = field_behaviors["placeholders"]
+                field_behaviors["placeholders"] = {_ensure_prefix(k): v for k, v in placeholders.items()}
+            return field_behaviors
+
+        return inner
+
+    return dec
+
+
+
[docs]class WasbHook(BaseHook): + """ + Interacts with Azure Blob Storage through the ``wasb://`` protocol. + + These parameters have to be passed in Airflow Data Base: account_name and account_key. + + Additional options passed in the 'extra' field of the connection will be + passed to the `BlockBlockService()` constructor. For example, authenticate + using a SAS token by adding {"sas_token": "YOUR_TOKEN"}. + + If no authentication configuration is provided, DefaultAzureCredential will be used (applicable + when using Azure compute infrastructure). + + :param wasb_conn_id: Reference to the :ref:`wasb connection <howto/connection:wasb>`. + :param public_read: Whether an anonymous public read access should be used. default is False + """ + +
[docs] conn_name_attr = "wasb_conn_id"
+
[docs] default_conn_name = "wasb_default"
+
[docs] conn_type = "wasb"
+
[docs] hook_name = "Azure Blob Storage"
+ + @staticmethod +
[docs] def get_connection_form_widgets() -> dict[str, Any]: + """Returns connection widgets to add to connection form""" + from flask_appbuilder.fieldwidgets import BS3PasswordFieldWidget, BS3TextFieldWidget + from flask_babel import lazy_gettext + from wtforms import PasswordField, StringField + + return { + "connection_string": PasswordField( + lazy_gettext("Blob Storage Connection String (optional)"), widget=BS3PasswordFieldWidget() + ), + "shared_access_key": PasswordField( + lazy_gettext("Blob Storage Shared Access Key (optional)"), widget=BS3PasswordFieldWidget() + ), + "tenant_id": StringField( + lazy_gettext("Tenant Id (Active Directory Auth)"), widget=BS3TextFieldWidget() + ), + "sas_token": PasswordField(lazy_gettext("SAS Token (optional)"), widget=BS3PasswordFieldWidget()),
+ } + + @staticmethod + @_ensure_prefixes(conn_type="wasb") +
[docs] def get_ui_field_behaviour() -> dict[str, Any]: + """Returns custom field behaviour""" + return { + "hidden_fields": ["schema", "port"], + "relabeling": { + "login": "Blob Storage Login (optional)", + "password": "Blob Storage Key (optional)", + "host": "Account Name (Active Directory Auth)", + }, + "placeholders": { + "extra": "additional options for use with FileService and AzureFileVolume", + "login": "account name", + "password": "secret", + "host": "account url", + "connection_string": "connection string auth", + "tenant_id": "tenant", + "shared_access_key": "shared access key", + "sas_token": "account url or token",
+ }, + } + + def __init__( + self, + wasb_conn_id: str = default_conn_name, + public_read: bool = False, + ) -> None: + super().__init__() + self.conn_id = wasb_conn_id + self.public_read = public_read + self.blob_service_client = self.get_conn() + + logger = logging.getLogger("azure.core.pipeline.policies.http_logging_policy") + try: + logger.setLevel(os.environ.get("AZURE_HTTP_LOGGING_LEVEL", logging.WARNING)) + except ValueError: + logger.setLevel(logging.WARNING) + + def _get_field(self, extra_dict, field_name): + prefix = "extra__wasb__" + if field_name.startswith("extra__"): + raise ValueError( + f"Got prefixed name {field_name}; please remove the '{prefix}' prefix " + f"when using this method." + ) + if field_name in extra_dict: + return extra_dict[field_name] or None + return extra_dict.get(f"{prefix}{field_name}") or None + +
[docs] def get_conn(self) -> BlobServiceClient: + """Return the BlobServiceClient object.""" + conn = self.get_connection(self.conn_id) + extra = conn.extra_dejson or {} + + if self.public_read: + # Here we use anonymous public read + # more info + # https://docs.microsoft.com/en-us/azure/storage/blobs/storage-manage-access-to-resources + return BlobServiceClient(account_url=conn.host, **extra) + + connection_string = self._get_field(extra, "connection_string") + if connection_string: + # connection_string auth takes priority + return BlobServiceClient.from_connection_string(connection_string, **extra) + + shared_access_key = self._get_field(extra, "shared_access_key") + if shared_access_key: + # using shared access key + return BlobServiceClient(account_url=conn.host, credential=shared_access_key, **extra) + + tenant = self._get_field(extra, "tenant_id") + if tenant: + # use Active Directory auth + app_id = conn.login + app_secret = conn.password + token_credential = ClientSecretCredential(tenant, app_id, app_secret) + return BlobServiceClient(account_url=conn.host, credential=token_credential, **extra) + + sas_token = self._get_field(extra, "sas_token") + if sas_token: + if sas_token.startswith("https"): + return BlobServiceClient(account_url=sas_token, **extra) + else: + return BlobServiceClient( + account_url=f"https://{conn.login}.blob.core.windows.net/{sas_token}", **extra + ) + + # Fall back to old auth (password) or use managed identity if not provided. + credential = conn.password + if not credential: + credential = DefaultAzureCredential() + self.log.info("Using DefaultAzureCredential as credential") + return BlobServiceClient( + account_url=f"https://{conn.login}.blob.core.windows.net/", + credential=credential, + **extra,
+ ) + + def _get_container_client(self, container_name: str) -> ContainerClient: + """ + Instantiates a container client + + :param container_name: The name of the container + :return: ContainerClient + """ + return self.blob_service_client.get_container_client(container_name) + + def _get_blob_client(self, container_name: str, blob_name: str) -> BlobClient: + """ + Instantiates a blob client + + :param container_name: The name of the blob container + :param blob_name: The name of the blob. This needs not be existing + """ + return self.blob_service_client.get_blob_client(container=container_name, blob=blob_name) + +
[docs] def check_for_blob(self, container_name: str, blob_name: str, **kwargs) -> bool: + """ + Check if a blob exists on Azure Blob Storage. + + :param container_name: Name of the container. + :param blob_name: Name of the blob. + :param kwargs: Optional keyword arguments for ``BlobClient.get_blob_properties`` takes. + :return: True if the blob exists, False otherwise. + """ + try: + self._get_blob_client(container_name, blob_name).get_blob_properties(**kwargs) + except ResourceNotFoundError: + return False + return True
+ +
[docs] def check_for_prefix(self, container_name: str, prefix: str, **kwargs) -> bool: + """ + Check if a prefix exists on Azure Blob storage. + + :param container_name: Name of the container. + :param prefix: Prefix of the blob. + :param kwargs: Optional keyword arguments that ``ContainerClient.walk_blobs`` takes + :return: True if blobs matching the prefix exist, False otherwise. + """ + blobs = self.get_blobs_list(container_name=container_name, prefix=prefix, **kwargs) + return len(blobs) > 0
+ +
[docs] def get_blobs_list( + self, + container_name: str, + prefix: str | None = None, + include: list[str] | None = None, + delimiter: str = "/", + **kwargs, + ) -> list: + """ + List blobs in a given container + + :param container_name: The name of the container + :param prefix: Filters the results to return only blobs whose names + begin with the specified prefix. + :param include: Specifies one or more additional datasets to include in the + response. Options include: ``snapshots``, ``metadata``, ``uncommittedblobs``, + ``copy`, ``deleted``. + :param delimiter: filters objects based on the delimiter (for e.g '.csv') + """ + container = self._get_container_client(container_name) + blob_list = [] + blobs = container.walk_blobs(name_starts_with=prefix, include=include, delimiter=delimiter, **kwargs) + for blob in blobs: + blob_list.append(blob.name) + return blob_list
+ +
[docs] def load_file( + self, + file_path: str, + container_name: str, + blob_name: str, + create_container: bool = False, + **kwargs, + ) -> None: + """ + Upload a file to Azure Blob Storage. + + :param file_path: Path to the file to load. + :param container_name: Name of the container. + :param blob_name: Name of the blob. + :param create_container: Attempt to create the target container prior to uploading the blob. This is + useful if the target container may not exist yet. Defaults to False. + :param kwargs: Optional keyword arguments that ``BlobClient.upload_blob()`` takes. + """ + with open(file_path, "rb") as data: + self.upload( + container_name=container_name, + blob_name=blob_name, + data=data, + create_container=create_container, + **kwargs,
+ ) + +
[docs] def load_string( + self, + string_data: str, + container_name: str, + blob_name: str, + create_container: bool = False, + **kwargs, + ) -> None: + """ + Upload a string to Azure Blob Storage. + + :param string_data: String to load. + :param container_name: Name of the container. + :param blob_name: Name of the blob. + :param create_container: Attempt to create the target container prior to uploading the blob. This is + useful if the target container may not exist yet. Defaults to False. + :param kwargs: Optional keyword arguments that ``BlobClient.upload()`` takes. + """ + # Reorder the argument order from airflow.providers.amazon.aws.hooks.s3.load_string. + self.upload( + container_name=container_name, + blob_name=blob_name, + data=string_data, + create_container=create_container, + **kwargs,
+ ) + +
[docs] def get_file(self, file_path: str, container_name: str, blob_name: str, **kwargs): + """ + Download a file from Azure Blob Storage. + + :param file_path: Path to the file to download. + :param container_name: Name of the container. + :param blob_name: Name of the blob. + :param kwargs: Optional keyword arguments that `BlobClient.download_blob()` takes. + """ + with open(file_path, "wb") as fileblob: + stream = self.download(container_name=container_name, blob_name=blob_name, **kwargs) + fileblob.write(stream.readall())
+ +
[docs] def read_file(self, container_name: str, blob_name: str, **kwargs): + """ + Read a file from Azure Blob Storage and return as a string. + + :param container_name: Name of the container. + :param blob_name: Name of the blob. + :param kwargs: Optional keyword arguments that `BlobClient.download_blob` takes. + """ + return self.download(container_name, blob_name, **kwargs).content_as_text()
+ +
[docs] def upload( + self, + container_name: str, + blob_name: str, + data: Any, + blob_type: str = "BlockBlob", + length: int | None = None, + create_container: bool = False, + **kwargs, + ) -> dict[str, Any]: + """ + Creates a new blob from a data source with automatic chunking. + + :param container_name: The name of the container to upload data + :param blob_name: The name of the blob to upload. This need not exist in the container + :param data: The blob data to upload + :param blob_type: The type of the blob. This can be either ``BlockBlob``, + ``PageBlob`` or ``AppendBlob``. The default value is ``BlockBlob``. + :param length: Number of bytes to read from the stream. This is optional, + but should be supplied for optimal performance. + :param create_container: Attempt to create the target container prior to uploading the blob. This is + useful if the target container may not exist yet. Defaults to False. + """ + if create_container: + self.create_container(container_name) + + blob_client = self._get_blob_client(container_name, blob_name) + return blob_client.upload_blob(data, blob_type, length=length, **kwargs)
+ +
[docs] def download( + self, container_name, blob_name, offset: int | None = None, length: int | None = None, **kwargs + ) -> StorageStreamDownloader: + """ + Downloads a blob to the StorageStreamDownloader + + :param container_name: The name of the container containing the blob + :param blob_name: The name of the blob to download + :param offset: Start of byte range to use for downloading a section of the blob. + Must be set if length is provided. + :param length: Number of bytes to read from the stream. + """ + blob_client = self._get_blob_client(container_name, blob_name) + return blob_client.download_blob(offset=offset, length=length, **kwargs)
+ +
[docs] def create_container(self, container_name: str) -> None: + """ + Create container object if not already existing + + :param container_name: The name of the container to create + """ + container_client = self._get_container_client(container_name) + try: + self.log.debug("Attempting to create container: %s", container_name) + container_client.create_container() + self.log.info("Created container: %s", container_name) + except ResourceExistsError: + self.log.info( + "Attempted to create container %r but it already exists. If it is expected that this " + "container will always exist, consider setting create_container to False.", + container_name, + ) + except HttpResponseError as e: + self.log.info( + "Received an HTTP response error while attempting to creating container %r: %s" + "\nIf the error is related to missing permissions to create containers, please consider " + "setting create_container to False or supplying connection credentials with the " + "appropriate permission for connection ID %r.", + container_name, + e.response, + self.conn_id, + ) + except Exception as e: + self.log.info("Error while attempting to create container %r: %s", container_name, e) + raise
+ +
[docs] def delete_container(self, container_name: str) -> None: + """ + Delete a container object + + :param container_name: The name of the container + """ + try: + self.log.debug("Attempting to delete container: %s", container_name) + self._get_container_client(container_name).delete_container() + self.log.info("Deleted container: %s", container_name) + except ResourceNotFoundError: + self.log.info("Unable to delete container %s (not found)", container_name) + except: # noqa: E722 + self.log.info("Error deleting container: %s", container_name) + raise
+ +
[docs] def delete_blobs(self, container_name: str, *blobs, **kwargs) -> None: + """ + Marks the specified blobs or snapshots for deletion. + + :param container_name: The name of the container containing the blobs + :param blobs: The blobs to delete. This can be a single blob, or multiple values + can be supplied, where each value is either the name of the blob (str) or BlobProperties. + """ + self._get_container_client(container_name).delete_blobs(*blobs, **kwargs) + self.log.info("Deleted blobs: %s", blobs)
+ +
[docs] def delete_file( + self, + container_name: str, + blob_name: str, + is_prefix: bool = False, + ignore_if_missing: bool = False, + delimiter: str = "", + **kwargs, + ) -> None: + """ + Delete a file from Azure Blob Storage. + + :param container_name: Name of the container. + :param blob_name: Name of the blob. + :param is_prefix: If blob_name is a prefix, delete all matching files + :param ignore_if_missing: if True, then return success even if the + blob does not exist. + :param kwargs: Optional keyword arguments that ``ContainerClient.delete_blobs()`` takes. + """ + if is_prefix: + blobs_to_delete = self.get_blobs_list( + container_name, prefix=blob_name, delimiter=delimiter, **kwargs + ) + elif self.check_for_blob(container_name, blob_name): + blobs_to_delete = [blob_name] + else: + blobs_to_delete = [] + if not ignore_if_missing and len(blobs_to_delete) == 0: + raise AirflowException(f"Blob(s) not found: {blob_name}") + + self.delete_blobs(container_name, *blobs_to_delete, **kwargs)
+ +
[docs] def test_connection(self): + """Test Azure Blob Storage connection.""" + success = (True, "Successfully connected to Azure Blob Storage.") + + try: + # Attempt to retrieve storage account information + self.get_conn().get_account_information() + return success + except Exception as e: + return False, str(e)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/log/wasb_task_handler.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/log/wasb_task_handler.html new file mode 100644 index 00000000000..a291d2e1d8d --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/log/wasb_task_handler.html @@ -0,0 +1,1000 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.log.wasb_task_handler — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.log.wasb_task_handler

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import os
+import shutil
+from typing import Any
+
+from airflow.compat.functools import cached_property
+from airflow.configuration import conf
+from airflow.utils.log.file_task_handler import FileTaskHandler
+from airflow.utils.log.logging_mixin import LoggingMixin
+
+
+
[docs]class WasbTaskHandler(FileTaskHandler, LoggingMixin): + """ + WasbTaskHandler is a python log handler that handles and reads + task instance logs. It extends airflow FileTaskHandler and + uploads to and reads from Wasb remote storage. + """ + + def __init__( + self, + base_log_folder: str, + wasb_log_folder: str, + wasb_container: str, + delete_local_copy: str, + *, + filename_template: str | None = None, + ) -> None: + super().__init__(base_log_folder, filename_template) + self.wasb_container = wasb_container + self.remote_base = wasb_log_folder + self.log_relative_path = "" + self._hook = None + self.closed = False + self.upload_on_close = True + self.delete_local_copy = delete_local_copy + + @cached_property +
[docs] def hook(self): + """Returns WasbHook.""" + remote_conn_id = conf.get("logging", "REMOTE_LOG_CONN_ID") + try: + from airflow.providers.microsoft.azure.hooks.wasb import WasbHook + + return WasbHook(remote_conn_id) + except Exception: + self.log.exception( + "Could not create a WasbHook with connection id '%s'. " + "Do you have apache-airflow[azure] installed? " + "Does connection the connection exist, and is it " + "configured properly?", + remote_conn_id, + ) + return None
+ +
[docs] def set_context(self, ti) -> None: + super().set_context(ti) + # Local location and remote location is needed to open and + # upload local log file to Wasb remote storage. + self.log_relative_path = self._render_filename(ti, ti.try_number) + self.upload_on_close = not ti.raw
+ +
[docs] def close(self) -> None: + """Close and upload local log file to remote storage Wasb.""" + # When application exit, system shuts down all handlers by + # calling close method. Here we check if logger is already + # closed to prevent uploading the log to remote storage multiple + # times when `logging.shutdown` is called. + if self.closed: + return + + super().close() + + if not self.upload_on_close: + return + + local_loc = os.path.join(self.local_base, self.log_relative_path) + remote_loc = os.path.join(self.remote_base, self.log_relative_path) + if os.path.exists(local_loc): + # read log and remove old logs to get just the latest additions + with open(local_loc) as logfile: + log = logfile.read() + self.wasb_write(log, remote_loc, append=True) + + if self.delete_local_copy: + shutil.rmtree(os.path.dirname(local_loc)) + # Mark closed so we don't double write if close is called twice + self.closed = True
+ + def _read( + self, ti, try_number: int, metadata: dict[str, Any] | None = None + ) -> tuple[str, dict[str, bool]]: + """ + Read logs of given task instance and try_number from Wasb remote storage. + If failed, read the log from task instance host machine. + + :param ti: task instance object + :param try_number: task instance try_number to read logs from + :param metadata: log metadata, + can be used for steaming log reading and auto-tailing. + """ + # Explicitly getting log relative path is necessary as the given + # task instance might be different than task instance passed in + # in set_context method. + log_relative_path = self._render_filename(ti, try_number) + remote_loc = os.path.join(self.remote_base, log_relative_path) + + if self.wasb_log_exists(remote_loc): + # If Wasb remote file exists, we do not fetch logs from task instance + # local machine even if there are errors reading remote logs, as + # returned remote_log will contain error messages. + remote_log = self.wasb_read(remote_loc, return_error=True) + log = f"*** Reading remote log from {remote_loc}.\n{remote_log}\n" + return log, {"end_of_log": True} + else: + return super()._read(ti, try_number, metadata) + +
[docs] def wasb_log_exists(self, remote_log_location: str) -> bool: + """ + Check if remote_log_location exists in remote storage + + :param remote_log_location: log's location in remote storage + :return: True if location exists else False + """ + try: + return self.hook.check_for_blob(self.wasb_container, remote_log_location) + + except Exception as e: + self.log.debug('Exception when trying to check remote location: "%s"', e) + return False
+ +
[docs] def wasb_read(self, remote_log_location: str, return_error: bool = False): + """ + Returns the log found at the remote_log_location. Returns '' if no + logs are found or there is an error. + + :param remote_log_location: the log's location in remote storage + :param return_error: if True, returns a string error message if an + error occurs. Otherwise returns '' when an error occurs. + """ + try: + return self.hook.read_file(self.wasb_container, remote_log_location) + except Exception: + msg = f"Could not read logs from {remote_log_location}" + self.log.exception(msg) + # return error if needed + if return_error: + return msg + return ""
+ +
[docs] def wasb_write(self, log: str, remote_log_location: str, append: bool = True) -> None: + """ + Writes the log to the remote_log_location. Fails silently if no hook + was created. + + :param log: the log to write to the remote_log_location + :param remote_log_location: the log's location in remote storage + :param append: if False, any existing log file is overwritten. If True, + the new log is appended to any existing logs. + """ + if append and self.wasb_log_exists(remote_log_location): + old_log = self.wasb_read(remote_log_location) + log = "\n".join([old_log, log]) if old_log else log + + try: + self.hook.load_string(log, self.wasb_container, remote_log_location, overwrite=True) + except Exception: + self.log.exception("Could not write logs to %s", remote_log_location)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/adls.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/adls.html new file mode 100644 index 00000000000..08f17c258ba --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/adls.html @@ -0,0 +1,915 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.operators.adls — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.operators.adls

+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, Sequence
+
+from airflow.models import BaseOperator
+from airflow.providers.microsoft.azure.hooks.data_lake import AzureDataLakeHook
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+
[docs]class ADLSDeleteOperator(BaseOperator): + """ + Delete files in the specified path. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:ADLSDeleteOperator` + + :param path: A directory or file to remove + :param recursive: Whether to loop into directories in the location and remove the files + :param ignore_not_found: Whether to raise error if file to delete is not found + :param azure_data_lake_conn_id: Reference to the :ref:`Azure Data Lake connection<howto/connection:adl>`. + """ + +
[docs] template_fields: Sequence[str] = ("path",)
+
[docs] ui_color = "#901dd2"
+ + def __init__( + self, + *, + path: str, + recursive: bool = False, + ignore_not_found: bool = True, + azure_data_lake_conn_id: str = "azure_data_lake_default", + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.path = path + self.recursive = recursive + self.ignore_not_found = ignore_not_found + self.azure_data_lake_conn_id = azure_data_lake_conn_id + +
[docs] def execute(self, context: Context) -> Any: + hook = AzureDataLakeHook(azure_data_lake_conn_id=self.azure_data_lake_conn_id) + return hook.remove(path=self.path, recursive=self.recursive, ignore_not_found=self.ignore_not_found)
+ + +
[docs]class ADLSListOperator(BaseOperator): + """ + List all files from the specified path + + This operator returns a python list with the names of files which can be used by + `xcom` in the downstream tasks. + + :param path: The Azure Data Lake path to find the objects. Supports glob + strings (templated) + :param azure_data_lake_conn_id: Reference to the :ref:`Azure Data Lake connection<howto/connection:adl>`. + + **Example**: + The following Operator would list all the Parquet files from ``folder/output/`` + folder in the specified ADLS account :: + + adls_files = ADLSListOperator( + task_id='adls_files', + path='folder/output/*.parquet', + azure_data_lake_conn_id='azure_data_lake_default' + ) + """ + +
[docs] template_fields: Sequence[str] = ("path",)
+
[docs] ui_color = "#901dd2"
+ + def __init__( + self, *, path: str, azure_data_lake_conn_id: str = "azure_data_lake_default", **kwargs + ) -> None: + super().__init__(**kwargs) + self.path = path + self.azure_data_lake_conn_id = azure_data_lake_conn_id + +
[docs] def execute(self, context: Context) -> list: + hook = AzureDataLakeHook(azure_data_lake_conn_id=self.azure_data_lake_conn_id) + self.log.info("Getting list of ADLS files in path: %s", self.path) + return hook.list(path=self.path)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/adx.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/adx.html new file mode 100644 index 00000000000..b23a49ca1d0 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/adx.html @@ -0,0 +1,894 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.operators.adx — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.operators.adx

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""This module contains Azure Data Explorer operators"""
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Sequence
+
+from azure.kusto.data._models import KustoResultTable
+
+from airflow.configuration import conf
+from airflow.models import BaseOperator
+from airflow.providers.microsoft.azure.hooks.adx import AzureDataExplorerHook
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+
[docs]class AzureDataExplorerQueryOperator(BaseOperator): + """ + Operator for querying Azure Data Explorer (Kusto). + + :param query: KQL query to run (templated). + :param database: Database to run the query on (templated). + :param options: Optional query options. See: + https://docs.microsoft.com/en-us/azure/kusto/api/netfx/request-properties#list-of-clientrequestproperties + :param azure_data_explorer_conn_id: Reference to the + :ref:`Azure Data Explorer connection<howto/connection:adx>`. + """ + +
[docs] ui_color = "#00a1f2"
+
[docs] template_fields: Sequence[str] = ("query", "database")
+
[docs] template_ext: Sequence[str] = (".kql",)
+ + def __init__( + self, + *, + query: str, + database: str, + options: dict | None = None, + azure_data_explorer_conn_id: str = "azure_data_explorer_default", + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.query = query + self.database = database + self.options = options + self.azure_data_explorer_conn_id = azure_data_explorer_conn_id + +
[docs] def get_hook(self) -> AzureDataExplorerHook: + """Returns new instance of AzureDataExplorerHook""" + return AzureDataExplorerHook(self.azure_data_explorer_conn_id)
+ +
[docs] def execute(self, context: Context) -> KustoResultTable | str: + """ + Run KQL Query on Azure Data Explorer (Kusto). + Returns `PrimaryResult` of Query v2 HTTP response contents + (https://docs.microsoft.com/en-us/azure/kusto/api/rest/response2) + """ + hook = self.get_hook() + response = hook.run_query(self.query, self.database, self.options) + if conf.getboolean("core", "enable_xcom_pickling"): + return response.primary_results[0] + else: + return str(response.primary_results[0])
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/asb.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/asb.html new file mode 100644 index 00000000000..cd697c15003 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/asb.html @@ -0,0 +1,1449 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.operators.asb — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.operators.asb

+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import datetime
+from typing import TYPE_CHECKING, Any, Sequence
+
+from airflow.models import BaseOperator
+from airflow.providers.microsoft.azure.hooks.asb import AdminClientHook, MessageHook
+
+if TYPE_CHECKING:
+    from azure.servicebus.management._models import AuthorizationRule
+
+    from airflow.utils.context import Context
+
+
+
[docs]class AzureServiceBusCreateQueueOperator(BaseOperator): + """ + Creates a Azure Service Bus queue under a Service Bus Namespace by using ServiceBusAdministrationClient + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:AzureServiceBusCreateQueueOperator` + + :param queue_name: The name of the queue. should be unique. + :param max_delivery_count: The maximum delivery count. A message is automatically + dead lettered after this number of deliveries. Default value is 10.. + :param dead_lettering_on_message_expiration: A value that indicates whether this subscription has + dead letter support when a message expires. + :param enable_batched_operations: Value that indicates whether server-side batched + operations are enabled. + :param azure_service_bus_conn_id: Reference to the + :ref:`Azure Service Bus connection<howto/connection:azure_service_bus>`. + """ + +
[docs] template_fields: Sequence[str] = ("queue_name",)
+
[docs] ui_color = "#e4f0e8"
+ + def __init__( + self, + *, + queue_name: str, + max_delivery_count: int = 10, + dead_lettering_on_message_expiration: bool = True, + enable_batched_operations: bool = True, + azure_service_bus_conn_id: str = "azure_service_bus_default", + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.queue_name = queue_name + self.max_delivery_count = max_delivery_count + self.dead_lettering_on_message_expiration = dead_lettering_on_message_expiration + self.enable_batched_operations = enable_batched_operations + self.azure_service_bus_conn_id = azure_service_bus_conn_id + +
[docs] def execute(self, context: Context) -> None: + """Creates Queue in Azure Service Bus namespace, by connecting to Service Bus Admin client in hook""" + hook = AdminClientHook(azure_service_bus_conn_id=self.azure_service_bus_conn_id) + + # create queue with name + queue = hook.create_queue( + self.queue_name, + self.max_delivery_count, + self.dead_lettering_on_message_expiration, + self.enable_batched_operations, + ) + self.log.info("Created Queue %s", queue.name)
+ + +
[docs]class AzureServiceBusSendMessageOperator(BaseOperator): + """ + Send Message or batch message to the Service Bus queue + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:AzureServiceBusSendMessageOperator` + + :param queue_name: The name of the queue. should be unique. + :param message: Message which needs to be sent to the queue. It can be string or list of string. + :param batch: Its boolean flag by default it is set to False, if the message needs to be sent + as batch message it can be set to True. + :param azure_service_bus_conn_id: Reference to the + :ref: `Azure Service Bus connection<howto/connection:azure_service_bus>`. + """ + +
[docs] template_fields: Sequence[str] = ("queue_name",)
+
[docs] ui_color = "#e4f0e8"
+ + def __init__( + self, + *, + queue_name: str, + message: str | list[str], + batch: bool = False, + azure_service_bus_conn_id: str = "azure_service_bus_default", + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.queue_name = queue_name + self.batch = batch + self.message = message + self.azure_service_bus_conn_id = azure_service_bus_conn_id + +
[docs] def execute(self, context: Context) -> None: + """ + Sends Message to the specific queue in Service Bus namespace, by + connecting to Service Bus client + """ + # Create the hook + hook = MessageHook(azure_service_bus_conn_id=self.azure_service_bus_conn_id) + + # send message + hook.send_message(self.queue_name, self.message, self.batch)
+ + +
[docs]class AzureServiceBusReceiveMessageOperator(BaseOperator): + """ + Receive a batch of messages at once in a specified Queue name + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:AzureServiceBusReceiveMessageOperator` + + :param queue_name: The name of the queue name or a QueueProperties with name. + :param max_message_count: Maximum number of messages in the batch. + :param max_wait_time: Maximum time to wait in seconds for the first message to arrive. + :param azure_service_bus_conn_id: Reference to the + :ref: `Azure Service Bus connection <howto/connection:azure_service_bus>`. + """ + +
[docs] template_fields: Sequence[str] = ("queue_name",)
+
[docs] ui_color = "#e4f0e8"
+ + def __init__( + self, + *, + queue_name: str, + azure_service_bus_conn_id: str = "azure_service_bus_default", + max_message_count: int = 10, + max_wait_time: float = 5, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.queue_name = queue_name + self.azure_service_bus_conn_id = azure_service_bus_conn_id + self.max_message_count = max_message_count + self.max_wait_time = max_wait_time + +
[docs] def execute(self, context: Context) -> None: + """ + Receive Message in specific queue in Service Bus namespace, + by connecting to Service Bus client + """ + # Create the hook + hook = MessageHook(azure_service_bus_conn_id=self.azure_service_bus_conn_id) + + # Receive message + hook.receive_message( + self.queue_name, max_message_count=self.max_message_count, max_wait_time=self.max_wait_time
+ ) + + +
[docs]class AzureServiceBusDeleteQueueOperator(BaseOperator): + """ + Deletes the Queue in the Azure Service Bus namespace + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:AzureServiceBusDeleteQueueOperator` + + :param queue_name: The name of the queue in Service Bus namespace. + :param azure_service_bus_conn_id: Reference to the + :ref: `Azure Service Bus connection <howto/connection:azure_service_bus>`. + """ + +
[docs] template_fields: Sequence[str] = ("queue_name",)
+
[docs] ui_color = "#e4f0e8"
+ + def __init__( + self, + *, + queue_name: str, + azure_service_bus_conn_id: str = "azure_service_bus_default", + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.queue_name = queue_name + self.azure_service_bus_conn_id = azure_service_bus_conn_id + +
[docs] def execute(self, context: Context) -> None: + """Delete Queue in Service Bus namespace, by connecting to Service Bus Admin client""" + # Create the hook + hook = AdminClientHook(azure_service_bus_conn_id=self.azure_service_bus_conn_id) + + # delete queue with name + hook.delete_queue(self.queue_name)
+ + +
[docs]class AzureServiceBusTopicCreateOperator(BaseOperator): + """ + Create an Azure Service Bus Topic under a Service Bus Namespace by using ServiceBusAdministrationClient + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:AzureServiceBusTopicCreateOperator` + + :param topic_name: Name of the topic. + :param default_message_time_to_live: ISO 8601 default message time span to live value. This is + the duration after which the message expires, starting from when the message is sent to Service + Bus. This is the default value used when TimeToLive is not set on a message itself. + Input value of either type ~datetime.timedelta or string in ISO 8601 duration format + like "PT300S" is accepted. + :param max_size_in_megabytes: The maximum size of the topic in megabytes, which is the size of + memory allocated for the topic. + :param requires_duplicate_detection: A value indicating if this topic requires duplicate + detection. + :param duplicate_detection_history_time_window: ISO 8601 time span structure that defines the + duration of the duplicate detection history. The default value is 10 minutes. + Input value of either type ~datetime.timedelta or string in ISO 8601 duration format + like "PT300S" is accepted. + :param enable_batched_operations: Value that indicates whether server-side batched operations + are enabled. + :param size_in_bytes: The size of the topic, in bytes. + :param filtering_messages_before_publishing: Filter messages before publishing. + :param authorization_rules: List of Authorization rules for resource. + :param support_ordering: A value that indicates whether the topic supports ordering. + :param auto_delete_on_idle: ISO 8601 time span idle interval after which the topic is + automatically deleted. The minimum duration is 5 minutes. + Input value of either type ~datetime.timedelta or string in ISO 8601 duration format + like "PT300S" is accepted. + :param enable_partitioning: A value that indicates whether the topic is to be partitioned + across multiple message brokers. + :param enable_express: A value that indicates whether Express Entities are enabled. An express + queue holds a message in memory temporarily before writing it to persistent storage. + :param user_metadata: Metadata associated with the topic. + :param max_message_size_in_kilobytes: The maximum size in kilobytes of message payload that + can be accepted by the queue. This feature is only available when using a Premium namespace + and Service Bus API version "2021-05" or higher. + The minimum allowed value is 1024 while the maximum allowed value is 102400. Default value is 1024. + """ + +
[docs] template_fields: Sequence[str] = ("topic_name",)
+
[docs] ui_color = "#e4f0e8"
+ + def __init__( + self, + *, + topic_name: str, + azure_service_bus_conn_id: str = "azure_service_bus_default", + default_message_time_to_live: datetime.timedelta | str | None = None, + max_size_in_megabytes: int | None = None, + requires_duplicate_detection: bool | None = None, + duplicate_detection_history_time_window: datetime.timedelta | str | None = None, + enable_batched_operations: bool | None = None, + size_in_bytes: int | None = None, + filtering_messages_before_publishing: bool | None = None, + authorization_rules: list[AuthorizationRule] | None = None, + support_ordering: bool | None = None, + auto_delete_on_idle: datetime.timedelta | str | None = None, + enable_partitioning: bool | None = None, + enable_express: bool | None = None, + user_metadata: str | None = None, + max_message_size_in_kilobytes: int | None = None, + **kwargs: Any, + ) -> None: + super().__init__(**kwargs) + self.topic_name = topic_name + self.azure_service_bus_conn_id = azure_service_bus_conn_id + self.default_message_time_to_live = default_message_time_to_live + self.max_size_in_megabytes = max_size_in_megabytes + self.requires_duplicate_detection = requires_duplicate_detection + self.duplicate_detection_history_time_window = duplicate_detection_history_time_window + self.enable_batched_operations = enable_batched_operations + self.size_in_bytes = size_in_bytes + self.filtering_messages_before_publishing = filtering_messages_before_publishing + self.authorization_rules = authorization_rules + self.support_ordering = support_ordering + self.auto_delete_on_idle = auto_delete_on_idle + self.enable_partitioning = enable_partitioning + self.enable_express = enable_express + self.user_metadata = user_metadata + self.max_message_size_in_kilobytes = max_message_size_in_kilobytes + +
[docs] def execute(self, context: Context) -> str: + """Creates Topic in Service Bus namespace, by connecting to Service Bus Admin client""" + if self.topic_name is None: + raise TypeError("Topic name cannot be None.") + + # Create the hook + hook = AdminClientHook(azure_service_bus_conn_id=self.azure_service_bus_conn_id) + + with hook.get_conn() as service_mgmt_conn: + topic_properties = service_mgmt_conn.get_topic(self.topic_name) + if topic_properties and topic_properties.name == self.topic_name: + self.log.info("Topic name already exists") + return topic_properties.name + topic = service_mgmt_conn.create_topic( + topic_name=self.topic_name, + default_message_time_to_live=self.default_message_time_to_live, + max_size_in_megabytes=self.max_size_in_megabytes, + requires_duplicate_detection=self.requires_duplicate_detection, + duplicate_detection_history_time_window=self.duplicate_detection_history_time_window, + enable_batched_operations=self.enable_batched_operations, + size_in_bytes=self.size_in_bytes, + filtering_messages_before_publishing=self.filtering_messages_before_publishing, + authorization_rules=self.authorization_rules, + support_ordering=self.support_ordering, + auto_delete_on_idle=self.auto_delete_on_idle, + enable_partitioning=self.enable_partitioning, + enable_express=self.enable_express, + user_metadata=self.user_metadata, + max_message_size_in_kilobytes=self.max_message_size_in_kilobytes, + ) + self.log.info("Created Topic %s", topic.name) + return topic.name
+ + +
[docs]class AzureServiceBusSubscriptionCreateOperator(BaseOperator): + """ + Create an Azure Service Bus Topic Subscription under a Service Bus Namespace + by using ServiceBusAdministrationClient + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:AzureServiceBusSubscriptionCreateOperator` + + :param topic_name: The topic that will own the to-be-created subscription. + :param subscription_name: Name of the subscription that need to be created + :param lock_duration: ISO 8601 time span duration of a peek-lock; that is, the amount of time that + the message is locked for other receivers. The maximum value for LockDuration is 5 minutes; the + default value is 1 minute. Input value of either type ~datetime.timedelta or string in ISO 8601 + duration format like "PT300S" is accepted. + :param requires_session: A value that indicates whether the queue supports the concept of sessions. + :param default_message_time_to_live: ISO 8601 default message time span to live value. This is the + duration after which the message expires, starting from when the message is sent to + Service Bus. This is the default value used when TimeToLive is not set on a message itself. + Input value of either type ~datetime.timedelta or string in ISO 8601 duration + format like "PT300S" is accepted. + :param dead_lettering_on_message_expiration: A value that indicates whether this subscription has + dead letter support when a message expires. + :param dead_lettering_on_filter_evaluation_exceptions: A value that indicates whether this + subscription has dead letter support when a message expires. + :param max_delivery_count: The maximum delivery count. A message is automatically dead lettered + after this number of deliveries. Default value is 10. + :param enable_batched_operations: Value that indicates whether server-side batched + operations are enabled. + :param forward_to: The name of the recipient entity to which all the messages sent to the + subscription are forwarded to. + :param user_metadata: Metadata associated with the subscription. Maximum number of characters is 1024. + :param forward_dead_lettered_messages_to: The name of the recipient entity to which all the + messages sent to the subscription are forwarded to. + :param auto_delete_on_idle: ISO 8601 time Span idle interval after which the subscription is + automatically deleted. The minimum duration is 5 minutes. Input value of either + type ~datetime.timedelta or string in ISO 8601 duration format like "PT300S" is accepted. + :param azure_service_bus_conn_id: Reference to the + :ref:`Azure Service Bus connection<howto/connection:azure_service_bus>`. + """ + +
[docs] template_fields: Sequence[str] = ("topic_name", "subscription_name")
+
[docs] ui_color = "#e4f0e8"
+ + def __init__( + self, + *, + topic_name: str, + subscription_name: str, + azure_service_bus_conn_id: str = "azure_service_bus_default", + lock_duration: datetime.timedelta | str | None = None, + requires_session: bool | None = None, + default_message_time_to_live: datetime.timedelta | str | None = None, + dead_lettering_on_message_expiration: bool | None = True, + dead_lettering_on_filter_evaluation_exceptions: bool | None = None, + max_delivery_count: int | None = 10, + enable_batched_operations: bool | None = True, + forward_to: str | None = None, + user_metadata: str | None = None, + forward_dead_lettered_messages_to: str | None = None, + auto_delete_on_idle: datetime.timedelta | str | None = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.topic_name = topic_name + self.subscription_name = subscription_name + self.lock_duration = lock_duration + self.requires_session = requires_session + self.default_message_time_to_live = default_message_time_to_live + self.dl_on_message_expiration = dead_lettering_on_message_expiration + self.dl_on_filter_evaluation_exceptions = dead_lettering_on_filter_evaluation_exceptions + self.max_delivery_count = max_delivery_count + self.enable_batched_operations = enable_batched_operations + self.forward_to = forward_to + self.user_metadata = user_metadata + self.forward_dead_lettered_messages_to = forward_dead_lettered_messages_to + self.auto_delete_on_idle = auto_delete_on_idle + self.azure_service_bus_conn_id = azure_service_bus_conn_id + +
[docs] def execute(self, context: Context) -> None: + """Creates Subscription in Service Bus namespace, by connecting to Service Bus Admin client""" + if self.subscription_name is None: + raise TypeError("Subscription name cannot be None.") + if self.topic_name is None: + raise TypeError("Topic name cannot be None.") + # Create the hook + hook = AdminClientHook(azure_service_bus_conn_id=self.azure_service_bus_conn_id) + + with hook.get_conn() as service_mgmt_conn: + # create subscription with name + subscription = service_mgmt_conn.create_subscription( + topic_name=self.topic_name, + subscription_name=self.subscription_name, + lock_duration=self.lock_duration, + requires_session=self.requires_session, + default_message_time_to_live=self.default_message_time_to_live, + dead_lettering_on_message_expiration=self.dl_on_message_expiration, + dead_lettering_on_filter_evaluation_exceptions=self.dl_on_filter_evaluation_exceptions, + max_delivery_count=self.max_delivery_count, + enable_batched_operations=self.enable_batched_operations, + forward_to=self.forward_to, + user_metadata=self.user_metadata, + forward_dead_lettered_messages_to=self.forward_dead_lettered_messages_to, + auto_delete_on_idle=self.auto_delete_on_idle, + ) + self.log.info("Created subscription %s", subscription.name)
+ + +
[docs]class AzureServiceBusUpdateSubscriptionOperator(BaseOperator): + """ + Update an Azure ServiceBus Topic Subscription under a ServiceBus Namespace + by using ServiceBusAdministrationClient + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:AzureServiceBusUpdateSubscriptionOperator` + + :param topic_name: The topic that will own the to-be-created subscription. + :param subscription_name: Name of the subscription that need to be created. + :param max_delivery_count: The maximum delivery count. A message is automatically dead lettered + after this number of deliveries. Default value is 10. + :param dead_lettering_on_message_expiration: A value that indicates whether this subscription + has dead letter support when a message expires. + :param enable_batched_operations: Value that indicates whether server-side batched + operations are enabled. + :param azure_service_bus_conn_id: Reference to the + :ref:`Azure Service Bus connection<howto/connection:azure_service_bus>`. + """ + +
[docs] template_fields: Sequence[str] = ("topic_name", "subscription_name")
+
[docs] ui_color = "#e4f0e8"
+ + def __init__( + self, + *, + topic_name: str, + subscription_name: str, + max_delivery_count: int | None = None, + dead_lettering_on_message_expiration: bool | None = None, + enable_batched_operations: bool | None = None, + azure_service_bus_conn_id: str = "azure_service_bus_default", + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.topic_name = topic_name + self.subscription_name = subscription_name + self.max_delivery_count = max_delivery_count + self.dl_on_message_expiration = dead_lettering_on_message_expiration + self.enable_batched_operations = enable_batched_operations + self.azure_service_bus_conn_id = azure_service_bus_conn_id + +
[docs] def execute(self, context: Context) -> None: + """Updates Subscription properties, by connecting to Service Bus Admin client""" + hook = AdminClientHook(azure_service_bus_conn_id=self.azure_service_bus_conn_id) + + with hook.get_conn() as service_mgmt_conn: + subscription_prop = service_mgmt_conn.get_subscription(self.topic_name, self.subscription_name) + if self.max_delivery_count: + subscription_prop.max_delivery_count = self.max_delivery_count + if self.dl_on_message_expiration is not None: + subscription_prop.dead_lettering_on_message_expiration = self.dl_on_message_expiration + if self.enable_batched_operations is not None: + subscription_prop.enable_batched_operations = self.enable_batched_operations + # update by updating the properties in the model + service_mgmt_conn.update_subscription(self.topic_name, subscription_prop) + updated_subscription = service_mgmt_conn.get_subscription(self.topic_name, self.subscription_name) + self.log.info("Subscription Updated successfully %s", updated_subscription)
+ + +
[docs]class ASBReceiveSubscriptionMessageOperator(BaseOperator): + """ + Receive a Batch messages from a Service Bus Subscription under specific Topic. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:ASBReceiveSubscriptionMessageOperator` + + :param subscription_name: The subscription name that will own the rule in topic + :param topic_name: The topic that will own the subscription rule. + :param max_message_count: Maximum number of messages in the batch. + Actual number returned will depend on prefetch_count and incoming stream rate. + Setting to None will fully depend on the prefetch config. The default value is 1. + :param max_wait_time: Maximum time to wait in seconds for the first message to arrive. If no + messages arrive, and no timeout is specified, this call will not return until the + connection is closed. If specified, an no messages arrive within the timeout period, + an empty list will be returned. + :param azure_service_bus_conn_id: Reference to the + :ref:`Azure Service Bus connection <howto/connection:azure_service_bus>`. + """ + +
[docs] template_fields: Sequence[str] = ("topic_name", "subscription_name")
+
[docs] ui_color = "#e4f0e8"
+ + def __init__( + self, + *, + topic_name: str, + subscription_name: str, + max_message_count: int | None = 1, + max_wait_time: float | None = 5, + azure_service_bus_conn_id: str = "azure_service_bus_default", + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.topic_name = topic_name + self.subscription_name = subscription_name + self.max_message_count = max_message_count + self.max_wait_time = max_wait_time + self.azure_service_bus_conn_id = azure_service_bus_conn_id + +
[docs] def execute(self, context: Context) -> None: + """ + Receive Message in specific queue in Service Bus namespace, + by connecting to Service Bus client + """ + # Create the hook + hook = MessageHook(azure_service_bus_conn_id=self.azure_service_bus_conn_id) + + # Receive message + hook.receive_subscription_message( + self.topic_name, self.subscription_name, self.max_message_count, self.max_wait_time
+ ) + + +
[docs]class AzureServiceBusSubscriptionDeleteOperator(BaseOperator): + """ + Deletes the topic subscription in the Azure ServiceBus namespace + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:AzureServiceBusSubscriptionDeleteOperator` + + :param topic_name: The topic that will own the to-be-created subscription. + :param subscription_name: Name of the subscription that need to be created + :param azure_service_bus_conn_id: Reference to the + :ref:`Azure Service Bus connection <howto/connection:azure_service_bus>`. + """ + +
[docs] template_fields: Sequence[str] = ("topic_name", "subscription_name")
+
[docs] ui_color = "#e4f0e8"
+ + def __init__( + self, + *, + topic_name: str, + subscription_name: str, + azure_service_bus_conn_id: str = "azure_service_bus_default", + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.topic_name = topic_name + self.subscription_name = subscription_name + self.azure_service_bus_conn_id = azure_service_bus_conn_id + +
[docs] def execute(self, context: Context) -> None: + """Delete topic subscription in Service Bus namespace, by connecting to Service Bus Admin client""" + # Create the hook + hook = AdminClientHook(azure_service_bus_conn_id=self.azure_service_bus_conn_id) + + # delete subscription with name + hook.delete_subscription(self.subscription_name, self.topic_name)
+ + +
[docs]class AzureServiceBusTopicDeleteOperator(BaseOperator): + """ + Deletes the topic in the Azure Service Bus namespace + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:AzureServiceBusTopicDeleteOperator` + + :param topic_name: Name of the topic to be deleted. + :param azure_service_bus_conn_id: Reference to the + :ref:`Azure Service Bus connection <howto/connection:azure_service_bus>`. + """ + +
[docs] template_fields: Sequence[str] = ("topic_name",)
+
[docs] ui_color = "#e4f0e8"
+ + def __init__( + self, + *, + topic_name: str, + azure_service_bus_conn_id: str = "azure_service_bus_default", + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.topic_name = topic_name + self.azure_service_bus_conn_id = azure_service_bus_conn_id + +
[docs] def execute(self, context: Context) -> None: + """Delete topic in Service Bus namespace, by connecting to Service Bus Admin client""" + if self.topic_name is None: + raise TypeError("Topic name cannot be None.") + hook = AdminClientHook(azure_service_bus_conn_id=self.azure_service_bus_conn_id) + + with hook.get_conn() as service_mgmt_conn: + topic_properties = service_mgmt_conn.get_topic(self.topic_name) + if topic_properties and topic_properties.name == self.topic_name: + service_mgmt_conn.delete_topic(self.topic_name) + self.log.info("Topic %s deleted.", self.topic_name) + else: + self.log.info("Topic %s does not exist.", self.topic_name)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/batch.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/batch.html new file mode 100644 index 00000000000..b0dd0746d40 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/batch.html @@ -0,0 +1,1145 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.operators.batch — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.operators.batch

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, Sequence
+
+from azure.batch import models as batch_models
+
+from airflow.exceptions import AirflowException
+from airflow.models import BaseOperator
+from airflow.providers.microsoft.azure.hooks.batch import AzureBatchHook
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+
[docs]class AzureBatchOperator(BaseOperator): + """ + Executes a job on Azure Batch Service + + :param batch_pool_id: A string that uniquely identifies the Pool within the Account. + :param batch_pool_vm_size: The size of virtual machines in the Pool + :param batch_job_id: A string that uniquely identifies the Job within the Account. + :param batch_task_command_line: The command line of the Task + :param batch_task_id: A string that uniquely identifies the task within the Job. + :param batch_pool_display_name: The display name for the Pool. + The display name need not be unique + :param batch_job_display_name: The display name for the Job. + The display name need not be unique + :param batch_job_manager_task: Details of a Job Manager Task to be launched when the Job is started. + :param batch_job_preparation_task: The Job Preparation Task. If set, the Batch service will + run the Job Preparation Task on a Node before starting any Tasks of that + Job on that Compute Node. Required if batch_job_release_task is set. + :param batch_job_release_task: The Job Release Task. Use to undo changes to Compute Nodes + made by the Job Preparation Task + :param batch_task_display_name: The display name for the task. + The display name need not be unique + :param batch_task_container_settings: The settings for the container under which the Task runs + :param batch_start_task: A Task specified to run on each Compute Node as it joins the Pool. + The Task runs when the Compute Node is added to the Pool or + when the Compute Node is restarted. + :param batch_max_retries: The number of times to retry this batch operation before it's + considered a failed operation. Default is 3 + :param batch_task_resource_files: A list of files that the Batch service will + download to the Compute Node before running the command line. + :param batch_task_output_files: A list of files that the Batch service will upload + from the Compute Node after running the command line. + :param batch_task_user_identity: The user identity under which the Task runs. + If omitted, the Task runs as a non-administrative user unique to the Task. + :param target_low_priority_nodes: The desired number of low-priority Compute Nodes in the Pool. + This property must not be specified if enable_auto_scale is set to true. + :param target_dedicated_nodes: The desired number of dedicated Compute Nodes in the Pool. + This property must not be specified if enable_auto_scale is set to true. + :param enable_auto_scale: Whether the Pool size should automatically adjust over time. Default is false + :param auto_scale_formula: A formula for the desired number of Compute Nodes in the Pool. + This property must not be specified if enableAutoScale is set to false. + It is required if enableAutoScale is set to true. + :param azure_batch_conn_id: The :ref:`Azure Batch connection id<howto/connection:azure_batch>` + :param use_latest_verified_vm_image_and_sku: Whether to use the latest verified virtual + machine image and sku in the batch account. Default is false. + :param vm_publisher: The publisher of the Azure Virtual Machines Marketplace Image. + For example, Canonical or MicrosoftWindowsServer. Required if + use_latest_image_and_sku is set to True + :param vm_offer: The offer type of the Azure Virtual Machines Marketplace Image. + For example, UbuntuServer or WindowsServer. Required if + use_latest_image_and_sku is set to True + :param sku_starts_with: The starting string of the Virtual Machine SKU. Required if + use_latest_image_and_sku is set to True + :param vm_sku: The name of the virtual machine sku to use + :param vm_version: The version of the virtual machine + :param vm_version: str | None + :param vm_node_agent_sku_id: The node agent sku id of the virtual machine + :param os_family: The Azure Guest OS family to be installed on the virtual machines in the Pool. + :param os_version: The OS family version + :param timeout: The amount of time to wait for the job to complete in minutes. Default is 25 + :param should_delete_job: Whether to delete job after execution. Default is False + :param should_delete_pool: Whether to delete pool after execution of jobs. Default is False + """ + +
[docs] template_fields: Sequence[str] = ( + "batch_pool_id", + "batch_pool_vm_size", + "batch_job_id", + "batch_task_id", + "batch_task_command_line",
+ ) +
[docs] ui_color = "#f0f0e4"
+ + def __init__( + self, + *, + batch_pool_id: str, + batch_pool_vm_size: str, + batch_job_id: str, + batch_task_command_line: str, + batch_task_id: str, + vm_node_agent_sku_id: str, + vm_publisher: str | None = None, + vm_offer: str | None = None, + sku_starts_with: str | None = None, + vm_sku: str | None = None, + vm_version: str | None = None, + os_family: str | None = None, + os_version: str | None = None, + batch_pool_display_name: str | None = None, + batch_job_display_name: str | None = None, + batch_job_manager_task: batch_models.JobManagerTask | None = None, + batch_job_preparation_task: batch_models.JobPreparationTask | None = None, + batch_job_release_task: batch_models.JobReleaseTask | None = None, + batch_task_display_name: str | None = None, + batch_task_container_settings: batch_models.TaskContainerSettings | None = None, + batch_start_task: batch_models.StartTask | None = None, + batch_max_retries: int = 3, + batch_task_resource_files: list[batch_models.ResourceFile] | None = None, + batch_task_output_files: list[batch_models.OutputFile] | None = None, + batch_task_user_identity: batch_models.UserIdentity | None = None, + target_low_priority_nodes: int | None = None, + target_dedicated_nodes: int | None = None, + enable_auto_scale: bool = False, + auto_scale_formula: str | None = None, + azure_batch_conn_id="azure_batch_default", + use_latest_verified_vm_image_and_sku: bool = False, + timeout: int = 25, + should_delete_job: bool = False, + should_delete_pool: bool = False, + **kwargs, + ) -> None: + + super().__init__(**kwargs) + self.batch_pool_id = batch_pool_id + self.batch_pool_vm_size = batch_pool_vm_size + self.batch_job_id = batch_job_id + self.batch_task_id = batch_task_id + self.batch_task_command_line = batch_task_command_line + self.batch_pool_display_name = batch_pool_display_name + self.batch_job_display_name = batch_job_display_name + self.batch_job_manager_task = batch_job_manager_task + self.batch_job_preparation_task = batch_job_preparation_task + self.batch_job_release_task = batch_job_release_task + self.batch_task_display_name = batch_task_display_name + self.batch_task_container_settings = batch_task_container_settings + self.batch_start_task = batch_start_task + self.batch_max_retries = batch_max_retries + self.batch_task_resource_files = batch_task_resource_files + self.batch_task_output_files = batch_task_output_files + self.batch_task_user_identity = batch_task_user_identity + self.target_low_priority_nodes = target_low_priority_nodes + self.target_dedicated_nodes = target_dedicated_nodes + self.enable_auto_scale = enable_auto_scale + self.auto_scale_formula = auto_scale_formula + self.azure_batch_conn_id = azure_batch_conn_id + self.use_latest_image = use_latest_verified_vm_image_and_sku + self.vm_publisher = vm_publisher + self.vm_offer = vm_offer + self.sku_starts_with = sku_starts_with + self.vm_sku = vm_sku + self.vm_version = vm_version + self.vm_node_agent_sku_id = vm_node_agent_sku_id + self.os_family = os_family + self.os_version = os_version + self.timeout = timeout + self.should_delete_job = should_delete_job + self.should_delete_pool = should_delete_pool + self.hook = self.get_hook() + + def _check_inputs(self) -> Any: + if not self.os_family and not self.vm_publisher: + raise AirflowException("You must specify either vm_publisher or os_family") + if self.os_family and self.vm_publisher: + raise AirflowException( + "Cloud service configuration and virtual machine configuration " + "are mutually exclusive. You must specify either of os_family and" + " vm_publisher" + ) + + if self.use_latest_image: + if not all(elem for elem in [self.vm_publisher, self.vm_offer]): + raise AirflowException( + f"If use_latest_image_and_sku is set to True then the parameters vm_publisher, " + f"vm_offer, must all be set. " + f"Found vm_publisher={self.vm_publisher}, vm_offer={self.vm_offer}" + ) + if self.vm_publisher: + if not all([self.vm_sku, self.vm_offer, self.vm_node_agent_sku_id]): + raise AirflowException( + "If vm_publisher is set, then the parameters vm_sku, vm_offer," + "vm_node_agent_sku_id must be set. Found " + f"vm_publisher={self.vm_publisher}, vm_offer={self.vm_offer} " + f"vm_node_agent_sku_id={self.vm_node_agent_sku_id}, " + f"vm_version={self.vm_version}" + ) + + if not self.target_dedicated_nodes and not self.enable_auto_scale: + raise AirflowException( + "Either target_dedicated_nodes or enable_auto_scale must be set. None was set" + ) + if self.enable_auto_scale: + if self.target_dedicated_nodes or self.target_low_priority_nodes: + raise AirflowException( + f"If enable_auto_scale is set, then the parameters target_dedicated_nodes and " + f"target_low_priority_nodes must not be set. Found " + f"target_dedicated_nodes={self.target_dedicated_nodes}, " + f"target_low_priority_nodes={self.target_low_priority_nodes}" + ) + if not self.auto_scale_formula: + raise AirflowException("The auto_scale_formula is required when enable_auto_scale is set") + if self.batch_job_release_task and not self.batch_job_preparation_task: + raise AirflowException( + "A batch_job_release_task cannot be specified without also " + " specifying a batch_job_preparation_task for the Job." + ) + if not all( + [ + self.batch_pool_id, + self.batch_job_id, + self.batch_pool_vm_size, + self.batch_task_id, + self.batch_task_command_line, + ] + ): + raise AirflowException( + "Some required parameters are missing.Please you must set all the required parameters. " + ) + +
[docs] def execute(self, context: Context) -> None: + self._check_inputs() + self.hook.connection.config.retry_policy = self.batch_max_retries + + pool = self.hook.configure_pool( + pool_id=self.batch_pool_id, + vm_size=self.batch_pool_vm_size, + display_name=self.batch_pool_display_name, + target_dedicated_nodes=self.target_dedicated_nodes, + use_latest_image_and_sku=self.use_latest_image, + vm_publisher=self.vm_publisher, + vm_offer=self.vm_offer, + sku_starts_with=self.sku_starts_with, + vm_sku=self.vm_sku, + vm_version=self.vm_version, + vm_node_agent_sku_id=self.vm_node_agent_sku_id, + os_family=self.os_family, + os_version=self.os_version, + target_low_priority_nodes=self.target_low_priority_nodes, + enable_auto_scale=self.enable_auto_scale, + auto_scale_formula=self.auto_scale_formula, + start_task=self.batch_start_task, + ) + self.hook.create_pool(pool) + # Wait for nodes to reach complete state + self.hook.wait_for_all_node_state( + self.batch_pool_id, + { + batch_models.ComputeNodeState.start_task_failed, + batch_models.ComputeNodeState.unusable, + batch_models.ComputeNodeState.idle, + }, + ) + # Create job if not already exist + job = self.hook.configure_job( + job_id=self.batch_job_id, + pool_id=self.batch_pool_id, + display_name=self.batch_job_display_name, + job_manager_task=self.batch_job_manager_task, + job_preparation_task=self.batch_job_preparation_task, + job_release_task=self.batch_job_release_task, + ) + self.hook.create_job(job) + # Create task + task = self.hook.configure_task( + task_id=self.batch_task_id, + command_line=self.batch_task_command_line, + display_name=self.batch_task_display_name, + container_settings=self.batch_task_container_settings, + resource_files=self.batch_task_resource_files, + output_files=self.batch_task_output_files, + user_identity=self.batch_task_user_identity, + ) + # Add task to job + self.hook.add_single_task_to_job(job_id=self.batch_job_id, task=task) + # Wait for tasks to complete + fail_tasks = self.hook.wait_for_job_tasks_to_complete(job_id=self.batch_job_id, timeout=self.timeout) + # Clean up + if self.should_delete_job: + # delete job first + self.clean_up(job_id=self.batch_job_id) + if self.should_delete_pool: + self.clean_up(self.batch_pool_id) + # raise exception if any task fail + if fail_tasks: + raise AirflowException(f"Job fail. The failed task are: {fail_tasks}")
+ +
[docs] def on_kill(self) -> None: + response = self.hook.connection.job.terminate( + job_id=self.batch_job_id, terminate_reason="Job killed by user" + ) + self.log.info("Azure Batch job (%s) terminated: %s", self.batch_job_id, response)
+ +
[docs] def get_hook(self) -> AzureBatchHook: + """Create and return an AzureBatchHook.""" + return AzureBatchHook(azure_batch_conn_id=self.azure_batch_conn_id)
+ +
[docs] def clean_up(self, pool_id: str | None = None, job_id: str | None = None) -> None: + """ + Delete the given pool and job in the batch account + + :param pool_id: The id of the pool to delete + :param job_id: The id of the job to delete + + """ + if job_id: + self.log.info("Deleting job: %s", job_id) + self.hook.connection.job.delete(job_id) + if pool_id: + self.log.info("Deleting pool: %s", pool_id) + self.hook.connection.pool.delete(pool_id)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/container_instances.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/container_instances.html new file mode 100644 index 00000000000..698f1210ffc --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/container_instances.html @@ -0,0 +1,1196 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.operators.container_instances — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.operators.container_instances

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import re
+from collections import namedtuple
+from time import sleep
+from typing import TYPE_CHECKING, Any, Sequence
+
+from azure.mgmt.containerinstance.models import (
+    Container,
+    ContainerGroup,
+    ContainerGroupNetworkProfile,
+    ContainerPort,
+    EnvironmentVariable,
+    IpAddress,
+    ResourceRequests,
+    ResourceRequirements,
+    VolumeMount,
+)
+from msrestazure.azure_exceptions import CloudError
+
+from airflow.exceptions import AirflowException, AirflowTaskTimeout
+from airflow.models import BaseOperator
+from airflow.providers.microsoft.azure.hooks.container_instance import AzureContainerInstanceHook
+from airflow.providers.microsoft.azure.hooks.container_registry import AzureContainerRegistryHook
+from airflow.providers.microsoft.azure.hooks.container_volume import AzureContainerVolumeHook
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+
[docs]Volume = namedtuple( + "Volume", + ["conn_id", "account_name", "share_name", "mount_path", "read_only"],
+) + + +
[docs]DEFAULT_ENVIRONMENT_VARIABLES: dict[str, str] = {}
+
[docs]DEFAULT_SECURED_VARIABLES: Sequence[str] = []
+
[docs]DEFAULT_VOLUMES: Sequence[Volume] = []
+
[docs]DEFAULT_MEMORY_IN_GB = 2.0
+
[docs]DEFAULT_CPU = 1.0
+ + +
[docs]class AzureContainerInstancesOperator(BaseOperator): + """ + Start a container on Azure Container Instances + + :param ci_conn_id: connection id of a service principal which will be used + to start the container instance + :param registry_conn_id: connection id of a user which can login to a + private docker registry. For Azure use :ref:`Azure connection id<howto/connection:azure>` + :param resource_group: name of the resource group wherein this container + instance should be started + :param name: name of this container instance. Please note this name has + to be unique in order to run containers in parallel. + :param image: the docker image to be used + :param region: the region wherein this container instance should be started + :param environment_variables: key,value pairs containing environment + variables which will be passed to the running container + :param secured_variables: names of environmental variables that should not + be exposed outside the container (typically passwords). + :param volumes: list of ``Volume`` tuples to be mounted to the container. + Currently only Azure Fileshares are supported. + :param memory_in_gb: the amount of memory to allocate to this container + :param cpu: the number of cpus to allocate to this container + :param gpu: GPU Resource for the container. + :param command: the command to run inside the container + :param container_timeout: max time allowed for the execution of + the container instance. + :param tags: azure tags as dict of str:str + :param os_type: The operating system type required by the containers + in the container group. Possible values include: 'Windows', 'Linux' + :param restart_policy: Restart policy for all containers within the container group. + Possible values include: 'Always', 'OnFailure', 'Never' + :param ip_address: The IP address type of the container group. + :param network_profile: The network profile information for a container group. + + **Example**:: + + AzureContainerInstancesOperator( + ci_conn_id = "azure_service_principal", + registry_conn_id = "azure_registry_user", + resource_group = "my-resource-group", + name = "my-container-name-{{ ds }}", + image = "myprivateregistry.azurecr.io/my_container:latest", + region = "westeurope", + environment_variables = {"MODEL_PATH": "my_value", + "POSTGRES_LOGIN": "{{ macros.connection('postgres_default').login }}", + "POSTGRES_PASSWORD": "{{ macros.connection('postgres_default').password }}", + "JOB_GUID": "{{ ti.xcom_pull(task_ids='task1', key='guid') }}" }, + secured_variables = ['POSTGRES_PASSWORD'], + volumes = [("azure_container_instance_conn_id", + "my_storage_container", + "my_fileshare", + "/input-data", + True),], + memory_in_gb=14.0, + cpu=4.0, + gpu=GpuResource(count=1, sku='K80'), + command=["/bin/echo", "world"], + task_id="start_container" + ) + """ + +
[docs] template_fields: Sequence[str] = ("name", "image", "command", "environment_variables")
+
[docs] template_fields_renderers = {"command": "bash", "environment_variables": "json"}
+ + def __init__( + self, + *, + ci_conn_id: str, + registry_conn_id: str | None, + resource_group: str, + name: str, + image: str, + region: str, + environment_variables: dict | None = None, + secured_variables: str | None = None, + volumes: list | None = None, + memory_in_gb: Any | None = None, + cpu: Any | None = None, + gpu: Any | None = None, + command: list[str] | None = None, + remove_on_error: bool = True, + fail_if_exists: bool = True, + tags: dict[str, str] | None = None, + os_type: str = "Linux", + restart_policy: str = "Never", + ip_address: IpAddress | None = None, + ports: list[ContainerPort] | None = None, + network_profile: ContainerGroupNetworkProfile | None = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + + self.ci_conn_id = ci_conn_id + self.resource_group = resource_group + self.name = self._check_name(name) + self.image = image + self.region = region + self.registry_conn_id = registry_conn_id + self.environment_variables = environment_variables or DEFAULT_ENVIRONMENT_VARIABLES + self.secured_variables = secured_variables or DEFAULT_SECURED_VARIABLES + self.volumes = volumes or DEFAULT_VOLUMES + self.memory_in_gb = memory_in_gb or DEFAULT_MEMORY_IN_GB + self.cpu = cpu or DEFAULT_CPU + self.gpu = gpu + self.command = command + self.remove_on_error = remove_on_error + self.fail_if_exists = fail_if_exists + self._ci_hook: Any = None + self.tags = tags + self.os_type = os_type + if self.os_type not in ["Linux", "Windows"]: + raise AirflowException( + "Invalid value for the os_type argument. " + "Please set 'Linux' or 'Windows' as the os_type. " + f"Found `{self.os_type}`." + ) + self.restart_policy = restart_policy + if self.restart_policy not in ["Always", "OnFailure", "Never"]: + raise AirflowException( + "Invalid value for the restart_policy argument. " + "Please set one of 'Always', 'OnFailure','Never' as the restart_policy. " + f"Found `{self.restart_policy}`" + ) + self.ip_address = ip_address + self.ports = ports + self.network_profile = network_profile + +
[docs] def execute(self, context: Context) -> int: + # Check name again in case it was templated. + self._check_name(self.name) + + self._ci_hook = AzureContainerInstanceHook(azure_conn_id=self.ci_conn_id) + + if self.fail_if_exists: + self.log.info("Testing if container group already exists") + if self._ci_hook.exists(self.resource_group, self.name): + raise AirflowException("Container group exists") + + if self.registry_conn_id: + registry_hook = AzureContainerRegistryHook(self.registry_conn_id) + image_registry_credentials: list | None = [ + registry_hook.connection, + ] + else: + image_registry_credentials = None + + environment_variables = [] + for key, value in self.environment_variables.items(): + if key in self.secured_variables: + e = EnvironmentVariable(name=key, secure_value=value) + else: + e = EnvironmentVariable(name=key, value=value) + environment_variables.append(e) + + volumes: list[Volume | Volume] = [] + volume_mounts: list[VolumeMount | VolumeMount] = [] + for conn_id, account_name, share_name, mount_path, read_only in self.volumes: + hook = AzureContainerVolumeHook(conn_id) + + mount_name = f"mount-{len(volumes)}" + volumes.append(hook.get_file_volume(mount_name, share_name, account_name, read_only)) + volume_mounts.append(VolumeMount(name=mount_name, mount_path=mount_path, read_only=read_only)) + + exit_code = 1 + try: + self.log.info("Starting container group with %.1f cpu %.1f mem", self.cpu, self.memory_in_gb) + if self.gpu: + self.log.info("GPU count: %.1f, GPU SKU: %s", self.gpu.count, self.gpu.sku) + + resources = ResourceRequirements( + requests=ResourceRequests(memory_in_gb=self.memory_in_gb, cpu=self.cpu, gpu=self.gpu) + ) + + if self.ip_address and not self.ports: + self.ports = [ContainerPort(port=80)] + self.log.info("Default port set. Container will listen on port 80") + + container = Container( + name=self.name, + image=self.image, + resources=resources, + command=self.command, + environment_variables=environment_variables, + volume_mounts=volume_mounts, + ports=self.ports, + ) + + container_group = ContainerGroup( + location=self.region, + containers=[ + container, + ], + image_registry_credentials=image_registry_credentials, + volumes=volumes, + restart_policy=self.restart_policy, + os_type=self.os_type, + tags=self.tags, + ip_address=self.ip_address, + network_profile=self.network_profile, + ) + + self._ci_hook.create_or_update(self.resource_group, self.name, container_group) + + self.log.info("Container group started %s/%s", self.resource_group, self.name) + + exit_code = self._monitor_logging(self.resource_group, self.name) + + self.log.info("Container had exit code: %s", exit_code) + if exit_code != 0: + raise AirflowException(f"Container had a non-zero exit code, {exit_code}") + return exit_code + + except CloudError: + self.log.exception("Could not start container group") + raise AirflowException("Could not start container group") + + finally: + if exit_code == 0 or self.remove_on_error: + self.on_kill()
+ +
[docs] def on_kill(self) -> None: + if self.remove_on_error: + self.log.info("Deleting container group") + try: + self._ci_hook.delete(self.resource_group, self.name) + except Exception: + self.log.exception("Could not delete container group")
+ + def _monitor_logging(self, resource_group: str, name: str) -> int: + last_state = None + last_message_logged = None + last_line_logged = None + + while True: + try: + cg_state = self._ci_hook.get_state(resource_group, name) + instance_view = cg_state.containers[0].instance_view + + # If there is no instance view, we show the provisioning state + if instance_view is not None: + c_state = instance_view.current_state + state, exit_code, detail_status = ( + c_state.state, + c_state.exit_code, + c_state.detail_status, + ) + else: + state = cg_state.provisioning_state + exit_code = 0 + detail_status = "Provisioning" + + if instance_view is not None and instance_view.events is not None: + messages = [event.message for event in instance_view.events] + last_message_logged = self._log_last(messages, last_message_logged) + + if state != last_state: + self.log.info("Container group state changed to %s", state) + last_state = state + + if state in ["Running", "Terminated", "Succeeded"]: + try: + logs = self._ci_hook.get_logs(resource_group, name) + last_line_logged = self._log_last(logs, last_line_logged) + except CloudError: + self.log.exception( + "Exception while getting logs from container instance, retrying..." + ) + + if state == "Terminated": + self.log.info("Container exited with detail_status %s", detail_status) + return exit_code + + if state == "Failed": + self.log.error("Azure provision failure") + return 1 + + except AirflowTaskTimeout: + raise + except CloudError as err: + if "ResourceNotFound" in str(err): + self.log.warning( + "ResourceNotFound, container is probably removed " + "by another process " + "(make sure that the name is unique)." + ) + return 1 + else: + self.log.exception("Exception while getting container groups") + except Exception: + self.log.exception("Exception while getting container groups") + + sleep(1) + + def _log_last(self, logs: list | None, last_line_logged: Any) -> Any | None: + if logs: + # determine the last line which was logged before + last_line_index = 0 + for i in range(len(logs) - 1, -1, -1): + if logs[i] == last_line_logged: + # this line is the same, hence print from i+1 + last_line_index = i + 1 + break + + # log all new ones + for line in logs[last_line_index:]: + self.log.info(line.rstrip()) + + return logs[-1] + return None + + @staticmethod + def _check_name(name: str) -> str: + if "{{" in name: + # Let macros pass as they cannot be checked at construction time + return name + regex_check = re.match("[a-z0-9]([-a-z0-9]*[a-z0-9])?", name) + if regex_check is None or regex_check.group() != name: + raise AirflowException('ACI name must match regex [a-z0-9]([-a-z0-9]*[a-z0-9])? (like "my-name")') + if len(name) > 63: + raise AirflowException("ACI name cannot be longer than 63 characters") + return name
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/cosmos.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/cosmos.html new file mode 100644 index 00000000000..89dbe412712 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/cosmos.html @@ -0,0 +1,887 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.operators.cosmos — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.operators.cosmos

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Sequence
+
+from airflow.models import BaseOperator
+from airflow.providers.microsoft.azure.hooks.cosmos import AzureCosmosDBHook
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+
[docs]class AzureCosmosInsertDocumentOperator(BaseOperator): + """ + Inserts a new document into the specified Cosmos database and collection + It will create both the database and collection if they do not already exist + + :param database_name: The name of the database. (templated) + :param collection_name: The name of the collection. (templated) + :param document: The document to insert + :param azure_cosmos_conn_id: Reference to the + :ref:`Azure CosmosDB connection<howto/connection:azure_cosmos>`. + """ + +
[docs] template_fields: Sequence[str] = ("database_name", "collection_name")
+
[docs] ui_color = "#e4f0e8"
+ + def __init__( + self, + *, + database_name: str, + collection_name: str, + document: dict, + azure_cosmos_conn_id: str = "azure_cosmos_default", + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.database_name = database_name + self.collection_name = collection_name + self.document = document + self.azure_cosmos_conn_id = azure_cosmos_conn_id + +
[docs] def execute(self, context: Context) -> None: + # Create the hook + hook = AzureCosmosDBHook(azure_cosmos_conn_id=self.azure_cosmos_conn_id) + + # Create the DB if it doesn't already exist + if not hook.does_database_exist(self.database_name): + hook.create_database(self.database_name) + + # Create the collection as well + if not hook.does_collection_exist(self.collection_name, self.database_name): + hook.create_collection(self.collection_name, self.database_name) + + # finally insert the document + hook.upsert_document(self.document, self.database_name, self.collection_name)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/data_factory.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/data_factory.html new file mode 100644 index 00000000000..86ad27d222a --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/data_factory.html @@ -0,0 +1,1022 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.operators.data_factory — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.operators.data_factory

+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, Sequence
+
+from airflow.hooks.base import BaseHook
+from airflow.models import BaseOperator, BaseOperatorLink, XCom
+from airflow.providers.microsoft.azure.hooks.data_factory import (
+    AzureDataFactoryHook,
+    AzureDataFactoryPipelineRunException,
+    AzureDataFactoryPipelineRunStatus,
+    get_field,
+)
+from airflow.utils.log.logging_mixin import LoggingMixin
+
+if TYPE_CHECKING:
+    from airflow.models.taskinstance import TaskInstanceKey
+    from airflow.utils.context import Context
+
+
+
+
+
+
[docs]class AzureDataFactoryRunPipelineOperator(BaseOperator): + """ + Executes a data factory pipeline. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:AzureDataFactoryRunPipelineOperator` + + :param azure_data_factory_conn_id: The connection identifier for connecting to Azure Data Factory. + :param pipeline_name: The name of the pipeline to execute. + :param wait_for_termination: Flag to wait on a pipeline run's termination. By default, this feature is + enabled but could be disabled to perform an asynchronous wait for a long-running pipeline execution + using the ``AzureDataFactoryPipelineRunSensor``. + :param resource_group_name: The resource group name. If a value is not passed in to the operator, the + ``AzureDataFactoryHook`` will attempt to use the resource group name provided in the corresponding + connection. + :param factory_name: The data factory name. If a value is not passed in to the operator, the + ``AzureDataFactoryHook`` will attempt to use the factory name name provided in the corresponding + connection. + :param reference_pipeline_run_id: The pipeline run identifier. If this run ID is specified the parameters + of the specified run will be used to create a new run. + :param is_recovery: Recovery mode flag. If recovery mode is set to `True`, the specified referenced + pipeline run and the new run will be grouped under the same ``groupId``. + :param start_activity_name: In recovery mode, the rerun will start from this activity. If not specified, + all activities will run. + :param start_from_failure: In recovery mode, if set to true, the rerun will start from failed activities. + The property will be used only if ``start_activity_name`` is not specified. + :param parameters: Parameters of the pipeline run. These parameters are referenced in a pipeline via + ``@pipeline().parameters.parameterName`` and will be used only if the ``reference_pipeline_run_id`` is + not specified. + :param timeout: Time in seconds to wait for a pipeline to reach a terminal status for non-asynchronous + waits. Used only if ``wait_for_termination`` is True. + :param check_interval: Time in seconds to check on a pipeline run's status for non-asynchronous waits. + Used only if ``wait_for_termination`` is True. + """ + +
[docs] template_fields: Sequence[str] = ( + "azure_data_factory_conn_id", + "resource_group_name", + "factory_name", + "pipeline_name", + "reference_pipeline_run_id", + "parameters",
+ ) +
[docs] template_fields_renderers = {"parameters": "json"}
+ +
[docs] ui_color = "#0678d4"
+ + + + def __init__( + self, + *, + pipeline_name: str, + azure_data_factory_conn_id: str = AzureDataFactoryHook.default_conn_name, + wait_for_termination: bool = True, + resource_group_name: str | None = None, + factory_name: str | None = None, + reference_pipeline_run_id: str | None = None, + is_recovery: bool | None = None, + start_activity_name: str | None = None, + start_from_failure: bool | None = None, + parameters: dict[str, Any] | None = None, + timeout: int = 60 * 60 * 24 * 7, + check_interval: int = 60, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.azure_data_factory_conn_id = azure_data_factory_conn_id + self.pipeline_name = pipeline_name + self.wait_for_termination = wait_for_termination + self.resource_group_name = resource_group_name + self.factory_name = factory_name + self.reference_pipeline_run_id = reference_pipeline_run_id + self.is_recovery = is_recovery + self.start_activity_name = start_activity_name + self.start_from_failure = start_from_failure + self.parameters = parameters + self.timeout = timeout + self.check_interval = check_interval + +
[docs] def execute(self, context: Context) -> None: + self.hook = AzureDataFactoryHook(azure_data_factory_conn_id=self.azure_data_factory_conn_id) + self.log.info("Executing the %s pipeline.", self.pipeline_name) + response = self.hook.run_pipeline( + pipeline_name=self.pipeline_name, + resource_group_name=self.resource_group_name, + factory_name=self.factory_name, + reference_pipeline_run_id=self.reference_pipeline_run_id, + is_recovery=self.is_recovery, + start_activity_name=self.start_activity_name, + start_from_failure=self.start_from_failure, + parameters=self.parameters, + ) + self.run_id = vars(response)["run_id"] + # Push the ``run_id`` value to XCom regardless of what happens during execution. This allows for + # retrieval the executed pipeline's ``run_id`` for downstream tasks especially if performing an + # asynchronous wait. + context["ti"].xcom_push(key="run_id", value=self.run_id) + + if self.wait_for_termination: + self.log.info("Waiting for pipeline run %s to terminate.", self.run_id) + + if self.hook.wait_for_pipeline_run_status( + run_id=self.run_id, + expected_statuses=AzureDataFactoryPipelineRunStatus.SUCCEEDED, + check_interval=self.check_interval, + timeout=self.timeout, + resource_group_name=self.resource_group_name, + factory_name=self.factory_name, + ): + self.log.info("Pipeline run %s has completed successfully.", self.run_id) + else: + raise AzureDataFactoryPipelineRunException( + f"Pipeline run {self.run_id} has failed or has been cancelled."
+ ) + +
[docs] def on_kill(self) -> None: + if self.run_id: + self.hook.cancel_pipeline_run( + run_id=self.run_id, + resource_group_name=self.resource_group_name, + factory_name=self.factory_name, + ) + + # Check to ensure the pipeline run was cancelled as expected. + if self.hook.wait_for_pipeline_run_status( + run_id=self.run_id, + expected_statuses=AzureDataFactoryPipelineRunStatus.CANCELLED, + check_interval=self.check_interval, + timeout=self.timeout, + resource_group_name=self.resource_group_name, + factory_name=self.factory_name, + ): + self.log.info("Pipeline run %s has been cancelled successfully.", self.run_id) + else: + raise AzureDataFactoryPipelineRunException(f"Pipeline run {self.run_id} was not cancelled.")
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/synapse.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/synapse.html new file mode 100644 index 00000000000..61611f068a7 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/synapse.html @@ -0,0 +1,922 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.operators.synapse — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.operators.synapse

+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Sequence
+
+from azure.synapse.spark.models import SparkBatchJobOptions
+
+from airflow.models import BaseOperator
+from airflow.providers.microsoft.azure.hooks.synapse import AzureSynapseHook, AzureSynapseSparkBatchRunStatus
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+
[docs]class AzureSynapseRunSparkBatchOperator(BaseOperator): + """ + Executes a Spark job on Azure Synapse. + + .. see also:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:AzureSynapseRunSparkBatchOperator` + + :param azure_synapse_conn_id: The connection identifier for connecting to Azure Synapse. + :param wait_for_termination: Flag to wait on a job run's termination. + :param spark_pool: The target synapse spark pool used to submit the job + :param payload: Livy compatible payload which represents the spark job that a user wants to submit + :param timeout: Time in seconds to wait for a job to reach a terminal status for non-asynchronous + waits. Used only if ``wait_for_termination`` is True. + :param check_interval: Time in seconds to check on a job run's status for non-asynchronous waits. + Used only if ``wait_for_termination`` is True. + """ + +
[docs] template_fields: Sequence[str] = ( + "azure_synapse_conn_id", + "spark_pool",
+ ) +
[docs] template_fields_renderers = {"parameters": "json"}
+ +
[docs] ui_color = "#0678d4"
+ + def __init__( + self, + *, + azure_synapse_conn_id: str = AzureSynapseHook.default_conn_name, + wait_for_termination: bool = True, + spark_pool: str = "", + payload: SparkBatchJobOptions, + timeout: int = 60 * 60 * 24 * 7, + check_interval: int = 60, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.job_id = None + self.azure_synapse_conn_id = azure_synapse_conn_id + self.wait_for_termination = wait_for_termination + self.spark_pool = spark_pool + self.payload = payload + self.timeout = timeout + self.check_interval = check_interval + +
[docs] def execute(self, context: Context) -> None: + self.hook = AzureSynapseHook( + azure_synapse_conn_id=self.azure_synapse_conn_id, spark_pool=self.spark_pool + ) + self.log.info("Executing the Synapse spark job.") + response = self.hook.run_spark_job(payload=self.payload) + self.log.info(response) + self.job_id = vars(response)["id"] + # Push the ``job_id`` value to XCom regardless of what happens during execution. This allows for + # retrieval the executed job's ``id`` for downstream tasks especially if performing an + # asynchronous wait. + context["ti"].xcom_push(key="job_id", value=self.job_id) + + if self.wait_for_termination: + self.log.info("Waiting for job run %s to terminate.", self.job_id) + + if self.hook.wait_for_job_run_status( + job_id=self.job_id, + expected_statuses=AzureSynapseSparkBatchRunStatus.SUCCESS, + check_interval=self.check_interval, + timeout=self.timeout, + ): + self.log.info("Job run %s has completed successfully.", self.job_id) + else: + raise Exception(f"Job run {self.job_id} has failed or has been cancelled.")
+ +
[docs] def on_kill(self) -> None: + if self.job_id: + self.hook.cancel_job_run( + job_id=self.job_id, + ) + self.log.info("Job run %s has been cancelled successfully.", self.job_id)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/wasb_delete_blob.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/wasb_delete_blob.html new file mode 100644 index 00000000000..2300fdf3e01 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/operators/wasb_delete_blob.html @@ -0,0 +1,887 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.operators.wasb_delete_blob — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.operators.wasb_delete_blob

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, Sequence
+
+from airflow.models import BaseOperator
+from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+
[docs]class WasbDeleteBlobOperator(BaseOperator): + """ + Deletes blob(s) on Azure Blob Storage. + + :param container_name: Name of the container. (templated) + :param blob_name: Name of the blob. (templated) + :param wasb_conn_id: Reference to the :ref:`wasb connection <howto/connection:wasb>`. + :param check_options: Optional keyword arguments that + `WasbHook.check_for_blob()` takes. + :param is_prefix: If blob_name is a prefix, delete all files matching prefix. + :param ignore_if_missing: if True, then return success even if the + blob does not exist. + """ + +
[docs] template_fields: Sequence[str] = ("container_name", "blob_name")
+ + def __init__( + self, + *, + container_name: str, + blob_name: str, + wasb_conn_id: str = "wasb_default", + check_options: Any = None, + is_prefix: bool = False, + ignore_if_missing: bool = False, + **kwargs, + ) -> None: + super().__init__(**kwargs) + if check_options is None: + check_options = {} + self.wasb_conn_id = wasb_conn_id + self.container_name = container_name + self.blob_name = blob_name + self.check_options = check_options + self.is_prefix = is_prefix + self.ignore_if_missing = ignore_if_missing + +
[docs] def execute(self, context: Context) -> None: + self.log.info("Deleting blob: %s\n in wasb://%s", self.blob_name, self.container_name) + hook = WasbHook(wasb_conn_id=self.wasb_conn_id) + + hook.delete_file( + self.container_name, self.blob_name, self.is_prefix, self.ignore_if_missing, **self.check_options
+ ) +
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/secrets/key_vault.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/secrets/key_vault.html new file mode 100644 index 00000000000..f197c84929e --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/secrets/key_vault.html @@ -0,0 +1,1006 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.secrets.key_vault — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.secrets.key_vault

+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import re
+import warnings
+
+from azure.core.exceptions import ResourceNotFoundError
+from azure.identity import DefaultAzureCredential
+from azure.keyvault.secrets import SecretClient
+
+from airflow.compat.functools import cached_property
+from airflow.secrets import BaseSecretsBackend
+from airflow.utils.log.logging_mixin import LoggingMixin
+from airflow.version import version as airflow_version
+
+
+def _parse_version(val):
+    val = re.sub(r"(\d+\.\d+\.\d+).*", lambda x: x.group(1), val)
+    return tuple(int(x) for x in val.split("."))
+
+
+
[docs]class AzureKeyVaultBackend(BaseSecretsBackend, LoggingMixin): + """ + Retrieves Airflow Connections or Variables from Azure Key Vault secrets. + + The Azure Key Vault can be configured as a secrets backend in the ``airflow.cfg``: + + .. code-block:: ini + + [secrets] + backend = airflow.providers.microsoft.azure.secrets.key_vault.AzureKeyVaultBackend + backend_kwargs = {"connections_prefix": "airflow-connections", "vault_url": "<azure_key_vault_uri>"} + + For example, if the secrets prefix is ``airflow-connections-smtp-default``, this would be accessible + if you provide ``{"connections_prefix": "airflow-connections"}`` and request conn_id ``smtp-default``. + And if variables prefix is ``airflow-variables-hello``, this would be accessible + if you provide ``{"variables_prefix": "airflow-variables"}`` and request variable key ``hello``. + + For client authentication, the ``DefaultAzureCredential`` from the Azure Python SDK is used as + credential provider, which supports service principal, managed identity and user credentials + + For example, to specify a service principal with secret you can set the environment variables + ``AZURE_TENANT_ID``, ``AZURE_CLIENT_ID`` and ``AZURE_CLIENT_SECRET``. + + .. seealso:: + For more details on client authentication refer to the ``DefaultAzureCredential`` Class reference: + https://docs.microsoft.com/en-us/python/api/azure-identity/azure.identity.defaultazurecredential?view=azure-python + + :param connections_prefix: Specifies the prefix of the secret to read to get Connections + If set to None (null), requests for connections will not be sent to Azure Key Vault + :param variables_prefix: Specifies the prefix of the secret to read to get Variables + If set to None (null), requests for variables will not be sent to Azure Key Vault + :param config_prefix: Specifies the prefix of the secret to read to get Variables. + If set to None (null), requests for configurations will not be sent to Azure Key Vault + :param vault_url: The URL of an Azure Key Vault to use + :param sep: separator used to concatenate secret_prefix and secret_id. Default: "-" + """ + + def __init__( + self, + connections_prefix: str = "airflow-connections", + variables_prefix: str = "airflow-variables", + config_prefix: str = "airflow-config", + vault_url: str = "", + sep: str = "-", + **kwargs, + ) -> None: + super().__init__() + self.vault_url = vault_url + if connections_prefix is not None: + self.connections_prefix = connections_prefix.rstrip(sep) + else: + self.connections_prefix = connections_prefix + if variables_prefix is not None: + self.variables_prefix = variables_prefix.rstrip(sep) + else: + self.variables_prefix = variables_prefix + if config_prefix is not None: + self.config_prefix = config_prefix.rstrip(sep) + else: + self.config_prefix = config_prefix + self.sep = sep + self.kwargs = kwargs + + @cached_property +
[docs] def client(self) -> SecretClient: + """Create a Azure Key Vault client.""" + credential = DefaultAzureCredential() + client = SecretClient(vault_url=self.vault_url, credential=credential, **self.kwargs) + return client
+ +
[docs] def get_conn_value(self, conn_id: str) -> str | None: + """ + Get a serialized representation of Airflow Connection from an Azure Key Vault secret + + :param conn_id: The Airflow connection id to retrieve + """ + if self.connections_prefix is None: + return None + + return self._get_secret(self.connections_prefix, conn_id)
+ +
[docs] def get_conn_uri(self, conn_id: str) -> str | None: + """ + Return URI representation of Connection conn_id. + + As of Airflow version 2.3.0 this method is deprecated. + + :param conn_id: the connection id + :return: deserialized Connection + """ + if _parse_version(airflow_version) >= (2, 3): + warnings.warn( + f"Method `{self.__class__.__name__}.get_conn_uri` is deprecated and will be removed " + "in a future release. Please use method `get_conn_value` instead.", + DeprecationWarning, + stacklevel=2, + ) + return self.get_conn_value(conn_id)
+ +
[docs] def get_variable(self, key: str) -> str | None: + """ + Get an Airflow Variable from an Azure Key Vault secret. + + :param key: Variable Key + :return: Variable Value + """ + if self.variables_prefix is None: + return None + + return self._get_secret(self.variables_prefix, key)
+ +
[docs] def get_config(self, key: str) -> str | None: + """ + Get Airflow Configuration + + :param key: Configuration Option Key + :return: Configuration Option Value + """ + if self.config_prefix is None: + return None + + return self._get_secret(self.config_prefix, key)
+ + @staticmethod +
[docs] def build_path(path_prefix: str, secret_id: str, sep: str = "-") -> str: + """ + Given a path_prefix and secret_id, build a valid secret name for the Azure Key Vault Backend. + Also replaces underscore in the path with dashes to support easy switching between + environment variables, so ``connection_default`` becomes ``connection-default``. + + :param path_prefix: The path prefix of the secret to retrieve + :param secret_id: Name of the secret + :param sep: Separator used to concatenate path_prefix and secret_id + """ + # When an empty prefix is given, do not add a separator to the secret name + if path_prefix == "": + path = f"{secret_id}" + else: + path = f"{path_prefix}{sep}{secret_id}" + return path.replace("_", sep)
+ + def _get_secret(self, path_prefix: str, secret_id: str) -> str | None: + """ + Get an Azure Key Vault secret value + + :param path_prefix: Prefix for the Path to get Secret + :param secret_id: Secret Key + """ + name = self.build_path(path_prefix, secret_id, self.sep) + try: + secret = self.client.get_secret(name=name) + return secret.value + except ResourceNotFoundError as ex: + self.log.debug("Secret %s not found: %s", name, ex) + return None
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/sensors/cosmos.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/sensors/cosmos.html new file mode 100644 index 00000000000..d3bf121b309 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/sensors/cosmos.html @@ -0,0 +1,886 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.sensors.cosmos — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.sensors.cosmos

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Sequence
+
+from airflow.providers.microsoft.azure.hooks.cosmos import AzureCosmosDBHook
+from airflow.sensors.base import BaseSensorOperator
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+
[docs]class AzureCosmosDocumentSensor(BaseSensorOperator): + """ + Checks for the existence of a document which + matches the given query in CosmosDB. Example: + + .. code-block:: + + azure_cosmos_sensor = AzureCosmosDocumentSensor( + database_name="somedatabase_name", + collection_name="somecollection_name", + document_id="unique-doc-id", + azure_cosmos_conn_id="azure_cosmos_default", + task_id="azure_cosmos_sensor", + ) + + :param database_name: Target CosmosDB database_name. + :param collection_name: Target CosmosDB collection_name. + :param document_id: The ID of the target document. + :param azure_cosmos_conn_id: Reference to the + :ref:`Azure CosmosDB connection<howto/connection:azure_cosmos>`. + """ + +
[docs] template_fields: Sequence[str] = ("database_name", "collection_name", "document_id")
+ + def __init__( + self, + *, + database_name: str, + collection_name: str, + document_id: str, + azure_cosmos_conn_id: str = "azure_cosmos_default", + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.azure_cosmos_conn_id = azure_cosmos_conn_id + self.database_name = database_name + self.collection_name = collection_name + self.document_id = document_id + +
[docs] def poke(self, context: Context) -> bool: + self.log.info("*** Entering poke") + hook = AzureCosmosDBHook(self.azure_cosmos_conn_id) + return hook.get_document(self.document_id, self.database_name, self.collection_name) is not None
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/sensors/data_factory.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/sensors/data_factory.html new file mode 100644 index 00000000000..b0729e3d904 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/sensors/data_factory.html @@ -0,0 +1,895 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.sensors.data_factory — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.sensors.data_factory

+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Sequence
+
+from airflow.providers.microsoft.azure.hooks.data_factory import (
+    AzureDataFactoryHook,
+    AzureDataFactoryPipelineRunException,
+    AzureDataFactoryPipelineRunStatus,
+)
+from airflow.sensors.base import BaseSensorOperator
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+
[docs]class AzureDataFactoryPipelineRunStatusSensor(BaseSensorOperator): + """ + Checks the status of a pipeline run. + + :param azure_data_factory_conn_id: The connection identifier for connecting to Azure Data Factory. + :param run_id: The pipeline run identifier. + :param resource_group_name: The resource group name. + :param factory_name: The data factory name. + """ + +
[docs] template_fields: Sequence[str] = ( + "azure_data_factory_conn_id", + "resource_group_name", + "factory_name", + "run_id",
+ ) + +
[docs] ui_color = "#50e6ff"
+ + def __init__( + self, + *, + run_id: str, + azure_data_factory_conn_id: str = AzureDataFactoryHook.default_conn_name, + resource_group_name: str | None = None, + factory_name: str | None = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.azure_data_factory_conn_id = azure_data_factory_conn_id + self.run_id = run_id + self.resource_group_name = resource_group_name + self.factory_name = factory_name + +
[docs] def poke(self, context: Context) -> bool: + self.hook = AzureDataFactoryHook(azure_data_factory_conn_id=self.azure_data_factory_conn_id) + pipeline_run_status = self.hook.get_pipeline_run_status( + run_id=self.run_id, + resource_group_name=self.resource_group_name, + factory_name=self.factory_name, + ) + + if pipeline_run_status == AzureDataFactoryPipelineRunStatus.FAILED: + raise AzureDataFactoryPipelineRunException(f"Pipeline run {self.run_id} has failed.") + + if pipeline_run_status == AzureDataFactoryPipelineRunStatus.CANCELLED: + raise AzureDataFactoryPipelineRunException(f"Pipeline run {self.run_id} has been cancelled.") + + return pipeline_run_status == AzureDataFactoryPipelineRunStatus.SUCCEEDED
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/sensors/wasb.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/sensors/wasb.html new file mode 100644 index 00000000000..336ecc8f658 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/sensors/wasb.html @@ -0,0 +1,913 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.sensors.wasb — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.sensors.wasb

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Sequence
+
+from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
+from airflow.sensors.base import BaseSensorOperator
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+
[docs]class WasbBlobSensor(BaseSensorOperator): + """ + Waits for a blob to arrive on Azure Blob Storage. + + :param container_name: Name of the container. + :param blob_name: Name of the blob. + :param wasb_conn_id: Reference to the :ref:`wasb connection <howto/connection:wasb>`. + :param check_options: Optional keyword arguments that + `WasbHook.check_for_blob()` takes. + """ + +
[docs] template_fields: Sequence[str] = ("container_name", "blob_name")
+ + def __init__( + self, + *, + container_name: str, + blob_name: str, + wasb_conn_id: str = "wasb_default", + check_options: dict | None = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + if check_options is None: + check_options = {} + self.wasb_conn_id = wasb_conn_id + self.container_name = container_name + self.blob_name = blob_name + self.check_options = check_options + +
[docs] def poke(self, context: Context): + self.log.info("Poking for blob: %s\n in wasb://%s", self.blob_name, self.container_name) + hook = WasbHook(wasb_conn_id=self.wasb_conn_id) + return hook.check_for_blob(self.container_name, self.blob_name, **self.check_options)
+ + +
[docs]class WasbPrefixSensor(BaseSensorOperator): + """ + Waits for blobs matching a prefix to arrive on Azure Blob Storage. + + :param container_name: Name of the container. + :param prefix: Prefix of the blob. + :param wasb_conn_id: Reference to the wasb connection. + :param check_options: Optional keyword arguments that + `WasbHook.check_for_prefix()` takes. + """ + +
[docs] template_fields: Sequence[str] = ("container_name", "prefix")
+ + def __init__( + self, + *, + container_name: str, + prefix: str, + wasb_conn_id: str = "wasb_default", + check_options: dict | None = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + if check_options is None: + check_options = {} + self.wasb_conn_id = wasb_conn_id + self.container_name = container_name + self.prefix = prefix + self.check_options = check_options + +
[docs] def poke(self, context: Context) -> bool: + self.log.info("Poking for prefix: %s in wasb://%s", self.prefix, self.container_name) + hook = WasbHook(wasb_conn_id=self.wasb_conn_id) + return hook.check_for_prefix(self.container_name, self.prefix, **self.check_options)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs.html new file mode 100644 index 00000000000..3903a956432 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs.html @@ -0,0 +1,943 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import tempfile
+from typing import TYPE_CHECKING, Sequence
+
+from airflow.models import BaseOperator
+from airflow.providers.google.cloud.hooks.gcs import GCSHook
+from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+
[docs]class AzureBlobStorageToGCSOperator(BaseOperator): + """ + Operator transfers data from Azure Blob Storage to specified bucket in Google Cloud Storage + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:AzureBlobStorageToGCSOperator` + + :param wasb_conn_id: Reference to the wasb connection. + :param gcp_conn_id: The connection ID to use when fetching connection info. + :param blob_name: Name of the blob + :param file_path: Path to the file to download + :param container_name: Name of the container + :param bucket_name: The bucket to upload to + :param object_name: The object name to set when uploading the file + :param filename: The local file path to the file to be uploaded + :param gzip: Option to compress local file or file data for upload + :param delegate_to: The account to impersonate using domain-wide delegation of authority, + if any. For this to work, the service account making the request must have + domain-wide delegation enabled. + :param impersonation_chain: Optional service account to impersonate using short-term + credentials, or chained list of accounts required to get the access_token + of the last account in the list, which will be impersonated in the request. + If set as a string, the account must grant the originating account + the Service Account Token Creator IAM role. + If set as a sequence, the identities from the list must grant + Service Account Token Creator IAM role to the directly preceding identity, with first + account from the list granting this role to the originating account. + """ + + def __init__( + self, + *, + wasb_conn_id="wasb_default", + gcp_conn_id: str = "google_cloud_default", + blob_name: str, + file_path: str, + container_name: str, + bucket_name: str, + object_name: str, + filename: str, + gzip: bool, + delegate_to: str | None, + impersonation_chain: str | Sequence[str] | None = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.wasb_conn_id = wasb_conn_id + self.gcp_conn_id = gcp_conn_id + self.blob_name = blob_name + self.file_path = file_path + self.container_name = container_name + self.bucket_name = bucket_name + self.object_name = object_name + self.filename = filename + self.gzip = gzip + self.delegate_to = delegate_to + self.impersonation_chain = impersonation_chain + +
[docs] template_fields: Sequence[str] = ( + "blob_name", + "file_path", + "container_name", + "bucket_name", + "object_name", + "filename",
+ ) + +
[docs] def execute(self, context: Context) -> str: + azure_hook = WasbHook(wasb_conn_id=self.wasb_conn_id) + gcs_hook = GCSHook( + gcp_conn_id=self.gcp_conn_id, + delegate_to=self.delegate_to, + impersonation_chain=self.impersonation_chain, + ) + + with tempfile.NamedTemporaryFile() as temp_file: + self.log.info("Downloading data from blob: %s", self.blob_name) + azure_hook.get_file( + file_path=temp_file.name, + container_name=self.container_name, + blob_name=self.blob_name, + ) + self.log.info( + "Uploading data from blob's: %s into GCP bucket: %s", self.object_name, self.bucket_name + ) + gcs_hook.upload( + bucket_name=self.bucket_name, + object_name=self.object_name, + filename=temp_file.name, + gzip=self.gzip, + ) + self.log.info( + "Resources have been uploaded from blob: %s to GCS bucket:%s", + self.blob_name, + self.bucket_name, + ) + return f"gs://{self.bucket_name}/{self.object_name}"
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/transfers/local_to_adls.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/transfers/local_to_adls.html new file mode 100644 index 00000000000..d0b98bc1469 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/transfers/local_to_adls.html @@ -0,0 +1,931 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.transfers.local_to_adls — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.transfers.local_to_adls

+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import warnings
+from typing import TYPE_CHECKING, Any, Sequence
+
+from airflow.exceptions import AirflowException
+from airflow.models import BaseOperator
+from airflow.providers.microsoft.azure.hooks.data_lake import AzureDataLakeHook
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+
[docs]class LocalFilesystemToADLSOperator(BaseOperator): + """ + Upload file(s) to Azure Data Lake + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:LocalFilesystemToADLSOperator` + + :param local_path: local path. Can be single file, directory (in which case, + upload recursively) or glob pattern. Recursive glob patterns using `**` + are not supported + :param remote_path: Remote path to upload to; if multiple files, this is the + directory root to write within + :param nthreads: Number of threads to use. If None, uses the number of cores. + :param overwrite: Whether to forcibly overwrite existing files/directories. + If False and remote path is a directory, will quit regardless if any files + would be overwritten or not. If True, only matching filenames are actually + overwritten + :param buffersize: int [2**22] + Number of bytes for internal buffer. This block cannot be bigger than + a chunk and cannot be smaller than a block + :param blocksize: int [2**22] + Number of bytes for a block. Within each chunk, we write a smaller + block for each API call. This block cannot be bigger than a chunk + :param extra_upload_options: Extra upload options to add to the hook upload method + :param azure_data_lake_conn_id: Reference to the Azure Data Lake connection + """ + +
[docs] template_fields: Sequence[str] = ("local_path", "remote_path")
+
[docs] ui_color = "#e4f0e8"
+ + def __init__( + self, + *, + local_path: str, + remote_path: str, + overwrite: bool = True, + nthreads: int = 64, + buffersize: int = 4194304, + blocksize: int = 4194304, + extra_upload_options: dict[str, Any] | None = None, + azure_data_lake_conn_id: str = "azure_data_lake_default", + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.local_path = local_path + self.remote_path = remote_path + self.overwrite = overwrite + self.nthreads = nthreads + self.buffersize = buffersize + self.blocksize = blocksize + self.extra_upload_options = extra_upload_options + self.azure_data_lake_conn_id = azure_data_lake_conn_id + +
[docs] def execute(self, context: Context) -> None: + if "**" in self.local_path: + raise AirflowException("Recursive glob patterns using `**` are not supported") + if not self.extra_upload_options: + self.extra_upload_options = {} + hook = AzureDataLakeHook(azure_data_lake_conn_id=self.azure_data_lake_conn_id) + self.log.info("Uploading %s to %s", self.local_path, self.remote_path) + return hook.upload_file( + local_path=self.local_path, + remote_path=self.remote_path, + nthreads=self.nthreads, + overwrite=self.overwrite, + buffersize=self.buffersize, + blocksize=self.blocksize, + **self.extra_upload_options,
+ ) + + +
[docs]class LocalToAzureDataLakeStorageOperator(LocalFilesystemToADLSOperator): + """ + This class is deprecated. + Please use `airflow.providers.microsoft.azure.transfers.local_to_adls.LocalFilesystemToADLSOperator`. + """ + + def __init__(self, *args, **kwargs): + warnings.warn( + """This class is deprecated. + Please use + `airflow.providers.microsoft.azure.transfers.local_to_adls.LocalFilesystemToADLSOperator`.""", + DeprecationWarning, + stacklevel=3, + ) + super().__init__(*args, **kwargs)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/transfers/local_to_wasb.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/transfers/local_to_wasb.html new file mode 100644 index 00000000000..2897acc55b2 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/transfers/local_to_wasb.html @@ -0,0 +1,896 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.transfers.local_to_wasb — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.transfers.local_to_wasb

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Sequence
+
+from airflow.models import BaseOperator
+from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+
[docs]class LocalFilesystemToWasbOperator(BaseOperator): + """ + Uploads a file to Azure Blob Storage. + + :param file_path: Path to the file to load. (templated) + :param container_name: Name of the container. (templated) + :param blob_name: Name of the blob. (templated) + :param wasb_conn_id: Reference to the wasb connection. + :param create_container: Attempt to create the target container prior to uploading the blob. This is + useful if the target container may not exist yet. Defaults to False. + :param load_options: Optional keyword arguments that + `WasbHook.load_file()` takes. + """ + +
[docs] template_fields: Sequence[str] = ("file_path", "container_name", "blob_name")
+ + def __init__( + self, + *, + file_path: str, + container_name: str, + blob_name: str, + wasb_conn_id: str = "wasb_default", + create_container: bool = False, + load_options: dict | None = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + if load_options is None: + load_options = {} + self.file_path = file_path + self.container_name = container_name + self.blob_name = blob_name + self.wasb_conn_id = wasb_conn_id + self.create_container = create_container + self.load_options = load_options + +
[docs] def execute(self, context: Context) -> None: + """Upload a file to Azure Blob Storage.""" + hook = WasbHook(wasb_conn_id=self.wasb_conn_id) + self.log.info( + "Uploading %s to wasb://%s as %s", + self.file_path, + self.container_name, + self.blob_name, + ) + hook.load_file( + file_path=self.file_path, + container_name=self.container_name, + blob_name=self.blob_name, + create_container=self.create_container, + **self.load_options,
+ ) +
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.html new file mode 100644 index 00000000000..b8d3fb4e28a --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.html @@ -0,0 +1,929 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import os
+from tempfile import TemporaryDirectory
+from typing import TYPE_CHECKING, Any, Sequence
+
+import unicodecsv as csv
+
+from airflow.models import BaseOperator
+from airflow.providers.microsoft.azure.hooks.data_lake import AzureDataLakeHook
+from airflow.providers.oracle.hooks.oracle import OracleHook
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+
+
[docs]class OracleToAzureDataLakeOperator(BaseOperator): + """ + Moves data from Oracle to Azure Data Lake. The operator runs the query against + Oracle and stores the file locally before loading it into Azure Data Lake. + + + :param filename: file name to be used by the csv file. + :param azure_data_lake_conn_id: destination azure data lake connection. + :param azure_data_lake_path: destination path in azure data lake to put the file. + :param oracle_conn_id: :ref:`Source Oracle connection <howto/connection:oracle>`. + :param sql: SQL query to execute against the Oracle database. (templated) + :param sql_params: Parameters to use in sql query. (templated) + :param delimiter: field delimiter in the file. + :param encoding: encoding type for the file. + :param quotechar: Character to use in quoting. + :param quoting: Quoting strategy. See unicodecsv quoting for more information. + """ + +
[docs] template_fields: Sequence[str] = ("filename", "sql", "sql_params")
+
[docs] template_fields_renderers = {"sql_params": "py"}
+
[docs] ui_color = "#e08c8c"
+ + def __init__( + self, + *, + filename: str, + azure_data_lake_conn_id: str, + azure_data_lake_path: str, + oracle_conn_id: str, + sql: str, + sql_params: dict | None = None, + delimiter: str = ",", + encoding: str = "utf-8", + quotechar: str = '"', + quoting: str = csv.QUOTE_MINIMAL, + **kwargs, + ) -> None: + super().__init__(**kwargs) + if sql_params is None: + sql_params = {} + self.filename = filename + self.oracle_conn_id = oracle_conn_id + self.sql = sql + self.sql_params = sql_params + self.azure_data_lake_conn_id = azure_data_lake_conn_id + self.azure_data_lake_path = azure_data_lake_path + self.delimiter = delimiter + self.encoding = encoding + self.quotechar = quotechar + self.quoting = quoting + + def _write_temp_file(self, cursor: Any, path_to_save: str | bytes | int) -> None: + with open(path_to_save, "wb") as csvfile: + csv_writer = csv.writer( + csvfile, + delimiter=self.delimiter, + encoding=self.encoding, + quotechar=self.quotechar, + quoting=self.quoting, + ) + csv_writer.writerow(map(lambda field: field[0], cursor.description)) + csv_writer.writerows(cursor) + csvfile.flush() + +
[docs] def execute(self, context: Context) -> None: + oracle_hook = OracleHook(oracle_conn_id=self.oracle_conn_id) + azure_data_lake_hook = AzureDataLakeHook(azure_data_lake_conn_id=self.azure_data_lake_conn_id) + + self.log.info("Dumping Oracle query results to local file") + conn = oracle_hook.get_conn() + cursor = conn.cursor() # type: ignore[attr-defined] + cursor.execute(self.sql, self.sql_params) + + with TemporaryDirectory(prefix="airflow_oracle_to_azure_op_") as temp: + self._write_temp_file(cursor, os.path.join(temp, self.filename)) + self.log.info("Uploading local file to Azure Data Lake") + azure_data_lake_hook.upload_file( + os.path.join(temp, self.filename), os.path.join(self.azure_data_lake_path, self.filename) + ) + cursor.close() + conn.close() # type: ignore[attr-defined]
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/transfers/sftp_to_wasb.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/transfers/sftp_to_wasb.html new file mode 100644 index 00000000000..34375eac396 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/transfers/sftp_to_wasb.html @@ -0,0 +1,1017 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.transfers.sftp_to_wasb — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.transfers.sftp_to_wasb

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""This module contains SFTP to Azure Blob Storage operator."""
+from __future__ import annotations
+
+import os
+from collections import namedtuple
+from tempfile import NamedTemporaryFile
+from typing import TYPE_CHECKING, Sequence
+
+if TYPE_CHECKING:
+    from airflow.utils.context import Context
+
+from airflow.compat.functools import cached_property
+from airflow.exceptions import AirflowException
+from airflow.models import BaseOperator
+from airflow.providers.microsoft.azure.hooks.wasb import WasbHook
+from airflow.providers.sftp.hooks.sftp import SFTPHook
+
+
[docs]WILDCARD = "*"
+
[docs]SftpFile = namedtuple("SftpFile", "sftp_file_path, blob_name")
+ + +
[docs]class SFTPToWasbOperator(BaseOperator): + """ + Transfer files to Azure Blob Storage from SFTP server. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:SFTPToWasbOperator` + + :param sftp_source_path: The sftp remote path. This is the specified file path + for downloading the single file or multiple files from the SFTP server. + You can use only one wildcard within your path. The wildcard can appear + inside the path or at the end of the path. + :param container_name: Name of the container. + :param blob_prefix: Prefix to name a blob. + :param sftp_conn_id: The sftp connection id. The name or identifier for + establishing a connection to the SFTP server. + :param wasb_conn_id: Reference to the wasb connection. + :param load_options: Optional keyword arguments that + ``WasbHook.load_file()`` takes. + :param move_object: When move object is True, the object is moved instead + of copied to the new location. This is the equivalent of a mv command + as opposed to a cp command. + :param wasb_overwrite_object: Whether the blob to be uploaded + should overwrite the current data. + When wasb_overwrite_object is True, it will overwrite the existing data. + If set to False, the operation might fail with + ResourceExistsError in case a blob object already exists. + :param create_container: Attempt to create the target container prior to uploading the blob. This is + useful if the target container may not exist yet. Defaults to False. + """ + +
[docs] template_fields: Sequence[str] = ("sftp_source_path", "container_name", "blob_prefix")
+ + def __init__( + self, + *, + sftp_source_path: str, + container_name: str, + blob_prefix: str = "", + sftp_conn_id: str = "sftp_default", + wasb_conn_id: str = "wasb_default", + load_options: dict | None = None, + move_object: bool = False, + wasb_overwrite_object: bool = False, + create_container: bool = False, + **kwargs, + ) -> None: + super().__init__(**kwargs) + + self.sftp_source_path = sftp_source_path + self.blob_prefix = blob_prefix + self.sftp_conn_id = sftp_conn_id + self.wasb_conn_id = wasb_conn_id + self.container_name = container_name + self.wasb_conn_id = wasb_conn_id + self.load_options = load_options or {"overwrite": wasb_overwrite_object} + self.move_object = move_object + self.create_container = create_container + +
[docs] def dry_run(self) -> None: + super().dry_run() + sftp_files: list[SftpFile] = self.get_sftp_files_map() + for file in sftp_files: + self.log.info( + "Process will upload file from (SFTP) %s to wasb://%s as %s", + file.sftp_file_path, + self.container_name, + file.blob_name, + ) + if self.move_object: + self.log.info("Executing delete of %s", file)
+ +
[docs] def execute(self, context: Context) -> None: + """Upload a file from SFTP to Azure Blob Storage.""" + sftp_files: list[SftpFile] = self.get_sftp_files_map() + uploaded_files = self.copy_files_to_wasb(sftp_files) + if self.move_object: + self.delete_files(uploaded_files)
+ +
[docs] def get_sftp_files_map(self) -> list[SftpFile]: + """Get SFTP files from the source path, it may use a WILDCARD to this end.""" + sftp_files = [] + + sftp_complete_path, prefix, delimiter = self.get_tree_behavior() + + found_files, _, _ = self.sftp_hook.get_tree_map( + sftp_complete_path, prefix=prefix, delimiter=delimiter + ) + + self.log.info("Found %s files at sftp source path: %s", str(len(found_files)), self.sftp_source_path) + + for file in found_files: + future_blob_name = self.get_full_path_blob(file) + sftp_files.append(SftpFile(file, future_blob_name)) + + return sftp_files
+ +
[docs] def get_tree_behavior(self) -> tuple[str, str | None, str | None]: + """Extracts from source path the tree behavior to interact with the remote folder""" + self.check_wildcards_limit() + + if self.source_path_contains_wildcard: + + prefix, delimiter = self.sftp_source_path.split(WILDCARD, 1) + + sftp_complete_path = os.path.dirname(prefix) + + return sftp_complete_path, prefix, delimiter + + return self.sftp_source_path, None, None
+ +
[docs] def check_wildcards_limit(self) -> None: + """Check if there are multiple wildcards used in the SFTP source path.""" + total_wildcards = self.sftp_source_path.count(WILDCARD) + if total_wildcards > 1: + raise AirflowException( + "Only one wildcard '*' is allowed in sftp_source_path parameter. "
+ f"Found {total_wildcards} in {self.sftp_source_path}." + ) + + @property +
[docs] def source_path_contains_wildcard(self) -> bool: + """Checks if the SFTP source path contains a wildcard.""" + return WILDCARD in self.sftp_source_path
+ + @cached_property +
[docs] def sftp_hook(self) -> SFTPHook: + """Property of sftp hook to be re-used.""" + return SFTPHook(self.sftp_conn_id)
+ +
[docs] def get_full_path_blob(self, file: str) -> str: + """Get a blob name based on the previous name and a blob_prefix variable""" + return self.blob_prefix + os.path.basename(file)
+ +
[docs] def copy_files_to_wasb(self, sftp_files: list[SftpFile]) -> list[str]: + """Upload a list of files from sftp_files to Azure Blob Storage with a new Blob Name.""" + uploaded_files = [] + wasb_hook = WasbHook(wasb_conn_id=self.wasb_conn_id) + for file in sftp_files: + with NamedTemporaryFile("w") as tmp: + self.sftp_hook.retrieve_file(file.sftp_file_path, tmp.name) + self.log.info( + "Uploading %s to wasb://%s as %s", + file.sftp_file_path, + self.container_name, + file.blob_name, + ) + wasb_hook.load_file( + tmp.name, + self.container_name, + file.blob_name, + self.create_container, + **self.load_options, + ) + + uploaded_files.append(file.sftp_file_path) + + return uploaded_files
+ +
[docs] def delete_files(self, uploaded_files: list[str]) -> None: + """Delete files at SFTP which have been moved to Azure Blob Storage.""" + for sftp_file_path in uploaded_files: + self.log.info("Executing delete of %s", sftp_file_path) + self.sftp_hook.delete_file(sftp_file_path)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/utils.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/utils.html new file mode 100644 index 00000000000..610add94ddd --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/airflow/providers/microsoft/azure/utils.html @@ -0,0 +1,889 @@ + + + + + + + + + + + + airflow.providers.microsoft.azure.utils — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for airflow.providers.microsoft.azure.utils

+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from __future__ import annotations
+
+import warnings
+from functools import wraps
+
+
+def _ensure_prefixes(conn_type):
+    """
+    Remove when provider min airflow version >= 2.5.0 since this is handled by
+    provider manager from that version.
+    """
+
+    def dec(func):
+        @wraps(func)
+        def inner():
+            field_behaviors = func()
+            conn_attrs = {"host", "schema", "login", "password", "port", "extra"}
+
+            def _ensure_prefix(field):
+                if field not in conn_attrs and not field.startswith("extra__"):
+                    return f"extra__{conn_type}__{field}"
+                else:
+                    return field
+
+            if "placeholders" in field_behaviors:
+                placeholders = field_behaviors["placeholders"]
+                field_behaviors["placeholders"] = {_ensure_prefix(k): v for k, v in placeholders.items()}
+            return field_behaviors
+
+        return inner
+
+    return dec
+
+
+
[docs]def get_field(*, conn_id: str, conn_type: str, extras: dict, field_name: str): + """Get field from extra, first checking short name, then for backcompat we check for prefixed name.""" + backcompat_prefix = f"extra__{conn_type}__" + backcompat_key = f"{backcompat_prefix}{field_name}" + ret = None + if field_name.startswith("extra__"): + raise ValueError( + f"Got prefixed name {field_name}; please remove the '{backcompat_prefix}' prefix " + "when using this method." + ) + if field_name in extras: + if backcompat_key in extras: + warnings.warn( + f"Conflicting params `{field_name}` and `{backcompat_key}` found in extras for conn " + f"{conn_id}. Using value for `{field_name}`. Please ensure this is the correct " + f"value and remove the backcompat key `{backcompat_key}`." + ) + ret = extras[field_name] + elif backcompat_key in extras: + ret = extras.get(backcompat_key) + if ret == "": + return None + return ret
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/index.html new file mode 100644 index 00000000000..5ac4866496a --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/index.html @@ -0,0 +1,857 @@ + + + + + + + + + + + + Overview: module code — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

All modules for which code is available

+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_adf_run_pipeline.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_adf_run_pipeline.html new file mode 100644 index 00000000000..5ac32780ae0 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_adf_run_pipeline.html @@ -0,0 +1,907 @@ + + + + + + + + + + + + tests.system.providers.microsoft.azure.example_adf_run_pipeline — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for tests.system.providers.microsoft.azure.example_adf_run_pipeline

+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import os
+from datetime import datetime, timedelta
+from typing import cast
+
+from airflow.models import DAG
+from airflow.models.xcom_arg import XComArg
+
+try:
+    from airflow.operators.empty import EmptyOperator
+except ModuleNotFoundError:
+    from airflow.operators.dummy import DummyOperator as EmptyOperator  # type: ignore
+
+from airflow.providers.microsoft.azure.operators.data_factory import AzureDataFactoryRunPipelineOperator
+from airflow.providers.microsoft.azure.sensors.data_factory import AzureDataFactoryPipelineRunStatusSensor
+from airflow.utils.edgemodifier import Label
+
+
[docs]ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+
[docs]DAG_ID = "example_adf_run_pipeline"
+ +with DAG( + dag_id=DAG_ID, + start_date=datetime(2021, 8, 13), + schedule="@daily", + catchup=False, + default_args={ + "retries": 1, + "retry_delay": timedelta(minutes=3), + "azure_data_factory_conn_id": "azure_data_factory", + "factory_name": "my-data-factory", # This can also be specified in the ADF connection. + "resource_group_name": "my-resource-group", # This can also be specified in the ADF connection. + }, + default_view="graph", +) as dag: +
[docs] begin = EmptyOperator(task_id="begin")
+ end = EmptyOperator(task_id="end") + + # [START howto_operator_adf_run_pipeline] + run_pipeline1 = AzureDataFactoryRunPipelineOperator( + task_id="run_pipeline1", + pipeline_name="pipeline1", + parameters={"myParam": "value"}, + ) + # [END howto_operator_adf_run_pipeline] + + # [START howto_operator_adf_run_pipeline_async] + run_pipeline2 = AzureDataFactoryRunPipelineOperator( + task_id="run_pipeline2", + pipeline_name="pipeline2", + wait_for_termination=False, + ) + + pipeline_run_sensor = AzureDataFactoryPipelineRunStatusSensor( + task_id="pipeline_run_sensor", + run_id=cast(str, XComArg(run_pipeline2, key="run_id")), + ) + # [END howto_operator_adf_run_pipeline_async] + + begin >> Label("No async wait") >> run_pipeline1 + begin >> Label("Do async wait with sensor") >> run_pipeline2 + [run_pipeline1, pipeline_run_sensor] >> end + + # Task dependency created via `XComArgs`: + # run_pipeline2 >> pipeline_run_sensor + + from tests.system.utils.watcher import watcher + + # This test needs watcher in order to properly mark success/failure + # when "tearDown" task with trigger rule is part of the DAG + list(dag.tasks) >> watcher() + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +
[docs]test_run = get_test_run(dag)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_adls_delete.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_adls_delete.html new file mode 100644 index 00000000000..a63ac8fbdfe --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_adls_delete.html @@ -0,0 +1,874 @@ + + + + + + + + + + + + tests.system.providers.microsoft.azure.example_adls_delete — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for tests.system.providers.microsoft.azure.example_adls_delete

+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import os
+from datetime import datetime
+
+from airflow import models
+from airflow.providers.microsoft.azure.operators.adls import ADLSDeleteOperator
+from airflow.providers.microsoft.azure.transfers.local_to_adls import LocalFilesystemToADLSOperator
+
+
[docs]LOCAL_FILE_PATH = os.environ.get("LOCAL_FILE_PATH", "localfile.txt")
+
[docs]REMOTE_FILE_PATH = os.environ.get("REMOTE_LOCAL_PATH", "remote.txt")
+ +
[docs]ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+
[docs]DAG_ID = "example_adls_delete"
+ +with models.DAG( + DAG_ID, + start_date=datetime(2021, 1, 1), + schedule=None, + tags=["example"], +) as dag: + +
[docs] upload_file = LocalFilesystemToADLSOperator( + task_id="upload_task", + local_path=LOCAL_FILE_PATH, + remote_path=REMOTE_FILE_PATH,
+ ) + # [START howto_operator_adls_delete] + remove_file = ADLSDeleteOperator(task_id="delete_task", path=REMOTE_FILE_PATH, recursive=True) + # [END howto_operator_adls_delete] + + upload_file >> remove_file + + from tests.system.utils.watcher import watcher + + # This test needs watcher in order to properly mark success/failure + # when "tearDown" task with trigger rule is part of the DAG + list(dag.tasks) >> watcher() + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +
[docs]test_run = get_test_run(dag)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_azure_blob_to_gcs.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_azure_blob_to_gcs.html new file mode 100644 index 00000000000..7539fcb8720 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_azure_blob_to_gcs.html @@ -0,0 +1,889 @@ + + + + + + + + + + + + tests.system.providers.microsoft.azure.example_azure_blob_to_gcs — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for tests.system.providers.microsoft.azure.example_azure_blob_to_gcs

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import os
+from datetime import datetime
+
+from airflow import DAG
+from airflow.providers.microsoft.azure.sensors.wasb import WasbBlobSensor
+from airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs import AzureBlobStorageToGCSOperator
+
+# Ignore missing args provided by default_args
+# type: ignore[call-arg]
+
+
+
[docs]BLOB_NAME = os.environ.get("AZURE_BLOB_NAME", "file.txt")
+
[docs]AZURE_CONTAINER_NAME = os.environ.get("AZURE_CONTAINER_NAME", "airflow")
+
[docs]GCP_BUCKET_FILE_PATH = os.environ.get("GCP_BUCKET_FILE_PATH", "file.txt")
+
[docs]GCP_BUCKET_NAME = os.environ.get("GCP_BUCKET_NAME", "INVALID BUCKET NAME")
+
[docs]GCP_OBJECT_NAME = os.environ.get("GCP_OBJECT_NAME", "file.txt")
+
[docs]ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+
[docs]DAG_ID = "example_azure_blob_to_gcs"
+ +# [START how_to_azure_blob_to_gcs] +with DAG( + DAG_ID, + schedule=None, + start_date=datetime(2021, 1, 1), # Override to match your needs + default_args={"container_name": AZURE_CONTAINER_NAME, "blob_name": BLOB_NAME}, +) as dag: + +
[docs] wait_for_blob = WasbBlobSensor(task_id="wait_for_blob")
+ + transfer_files_to_gcs = AzureBlobStorageToGCSOperator( + task_id="transfer_files_to_gcs", + # AZURE arg + file_path=GCP_OBJECT_NAME, + # GCP args + bucket_name=GCP_BUCKET_NAME, + object_name=GCP_OBJECT_NAME, + filename=GCP_BUCKET_FILE_PATH, + gzip=False, + delegate_to=None, + impersonation_chain=None, + ) + # [END how_to_azure_blob_to_gcs] + + wait_for_blob >> transfer_files_to_gcs + + from tests.system.utils.watcher import watcher + + # This test needs watcher in order to properly mark success/failure + # when "tearDown" task with trigger rule is part of the DAG + list(dag.tasks) >> watcher() + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +
[docs]test_run = get_test_run(dag)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_azure_container_instances.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_azure_container_instances.html new file mode 100644 index 00000000000..17c0ee3d4b8 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_azure_container_instances.html @@ -0,0 +1,874 @@ + + + + + + + + + + + + tests.system.providers.microsoft.azure.example_azure_container_instances — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for tests.system.providers.microsoft.azure.example_azure_container_instances

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+This is an example dag for using the AzureContainerInstancesOperator.
+"""
+from __future__ import annotations
+
+import os
+from datetime import datetime, timedelta
+
+from airflow import DAG
+from airflow.providers.microsoft.azure.operators.container_instances import AzureContainerInstancesOperator
+
+
[docs]ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+
[docs]DAG_ID = "aci_example"
+ +with DAG( + dag_id=DAG_ID, + default_args={"retries": 1}, + schedule=timedelta(days=1), + start_date=datetime(2018, 11, 1), + catchup=False, + tags=["example"], +) as dag: + +
[docs] t1 = AzureContainerInstancesOperator( + ci_conn_id="azure_default", + registry_conn_id=None, + resource_group="resource-group", + name="aci-test-{{ ds }}", + image="hello-world", + region="WestUS2", + environment_variables={}, + volumes=[], + memory_in_gb=4.0, + cpu=1.0, + task_id="start_container",
+ ) + + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +
[docs]test_run = get_test_run(dag)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_azure_cosmosdb.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_azure_cosmosdb.html new file mode 100644 index 00000000000..64271f8ff2b --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_azure_cosmosdb.html @@ -0,0 +1,890 @@ + + + + + + + + + + + + tests.system.providers.microsoft.azure.example_azure_cosmosdb — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for tests.system.providers.microsoft.azure.example_azure_cosmosdb

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""
+This is only an example DAG to highlight usage of AzureCosmosDocumentSensor to detect
+if a document now exists.
+
+You can trigger this manually with `airflow dags trigger example_cosmosdb_sensor`.
+
+*Note: Make sure that connection `azure_cosmos_default` is properly set before running
+this example.*
+"""
+from __future__ import annotations
+
+import os
+from datetime import datetime
+
+from airflow import DAG
+from airflow.providers.microsoft.azure.operators.cosmos import AzureCosmosInsertDocumentOperator
+from airflow.providers.microsoft.azure.sensors.cosmos import AzureCosmosDocumentSensor
+
+# Ignore missing args provided by default_args
+# type: ignore[call-arg]
+
+
+
[docs]ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+
[docs]DAG_ID = "example_azure_cosmosdb_sensor"
+ +with DAG( + dag_id=DAG_ID, + default_args={"database_name": "airflow_example_db"}, + start_date=datetime(2021, 1, 1), + catchup=False, + doc_md=__doc__, + tags=["example"], +) as dag: + +
[docs] t1 = AzureCosmosDocumentSensor( + task_id="check_cosmos_file", + collection_name="airflow_example_coll", + document_id="airflow_checkid",
+ ) + + t2 = AzureCosmosInsertDocumentOperator( + task_id="insert_cosmos_file", + collection_name="new-collection", + document={"id": "someuniqueid", "param1": "value1", "param2": "value2"}, + ) + + t1 >> t2 + + from tests.system.utils.watcher import watcher + + # This test needs watcher in order to properly mark success/failure + # when "tearDown" task with trigger rule is part of the DAG + list(dag.tasks) >> watcher() + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +
[docs]test_run = get_test_run(dag)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_azure_service_bus.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_azure_service_bus.html new file mode 100644 index 00000000000..ead5ee59317 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_azure_service_bus.html @@ -0,0 +1,993 @@ + + + + + + + + + + + + tests.system.providers.microsoft.azure.example_azure_service_bus — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for tests.system.providers.microsoft.azure.example_azure_service_bus

+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import os
+from datetime import datetime, timedelta
+
+from airflow import DAG
+from airflow.models.baseoperator import chain
+from airflow.providers.microsoft.azure.operators.asb import (
+    ASBReceiveSubscriptionMessageOperator,
+    AzureServiceBusCreateQueueOperator,
+    AzureServiceBusDeleteQueueOperator,
+    AzureServiceBusReceiveMessageOperator,
+    AzureServiceBusSendMessageOperator,
+    AzureServiceBusSubscriptionCreateOperator,
+    AzureServiceBusSubscriptionDeleteOperator,
+    AzureServiceBusTopicCreateOperator,
+    AzureServiceBusTopicDeleteOperator,
+    AzureServiceBusUpdateSubscriptionOperator,
+)
+
+
[docs]EXECUTION_TIMEOUT = int(os.getenv("EXECUTION_TIMEOUT", 6))
+ +
[docs]CLIENT_ID = os.getenv("CLIENT_ID", "")
+
[docs]QUEUE_NAME = "sb_mgmt_queue_test"
+
[docs]MESSAGE = "Test Message"
+
[docs]MESSAGE_LIST = [MESSAGE + " " + str(n) for n in range(0, 10)]
+
[docs]TOPIC_NAME = "sb_mgmt_topic_test"
+
[docs]SUBSCRIPTION_NAME = "sb_mgmt_subscription"
+ +with DAG( + dag_id="example_azure_service_bus", + start_date=datetime(2021, 8, 13), + schedule=None, + catchup=False, + default_args={ + "execution_timeout": timedelta(hours=EXECUTION_TIMEOUT), + "azure_service_bus_conn_id": "azure_service_bus_default", + }, + tags=["example", "Azure service bus"], +) as dag: + # [START howto_operator_create_service_bus_queue] +
[docs] create_service_bus_queue = AzureServiceBusCreateQueueOperator( + task_id="create_service_bus_queue", + queue_name=QUEUE_NAME,
+ ) + # [END howto_operator_create_service_bus_queue] + + # [START howto_operator_send_message_to_service_bus_queue] + send_message_to_service_bus_queue = AzureServiceBusSendMessageOperator( + task_id="send_message_to_service_bus_queue", + message=MESSAGE, + queue_name=QUEUE_NAME, + batch=False, + ) + # [END howto_operator_send_message_to_service_bus_queue] + + # [START howto_operator_send_list_message_to_service_bus_queue] + send_list_message_to_service_bus_queue = AzureServiceBusSendMessageOperator( + task_id="send_list_message_to_service_bus_queue", + message=MESSAGE_LIST, + queue_name=QUEUE_NAME, + batch=False, + ) + # [END howto_operator_send_list_message_to_service_bus_queue] + + # [START howto_operator_send_batch_message_to_service_bus_queue] + send_batch_message_to_service_bus_queue = AzureServiceBusSendMessageOperator( + task_id="send_batch_message_to_service_bus_queue", + message=MESSAGE_LIST, + queue_name=QUEUE_NAME, + batch=True, + ) + # [END howto_operator_send_batch_message_to_service_bus_queue] + + # [START howto_operator_receive_message_service_bus_queue] + receive_message_service_bus_queue = AzureServiceBusReceiveMessageOperator( + task_id="receive_message_service_bus_queue", + queue_name=QUEUE_NAME, + max_message_count=20, + max_wait_time=5, + ) + # [END howto_operator_receive_message_service_bus_queue] + + # [START howto_operator_create_service_bus_topic] + create_service_bus_topic = AzureServiceBusTopicCreateOperator( + task_id="create_service_bus_topic", topic_name=TOPIC_NAME + ) + # [END howto_operator_create_service_bus_topic] + + # [START howto_operator_create_service_bus_subscription] + create_service_bus_subscription = AzureServiceBusSubscriptionCreateOperator( + task_id="create_service_bus_subscription", + topic_name=TOPIC_NAME, + subscription_name=SUBSCRIPTION_NAME, + ) + # [END howto_operator_create_service_bus_subscription] + + # [START howto_operator_update_service_bus_subscription] + update_service_bus_subscription = AzureServiceBusUpdateSubscriptionOperator( + task_id="update_service_bus_subscription", + topic_name=TOPIC_NAME, + subscription_name=SUBSCRIPTION_NAME, + max_delivery_count=5, + ) + # [END howto_operator_update_service_bus_subscription] + + # [START howto_operator_receive_message_service_bus_subscription] + receive_message_service_bus_subscription = ASBReceiveSubscriptionMessageOperator( + task_id="receive_message_service_bus_subscription", + topic_name=TOPIC_NAME, + subscription_name=SUBSCRIPTION_NAME, + max_message_count=10, + ) + # [END howto_operator_receive_message_service_bus_subscription] + + # [START howto_operator_delete_service_bus_subscription] + delete_service_bus_subscription = AzureServiceBusSubscriptionDeleteOperator( + task_id="delete_service_bus_subscription", + topic_name=TOPIC_NAME, + subscription_name=SUBSCRIPTION_NAME, + trigger_rule="all_done", + ) + # [END howto_operator_delete_service_bus_subscription] + + # [START howto_operator_delete_service_bus_topic] + delete_asb_topic = AzureServiceBusTopicDeleteOperator( + task_id="delete_asb_topic", + topic_name=TOPIC_NAME, + ) + # [END howto_operator_delete_service_bus_topic] + + # [START howto_operator_delete_service_bus_queue] + delete_service_bus_queue = AzureServiceBusDeleteQueueOperator( + task_id="delete_service_bus_queue", queue_name=QUEUE_NAME, trigger_rule="all_done" + ) + # [END howto_operator_delete_service_bus_queue] + + chain( + create_service_bus_queue, + create_service_bus_topic, + create_service_bus_subscription, + send_message_to_service_bus_queue, + send_list_message_to_service_bus_queue, + send_batch_message_to_service_bus_queue, + receive_message_service_bus_queue, + update_service_bus_subscription, + receive_message_service_bus_subscription, + delete_service_bus_subscription, + delete_asb_topic, + delete_service_bus_queue, + ) + + from tests.system.utils.watcher import watcher + + # This test needs watcher in order to properly mark success/failure + # when "tearDown" task with trigger rule is part of the DAG + list(dag.tasks) >> watcher() + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +
[docs]test_run = get_test_run(dag)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_azure_synapse.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_azure_synapse.html new file mode 100644 index 00000000000..703bf5870f1 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_azure_synapse.html @@ -0,0 +1,888 @@ + + + + + + + + + + + + tests.system.providers.microsoft.azure.example_azure_synapse — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for tests.system.providers.microsoft.azure.example_azure_synapse

+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import os
+from datetime import datetime, timedelta
+
+from airflow import DAG
+from airflow.providers.microsoft.azure.operators.synapse import AzureSynapseRunSparkBatchOperator
+
+
[docs]AIRFLOW_HOME = os.getenv("AIRFLOW_HOME", "/usr/local/airflow")
+
[docs]EXECUTION_TIMEOUT = int(os.getenv("EXECUTION_TIMEOUT", 6))
+ +
[docs]default_args = { + "execution_timeout": timedelta(hours=EXECUTION_TIMEOUT), + "retries": int(os.getenv("DEFAULT_TASK_RETRIES", 2)), + "retry_delay": timedelta(seconds=int(os.getenv("DEFAULT_RETRY_DELAY_SECONDS", 60))),
+} + +
[docs]SPARK_JOB_PAYLOAD = { + "name": "SparkJob", + "file": "abfss://spark@providersstorageaccgen2.dfs.core.windows.net/wordcount.py", + "args": [ + "abfss://spark@providersstorageaccgen2.dfs.core.windows.net/shakespeare.txt", + "abfss://spark@providersstorageaccgen2.dfs.core.windows.net/results/", + ], + "jars": [], + "pyFiles": [], + "files": [], + "conf": { + "spark.dynamicAllocation.enabled": "false", + "spark.dynamicAllocation.minExecutors": "1", + "spark.dynamicAllocation.maxExecutors": "2", + }, + "numExecutors": 2, + "executorCores": 4, + "executorMemory": "28g", + "driverCores": 4, + "driverMemory": "28g",
+} + +with DAG( + dag_id="example_synapse_spark_job", + start_date=datetime(2022, 1, 1), + schedule=None, + catchup=False, + default_args=default_args, + tags=["example", "synapse"], +) as dag: + # [START howto_operator_azure_synapse] +
[docs] run_spark_job = AzureSynapseRunSparkBatchOperator( + task_id="run_spark_job", spark_pool="provsparkpool", payload=SPARK_JOB_PAYLOAD # type: ignore
+ ) + # [END howto_operator_azure_synapse] + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +
[docs]test_run = get_test_run(dag)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_fileshare.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_fileshare.html new file mode 100644 index 00000000000..9a1e1238f96 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_fileshare.html @@ -0,0 +1,882 @@ + + + + + + + + + + + + tests.system.providers.microsoft.azure.example_fileshare — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for tests.system.providers.microsoft.azure.example_fileshare

+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import os
+from datetime import datetime
+
+from airflow.decorators import task
+from airflow.models import DAG
+from airflow.providers.microsoft.azure.hooks.fileshare import AzureFileShareHook
+
+
[docs]NAME = "myfileshare"
+
[docs]DIRECTORY = "mydirectory"
+
[docs]ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+
[docs]DAG_ID = "example_fileshare"
+ + +@task +
[docs]def create_fileshare(): + """Create a fileshare with directory""" + hook = AzureFileShareHook() + hook.create_share(NAME) + hook.create_directory(share_name=NAME, directory_name=DIRECTORY) + exists = hook.check_for_directory(share_name=NAME, directory_name=DIRECTORY) + if not exists: + raise Exception
+ + +@task +
[docs]def delete_fileshare(): + """Delete a fileshare""" + hook = AzureFileShareHook() + hook.delete_share(NAME)
+ + +with DAG( + DAG_ID, + schedule="@once", + start_date=datetime(2021, 1, 1), + catchup=False, +) as dag: + create_fileshare() >> delete_fileshare() + + from tests.system.utils.watcher import watcher + + # This test needs watcher in order to properly mark success/failure + # when "tearDown" task with trigger rule is part of the DAG + list(dag.tasks) >> watcher() + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +
[docs]test_run = get_test_run(dag)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_local_to_adls.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_local_to_adls.html new file mode 100644 index 00000000000..e742fd02a38 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_local_to_adls.html @@ -0,0 +1,874 @@ + + + + + + + + + + + + tests.system.providers.microsoft.azure.example_local_to_adls — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for tests.system.providers.microsoft.azure.example_local_to_adls

+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import os
+from datetime import datetime
+
+from airflow import models
+from airflow.providers.microsoft.azure.operators.adls import ADLSDeleteOperator
+from airflow.providers.microsoft.azure.transfers.local_to_adls import LocalFilesystemToADLSOperator
+
+
[docs]LOCAL_FILE_PATH = os.environ.get("LOCAL_FILE_PATH", "localfile.txt")
+
[docs]REMOTE_FILE_PATH = os.environ.get("REMOTE_LOCAL_PATH", "remote.txt")
+
[docs]ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+
[docs]DAG_ID = "example_local_to_adls"
+ +with models.DAG( + DAG_ID, + start_date=datetime(2021, 1, 1), + catchup=False, + schedule=None, + tags=["example"], +) as dag: + # [START howto_operator_local_to_adls] +
[docs] upload_file = LocalFilesystemToADLSOperator( + task_id="upload_task", + local_path=LOCAL_FILE_PATH, + remote_path=REMOTE_FILE_PATH,
+ ) + # [END howto_operator_local_to_adls] + + delete_file = ADLSDeleteOperator(task_id="remove_task", path=REMOTE_FILE_PATH, recursive=True) + + upload_file >> delete_file + + from tests.system.utils.watcher import watcher + + # This test needs watcher in order to properly mark success/failure + # when "tearDown" task with trigger rule is part of the DAG + list(dag.tasks) >> watcher() + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +
[docs]test_run = get_test_run(dag)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_local_to_wasb.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_local_to_wasb.html new file mode 100644 index 00000000000..b22bd4d23c0 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_local_to_wasb.html @@ -0,0 +1,870 @@ + + + + + + + + + + + + tests.system.providers.microsoft.azure.example_local_to_wasb — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for tests.system.providers.microsoft.azure.example_local_to_wasb

+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import os
+from datetime import datetime
+
+from airflow.models import DAG
+from airflow.providers.microsoft.azure.operators.wasb_delete_blob import WasbDeleteBlobOperator
+from airflow.providers.microsoft.azure.transfers.local_to_wasb import LocalFilesystemToWasbOperator
+
+# Ignore missing args provided by default_args
+# type: ignore[call-arg]
+
+
+
[docs]PATH_TO_UPLOAD_FILE = os.environ.get("AZURE_PATH_TO_UPLOAD_FILE", "example-text.txt")
+
[docs]ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+
[docs]DAG_ID = "example_local_to_wasb"
+ +with DAG( + DAG_ID, + schedule="@once", + start_date=datetime(2021, 1, 1), + catchup=False, + default_args={"container_name": "mycontainer", "blob_name": "myblob"}, +) as dag: +
[docs] upload = LocalFilesystemToWasbOperator(task_id="upload_file", file_path=PATH_TO_UPLOAD_FILE)
+ delete = WasbDeleteBlobOperator(task_id="delete_file") + + upload >> delete + + from tests.system.utils.watcher import watcher + + # This test needs watcher in order to properly mark success/failure + # when "tearDown" task with trigger rule is part of the DAG + list(dag.tasks) >> watcher() + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +
[docs]test_run = get_test_run(dag)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_sftp_to_wasb.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_sftp_to_wasb.html new file mode 100644 index 00000000000..ec9ad4eacae --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_modules/tests/system/providers/microsoft/azure/example_sftp_to_wasb.html @@ -0,0 +1,902 @@ + + + + + + + + + + + + tests.system.providers.microsoft.azure.example_sftp_to_wasb — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Source code for tests.system.providers.microsoft.azure.example_sftp_to_wasb

+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import os
+from datetime import datetime
+
+from airflow import DAG
+from airflow.decorators import task
+from airflow.providers.microsoft.azure.operators.wasb_delete_blob import WasbDeleteBlobOperator
+from airflow.providers.microsoft.azure.transfers.sftp_to_wasb import SFTPToWasbOperator
+from airflow.providers.sftp.hooks.sftp import SFTPHook
+from airflow.providers.sftp.operators.sftp import SFTPOperator
+
+
[docs]AZURE_CONTAINER_NAME = os.environ.get("AZURE_CONTAINER_NAME", "airflow")
+
[docs]BLOB_PREFIX = os.environ.get("AZURE_BLOB_PREFIX", "airflow")
+
[docs]SFTP_SRC_PATH = os.environ.get("SFTP_SRC_PATH", "/sftp")
+
[docs]LOCAL_FILE_PATH = os.environ.get("LOCAL_SRC_PATH", "/tmp")
+
[docs]SAMPLE_FILENAME = os.environ.get("SFTP_SAMPLE_FILENAME", "sftp_to_wasb_test.txt")
+
[docs]FILE_COMPLETE_PATH = os.path.join(LOCAL_FILE_PATH, SAMPLE_FILENAME)
+
[docs]SFTP_FILE_COMPLETE_PATH = os.path.join(SFTP_SRC_PATH, SAMPLE_FILENAME)
+
[docs]ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+
[docs]DAG_ID = "example_sftp_to_wasb"
+ + +@task +
[docs]def delete_sftp_file(): + """Delete a file at SFTP SERVER""" + SFTPHook().delete_file(SFTP_FILE_COMPLETE_PATH)
+ + +with DAG( + DAG_ID, + schedule=None, + catchup=False, + start_date=datetime(2021, 1, 1), # Override to match your needs +) as dag: +
[docs] transfer_files_to_sftp_step = SFTPOperator( + task_id="transfer_files_from_local_to_sftp", + local_filepath=FILE_COMPLETE_PATH, + remote_filepath=SFTP_FILE_COMPLETE_PATH,
+ ) + + # [START how_to_sftp_to_wasb] + transfer_files_to_azure = SFTPToWasbOperator( + task_id="transfer_files_from_sftp_to_wasb", + # SFTP args + sftp_source_path=SFTP_SRC_PATH, + # AZURE args + container_name=AZURE_CONTAINER_NAME, + blob_prefix=BLOB_PREFIX, + ) + # [END how_to_sftp_to_wasb] + + delete_blob_file_step = WasbDeleteBlobOperator( + task_id="delete_blob_files", + container_name=AZURE_CONTAINER_NAME, + blob_name=BLOB_PREFIX + SAMPLE_FILENAME, + ) + + transfer_files_to_sftp_step >> transfer_files_to_azure >> delete_blob_file_step >> delete_sftp_file() + + from tests.system.utils.watcher import watcher + + # This test needs watcher in order to properly mark success/failure + # when "tearDown" task with trigger rule is part of the DAG + list(dag.tasks) >> watcher() + +from tests.system.utils import get_test_run # noqa: E402 + +# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) +
[docs]test_run = get_test_run(dag)
+
+ + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/adx/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/adx/index.rst.txt new file mode 100644 index 00000000000..108234e8153 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/adx/index.rst.txt @@ -0,0 +1,115 @@ +:py:mod:`airflow.providers.microsoft.azure.hooks.adx` +===================================================== + +.. py:module:: airflow.providers.microsoft.azure.hooks.adx + +.. autoapi-nested-parse:: + + This module contains Azure Data Explorer hook. + + .. spelling:: + + KustoResponseDataSetV + kusto + + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.hooks.adx.AzureDataExplorerHook + + + + +.. py:class:: AzureDataExplorerHook(azure_data_explorer_conn_id = default_conn_name) + + Bases: :py:obj:`airflow.hooks.base.BaseHook` + + Interacts with Azure Data Explorer (Kusto). + + **Cluster**: + + Azure Data Explorer cluster is specified by a URL, for example: "https://help.kusto.windows.net". + The parameter must be provided through the Data Explorer Cluster URL connection detail. + + **Tenant ID**: + + To learn about tenants refer to: https://docs.microsoft.com/en-us/onedrive/find-your-office-365-tenant-id + + **Authentication methods**: + + Available authentication methods are: + + - AAD_APP: Authentication with AAD application certificate. A Tenant ID is required when using this + method. Provide application ID and application key through Username and Password parameters. + + - AAD_APP_CERT: Authentication with AAD application certificate. Tenant ID, Application PEM Certificate, + and Application Certificate Thumbprint are required when using this method. + + - AAD_CREDS: Authentication with AAD username and password. A Tenant ID is required when using this + method. Username and Password parameters are used for authentication with AAD. + + - AAD_DEVICE: Authenticate with AAD device code. Please note that if you choose this option, you'll need + to authenticate for every new instance that is initialized. It is highly recommended to create one + instance and use it for all queries. + + :param azure_data_explorer_conn_id: Reference to the + :ref:`Azure Data Explorer connection`. + + .. py:attribute:: conn_name_attr + :annotation: = azure_data_explorer_conn_id + + + + .. py:attribute:: default_conn_name + :annotation: = azure_data_explorer_default + + + + .. py:attribute:: conn_type + :annotation: = azure_data_explorer + + + + .. py:attribute:: hook_name + :annotation: = Azure Data Explorer + + + + .. py:method:: get_connection_form_widgets() + :staticmethod: + + Returns connection widgets to add to connection form + + + .. py:method:: get_ui_field_behaviour() + :staticmethod: + + Returns custom field behaviour + + + .. py:method:: get_conn() + + Return a KustoClient object. + + + .. py:method:: run_query(query, database, options = None) + + Run KQL query using provided configuration, and return + `azure.kusto.data.response.KustoResponseDataSet` instance. + If query is unsuccessful AirflowException is raised. + + :param query: KQL query to run + :param database: Database to run the query on. + :param options: Optional query options. See: + https://docs.microsoft.com/en-us/azure/kusto/api/netfx/request-properties#list-of-clientrequestproperties + :return: dict + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/asb/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/asb/index.rst.txt new file mode 100644 index 00000000000..cd430cf2405 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/asb/index.rst.txt @@ -0,0 +1,164 @@ +:py:mod:`airflow.providers.microsoft.azure.hooks.asb` +===================================================== + +.. py:module:: airflow.providers.microsoft.azure.hooks.asb + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.hooks.asb.BaseAzureServiceBusHook + airflow.providers.microsoft.azure.hooks.asb.AdminClientHook + airflow.providers.microsoft.azure.hooks.asb.MessageHook + + + + +.. py:class:: BaseAzureServiceBusHook(azure_service_bus_conn_id = default_conn_name) + + Bases: :py:obj:`airflow.hooks.base.BaseHook` + + BaseAzureServiceBusHook class to create session and create connection using connection string + + :param azure_service_bus_conn_id: Reference to the + :ref:`Azure Service Bus connection`. + + .. py:attribute:: conn_name_attr + :annotation: = azure_service_bus_conn_id + + + + .. py:attribute:: default_conn_name + :annotation: = azure_service_bus_default + + + + .. py:attribute:: conn_type + :annotation: = azure_service_bus + + + + .. py:attribute:: hook_name + :annotation: = Azure Service Bus + + + + .. py:method:: get_ui_field_behaviour() + :staticmethod: + + Returns custom field behaviour + + + .. py:method:: get_conn() + :abstractmethod: + + Returns connection for the hook. + + + +.. py:class:: AdminClientHook(azure_service_bus_conn_id = default_conn_name) + + Bases: :py:obj:`BaseAzureServiceBusHook` + + Interacts with ServiceBusAdministrationClient client + to create, update, list, and delete resources of a + Service Bus namespace. This hook uses the same Azure Service Bus client connection inherited + from the base class + + .. py:method:: get_conn() + + Create and returns ServiceBusAdministrationClient by using the connection + string in connection details + + + .. py:method:: create_queue(queue_name, max_delivery_count = 10, dead_lettering_on_message_expiration = True, enable_batched_operations = True) + + Create Queue by connecting to service Bus Admin client return the QueueProperties + + :param queue_name: The name of the queue or a QueueProperties with name. + :param max_delivery_count: The maximum delivery count. A message is automatically + dead lettered after this number of deliveries. Default value is 10.. + :param dead_lettering_on_message_expiration: A value that indicates whether this subscription has + dead letter support when a message expires. + :param enable_batched_operations: Value that indicates whether server-side batched + operations are enabled. + + + .. py:method:: delete_queue(queue_name) + + Delete the queue by queue_name in service bus namespace + + :param queue_name: The name of the queue or a QueueProperties with name. + + + .. py:method:: delete_subscription(subscription_name, topic_name) + + Delete a topic subscription entities under a ServiceBus Namespace + + :param subscription_name: The subscription name that will own the rule in topic + :param topic_name: The topic that will own the subscription rule. + + + +.. py:class:: MessageHook(azure_service_bus_conn_id = default_conn_name) + + Bases: :py:obj:`BaseAzureServiceBusHook` + + Interacts with ServiceBusClient and acts as a high level interface + for getting ServiceBusSender and ServiceBusReceiver. + + .. py:method:: get_conn() + + Create and returns ServiceBusClient by using the connection string in connection details + + + .. py:method:: send_message(queue_name, messages, batch_message_flag = False) + + By using ServiceBusClient Send message(s) to a Service Bus Queue. By using + batch_message_flag it enables and send message as batch message + + :param queue_name: The name of the queue or a QueueProperties with name. + :param messages: Message which needs to be sent to the queue. It can be string or list of string. + :param batch_message_flag: bool flag, can be set to True if message needs to be + sent as batch message. + + + .. py:method:: send_list_messages(sender, messages) + :staticmethod: + + + .. py:method:: send_batch_message(sender, messages) + :staticmethod: + + + .. py:method:: receive_message(queue_name, max_message_count = 1, max_wait_time = None) + + Receive a batch of messages at once in a specified Queue name + + :param queue_name: The name of the queue name or a QueueProperties with name. + :param max_message_count: Maximum number of messages in the batch. + :param max_wait_time: Maximum time to wait in seconds for the first message to arrive. + + + .. py:method:: receive_subscription_message(topic_name, subscription_name, max_message_count, max_wait_time) + + Receive a batch of subscription message at once. This approach is optimal if you wish + to process multiple messages simultaneously, or perform an ad-hoc receive as a single call. + + :param subscription_name: The subscription name that will own the rule in topic + :param topic_name: The topic that will own the subscription rule. + :param max_message_count: Maximum number of messages in the batch. + Actual number returned will depend on prefetch_count and incoming stream rate. + Setting to None will fully depend on the prefetch config. The default value is 1. + :param max_wait_time: Maximum time to wait in seconds for the first message to arrive. If no + messages arrive, and no timeout is specified, this call will not return until the + connection is closed. If specified, an no messages arrive within the timeout period, + an empty list will be returned. + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/base_azure/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/base_azure/index.rst.txt new file mode 100644 index 00000000000..6564db24ec3 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/base_azure/index.rst.txt @@ -0,0 +1,70 @@ +:py:mod:`airflow.providers.microsoft.azure.hooks.base_azure` +============================================================ + +.. py:module:: airflow.providers.microsoft.azure.hooks.base_azure + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.hooks.base_azure.AzureBaseHook + + + + +.. py:class:: AzureBaseHook(sdk_client, conn_id = 'azure_default') + + Bases: :py:obj:`airflow.hooks.base.BaseHook` + + This hook acts as a base hook for azure services. It offers several authentication mechanisms to + authenticate the client library used for upstream azure hooks. + + :param sdk_client: The SDKClient to use. + :param conn_id: The :ref:`Azure connection id` + which refers to the information to connect to the service. + + .. py:attribute:: conn_name_attr + :annotation: = azure_conn_id + + + + .. py:attribute:: default_conn_name + :annotation: = azure_default + + + + .. py:attribute:: conn_type + :annotation: = azure + + + + .. py:attribute:: hook_name + :annotation: = Azure + + + + .. py:method:: get_connection_form_widgets() + :staticmethod: + + Returns connection widgets to add to connection form + + + .. py:method:: get_ui_field_behaviour() + :staticmethod: + + Returns custom field behaviour + + + .. py:method:: get_conn() + + Authenticates the resource using the connection id passed during init. + + :return: the authenticated client. + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/batch/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/batch/index.rst.txt new file mode 100644 index 00000000000..ffb43c1be83 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/batch/index.rst.txt @@ -0,0 +1,169 @@ +:py:mod:`airflow.providers.microsoft.azure.hooks.batch` +======================================================= + +.. py:module:: airflow.providers.microsoft.azure.hooks.batch + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.hooks.batch.AzureBatchHook + + + + +.. py:class:: AzureBatchHook(azure_batch_conn_id = default_conn_name) + + Bases: :py:obj:`airflow.hooks.base.BaseHook` + + Hook for Azure Batch APIs + + :param azure_batch_conn_id: :ref:`Azure Batch connection id` + of a service principal which will be used to start the container instance. + + .. py:attribute:: conn_name_attr + :annotation: = azure_batch_conn_id + + + + .. py:attribute:: default_conn_name + :annotation: = azure_batch_default + + + + .. py:attribute:: conn_type + :annotation: = azure_batch + + + + .. py:attribute:: hook_name + :annotation: = Azure Batch Service + + + + .. py:method:: get_connection_form_widgets() + :staticmethod: + + Returns connection widgets to add to connection form + + + .. py:method:: get_ui_field_behaviour() + :staticmethod: + + Returns custom field behaviour + + + .. py:method:: get_conn() + + Get the Batch client connection + + :return: Azure Batch client + + + .. py:method:: configure_pool(pool_id, vm_size, vm_node_agent_sku_id, vm_publisher = None, vm_offer = None, sku_starts_with = None, vm_sku = None, vm_version = None, os_family = None, os_version = None, display_name = None, target_dedicated_nodes = None, use_latest_image_and_sku = False, **kwargs) + + Configures a pool + + :param pool_id: A string that uniquely identifies the Pool within the Account + + :param vm_size: The size of virtual machines in the Pool. + + :param display_name: The display name for the Pool + + :param target_dedicated_nodes: The desired number of dedicated Compute Nodes in the Pool. + + :param use_latest_image_and_sku: Whether to use the latest verified vm image and sku + + :param vm_publisher: The publisher of the Azure Virtual Machines Marketplace Image. + For example, Canonical or MicrosoftWindowsServer. + + :param vm_offer: The offer type of the Azure Virtual Machines Marketplace Image. + For example, UbuntuServer or WindowsServer. + + :param sku_starts_with: The start name of the sku to search + + :param vm_sku: The name of the virtual machine sku to use + + :param vm_version: The version of the virtual machine + :param vm_version: str + + :param vm_node_agent_sku_id: The node agent sku id of the virtual machine + + :param os_family: The Azure Guest OS family to be installed on the virtual machines in the Pool. + + :param os_version: The OS family version + + + + .. py:method:: create_pool(pool) + + Creates a pool if not already existing + + :param pool: the pool object to create + + + + .. py:method:: wait_for_all_node_state(pool_id, node_state) + + Wait for all nodes in a pool to reach given states + + :param pool_id: A string that identifies the pool + :param node_state: A set of batch_models.ComputeNodeState + + + .. py:method:: configure_job(job_id, pool_id, display_name = None, **kwargs) + + Configures a job for use in the pool + + :param job_id: A string that uniquely identifies the job within the account + :param pool_id: A string that identifies the pool + :param display_name: The display name for the job + + + .. py:method:: create_job(job) + + Creates a job in the pool + + :param job: The job object to create + + + .. py:method:: configure_task(task_id, command_line, display_name = None, container_settings=None, **kwargs) + + Creates a task + + :param task_id: A string that identifies the task to create + :param command_line: The command line of the Task. + :param display_name: A display name for the Task + :param container_settings: The settings for the container under which the Task runs. + If the Pool that will run this Task has containerConfiguration set, + this must be set as well. If the Pool that will run this Task doesn't have + containerConfiguration set, this must not be set. + + + .. py:method:: add_single_task_to_job(job_id, task) + + Add a single task to given job if it doesn't exist + + :param job_id: A string that identifies the given job + :param task: The task to add + + + .. py:method:: wait_for_job_tasks_to_complete(job_id, timeout) + + Wait for tasks in a particular job to complete + + :param job_id: A string that identifies the job + :param timeout: The amount of time to wait before timing out in minutes + + + .. py:method:: test_connection() + + Test a configured Azure Batch connection. + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/container_instance/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/container_instance/index.rst.txt new file mode 100644 index 00000000000..86aba2eb738 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/container_instance/index.rst.txt @@ -0,0 +1,123 @@ +:py:mod:`airflow.providers.microsoft.azure.hooks.container_instance` +==================================================================== + +.. py:module:: airflow.providers.microsoft.azure.hooks.container_instance + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.hooks.container_instance.AzureContainerInstanceHook + + + + +.. py:class:: AzureContainerInstanceHook(azure_conn_id = default_conn_name) + + Bases: :py:obj:`airflow.providers.microsoft.azure.hooks.base_azure.AzureBaseHook` + + A hook to communicate with Azure Container Instances. + + This hook requires a service principal in order to work. + After creating this service principal + (Azure Active Directory/App Registrations), you need to fill in the + client_id (Application ID) as login, the generated password as password, + and tenantId and subscriptionId in the extra's field as a json. + + :param azure_conn_id: :ref:`Azure connection id` of + a service principal which will be used to start the container instance. + + .. py:attribute:: conn_name_attr + :annotation: = azure_conn_id + + + + .. py:attribute:: default_conn_name + :annotation: = azure_default + + + + .. py:attribute:: conn_type + :annotation: = azure_container_instance + + + + .. py:attribute:: hook_name + :annotation: = Azure Container Instance + + + + .. py:method:: create_or_update(resource_group, name, container_group) + + Create a new container group + + :param resource_group: the name of the resource group + :param name: the name of the container group + :param container_group: the properties of the container group + + + .. py:method:: get_state_exitcode_details(resource_group, name) + + Get the state and exitcode of a container group + + :param resource_group: the name of the resource group + :param name: the name of the container group + :return: A tuple with the state, exitcode, and details. + If the exitcode is unknown 0 is returned. + + + .. py:method:: get_messages(resource_group, name) + + Get the messages of a container group + + :param resource_group: the name of the resource group + :param name: the name of the container group + :return: A list of the event messages + + + .. py:method:: get_state(resource_group, name) + + Get the state of a container group + + :param resource_group: the name of the resource group + :param name: the name of the container group + :return: ContainerGroup + + + .. py:method:: get_logs(resource_group, name, tail = 1000) + + Get the tail from logs of a container group + + :param resource_group: the name of the resource group + :param name: the name of the container group + :param tail: the size of the tail + :return: A list of log messages + + + .. py:method:: delete(resource_group, name) + + Delete a container group + + :param resource_group: the name of the resource group + :param name: the name of the container group + + + .. py:method:: exists(resource_group, name) + + Test if a container group exists + + :param resource_group: the name of the resource group + :param name: the name of the container group + + + .. py:method:: test_connection() + + Test a configured Azure Container Instance connection. + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/container_registry/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/container_registry/index.rst.txt new file mode 100644 index 00000000000..e594722f6f6 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/container_registry/index.rst.txt @@ -0,0 +1,66 @@ +:py:mod:`airflow.providers.microsoft.azure.hooks.container_registry` +==================================================================== + +.. py:module:: airflow.providers.microsoft.azure.hooks.container_registry + +.. autoapi-nested-parse:: + + Hook for Azure Container Registry + + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.hooks.container_registry.AzureContainerRegistryHook + + + + +.. py:class:: AzureContainerRegistryHook(conn_id = 'azure_registry') + + Bases: :py:obj:`airflow.hooks.base.BaseHook` + + A hook to communicate with a Azure Container Registry. + + :param conn_id: :ref:`Azure Container Registry connection id` + of a service principal which will be used to start the container instance + + + .. py:attribute:: conn_name_attr + :annotation: = azure_container_registry_conn_id + + + + .. py:attribute:: default_conn_name + :annotation: = azure_container_registry_default + + + + .. py:attribute:: conn_type + :annotation: = azure_container_registry + + + + .. py:attribute:: hook_name + :annotation: = Azure Container Registry + + + + .. py:method:: get_ui_field_behaviour() + :staticmethod: + + Returns custom field behaviour + + + .. py:method:: get_conn() + + Returns connection for the hook. + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/container_volume/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/container_volume/index.rst.txt new file mode 100644 index 00000000000..676c090a846 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/container_volume/index.rst.txt @@ -0,0 +1,72 @@ +:py:mod:`airflow.providers.microsoft.azure.hooks.container_volume` +================================================================== + +.. py:module:: airflow.providers.microsoft.azure.hooks.container_volume + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.hooks.container_volume.AzureContainerVolumeHook + + + + +.. py:class:: AzureContainerVolumeHook(azure_container_volume_conn_id = 'azure_container_volume_default') + + Bases: :py:obj:`airflow.hooks.base.BaseHook` + + A hook which wraps an Azure Volume. + + :param azure_container_volume_conn_id: Reference to the + :ref:`Azure Container Volume connection id ` + of an Azure account of which container volumes should be used. + + .. py:attribute:: conn_name_attr + :annotation: = azure_container_volume_conn_id + + + + .. py:attribute:: default_conn_name + :annotation: = azure_container_volume_default + + + + .. py:attribute:: conn_type + :annotation: = azure_container_volume + + + + .. py:attribute:: hook_name + :annotation: = Azure Container Volume + + + + .. py:method:: get_connection_form_widgets() + :staticmethod: + + Returns connection widgets to add to connection form + + + .. py:method:: get_ui_field_behaviour() + :staticmethod: + + Returns custom field behaviour + + + .. py:method:: get_storagekey() + + Get Azure File Volume storage key + + + .. py:method:: get_file_volume(mount_name, share_name, storage_account_name, read_only = False) + + Get Azure File Volume + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/cosmos/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/cosmos/index.rst.txt new file mode 100644 index 00000000000..037c1262320 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/cosmos/index.rst.txt @@ -0,0 +1,166 @@ +:py:mod:`airflow.providers.microsoft.azure.hooks.cosmos` +======================================================== + +.. py:module:: airflow.providers.microsoft.azure.hooks.cosmos + +.. autoapi-nested-parse:: + + This module contains integration with Azure CosmosDB. + + AzureCosmosDBHook communicates via the Azure Cosmos library. Make sure that a + Airflow connection of type `azure_cosmos` exists. Authorization can be done by supplying a + login (=Endpoint uri), password (=secret key) and extra fields database_name and collection_name to specify + the default database and collection to use (see connection `azure_cosmos_default` for an example). + + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.hooks.cosmos.AzureCosmosDBHook + + + +Functions +~~~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.hooks.cosmos.get_database_link + airflow.providers.microsoft.azure.hooks.cosmos.get_collection_link + airflow.providers.microsoft.azure.hooks.cosmos.get_document_link + + + +.. py:class:: AzureCosmosDBHook(azure_cosmos_conn_id = default_conn_name) + + Bases: :py:obj:`airflow.hooks.base.BaseHook` + + Interacts with Azure CosmosDB. + + login should be the endpoint uri, password should be the master key + optionally, you can use the following extras to default these values + {"database_name": "", "collection_name": "COLLECTION_NAME"}. + + :param azure_cosmos_conn_id: Reference to the + :ref:`Azure CosmosDB connection`. + + .. py:attribute:: conn_name_attr + :annotation: = azure_cosmos_conn_id + + + + .. py:attribute:: default_conn_name + :annotation: = azure_cosmos_default + + + + .. py:attribute:: conn_type + :annotation: = azure_cosmos + + + + .. py:attribute:: hook_name + :annotation: = Azure CosmosDB + + + + .. py:method:: get_connection_form_widgets() + :staticmethod: + + Returns connection widgets to add to connection form + + + .. py:method:: get_ui_field_behaviour() + :staticmethod: + + Returns custom field behaviour + + + .. py:method:: get_conn() + + Return a cosmos db client. + + + .. py:method:: does_collection_exist(collection_name, database_name) + + Checks if a collection exists in CosmosDB. + + + .. py:method:: create_collection(collection_name, database_name = None, partition_key = None) + + Creates a new collection in the CosmosDB database. + + + .. py:method:: does_database_exist(database_name) + + Checks if a database exists in CosmosDB. + + + .. py:method:: create_database(database_name) + + Creates a new database in CosmosDB. + + + .. py:method:: delete_database(database_name) + + Deletes an existing database in CosmosDB. + + + .. py:method:: delete_collection(collection_name, database_name = None) + + Deletes an existing collection in the CosmosDB database. + + + .. py:method:: upsert_document(document, database_name=None, collection_name=None, document_id=None) + + Inserts a new document (or updates an existing one) into an existing + collection in the CosmosDB database. + + + .. py:method:: insert_documents(documents, database_name = None, collection_name = None) + + Insert a list of new documents into an existing collection in the CosmosDB database. + + + .. py:method:: delete_document(document_id, database_name = None, collection_name = None, partition_key = None) + + Delete an existing document out of a collection in the CosmosDB database. + + + .. py:method:: get_document(document_id, database_name = None, collection_name = None, partition_key = None) + + Get a document from an existing collection in the CosmosDB database. + + + .. py:method:: get_documents(sql_string, database_name = None, collection_name = None, partition_key = None) + + Get a list of documents from an existing collection in the CosmosDB database via SQL query. + + + .. py:method:: test_connection() + + Test a configured Azure Cosmos connection. + + + +.. py:function:: get_database_link(database_id) + + Get Azure CosmosDB database link + + +.. py:function:: get_collection_link(database_id, collection_id) + + Get Azure CosmosDB collection link + + +.. py:function:: get_document_link(database_id, collection_id, document_id) + + Get Azure CosmosDB document link + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/data_factory/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/data_factory/index.rst.txt new file mode 100644 index 00000000000..4edcedb9df0 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/data_factory/index.rst.txt @@ -0,0 +1,570 @@ +:py:mod:`airflow.providers.microsoft.azure.hooks.data_factory` +============================================================== + +.. py:module:: airflow.providers.microsoft.azure.hooks.data_factory + +.. autoapi-nested-parse:: + + .. spelling:: + + CreateRunResponse + DatasetResource + LinkedServiceResource + LROPoller + PipelineResource + PipelineRun + TriggerResource + datafactory + DataFlow + mgmt + + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.hooks.data_factory.PipelineRunInfo + airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryPipelineRunStatus + airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook + + + +Functions +~~~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.hooks.data_factory.provide_targeted_factory + airflow.providers.microsoft.azure.hooks.data_factory.get_field + + + +Attributes +~~~~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.hooks.data_factory.Credentials + + +.. py:data:: Credentials + + + + +.. py:function:: provide_targeted_factory(func) + + Provide the targeted factory to the decorated function in case it isn't specified. + + If ``resource_group_name`` or ``factory_name`` is not provided it defaults to the value specified in + the connection extras. + + +.. py:class:: PipelineRunInfo + + Bases: :py:obj:`airflow.typing_compat.TypedDict` + + Type class for the pipeline run info dictionary. + + .. py:attribute:: run_id + :annotation: :str + + + + .. py:attribute:: factory_name + :annotation: :str | None + + + + .. py:attribute:: resource_group_name + :annotation: :str | None + + + + +.. py:class:: AzureDataFactoryPipelineRunStatus + + Azure Data Factory pipeline operation statuses. + + .. py:attribute:: QUEUED + :annotation: = Queued + + + + .. py:attribute:: IN_PROGRESS + :annotation: = InProgress + + + + .. py:attribute:: SUCCEEDED + :annotation: = Succeeded + + + + .. py:attribute:: FAILED + :annotation: = Failed + + + + .. py:attribute:: CANCELING + :annotation: = Canceling + + + + .. py:attribute:: CANCELLED + :annotation: = Cancelled + + + + .. py:attribute:: TERMINAL_STATUSES + + + + + +.. py:exception:: AzureDataFactoryPipelineRunException + + Bases: :py:obj:`airflow.exceptions.AirflowException` + + An exception that indicates a pipeline run failed to complete. + + +.. py:function:: get_field(extras, field_name, strict = False) + + Get field from extra, first checking short name, then for backcompat we check for prefixed name. + + +.. py:class:: AzureDataFactoryHook(azure_data_factory_conn_id = default_conn_name) + + Bases: :py:obj:`airflow.hooks.base.BaseHook` + + A hook to interact with Azure Data Factory. + + :param azure_data_factory_conn_id: The :ref:`Azure Data Factory connection id`. + + .. py:attribute:: conn_type + :annotation: :str = azure_data_factory + + + + .. py:attribute:: conn_name_attr + :annotation: :str = azure_data_factory_conn_id + + + + .. py:attribute:: default_conn_name + :annotation: :str = azure_data_factory_default + + + + .. py:attribute:: hook_name + :annotation: :str = Azure Data Factory + + + + .. py:method:: get_connection_form_widgets() + :staticmethod: + + Returns connection widgets to add to connection form + + + .. py:method:: get_ui_field_behaviour() + :staticmethod: + + Returns custom field behaviour + + + .. py:method:: get_conn() + + Returns connection for the hook. + + + .. py:method:: get_factory(resource_group_name = None, factory_name = None, **config) + + Get the factory. + + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: The factory. + + + .. py:method:: update_factory(factory, resource_group_name = None, factory_name = None, **config) + + Update the factory. + + :param factory: The factory resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the factory does not exist. + :return: The factory. + + + .. py:method:: create_factory(factory, resource_group_name = None, factory_name = None, **config) + + Create the factory. + + :param factory: The factory resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the factory already exists. + :return: The factory. + + + .. py:method:: delete_factory(resource_group_name = None, factory_name = None, **config) + + Delete the factory. + + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + + + .. py:method:: get_linked_service(linked_service_name, resource_group_name = None, factory_name = None, **config) + + Get the linked service. + + :param linked_service_name: The linked service name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: The linked service. + + + .. py:method:: update_linked_service(linked_service_name, linked_service, resource_group_name = None, factory_name = None, **config) + + Update the linked service. + + :param linked_service_name: The linked service name. + :param linked_service: The linked service resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the linked service does not exist. + :return: The linked service. + + + .. py:method:: create_linked_service(linked_service_name, linked_service, resource_group_name = None, factory_name = None, **config) + + Create the linked service. + + :param linked_service_name: The linked service name. + :param linked_service: The linked service resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the linked service already exists. + :return: The linked service. + + + .. py:method:: delete_linked_service(linked_service_name, resource_group_name = None, factory_name = None, **config) + + Delete the linked service. + + :param linked_service_name: The linked service name. + :param resource_group_name: The linked service name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + + + .. py:method:: get_dataset(dataset_name, resource_group_name = None, factory_name = None, **config) + + Get the dataset. + + :param dataset_name: The dataset name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: The dataset. + + + .. py:method:: update_dataset(dataset_name, dataset, resource_group_name = None, factory_name = None, **config) + + Update the dataset. + + :param dataset_name: The dataset name. + :param dataset: The dataset resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the dataset does not exist. + :return: The dataset. + + + .. py:method:: create_dataset(dataset_name, dataset, resource_group_name = None, factory_name = None, **config) + + Create the dataset. + + :param dataset_name: The dataset name. + :param dataset: The dataset resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the dataset already exists. + :return: The dataset. + + + .. py:method:: delete_dataset(dataset_name, resource_group_name = None, factory_name = None, **config) + + Delete the dataset. + + :param dataset_name: The dataset name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + + + .. py:method:: get_dataflow(dataflow_name, resource_group_name = None, factory_name = None, **config) + + Get the dataflow. + + :param dataflow_name: The dataflow name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: The dataflow. + + + .. py:method:: update_dataflow(dataflow_name, dataflow, resource_group_name = None, factory_name = None, **config) + + Update the dataflow. + + :param dataflow_name: The dataflow name. + :param dataflow: The dataflow resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the dataset does not exist. + :return: The dataflow. + + + .. py:method:: create_dataflow(dataflow_name, dataflow, resource_group_name = None, factory_name = None, **config) + + Create the dataflow. + + :param dataflow_name: The dataflow name. + :param dataflow: The dataflow resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the dataset already exists. + :return: The dataset. + + + .. py:method:: delete_dataflow(dataflow_name, resource_group_name = None, factory_name = None, **config) + + Delete the dataflow. + + :param dataflow_name: The dataflow name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + + + .. py:method:: get_pipeline(pipeline_name, resource_group_name = None, factory_name = None, **config) + + Get the pipeline. + + :param pipeline_name: The pipeline name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: The pipeline. + + + .. py:method:: update_pipeline(pipeline_name, pipeline, resource_group_name = None, factory_name = None, **config) + + Update the pipeline. + + :param pipeline_name: The pipeline name. + :param pipeline: The pipeline resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the pipeline does not exist. + :return: The pipeline. + + + .. py:method:: create_pipeline(pipeline_name, pipeline, resource_group_name = None, factory_name = None, **config) + + Create the pipeline. + + :param pipeline_name: The pipeline name. + :param pipeline: The pipeline resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the pipeline already exists. + :return: The pipeline. + + + .. py:method:: delete_pipeline(pipeline_name, resource_group_name = None, factory_name = None, **config) + + Delete the pipeline. + + :param pipeline_name: The pipeline name. + :param resource_group_name: The pipeline name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + + + .. py:method:: run_pipeline(pipeline_name, resource_group_name = None, factory_name = None, **config) + + Run a pipeline. + + :param pipeline_name: The pipeline name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: The pipeline run. + + + .. py:method:: get_pipeline_run(run_id, resource_group_name = None, factory_name = None, **config) + + Get the pipeline run. + + :param run_id: The pipeline run identifier. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: The pipeline run. + + + .. py:method:: get_pipeline_run_status(run_id, resource_group_name = None, factory_name = None) + + Get a pipeline run's current status. + + :param run_id: The pipeline run identifier. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :return: The status of the pipeline run. + + + .. py:method:: wait_for_pipeline_run_status(run_id, expected_statuses, resource_group_name = None, factory_name = None, check_interval = 60, timeout = 60 * 60 * 24 * 7) + + Waits for a pipeline run to match an expected status. + + :param run_id: The pipeline run identifier. + :param expected_statuses: The desired status(es) to check against a pipeline run's current status. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param check_interval: Time in seconds to check on a pipeline run's status. + :param timeout: Time in seconds to wait for a pipeline to reach a terminal status or the expected + status. + :return: Boolean indicating if the pipeline run has reached the ``expected_status``. + + + .. py:method:: cancel_pipeline_run(run_id, resource_group_name = None, factory_name = None, **config) + + Cancel the pipeline run. + + :param run_id: The pipeline run identifier. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + + + .. py:method:: get_trigger(trigger_name, resource_group_name = None, factory_name = None, **config) + + Get the trigger. + + :param trigger_name: The trigger name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: The trigger. + + + .. py:method:: update_trigger(trigger_name, trigger, resource_group_name = None, factory_name = None, **config) + + Update the trigger. + + :param trigger_name: The trigger name. + :param trigger: The trigger resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the trigger does not exist. + :return: The trigger. + + + .. py:method:: create_trigger(trigger_name, trigger, resource_group_name = None, factory_name = None, **config) + + Create the trigger. + + :param trigger_name: The trigger name. + :param trigger: The trigger resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the trigger already exists. + :return: The trigger. + + + .. py:method:: delete_trigger(trigger_name, resource_group_name = None, factory_name = None, **config) + + Delete the trigger. + + :param trigger_name: The trigger name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + + + .. py:method:: start_trigger(trigger_name, resource_group_name = None, factory_name = None, **config) + + Start the trigger. + + :param trigger_name: The trigger name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: An Azure operation poller. + + + .. py:method:: stop_trigger(trigger_name, resource_group_name = None, factory_name = None, **config) + + Stop the trigger. + + :param trigger_name: The trigger name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: An Azure operation poller. + + + .. py:method:: rerun_trigger(trigger_name, run_id, resource_group_name = None, factory_name = None, **config) + + Rerun the trigger. + + :param trigger_name: The trigger name. + :param run_id: The trigger run identifier. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + + + .. py:method:: cancel_trigger(trigger_name, run_id, resource_group_name = None, factory_name = None, **config) + + Cancel the trigger. + + :param trigger_name: The trigger name. + :param run_id: The trigger run identifier. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + + + .. py:method:: test_connection() + + Test a configured Azure Data Factory connection. + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/data_lake/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/data_lake/index.rst.txt new file mode 100644 index 00000000000..3aae33549e5 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/data_lake/index.rst.txt @@ -0,0 +1,148 @@ +:py:mod:`airflow.providers.microsoft.azure.hooks.data_lake` +=========================================================== + +.. py:module:: airflow.providers.microsoft.azure.hooks.data_lake + +.. autoapi-nested-parse:: + + This module contains integration with Azure Data Lake. + + AzureDataLakeHook communicates via a REST API compatible with WebHDFS. Make sure that a + Airflow connection of type `azure_data_lake` exists. Authorization can be done by supplying a + login (=Client ID), password (=Client Secret) and extra fields tenant (Tenant) and account_name (Account Name) + (see connection `azure_data_lake_default` for an example). + + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.hooks.data_lake.AzureDataLakeHook + + + + +.. py:class:: AzureDataLakeHook(azure_data_lake_conn_id = default_conn_name) + + Bases: :py:obj:`airflow.hooks.base.BaseHook` + + Interacts with Azure Data Lake. + + Client ID and client secret should be in user and password parameters. + Tenant and account name should be extra field as + {"tenant": "", "account_name": "ACCOUNT_NAME"}. + + :param azure_data_lake_conn_id: Reference to the :ref:`Azure Data Lake connection`. + + .. py:attribute:: conn_name_attr + :annotation: = azure_data_lake_conn_id + + + + .. py:attribute:: default_conn_name + :annotation: = azure_data_lake_default + + + + .. py:attribute:: conn_type + :annotation: = azure_data_lake + + + + .. py:attribute:: hook_name + :annotation: = Azure Data Lake + + + + .. py:method:: get_connection_form_widgets() + :staticmethod: + + Returns connection widgets to add to connection form + + + .. py:method:: get_ui_field_behaviour() + :staticmethod: + + Returns custom field behaviour + + + .. py:method:: get_conn() + + Return a AzureDLFileSystem object. + + + .. py:method:: check_for_file(file_path) + + Check if a file exists on Azure Data Lake. + + :param file_path: Path and name of the file. + :return: True if the file exists, False otherwise. + + + .. py:method:: upload_file(local_path, remote_path, nthreads = 64, overwrite = True, buffersize = 4194304, blocksize = 4194304, **kwargs) + + Upload a file to Azure Data Lake. + + :param local_path: local path. Can be single file, directory (in which case, + upload recursively) or glob pattern. Recursive glob patterns using `**` + are not supported. + :param remote_path: Remote path to upload to; if multiple files, this is the + directory root to write within. + :param nthreads: Number of threads to use. If None, uses the number of cores. + :param overwrite: Whether to forcibly overwrite existing files/directories. + If False and remote path is a directory, will quit regardless if any files + would be overwritten or not. If True, only matching filenames are actually + overwritten. + :param buffersize: int [2**22] + Number of bytes for internal buffer. This block cannot be bigger than + a chunk and cannot be smaller than a block. + :param blocksize: int [2**22] + Number of bytes for a block. Within each chunk, we write a smaller + block for each API call. This block cannot be bigger than a chunk. + + + .. py:method:: download_file(local_path, remote_path, nthreads = 64, overwrite = True, buffersize = 4194304, blocksize = 4194304, **kwargs) + + Download a file from Azure Blob Storage. + + :param local_path: local path. If downloading a single file, will write to this + specific file, unless it is an existing directory, in which case a file is + created within it. If downloading multiple files, this is the root + directory to write within. Will create directories as required. + :param remote_path: remote path/globstring to use to find remote files. + Recursive glob patterns using `**` are not supported. + :param nthreads: Number of threads to use. If None, uses the number of cores. + :param overwrite: Whether to forcibly overwrite existing files/directories. + If False and remote path is a directory, will quit regardless if any files + would be overwritten or not. If True, only matching filenames are actually + overwritten. + :param buffersize: int [2**22] + Number of bytes for internal buffer. This block cannot be bigger than + a chunk and cannot be smaller than a block. + :param blocksize: int [2**22] + Number of bytes for a block. Within each chunk, we write a smaller + block for each API call. This block cannot be bigger than a chunk. + + + .. py:method:: list(path) + + List files in Azure Data Lake Storage + + :param path: full path/globstring to use to list files in ADLS + + + .. py:method:: remove(path, recursive = False, ignore_not_found = True) + + Remove files in Azure Data Lake Storage + + :param path: A directory or file to remove in ADLS + :param recursive: Whether to loop into directories in the location and remove the files + :param ignore_not_found: Whether to raise error if file to delete is not found + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/fileshare/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/fileshare/index.rst.txt new file mode 100644 index 00000000000..53410d2a832 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/fileshare/index.rst.txt @@ -0,0 +1,210 @@ +:py:mod:`airflow.providers.microsoft.azure.hooks.fileshare` +=========================================================== + +.. py:module:: airflow.providers.microsoft.azure.hooks.fileshare + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.hooks.fileshare.AzureFileShareHook + + + + +.. py:class:: AzureFileShareHook(azure_fileshare_conn_id = 'azure_fileshare_default') + + Bases: :py:obj:`airflow.hooks.base.BaseHook` + + Interacts with Azure FileShare Storage. + + :param azure_fileshare_conn_id: Reference to the + :ref:`Azure Container Volume connection id` + of an Azure account of which container volumes should be used. + + + .. py:attribute:: conn_name_attr + :annotation: = azure_fileshare_conn_id + + + + .. py:attribute:: default_conn_name + :annotation: = azure_fileshare_default + + + + .. py:attribute:: conn_type + :annotation: = azure_fileshare + + + + .. py:attribute:: hook_name + :annotation: = Azure FileShare + + + + .. py:method:: get_connection_form_widgets() + :staticmethod: + + Returns connection widgets to add to connection form + + + .. py:method:: get_ui_field_behaviour() + :staticmethod: + + Returns custom field behaviour + + + .. py:method:: get_conn() + + Return the FileService object. + + + .. py:method:: check_for_directory(share_name, directory_name, **kwargs) + + Check if a directory exists on Azure File Share. + + :param share_name: Name of the share. + :param directory_name: Name of the directory. + :param kwargs: Optional keyword arguments that + `FileService.exists()` takes. + :return: True if the file exists, False otherwise. + + + .. py:method:: check_for_file(share_name, directory_name, file_name, **kwargs) + + Check if a file exists on Azure File Share. + + :param share_name: Name of the share. + :param directory_name: Name of the directory. + :param file_name: Name of the file. + :param kwargs: Optional keyword arguments that + `FileService.exists()` takes. + :return: True if the file exists, False otherwise. + + + .. py:method:: list_directories_and_files(share_name, directory_name = None, **kwargs) + + Return the list of directories and files stored on a Azure File Share. + + :param share_name: Name of the share. + :param directory_name: Name of the directory. + :param kwargs: Optional keyword arguments that + `FileService.list_directories_and_files()` takes. + :return: A list of files and directories + + + .. py:method:: list_files(share_name, directory_name = None, **kwargs) + + Return the list of files stored on a Azure File Share. + + :param share_name: Name of the share. + :param directory_name: Name of the directory. + :param kwargs: Optional keyword arguments that + `FileService.list_directories_and_files()` takes. + :return: A list of files + + + .. py:method:: create_share(share_name, **kwargs) + + Create new Azure File Share. + + :param share_name: Name of the share. + :param kwargs: Optional keyword arguments that + `FileService.create_share()` takes. + :return: True if share is created, False if share already exists. + + + .. py:method:: delete_share(share_name, **kwargs) + + Delete existing Azure File Share. + + :param share_name: Name of the share. + :param kwargs: Optional keyword arguments that + `FileService.delete_share()` takes. + :return: True if share is deleted, False if share does not exist. + + + .. py:method:: create_directory(share_name, directory_name, **kwargs) + + Create a new directory on a Azure File Share. + + :param share_name: Name of the share. + :param directory_name: Name of the directory. + :param kwargs: Optional keyword arguments that + `FileService.create_directory()` takes. + :return: A list of files and directories + + + .. py:method:: get_file(file_path, share_name, directory_name, file_name, **kwargs) + + Download a file from Azure File Share. + + :param file_path: Where to store the file. + :param share_name: Name of the share. + :param directory_name: Name of the directory. + :param file_name: Name of the file. + :param kwargs: Optional keyword arguments that + `FileService.get_file_to_path()` takes. + + + .. py:method:: get_file_to_stream(stream, share_name, directory_name, file_name, **kwargs) + + Download a file from Azure File Share. + + :param stream: A filehandle to store the file to. + :param share_name: Name of the share. + :param directory_name: Name of the directory. + :param file_name: Name of the file. + :param kwargs: Optional keyword arguments that + `FileService.get_file_to_stream()` takes. + + + .. py:method:: load_file(file_path, share_name, directory_name, file_name, **kwargs) + + Upload a file to Azure File Share. + + :param file_path: Path to the file to load. + :param share_name: Name of the share. + :param directory_name: Name of the directory. + :param file_name: Name of the file. + :param kwargs: Optional keyword arguments that + `FileService.create_file_from_path()` takes. + + + .. py:method:: load_string(string_data, share_name, directory_name, file_name, **kwargs) + + Upload a string to Azure File Share. + + :param string_data: String to load. + :param share_name: Name of the share. + :param directory_name: Name of the directory. + :param file_name: Name of the file. + :param kwargs: Optional keyword arguments that + `FileService.create_file_from_text()` takes. + + + .. py:method:: load_stream(stream, share_name, directory_name, file_name, count, **kwargs) + + Upload a stream to Azure File Share. + + :param stream: Opened file/stream to upload as the file content. + :param share_name: Name of the share. + :param directory_name: Name of the directory. + :param file_name: Name of the file. + :param count: Size of the stream in bytes + :param kwargs: Optional keyword arguments that + `FileService.create_file_from_stream()` takes. + + + .. py:method:: test_connection() + + Test Azure FileShare connection. + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/index.rst.txt new file mode 100644 index 00000000000..411a159fe2a --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/index.rst.txt @@ -0,0 +1,27 @@ +:py:mod:`airflow.providers.microsoft.azure.hooks` +================================================= + +.. py:module:: airflow.providers.microsoft.azure.hooks + + +Submodules +---------- +.. toctree:: + :titlesonly: + :maxdepth: 1 + + adx/index.rst + asb/index.rst + base_azure/index.rst + batch/index.rst + container_instance/index.rst + container_registry/index.rst + container_volume/index.rst + cosmos/index.rst + data_factory/index.rst + data_lake/index.rst + fileshare/index.rst + synapse/index.rst + wasb/index.rst + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/synapse/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/synapse/index.rst.txt new file mode 100644 index 00000000000..c87ab0aac49 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/synapse/index.rst.txt @@ -0,0 +1,168 @@ +:py:mod:`airflow.providers.microsoft.azure.hooks.synapse` +========================================================= + +.. py:module:: airflow.providers.microsoft.azure.hooks.synapse + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseSparkBatchRunStatus + airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseHook + + + + +Attributes +~~~~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.hooks.synapse.Credentials + + +.. py:data:: Credentials + + + + +.. py:class:: AzureSynapseSparkBatchRunStatus + + Azure Synapse Spark Job operation statuses. + + .. py:attribute:: NOT_STARTED + :annotation: = not_started + + + + .. py:attribute:: STARTING + :annotation: = starting + + + + .. py:attribute:: RUNNING + :annotation: = running + + + + .. py:attribute:: IDLE + :annotation: = idle + + + + .. py:attribute:: BUSY + :annotation: = busy + + + + .. py:attribute:: SHUTTING_DOWN + :annotation: = shutting_down + + + + .. py:attribute:: ERROR + :annotation: = error + + + + .. py:attribute:: DEAD + :annotation: = dead + + + + .. py:attribute:: KILLED + :annotation: = killed + + + + .. py:attribute:: SUCCESS + :annotation: = success + + + + .. py:attribute:: TERMINAL_STATUSES + + + + + +.. py:class:: AzureSynapseHook(azure_synapse_conn_id = default_conn_name, spark_pool = '') + + Bases: :py:obj:`airflow.hooks.base.BaseHook` + + A hook to interact with Azure Synapse. + :param azure_synapse_conn_id: The :ref:`Azure Synapse connection id`. + :param spark_pool: The Apache Spark pool used to submit the job + + .. py:attribute:: conn_type + :annotation: :str = azure_synapse + + + + .. py:attribute:: conn_name_attr + :annotation: :str = azure_synapse_conn_id + + + + .. py:attribute:: default_conn_name + :annotation: :str = azure_synapse_default + + + + .. py:attribute:: hook_name + :annotation: :str = Azure Synapse + + + + .. py:method:: get_connection_form_widgets() + :staticmethod: + + Returns connection widgets to add to connection form + + + .. py:method:: get_ui_field_behaviour() + :staticmethod: + + Returns custom field behaviour + + + .. py:method:: get_conn() + + Returns connection for the hook. + + + .. py:method:: run_spark_job(payload) + + Run a job in an Apache Spark pool. + :param payload: Livy compatible payload which represents the spark job that a user wants to submit. + + + .. py:method:: get_job_run_status() + + Get the job run status. + + + .. py:method:: wait_for_job_run_status(job_id, expected_statuses, check_interval = 60, timeout = 60 * 60 * 24 * 7) + + Waits for a job run to match an expected status. + + :param job_id: The job run identifier. + :param expected_statuses: The desired status(es) to check against a job run's current status. + :param check_interval: Time in seconds to check on a job run's status. + :param timeout: Time in seconds to wait for a job to reach a terminal status or the expected + status. + + + + .. py:method:: cancel_job_run(job_id) + + Cancel the spark job run. + :param job_id: The synapse spark job identifier. + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/wasb/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/wasb/index.rst.txt new file mode 100644 index 00000000000..ed6a4d90706 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/hooks/wasb/index.rst.txt @@ -0,0 +1,227 @@ +:py:mod:`airflow.providers.microsoft.azure.hooks.wasb` +====================================================== + +.. py:module:: airflow.providers.microsoft.azure.hooks.wasb + +.. autoapi-nested-parse:: + + This module contains integration with Azure Blob Storage. + + It communicate via the Window Azure Storage Blob protocol. Make sure that a + Airflow connection of type `wasb` exists. Authorization can be done by supplying a + login (=Storage account name) and password (=KEY), or login and SAS token in the extra + field (see connection `wasb_default` for an example). + + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.hooks.wasb.WasbHook + + + + +.. py:class:: WasbHook(wasb_conn_id = default_conn_name, public_read = False) + + Bases: :py:obj:`airflow.hooks.base.BaseHook` + + Interacts with Azure Blob Storage through the ``wasb://`` protocol. + + These parameters have to be passed in Airflow Data Base: account_name and account_key. + + Additional options passed in the 'extra' field of the connection will be + passed to the `BlockBlockService()` constructor. For example, authenticate + using a SAS token by adding {"sas_token": "YOUR_TOKEN"}. + + If no authentication configuration is provided, DefaultAzureCredential will be used (applicable + when using Azure compute infrastructure). + + :param wasb_conn_id: Reference to the :ref:`wasb connection `. + :param public_read: Whether an anonymous public read access should be used. default is False + + .. py:attribute:: conn_name_attr + :annotation: = wasb_conn_id + + + + .. py:attribute:: default_conn_name + :annotation: = wasb_default + + + + .. py:attribute:: conn_type + :annotation: = wasb + + + + .. py:attribute:: hook_name + :annotation: = Azure Blob Storage + + + + .. py:method:: get_connection_form_widgets() + :staticmethod: + + Returns connection widgets to add to connection form + + + .. py:method:: get_ui_field_behaviour() + :staticmethod: + + Returns custom field behaviour + + + .. py:method:: get_conn() + + Return the BlobServiceClient object. + + + .. py:method:: check_for_blob(container_name, blob_name, **kwargs) + + Check if a blob exists on Azure Blob Storage. + + :param container_name: Name of the container. + :param blob_name: Name of the blob. + :param kwargs: Optional keyword arguments for ``BlobClient.get_blob_properties`` takes. + :return: True if the blob exists, False otherwise. + + + .. py:method:: check_for_prefix(container_name, prefix, **kwargs) + + Check if a prefix exists on Azure Blob storage. + + :param container_name: Name of the container. + :param prefix: Prefix of the blob. + :param kwargs: Optional keyword arguments that ``ContainerClient.walk_blobs`` takes + :return: True if blobs matching the prefix exist, False otherwise. + + + .. py:method:: get_blobs_list(container_name, prefix = None, include = None, delimiter = '/', **kwargs) + + List blobs in a given container + + :param container_name: The name of the container + :param prefix: Filters the results to return only blobs whose names + begin with the specified prefix. + :param include: Specifies one or more additional datasets to include in the + response. Options include: ``snapshots``, ``metadata``, ``uncommittedblobs``, + ``copy`, ``deleted``. + :param delimiter: filters objects based on the delimiter (for e.g '.csv') + + + .. py:method:: load_file(file_path, container_name, blob_name, create_container = False, **kwargs) + + Upload a file to Azure Blob Storage. + + :param file_path: Path to the file to load. + :param container_name: Name of the container. + :param blob_name: Name of the blob. + :param create_container: Attempt to create the target container prior to uploading the blob. This is + useful if the target container may not exist yet. Defaults to False. + :param kwargs: Optional keyword arguments that ``BlobClient.upload_blob()`` takes. + + + .. py:method:: load_string(string_data, container_name, blob_name, create_container = False, **kwargs) + + Upload a string to Azure Blob Storage. + + :param string_data: String to load. + :param container_name: Name of the container. + :param blob_name: Name of the blob. + :param create_container: Attempt to create the target container prior to uploading the blob. This is + useful if the target container may not exist yet. Defaults to False. + :param kwargs: Optional keyword arguments that ``BlobClient.upload()`` takes. + + + .. py:method:: get_file(file_path, container_name, blob_name, **kwargs) + + Download a file from Azure Blob Storage. + + :param file_path: Path to the file to download. + :param container_name: Name of the container. + :param blob_name: Name of the blob. + :param kwargs: Optional keyword arguments that `BlobClient.download_blob()` takes. + + + .. py:method:: read_file(container_name, blob_name, **kwargs) + + Read a file from Azure Blob Storage and return as a string. + + :param container_name: Name of the container. + :param blob_name: Name of the blob. + :param kwargs: Optional keyword arguments that `BlobClient.download_blob` takes. + + + .. py:method:: upload(container_name, blob_name, data, blob_type = 'BlockBlob', length = None, create_container = False, **kwargs) + + Creates a new blob from a data source with automatic chunking. + + :param container_name: The name of the container to upload data + :param blob_name: The name of the blob to upload. This need not exist in the container + :param data: The blob data to upload + :param blob_type: The type of the blob. This can be either ``BlockBlob``, + ``PageBlob`` or ``AppendBlob``. The default value is ``BlockBlob``. + :param length: Number of bytes to read from the stream. This is optional, + but should be supplied for optimal performance. + :param create_container: Attempt to create the target container prior to uploading the blob. This is + useful if the target container may not exist yet. Defaults to False. + + + .. py:method:: download(container_name, blob_name, offset = None, length = None, **kwargs) + + Downloads a blob to the StorageStreamDownloader + + :param container_name: The name of the container containing the blob + :param blob_name: The name of the blob to download + :param offset: Start of byte range to use for downloading a section of the blob. + Must be set if length is provided. + :param length: Number of bytes to read from the stream. + + + .. py:method:: create_container(container_name) + + Create container object if not already existing + + :param container_name: The name of the container to create + + + .. py:method:: delete_container(container_name) + + Delete a container object + + :param container_name: The name of the container + + + .. py:method:: delete_blobs(container_name, *blobs, **kwargs) + + Marks the specified blobs or snapshots for deletion. + + :param container_name: The name of the container containing the blobs + :param blobs: The blobs to delete. This can be a single blob, or multiple values + can be supplied, where each value is either the name of the blob (str) or BlobProperties. + + + .. py:method:: delete_file(container_name, blob_name, is_prefix = False, ignore_if_missing = False, delimiter = '', **kwargs) + + Delete a file from Azure Blob Storage. + + :param container_name: Name of the container. + :param blob_name: Name of the blob. + :param is_prefix: If blob_name is a prefix, delete all matching files + :param ignore_if_missing: if True, then return success even if the + blob does not exist. + :param kwargs: Optional keyword arguments that ``ContainerClient.delete_blobs()`` takes. + + + .. py:method:: test_connection() + + Test Azure Blob Storage connection. + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/index.rst.txt new file mode 100644 index 00000000000..ef3f121206c --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/index.rst.txt @@ -0,0 +1,29 @@ +:py:mod:`airflow.providers.microsoft.azure` +=========================================== + +.. py:module:: airflow.providers.microsoft.azure + + +Subpackages +----------- +.. toctree:: + :titlesonly: + :maxdepth: 3 + + hooks/index.rst + log/index.rst + operators/index.rst + secrets/index.rst + sensors/index.rst + transfers/index.rst + + +Submodules +---------- +.. toctree:: + :titlesonly: + :maxdepth: 1 + + utils/index.rst + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/log/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/log/index.rst.txt new file mode 100644 index 00000000000..fcc54938a78 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/log/index.rst.txt @@ -0,0 +1,15 @@ +:py:mod:`airflow.providers.microsoft.azure.log` +=============================================== + +.. py:module:: airflow.providers.microsoft.azure.log + + +Submodules +---------- +.. toctree:: + :titlesonly: + :maxdepth: 1 + + wasb_task_handler/index.rst + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/log/wasb_task_handler/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/log/wasb_task_handler/index.rst.txt new file mode 100644 index 00000000000..08cd2a1da4b --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/log/wasb_task_handler/index.rst.txt @@ -0,0 +1,74 @@ +:py:mod:`airflow.providers.microsoft.azure.log.wasb_task_handler` +================================================================= + +.. py:module:: airflow.providers.microsoft.azure.log.wasb_task_handler + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.log.wasb_task_handler.WasbTaskHandler + + + + +.. py:class:: WasbTaskHandler(base_log_folder, wasb_log_folder, wasb_container, delete_local_copy, *, filename_template = None) + + Bases: :py:obj:`airflow.utils.log.file_task_handler.FileTaskHandler`, :py:obj:`airflow.utils.log.logging_mixin.LoggingMixin` + + WasbTaskHandler is a python log handler that handles and reads + task instance logs. It extends airflow FileTaskHandler and + uploads to and reads from Wasb remote storage. + + .. py:method:: hook() + + Returns WasbHook. + + + .. py:method:: set_context(ti) + + Provide task_instance context to airflow task handler. + + :param ti: task instance object + + + .. py:method:: close() + + Close and upload local log file to remote storage Wasb. + + + .. py:method:: wasb_log_exists(remote_log_location) + + Check if remote_log_location exists in remote storage + + :param remote_log_location: log's location in remote storage + :return: True if location exists else False + + + .. py:method:: wasb_read(remote_log_location, return_error = False) + + Returns the log found at the remote_log_location. Returns '' if no + logs are found or there is an error. + + :param remote_log_location: the log's location in remote storage + :param return_error: if True, returns a string error message if an + error occurs. Otherwise returns '' when an error occurs. + + + .. py:method:: wasb_write(log, remote_log_location, append = True) + + Writes the log to the remote_log_location. Fails silently if no hook + was created. + + :param log: the log to write to the remote_log_location + :param remote_log_location: the log's location in remote storage + :param append: if False, any existing log file is overwritten. If True, + the new log is appended to any existing logs. + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/adls/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/adls/index.rst.txt new file mode 100644 index 00000000000..8083e55c792 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/adls/index.rst.txt @@ -0,0 +1,96 @@ +:py:mod:`airflow.providers.microsoft.azure.operators.adls` +========================================================== + +.. py:module:: airflow.providers.microsoft.azure.operators.adls + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.operators.adls.ADLSDeleteOperator + airflow.providers.microsoft.azure.operators.adls.ADLSListOperator + + + + +.. py:class:: ADLSDeleteOperator(*, path, recursive = False, ignore_not_found = True, azure_data_lake_conn_id = 'azure_data_lake_default', **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Delete files in the specified path. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:ADLSDeleteOperator` + + :param path: A directory or file to remove + :param recursive: Whether to loop into directories in the location and remove the files + :param ignore_not_found: Whether to raise error if file to delete is not found + :param azure_data_lake_conn_id: Reference to the :ref:`Azure Data Lake connection`. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['path'] + + + + .. py:attribute:: ui_color + :annotation: = #901dd2 + + + + .. py:method:: execute(context) + + This is the main method to derive when creating an operator. + Context is the same dictionary used as when rendering jinja templates. + + Refer to get_template_context for more context. + + + +.. py:class:: ADLSListOperator(*, path, azure_data_lake_conn_id = 'azure_data_lake_default', **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + List all files from the specified path + + This operator returns a python list with the names of files which can be used by + `xcom` in the downstream tasks. + + :param path: The Azure Data Lake path to find the objects. Supports glob + strings (templated) + :param azure_data_lake_conn_id: Reference to the :ref:`Azure Data Lake connection`. + + **Example**: + The following Operator would list all the Parquet files from ``folder/output/`` + folder in the specified ADLS account :: + + adls_files = ADLSListOperator( + task_id='adls_files', + path='folder/output/*.parquet', + azure_data_lake_conn_id='azure_data_lake_default' + ) + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['path'] + + + + .. py:attribute:: ui_color + :annotation: = #901dd2 + + + + .. py:method:: execute(context) + + This is the main method to derive when creating an operator. + Context is the same dictionary used as when rendering jinja templates. + + Refer to get_template_context for more context. + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/adx/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/adx/index.rst.txt new file mode 100644 index 00000000000..5ed578c4e77 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/adx/index.rst.txt @@ -0,0 +1,65 @@ +:py:mod:`airflow.providers.microsoft.azure.operators.adx` +========================================================= + +.. py:module:: airflow.providers.microsoft.azure.operators.adx + +.. autoapi-nested-parse:: + + This module contains Azure Data Explorer operators + + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.operators.adx.AzureDataExplorerQueryOperator + + + + +.. py:class:: AzureDataExplorerQueryOperator(*, query, database, options = None, azure_data_explorer_conn_id = 'azure_data_explorer_default', **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Operator for querying Azure Data Explorer (Kusto). + + :param query: KQL query to run (templated). + :param database: Database to run the query on (templated). + :param options: Optional query options. See: + https://docs.microsoft.com/en-us/azure/kusto/api/netfx/request-properties#list-of-clientrequestproperties + :param azure_data_explorer_conn_id: Reference to the + :ref:`Azure Data Explorer connection`. + + .. py:attribute:: ui_color + :annotation: = #00a1f2 + + + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['query', 'database'] + + + + .. py:attribute:: template_ext + :annotation: :Sequence[str] = ['.kql'] + + + + .. py:method:: get_hook() + + Returns new instance of AzureDataExplorerHook + + + .. py:method:: execute(context) + + Run KQL Query on Azure Data Explorer (Kusto). + Returns `PrimaryResult` of Query v2 HTTP response contents + (https://docs.microsoft.com/en-us/azure/kusto/api/rest/response2) + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/asb/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/asb/index.rst.txt new file mode 100644 index 00000000000..44eba28f4d5 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/asb/index.rst.txt @@ -0,0 +1,417 @@ +:py:mod:`airflow.providers.microsoft.azure.operators.asb` +========================================================= + +.. py:module:: airflow.providers.microsoft.azure.operators.asb + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.operators.asb.AzureServiceBusCreateQueueOperator + airflow.providers.microsoft.azure.operators.asb.AzureServiceBusSendMessageOperator + airflow.providers.microsoft.azure.operators.asb.AzureServiceBusReceiveMessageOperator + airflow.providers.microsoft.azure.operators.asb.AzureServiceBusDeleteQueueOperator + airflow.providers.microsoft.azure.operators.asb.AzureServiceBusTopicCreateOperator + airflow.providers.microsoft.azure.operators.asb.AzureServiceBusSubscriptionCreateOperator + airflow.providers.microsoft.azure.operators.asb.AzureServiceBusUpdateSubscriptionOperator + airflow.providers.microsoft.azure.operators.asb.ASBReceiveSubscriptionMessageOperator + airflow.providers.microsoft.azure.operators.asb.AzureServiceBusSubscriptionDeleteOperator + airflow.providers.microsoft.azure.operators.asb.AzureServiceBusTopicDeleteOperator + + + + +.. py:class:: AzureServiceBusCreateQueueOperator(*, queue_name, max_delivery_count = 10, dead_lettering_on_message_expiration = True, enable_batched_operations = True, azure_service_bus_conn_id = 'azure_service_bus_default', **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Creates a Azure Service Bus queue under a Service Bus Namespace by using ServiceBusAdministrationClient + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:AzureServiceBusCreateQueueOperator` + + :param queue_name: The name of the queue. should be unique. + :param max_delivery_count: The maximum delivery count. A message is automatically + dead lettered after this number of deliveries. Default value is 10.. + :param dead_lettering_on_message_expiration: A value that indicates whether this subscription has + dead letter support when a message expires. + :param enable_batched_operations: Value that indicates whether server-side batched + operations are enabled. + :param azure_service_bus_conn_id: Reference to the + :ref:`Azure Service Bus connection`. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['queue_name'] + + + + .. py:attribute:: ui_color + :annotation: = #e4f0e8 + + + + .. py:method:: execute(context) + + Creates Queue in Azure Service Bus namespace, by connecting to Service Bus Admin client in hook + + + +.. py:class:: AzureServiceBusSendMessageOperator(*, queue_name, message, batch = False, azure_service_bus_conn_id = 'azure_service_bus_default', **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Send Message or batch message to the Service Bus queue + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:AzureServiceBusSendMessageOperator` + + :param queue_name: The name of the queue. should be unique. + :param message: Message which needs to be sent to the queue. It can be string or list of string. + :param batch: Its boolean flag by default it is set to False, if the message needs to be sent + as batch message it can be set to True. + :param azure_service_bus_conn_id: Reference to the + :ref: `Azure Service Bus connection`. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['queue_name'] + + + + .. py:attribute:: ui_color + :annotation: = #e4f0e8 + + + + .. py:method:: execute(context) + + Sends Message to the specific queue in Service Bus namespace, by + connecting to Service Bus client + + + +.. py:class:: AzureServiceBusReceiveMessageOperator(*, queue_name, azure_service_bus_conn_id = 'azure_service_bus_default', max_message_count = 10, max_wait_time = 5, **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Receive a batch of messages at once in a specified Queue name + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:AzureServiceBusReceiveMessageOperator` + + :param queue_name: The name of the queue name or a QueueProperties with name. + :param max_message_count: Maximum number of messages in the batch. + :param max_wait_time: Maximum time to wait in seconds for the first message to arrive. + :param azure_service_bus_conn_id: Reference to the + :ref: `Azure Service Bus connection `. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['queue_name'] + + + + .. py:attribute:: ui_color + :annotation: = #e4f0e8 + + + + .. py:method:: execute(context) + + Receive Message in specific queue in Service Bus namespace, + by connecting to Service Bus client + + + +.. py:class:: AzureServiceBusDeleteQueueOperator(*, queue_name, azure_service_bus_conn_id = 'azure_service_bus_default', **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Deletes the Queue in the Azure Service Bus namespace + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:AzureServiceBusDeleteQueueOperator` + + :param queue_name: The name of the queue in Service Bus namespace. + :param azure_service_bus_conn_id: Reference to the + :ref: `Azure Service Bus connection `. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['queue_name'] + + + + .. py:attribute:: ui_color + :annotation: = #e4f0e8 + + + + .. py:method:: execute(context) + + Delete Queue in Service Bus namespace, by connecting to Service Bus Admin client + + + +.. py:class:: AzureServiceBusTopicCreateOperator(*, topic_name, azure_service_bus_conn_id = 'azure_service_bus_default', default_message_time_to_live = None, max_size_in_megabytes = None, requires_duplicate_detection = None, duplicate_detection_history_time_window = None, enable_batched_operations = None, size_in_bytes = None, filtering_messages_before_publishing = None, authorization_rules = None, support_ordering = None, auto_delete_on_idle = None, enable_partitioning = None, enable_express = None, user_metadata = None, max_message_size_in_kilobytes = None, **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Create an Azure Service Bus Topic under a Service Bus Namespace by using ServiceBusAdministrationClient + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:AzureServiceBusTopicCreateOperator` + + :param topic_name: Name of the topic. + :param default_message_time_to_live: ISO 8601 default message time span to live value. This is + the duration after which the message expires, starting from when the message is sent to Service + Bus. This is the default value used when TimeToLive is not set on a message itself. + Input value of either type ~datetime.timedelta or string in ISO 8601 duration format + like "PT300S" is accepted. + :param max_size_in_megabytes: The maximum size of the topic in megabytes, which is the size of + memory allocated for the topic. + :param requires_duplicate_detection: A value indicating if this topic requires duplicate + detection. + :param duplicate_detection_history_time_window: ISO 8601 time span structure that defines the + duration of the duplicate detection history. The default value is 10 minutes. + Input value of either type ~datetime.timedelta or string in ISO 8601 duration format + like "PT300S" is accepted. + :param enable_batched_operations: Value that indicates whether server-side batched operations + are enabled. + :param size_in_bytes: The size of the topic, in bytes. + :param filtering_messages_before_publishing: Filter messages before publishing. + :param authorization_rules: List of Authorization rules for resource. + :param support_ordering: A value that indicates whether the topic supports ordering. + :param auto_delete_on_idle: ISO 8601 time span idle interval after which the topic is + automatically deleted. The minimum duration is 5 minutes. + Input value of either type ~datetime.timedelta or string in ISO 8601 duration format + like "PT300S" is accepted. + :param enable_partitioning: A value that indicates whether the topic is to be partitioned + across multiple message brokers. + :param enable_express: A value that indicates whether Express Entities are enabled. An express + queue holds a message in memory temporarily before writing it to persistent storage. + :param user_metadata: Metadata associated with the topic. + :param max_message_size_in_kilobytes: The maximum size in kilobytes of message payload that + can be accepted by the queue. This feature is only available when using a Premium namespace + and Service Bus API version "2021-05" or higher. + The minimum allowed value is 1024 while the maximum allowed value is 102400. Default value is 1024. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['topic_name'] + + + + .. py:attribute:: ui_color + :annotation: = #e4f0e8 + + + + .. py:method:: execute(context) + + Creates Topic in Service Bus namespace, by connecting to Service Bus Admin client + + + +.. py:class:: AzureServiceBusSubscriptionCreateOperator(*, topic_name, subscription_name, azure_service_bus_conn_id = 'azure_service_bus_default', lock_duration = None, requires_session = None, default_message_time_to_live = None, dead_lettering_on_message_expiration = True, dead_lettering_on_filter_evaluation_exceptions = None, max_delivery_count = 10, enable_batched_operations = True, forward_to = None, user_metadata = None, forward_dead_lettered_messages_to = None, auto_delete_on_idle = None, **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Create an Azure Service Bus Topic Subscription under a Service Bus Namespace + by using ServiceBusAdministrationClient + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:AzureServiceBusSubscriptionCreateOperator` + + :param topic_name: The topic that will own the to-be-created subscription. + :param subscription_name: Name of the subscription that need to be created + :param lock_duration: ISO 8601 time span duration of a peek-lock; that is, the amount of time that + the message is locked for other receivers. The maximum value for LockDuration is 5 minutes; the + default value is 1 minute. Input value of either type ~datetime.timedelta or string in ISO 8601 + duration format like "PT300S" is accepted. + :param requires_session: A value that indicates whether the queue supports the concept of sessions. + :param default_message_time_to_live: ISO 8601 default message time span to live value. This is the + duration after which the message expires, starting from when the message is sent to + Service Bus. This is the default value used when TimeToLive is not set on a message itself. + Input value of either type ~datetime.timedelta or string in ISO 8601 duration + format like "PT300S" is accepted. + :param dead_lettering_on_message_expiration: A value that indicates whether this subscription has + dead letter support when a message expires. + :param dead_lettering_on_filter_evaluation_exceptions: A value that indicates whether this + subscription has dead letter support when a message expires. + :param max_delivery_count: The maximum delivery count. A message is automatically dead lettered + after this number of deliveries. Default value is 10. + :param enable_batched_operations: Value that indicates whether server-side batched + operations are enabled. + :param forward_to: The name of the recipient entity to which all the messages sent to the + subscription are forwarded to. + :param user_metadata: Metadata associated with the subscription. Maximum number of characters is 1024. + :param forward_dead_lettered_messages_to: The name of the recipient entity to which all the + messages sent to the subscription are forwarded to. + :param auto_delete_on_idle: ISO 8601 time Span idle interval after which the subscription is + automatically deleted. The minimum duration is 5 minutes. Input value of either + type ~datetime.timedelta or string in ISO 8601 duration format like "PT300S" is accepted. + :param azure_service_bus_conn_id: Reference to the + :ref:`Azure Service Bus connection`. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['topic_name', 'subscription_name'] + + + + .. py:attribute:: ui_color + :annotation: = #e4f0e8 + + + + .. py:method:: execute(context) + + Creates Subscription in Service Bus namespace, by connecting to Service Bus Admin client + + + +.. py:class:: AzureServiceBusUpdateSubscriptionOperator(*, topic_name, subscription_name, max_delivery_count = None, dead_lettering_on_message_expiration = None, enable_batched_operations = None, azure_service_bus_conn_id = 'azure_service_bus_default', **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Update an Azure ServiceBus Topic Subscription under a ServiceBus Namespace + by using ServiceBusAdministrationClient + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:AzureServiceBusUpdateSubscriptionOperator` + + :param topic_name: The topic that will own the to-be-created subscription. + :param subscription_name: Name of the subscription that need to be created. + :param max_delivery_count: The maximum delivery count. A message is automatically dead lettered + after this number of deliveries. Default value is 10. + :param dead_lettering_on_message_expiration: A value that indicates whether this subscription + has dead letter support when a message expires. + :param enable_batched_operations: Value that indicates whether server-side batched + operations are enabled. + :param azure_service_bus_conn_id: Reference to the + :ref:`Azure Service Bus connection`. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['topic_name', 'subscription_name'] + + + + .. py:attribute:: ui_color + :annotation: = #e4f0e8 + + + + .. py:method:: execute(context) + + Updates Subscription properties, by connecting to Service Bus Admin client + + + +.. py:class:: ASBReceiveSubscriptionMessageOperator(*, topic_name, subscription_name, max_message_count = 1, max_wait_time = 5, azure_service_bus_conn_id = 'azure_service_bus_default', **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Receive a Batch messages from a Service Bus Subscription under specific Topic. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:ASBReceiveSubscriptionMessageOperator` + + :param subscription_name: The subscription name that will own the rule in topic + :param topic_name: The topic that will own the subscription rule. + :param max_message_count: Maximum number of messages in the batch. + Actual number returned will depend on prefetch_count and incoming stream rate. + Setting to None will fully depend on the prefetch config. The default value is 1. + :param max_wait_time: Maximum time to wait in seconds for the first message to arrive. If no + messages arrive, and no timeout is specified, this call will not return until the + connection is closed. If specified, an no messages arrive within the timeout period, + an empty list will be returned. + :param azure_service_bus_conn_id: Reference to the + :ref:`Azure Service Bus connection `. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['topic_name', 'subscription_name'] + + + + .. py:attribute:: ui_color + :annotation: = #e4f0e8 + + + + .. py:method:: execute(context) + + Receive Message in specific queue in Service Bus namespace, + by connecting to Service Bus client + + + +.. py:class:: AzureServiceBusSubscriptionDeleteOperator(*, topic_name, subscription_name, azure_service_bus_conn_id = 'azure_service_bus_default', **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Deletes the topic subscription in the Azure ServiceBus namespace + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:AzureServiceBusSubscriptionDeleteOperator` + + :param topic_name: The topic that will own the to-be-created subscription. + :param subscription_name: Name of the subscription that need to be created + :param azure_service_bus_conn_id: Reference to the + :ref:`Azure Service Bus connection `. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['topic_name', 'subscription_name'] + + + + .. py:attribute:: ui_color + :annotation: = #e4f0e8 + + + + .. py:method:: execute(context) + + Delete topic subscription in Service Bus namespace, by connecting to Service Bus Admin client + + + +.. py:class:: AzureServiceBusTopicDeleteOperator(*, topic_name, azure_service_bus_conn_id = 'azure_service_bus_default', **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Deletes the topic in the Azure Service Bus namespace + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:AzureServiceBusTopicDeleteOperator` + + :param topic_name: Name of the topic to be deleted. + :param azure_service_bus_conn_id: Reference to the + :ref:`Azure Service Bus connection `. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['topic_name'] + + + + .. py:attribute:: ui_color + :annotation: = #e4f0e8 + + + + .. py:method:: execute(context) + + Delete topic in Service Bus namespace, by connecting to Service Bus Admin client + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/batch/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/batch/index.rst.txt new file mode 100644 index 00000000000..f17de4f3312 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/batch/index.rst.txt @@ -0,0 +1,124 @@ +:py:mod:`airflow.providers.microsoft.azure.operators.batch` +=========================================================== + +.. py:module:: airflow.providers.microsoft.azure.operators.batch + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.operators.batch.AzureBatchOperator + + + + +.. py:class:: AzureBatchOperator(*, batch_pool_id, batch_pool_vm_size, batch_job_id, batch_task_command_line, batch_task_id, vm_node_agent_sku_id, vm_publisher = None, vm_offer = None, sku_starts_with = None, vm_sku = None, vm_version = None, os_family = None, os_version = None, batch_pool_display_name = None, batch_job_display_name = None, batch_job_manager_task = None, batch_job_preparation_task = None, batch_job_release_task = None, batch_task_display_name = None, batch_task_container_settings = None, batch_start_task = None, batch_max_retries = 3, batch_task_resource_files = None, batch_task_output_files = None, batch_task_user_identity = None, target_low_priority_nodes = None, target_dedicated_nodes = None, enable_auto_scale = False, auto_scale_formula = None, azure_batch_conn_id='azure_batch_default', use_latest_verified_vm_image_and_sku = False, timeout = 25, should_delete_job = False, should_delete_pool = False, **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Executes a job on Azure Batch Service + + :param batch_pool_id: A string that uniquely identifies the Pool within the Account. + :param batch_pool_vm_size: The size of virtual machines in the Pool + :param batch_job_id: A string that uniquely identifies the Job within the Account. + :param batch_task_command_line: The command line of the Task + :param batch_task_id: A string that uniquely identifies the task within the Job. + :param batch_pool_display_name: The display name for the Pool. + The display name need not be unique + :param batch_job_display_name: The display name for the Job. + The display name need not be unique + :param batch_job_manager_task: Details of a Job Manager Task to be launched when the Job is started. + :param batch_job_preparation_task: The Job Preparation Task. If set, the Batch service will + run the Job Preparation Task on a Node before starting any Tasks of that + Job on that Compute Node. Required if batch_job_release_task is set. + :param batch_job_release_task: The Job Release Task. Use to undo changes to Compute Nodes + made by the Job Preparation Task + :param batch_task_display_name: The display name for the task. + The display name need not be unique + :param batch_task_container_settings: The settings for the container under which the Task runs + :param batch_start_task: A Task specified to run on each Compute Node as it joins the Pool. + The Task runs when the Compute Node is added to the Pool or + when the Compute Node is restarted. + :param batch_max_retries: The number of times to retry this batch operation before it's + considered a failed operation. Default is 3 + :param batch_task_resource_files: A list of files that the Batch service will + download to the Compute Node before running the command line. + :param batch_task_output_files: A list of files that the Batch service will upload + from the Compute Node after running the command line. + :param batch_task_user_identity: The user identity under which the Task runs. + If omitted, the Task runs as a non-administrative user unique to the Task. + :param target_low_priority_nodes: The desired number of low-priority Compute Nodes in the Pool. + This property must not be specified if enable_auto_scale is set to true. + :param target_dedicated_nodes: The desired number of dedicated Compute Nodes in the Pool. + This property must not be specified if enable_auto_scale is set to true. + :param enable_auto_scale: Whether the Pool size should automatically adjust over time. Default is false + :param auto_scale_formula: A formula for the desired number of Compute Nodes in the Pool. + This property must not be specified if enableAutoScale is set to false. + It is required if enableAutoScale is set to true. + :param azure_batch_conn_id: The :ref:`Azure Batch connection id` + :param use_latest_verified_vm_image_and_sku: Whether to use the latest verified virtual + machine image and sku in the batch account. Default is false. + :param vm_publisher: The publisher of the Azure Virtual Machines Marketplace Image. + For example, Canonical or MicrosoftWindowsServer. Required if + use_latest_image_and_sku is set to True + :param vm_offer: The offer type of the Azure Virtual Machines Marketplace Image. + For example, UbuntuServer or WindowsServer. Required if + use_latest_image_and_sku is set to True + :param sku_starts_with: The starting string of the Virtual Machine SKU. Required if + use_latest_image_and_sku is set to True + :param vm_sku: The name of the virtual machine sku to use + :param vm_version: The version of the virtual machine + :param vm_version: str | None + :param vm_node_agent_sku_id: The node agent sku id of the virtual machine + :param os_family: The Azure Guest OS family to be installed on the virtual machines in the Pool. + :param os_version: The OS family version + :param timeout: The amount of time to wait for the job to complete in minutes. Default is 25 + :param should_delete_job: Whether to delete job after execution. Default is False + :param should_delete_pool: Whether to delete pool after execution of jobs. Default is False + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['batch_pool_id', 'batch_pool_vm_size', 'batch_job_id', 'batch_task_id', 'batch_task_command_line'] + + + + .. py:attribute:: ui_color + :annotation: = #f0f0e4 + + + + .. py:method:: execute(context) + + This is the main method to derive when creating an operator. + Context is the same dictionary used as when rendering jinja templates. + + Refer to get_template_context for more context. + + + .. py:method:: on_kill() + + Override this method to cleanup subprocesses when a task instance + gets killed. Any use of the threading, subprocess or multiprocessing + module within an operator needs to be cleaned up or it will leave + ghost processes behind. + + + .. py:method:: get_hook() + + Create and return an AzureBatchHook. + + + .. py:method:: clean_up(pool_id = None, job_id = None) + + Delete the given pool and job in the batch account + + :param pool_id: The id of the pool to delete + :param job_id: The id of the job to delete + + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/container_instances/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/container_instances/index.rst.txt new file mode 100644 index 00000000000..abe2d7f69a0 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/container_instances/index.rst.txt @@ -0,0 +1,151 @@ +:py:mod:`airflow.providers.microsoft.azure.operators.container_instances` +========================================================================= + +.. py:module:: airflow.providers.microsoft.azure.operators.container_instances + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.operators.container_instances.AzureContainerInstancesOperator + + + + +Attributes +~~~~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.operators.container_instances.Volume + airflow.providers.microsoft.azure.operators.container_instances.DEFAULT_ENVIRONMENT_VARIABLES + airflow.providers.microsoft.azure.operators.container_instances.DEFAULT_SECURED_VARIABLES + airflow.providers.microsoft.azure.operators.container_instances.DEFAULT_VOLUMES + airflow.providers.microsoft.azure.operators.container_instances.DEFAULT_MEMORY_IN_GB + airflow.providers.microsoft.azure.operators.container_instances.DEFAULT_CPU + + +.. py:data:: Volume + + + + +.. py:data:: DEFAULT_ENVIRONMENT_VARIABLES + :annotation: :dict[str, str] + + + +.. py:data:: DEFAULT_SECURED_VARIABLES + :annotation: :Sequence[str] = [] + + + +.. py:data:: DEFAULT_VOLUMES + :annotation: :Sequence[Volume] = [] + + + +.. py:data:: DEFAULT_MEMORY_IN_GB + :annotation: = 2.0 + + + +.. py:data:: DEFAULT_CPU + :annotation: = 1.0 + + + +.. py:class:: AzureContainerInstancesOperator(*, ci_conn_id, registry_conn_id, resource_group, name, image, region, environment_variables = None, secured_variables = None, volumes = None, memory_in_gb = None, cpu = None, gpu = None, command = None, remove_on_error = True, fail_if_exists = True, tags = None, os_type = 'Linux', restart_policy = 'Never', ip_address = None, ports = None, network_profile = None, **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Start a container on Azure Container Instances + + :param ci_conn_id: connection id of a service principal which will be used + to start the container instance + :param registry_conn_id: connection id of a user which can login to a + private docker registry. For Azure use :ref:`Azure connection id` + :param resource_group: name of the resource group wherein this container + instance should be started + :param name: name of this container instance. Please note this name has + to be unique in order to run containers in parallel. + :param image: the docker image to be used + :param region: the region wherein this container instance should be started + :param environment_variables: key,value pairs containing environment + variables which will be passed to the running container + :param secured_variables: names of environmental variables that should not + be exposed outside the container (typically passwords). + :param volumes: list of ``Volume`` tuples to be mounted to the container. + Currently only Azure Fileshares are supported. + :param memory_in_gb: the amount of memory to allocate to this container + :param cpu: the number of cpus to allocate to this container + :param gpu: GPU Resource for the container. + :param command: the command to run inside the container + :param container_timeout: max time allowed for the execution of + the container instance. + :param tags: azure tags as dict of str:str + :param os_type: The operating system type required by the containers + in the container group. Possible values include: 'Windows', 'Linux' + :param restart_policy: Restart policy for all containers within the container group. + Possible values include: 'Always', 'OnFailure', 'Never' + :param ip_address: The IP address type of the container group. + :param network_profile: The network profile information for a container group. + + **Example**:: + + AzureContainerInstancesOperator( + ci_conn_id = "azure_service_principal", + registry_conn_id = "azure_registry_user", + resource_group = "my-resource-group", + name = "my-container-name-{{ ds }}", + image = "myprivateregistry.azurecr.io/my_container:latest", + region = "westeurope", + environment_variables = {"MODEL_PATH": "my_value", + "POSTGRES_LOGIN": "{{ macros.connection('postgres_default').login }}", + "POSTGRES_PASSWORD": "{{ macros.connection('postgres_default').password }}", + "JOB_GUID": "{{ ti.xcom_pull(task_ids='task1', key='guid') }}" }, + secured_variables = ['POSTGRES_PASSWORD'], + volumes = [("azure_container_instance_conn_id", + "my_storage_container", + "my_fileshare", + "/input-data", + True),], + memory_in_gb=14.0, + cpu=4.0, + gpu=GpuResource(count=1, sku='K80'), + command=["/bin/echo", "world"], + task_id="start_container" + ) + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['name', 'image', 'command', 'environment_variables'] + + + + .. py:attribute:: template_fields_renderers + + + + + .. py:method:: execute(context) + + This is the main method to derive when creating an operator. + Context is the same dictionary used as when rendering jinja templates. + + Refer to get_template_context for more context. + + + .. py:method:: on_kill() + + Override this method to cleanup subprocesses when a task instance + gets killed. Any use of the threading, subprocess or multiprocessing + module within an operator needs to be cleaned up or it will leave + ghost processes behind. + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/cosmos/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/cosmos/index.rst.txt new file mode 100644 index 00000000000..d612e37cd81 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/cosmos/index.rst.txt @@ -0,0 +1,51 @@ +:py:mod:`airflow.providers.microsoft.azure.operators.cosmos` +============================================================ + +.. py:module:: airflow.providers.microsoft.azure.operators.cosmos + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.operators.cosmos.AzureCosmosInsertDocumentOperator + + + + +.. py:class:: AzureCosmosInsertDocumentOperator(*, database_name, collection_name, document, azure_cosmos_conn_id = 'azure_cosmos_default', **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Inserts a new document into the specified Cosmos database and collection + It will create both the database and collection if they do not already exist + + :param database_name: The name of the database. (templated) + :param collection_name: The name of the collection. (templated) + :param document: The document to insert + :param azure_cosmos_conn_id: Reference to the + :ref:`Azure CosmosDB connection`. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['database_name', 'collection_name'] + + + + .. py:attribute:: ui_color + :annotation: = #e4f0e8 + + + + .. py:method:: execute(context) + + This is the main method to derive when creating an operator. + Context is the same dictionary used as when rendering jinja templates. + + Refer to get_template_context for more context. + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/data_factory/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/data_factory/index.rst.txt new file mode 100644 index 00000000000..574248beb6a --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/data_factory/index.rst.txt @@ -0,0 +1,118 @@ +:py:mod:`airflow.providers.microsoft.azure.operators.data_factory` +================================================================== + +.. py:module:: airflow.providers.microsoft.azure.operators.data_factory + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.operators.data_factory.AzureDataFactoryPipelineRunLink + airflow.providers.microsoft.azure.operators.data_factory.AzureDataFactoryRunPipelineOperator + + + + +.. py:class:: AzureDataFactoryPipelineRunLink(context=None) + + Bases: :py:obj:`airflow.utils.log.logging_mixin.LoggingMixin`, :py:obj:`airflow.models.BaseOperatorLink` + + Constructs a link to monitor a pipeline run in Azure Data Factory. + + .. py:attribute:: name + :annotation: = Monitor Pipeline Run + + + + .. py:method:: get_link(operator, *, ti_key) + + Link to external system. + + Note: The old signature of this function was ``(self, operator, dttm: datetime)``. That is still + supported at runtime but is deprecated. + + :param operator: The Airflow operator object this link is associated to. + :param ti_key: TaskInstance ID to return link for. + :return: link to external system + + + +.. py:class:: AzureDataFactoryRunPipelineOperator(*, pipeline_name, azure_data_factory_conn_id = AzureDataFactoryHook.default_conn_name, wait_for_termination = True, resource_group_name = None, factory_name = None, reference_pipeline_run_id = None, is_recovery = None, start_activity_name = None, start_from_failure = None, parameters = None, timeout = 60 * 60 * 24 * 7, check_interval = 60, **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Executes a data factory pipeline. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:AzureDataFactoryRunPipelineOperator` + + :param azure_data_factory_conn_id: The connection identifier for connecting to Azure Data Factory. + :param pipeline_name: The name of the pipeline to execute. + :param wait_for_termination: Flag to wait on a pipeline run's termination. By default, this feature is + enabled but could be disabled to perform an asynchronous wait for a long-running pipeline execution + using the ``AzureDataFactoryPipelineRunSensor``. + :param resource_group_name: The resource group name. If a value is not passed in to the operator, the + ``AzureDataFactoryHook`` will attempt to use the resource group name provided in the corresponding + connection. + :param factory_name: The data factory name. If a value is not passed in to the operator, the + ``AzureDataFactoryHook`` will attempt to use the factory name name provided in the corresponding + connection. + :param reference_pipeline_run_id: The pipeline run identifier. If this run ID is specified the parameters + of the specified run will be used to create a new run. + :param is_recovery: Recovery mode flag. If recovery mode is set to `True`, the specified referenced + pipeline run and the new run will be grouped under the same ``groupId``. + :param start_activity_name: In recovery mode, the rerun will start from this activity. If not specified, + all activities will run. + :param start_from_failure: In recovery mode, if set to true, the rerun will start from failed activities. + The property will be used only if ``start_activity_name`` is not specified. + :param parameters: Parameters of the pipeline run. These parameters are referenced in a pipeline via + ``@pipeline().parameters.parameterName`` and will be used only if the ``reference_pipeline_run_id`` is + not specified. + :param timeout: Time in seconds to wait for a pipeline to reach a terminal status for non-asynchronous + waits. Used only if ``wait_for_termination`` is True. + :param check_interval: Time in seconds to check on a pipeline run's status for non-asynchronous waits. + Used only if ``wait_for_termination`` is True. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['azure_data_factory_conn_id', 'resource_group_name', 'factory_name', 'pipeline_name',... + + + + .. py:attribute:: template_fields_renderers + + + + + .. py:attribute:: ui_color + :annotation: = #0678d4 + + + + .. py:attribute:: operator_extra_links + + + + + .. py:method:: execute(context) + + This is the main method to derive when creating an operator. + Context is the same dictionary used as when rendering jinja templates. + + Refer to get_template_context for more context. + + + .. py:method:: on_kill() + + Override this method to cleanup subprocesses when a task instance + gets killed. Any use of the threading, subprocess or multiprocessing + module within an operator needs to be cleaned up or it will leave + ghost processes behind. + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/index.rst.txt new file mode 100644 index 00000000000..404202f5fdc --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/index.rst.txt @@ -0,0 +1,23 @@ +:py:mod:`airflow.providers.microsoft.azure.operators` +===================================================== + +.. py:module:: airflow.providers.microsoft.azure.operators + + +Submodules +---------- +.. toctree:: + :titlesonly: + :maxdepth: 1 + + adls/index.rst + adx/index.rst + asb/index.rst + batch/index.rst + container_instances/index.rst + cosmos/index.rst + data_factory/index.rst + synapse/index.rst + wasb_delete_blob/index.rst + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/synapse/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/synapse/index.rst.txt new file mode 100644 index 00000000000..7e75060921a --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/synapse/index.rst.txt @@ -0,0 +1,70 @@ +:py:mod:`airflow.providers.microsoft.azure.operators.synapse` +============================================================= + +.. py:module:: airflow.providers.microsoft.azure.operators.synapse + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.operators.synapse.AzureSynapseRunSparkBatchOperator + + + + +.. py:class:: AzureSynapseRunSparkBatchOperator(*, azure_synapse_conn_id = AzureSynapseHook.default_conn_name, wait_for_termination = True, spark_pool = '', payload, timeout = 60 * 60 * 24 * 7, check_interval = 60, **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Executes a Spark job on Azure Synapse. + + .. see also:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:AzureSynapseRunSparkBatchOperator` + + :param azure_synapse_conn_id: The connection identifier for connecting to Azure Synapse. + :param wait_for_termination: Flag to wait on a job run's termination. + :param spark_pool: The target synapse spark pool used to submit the job + :param payload: Livy compatible payload which represents the spark job that a user wants to submit + :param timeout: Time in seconds to wait for a job to reach a terminal status for non-asynchronous + waits. Used only if ``wait_for_termination`` is True. + :param check_interval: Time in seconds to check on a job run's status for non-asynchronous waits. + Used only if ``wait_for_termination`` is True. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['azure_synapse_conn_id', 'spark_pool'] + + + + .. py:attribute:: template_fields_renderers + + + + + .. py:attribute:: ui_color + :annotation: = #0678d4 + + + + .. py:method:: execute(context) + + This is the main method to derive when creating an operator. + Context is the same dictionary used as when rendering jinja templates. + + Refer to get_template_context for more context. + + + .. py:method:: on_kill() + + Override this method to cleanup subprocesses when a task instance + gets killed. Any use of the threading, subprocess or multiprocessing + module within an operator needs to be cleaned up or it will leave + ghost processes behind. + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/wasb_delete_blob/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/wasb_delete_blob/index.rst.txt new file mode 100644 index 00000000000..8c629323c65 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/operators/wasb_delete_blob/index.rst.txt @@ -0,0 +1,48 @@ +:py:mod:`airflow.providers.microsoft.azure.operators.wasb_delete_blob` +====================================================================== + +.. py:module:: airflow.providers.microsoft.azure.operators.wasb_delete_blob + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.operators.wasb_delete_blob.WasbDeleteBlobOperator + + + + +.. py:class:: WasbDeleteBlobOperator(*, container_name, blob_name, wasb_conn_id = 'wasb_default', check_options = None, is_prefix = False, ignore_if_missing = False, **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Deletes blob(s) on Azure Blob Storage. + + :param container_name: Name of the container. (templated) + :param blob_name: Name of the blob. (templated) + :param wasb_conn_id: Reference to the :ref:`wasb connection `. + :param check_options: Optional keyword arguments that + `WasbHook.check_for_blob()` takes. + :param is_prefix: If blob_name is a prefix, delete all files matching prefix. + :param ignore_if_missing: if True, then return success even if the + blob does not exist. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['container_name', 'blob_name'] + + + + .. py:method:: execute(context) + + This is the main method to derive when creating an operator. + Context is the same dictionary used as when rendering jinja templates. + + Refer to get_template_context for more context. + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/secrets/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/secrets/index.rst.txt new file mode 100644 index 00000000000..459d30a56f9 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/secrets/index.rst.txt @@ -0,0 +1,15 @@ +:py:mod:`airflow.providers.microsoft.azure.secrets` +=================================================== + +.. py:module:: airflow.providers.microsoft.azure.secrets + + +Submodules +---------- +.. toctree:: + :titlesonly: + :maxdepth: 1 + + key_vault/index.rst + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/secrets/key_vault/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/secrets/key_vault/index.rst.txt new file mode 100644 index 00000000000..438bf0f6683 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/secrets/key_vault/index.rst.txt @@ -0,0 +1,108 @@ +:py:mod:`airflow.providers.microsoft.azure.secrets.key_vault` +============================================================= + +.. py:module:: airflow.providers.microsoft.azure.secrets.key_vault + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.secrets.key_vault.AzureKeyVaultBackend + + + + +.. py:class:: AzureKeyVaultBackend(connections_prefix = 'airflow-connections', variables_prefix = 'airflow-variables', config_prefix = 'airflow-config', vault_url = '', sep = '-', **kwargs) + + Bases: :py:obj:`airflow.secrets.BaseSecretsBackend`, :py:obj:`airflow.utils.log.logging_mixin.LoggingMixin` + + Retrieves Airflow Connections or Variables from Azure Key Vault secrets. + + The Azure Key Vault can be configured as a secrets backend in the ``airflow.cfg``: + + .. code-block:: ini + + [secrets] + backend = airflow.providers.microsoft.azure.secrets.key_vault.AzureKeyVaultBackend + backend_kwargs = {"connections_prefix": "airflow-connections", "vault_url": ""} + + For example, if the secrets prefix is ``airflow-connections-smtp-default``, this would be accessible + if you provide ``{"connections_prefix": "airflow-connections"}`` and request conn_id ``smtp-default``. + And if variables prefix is ``airflow-variables-hello``, this would be accessible + if you provide ``{"variables_prefix": "airflow-variables"}`` and request variable key ``hello``. + + For client authentication, the ``DefaultAzureCredential`` from the Azure Python SDK is used as + credential provider, which supports service principal, managed identity and user credentials + + For example, to specify a service principal with secret you can set the environment variables + ``AZURE_TENANT_ID``, ``AZURE_CLIENT_ID`` and ``AZURE_CLIENT_SECRET``. + + .. seealso:: + For more details on client authentication refer to the ``DefaultAzureCredential`` Class reference: + https://docs.microsoft.com/en-us/python/api/azure-identity/azure.identity.defaultazurecredential?view=azure-python + + :param connections_prefix: Specifies the prefix of the secret to read to get Connections + If set to None (null), requests for connections will not be sent to Azure Key Vault + :param variables_prefix: Specifies the prefix of the secret to read to get Variables + If set to None (null), requests for variables will not be sent to Azure Key Vault + :param config_prefix: Specifies the prefix of the secret to read to get Variables. + If set to None (null), requests for configurations will not be sent to Azure Key Vault + :param vault_url: The URL of an Azure Key Vault to use + :param sep: separator used to concatenate secret_prefix and secret_id. Default: "-" + + .. py:method:: client() + + Create a Azure Key Vault client. + + + .. py:method:: get_conn_value(conn_id) + + Get a serialized representation of Airflow Connection from an Azure Key Vault secret + + :param conn_id: The Airflow connection id to retrieve + + + .. py:method:: get_conn_uri(conn_id) + + Return URI representation of Connection conn_id. + + As of Airflow version 2.3.0 this method is deprecated. + + :param conn_id: the connection id + :return: deserialized Connection + + + .. py:method:: get_variable(key) + + Get an Airflow Variable from an Azure Key Vault secret. + + :param key: Variable Key + :return: Variable Value + + + .. py:method:: get_config(key) + + Get Airflow Configuration + + :param key: Configuration Option Key + :return: Configuration Option Value + + + .. py:method:: build_path(path_prefix, secret_id, sep = '-') + :staticmethod: + + Given a path_prefix and secret_id, build a valid secret name for the Azure Key Vault Backend. + Also replaces underscore in the path with dashes to support easy switching between + environment variables, so ``connection_default`` becomes ``connection-default``. + + :param path_prefix: The path prefix of the secret to retrieve + :param secret_id: Name of the secret + :param sep: Separator used to concatenate path_prefix and secret_id + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/sensors/cosmos/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/sensors/cosmos/index.rst.txt new file mode 100644 index 00000000000..8a15ed31d39 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/sensors/cosmos/index.rst.txt @@ -0,0 +1,53 @@ +:py:mod:`airflow.providers.microsoft.azure.sensors.cosmos` +========================================================== + +.. py:module:: airflow.providers.microsoft.azure.sensors.cosmos + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.sensors.cosmos.AzureCosmosDocumentSensor + + + + +.. py:class:: AzureCosmosDocumentSensor(*, database_name, collection_name, document_id, azure_cosmos_conn_id = 'azure_cosmos_default', **kwargs) + + Bases: :py:obj:`airflow.sensors.base.BaseSensorOperator` + + Checks for the existence of a document which + matches the given query in CosmosDB. Example: + + .. code-block:: + + azure_cosmos_sensor = AzureCosmosDocumentSensor( + database_name="somedatabase_name", + collection_name="somecollection_name", + document_id="unique-doc-id", + azure_cosmos_conn_id="azure_cosmos_default", + task_id="azure_cosmos_sensor", + ) + + :param database_name: Target CosmosDB database_name. + :param collection_name: Target CosmosDB collection_name. + :param document_id: The ID of the target document. + :param azure_cosmos_conn_id: Reference to the + :ref:`Azure CosmosDB connection`. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['database_name', 'collection_name', 'document_id'] + + + + .. py:method:: poke(context) + + Function defined by the sensors while deriving this class should override. + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/sensors/data_factory/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/sensors/data_factory/index.rst.txt new file mode 100644 index 00000000000..f7224172b69 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/sensors/data_factory/index.rst.txt @@ -0,0 +1,46 @@ +:py:mod:`airflow.providers.microsoft.azure.sensors.data_factory` +================================================================ + +.. py:module:: airflow.providers.microsoft.azure.sensors.data_factory + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.sensors.data_factory.AzureDataFactoryPipelineRunStatusSensor + + + + +.. py:class:: AzureDataFactoryPipelineRunStatusSensor(*, run_id, azure_data_factory_conn_id = AzureDataFactoryHook.default_conn_name, resource_group_name = None, factory_name = None, **kwargs) + + Bases: :py:obj:`airflow.sensors.base.BaseSensorOperator` + + Checks the status of a pipeline run. + + :param azure_data_factory_conn_id: The connection identifier for connecting to Azure Data Factory. + :param run_id: The pipeline run identifier. + :param resource_group_name: The resource group name. + :param factory_name: The data factory name. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['azure_data_factory_conn_id', 'resource_group_name', 'factory_name', 'run_id'] + + + + .. py:attribute:: ui_color + :annotation: = #50e6ff + + + + .. py:method:: poke(context) + + Function defined by the sensors while deriving this class should override. + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/sensors/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/sensors/index.rst.txt new file mode 100644 index 00000000000..642886e74d1 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/sensors/index.rst.txt @@ -0,0 +1,17 @@ +:py:mod:`airflow.providers.microsoft.azure.sensors` +=================================================== + +.. py:module:: airflow.providers.microsoft.azure.sensors + + +Submodules +---------- +.. toctree:: + :titlesonly: + :maxdepth: 1 + + cosmos/index.rst + data_factory/index.rst + wasb/index.rst + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/sensors/wasb/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/sensors/wasb/index.rst.txt new file mode 100644 index 00000000000..f7e181bd0c2 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/sensors/wasb/index.rst.txt @@ -0,0 +1,66 @@ +:py:mod:`airflow.providers.microsoft.azure.sensors.wasb` +======================================================== + +.. py:module:: airflow.providers.microsoft.azure.sensors.wasb + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.sensors.wasb.WasbBlobSensor + airflow.providers.microsoft.azure.sensors.wasb.WasbPrefixSensor + + + + +.. py:class:: WasbBlobSensor(*, container_name, blob_name, wasb_conn_id = 'wasb_default', check_options = None, **kwargs) + + Bases: :py:obj:`airflow.sensors.base.BaseSensorOperator` + + Waits for a blob to arrive on Azure Blob Storage. + + :param container_name: Name of the container. + :param blob_name: Name of the blob. + :param wasb_conn_id: Reference to the :ref:`wasb connection `. + :param check_options: Optional keyword arguments that + `WasbHook.check_for_blob()` takes. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['container_name', 'blob_name'] + + + + .. py:method:: poke(context) + + Function defined by the sensors while deriving this class should override. + + + +.. py:class:: WasbPrefixSensor(*, container_name, prefix, wasb_conn_id = 'wasb_default', check_options = None, **kwargs) + + Bases: :py:obj:`airflow.sensors.base.BaseSensorOperator` + + Waits for blobs matching a prefix to arrive on Azure Blob Storage. + + :param container_name: Name of the container. + :param prefix: Prefix of the blob. + :param wasb_conn_id: Reference to the wasb connection. + :param check_options: Optional keyword arguments that + `WasbHook.check_for_prefix()` takes. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['container_name', 'prefix'] + + + + .. py:method:: poke(context) + + Function defined by the sensors while deriving this class should override. + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs/index.rst.txt new file mode 100644 index 00000000000..0c2a077f8ba --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs/index.rst.txt @@ -0,0 +1,64 @@ +:py:mod:`airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs` +======================================================================= + +.. py:module:: airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs.AzureBlobStorageToGCSOperator + + + + +.. py:class:: AzureBlobStorageToGCSOperator(*, wasb_conn_id='wasb_default', gcp_conn_id = 'google_cloud_default', blob_name, file_path, container_name, bucket_name, object_name, filename, gzip, delegate_to, impersonation_chain = None, **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Operator transfers data from Azure Blob Storage to specified bucket in Google Cloud Storage + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:AzureBlobStorageToGCSOperator` + + :param wasb_conn_id: Reference to the wasb connection. + :param gcp_conn_id: The connection ID to use when fetching connection info. + :param blob_name: Name of the blob + :param file_path: Path to the file to download + :param container_name: Name of the container + :param bucket_name: The bucket to upload to + :param object_name: The object name to set when uploading the file + :param filename: The local file path to the file to be uploaded + :param gzip: Option to compress local file or file data for upload + :param delegate_to: The account to impersonate using domain-wide delegation of authority, + if any. For this to work, the service account making the request must have + domain-wide delegation enabled. + :param impersonation_chain: Optional service account to impersonate using short-term + credentials, or chained list of accounts required to get the access_token + of the last account in the list, which will be impersonated in the request. + If set as a string, the account must grant the originating account + the Service Account Token Creator IAM role. + If set as a sequence, the identities from the list must grant + Service Account Token Creator IAM role to the directly preceding identity, with first + account from the list granting this role to the originating account. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['blob_name', 'file_path', 'container_name', 'bucket_name', 'object_name', 'filename'] + + + + .. py:method:: execute(context) + + This is the main method to derive when creating an operator. + Context is the same dictionary used as when rendering jinja templates. + + Refer to get_template_context for more context. + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/transfers/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/transfers/index.rst.txt new file mode 100644 index 00000000000..d4cfc875068 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/transfers/index.rst.txt @@ -0,0 +1,19 @@ +:py:mod:`airflow.providers.microsoft.azure.transfers` +===================================================== + +.. py:module:: airflow.providers.microsoft.azure.transfers + + +Submodules +---------- +.. toctree:: + :titlesonly: + :maxdepth: 1 + + azure_blob_to_gcs/index.rst + local_to_adls/index.rst + local_to_wasb/index.rst + oracle_to_azure_data_lake/index.rst + sftp_to_wasb/index.rst + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/transfers/local_to_adls/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/transfers/local_to_adls/index.rst.txt new file mode 100644 index 00000000000..f3aaf9e5552 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/transfers/local_to_adls/index.rst.txt @@ -0,0 +1,76 @@ +:py:mod:`airflow.providers.microsoft.azure.transfers.local_to_adls` +=================================================================== + +.. py:module:: airflow.providers.microsoft.azure.transfers.local_to_adls + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.transfers.local_to_adls.LocalFilesystemToADLSOperator + airflow.providers.microsoft.azure.transfers.local_to_adls.LocalToAzureDataLakeStorageOperator + + + + +.. py:class:: LocalFilesystemToADLSOperator(*, local_path, remote_path, overwrite = True, nthreads = 64, buffersize = 4194304, blocksize = 4194304, extra_upload_options = None, azure_data_lake_conn_id = 'azure_data_lake_default', **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Upload file(s) to Azure Data Lake + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:LocalFilesystemToADLSOperator` + + :param local_path: local path. Can be single file, directory (in which case, + upload recursively) or glob pattern. Recursive glob patterns using `**` + are not supported + :param remote_path: Remote path to upload to; if multiple files, this is the + directory root to write within + :param nthreads: Number of threads to use. If None, uses the number of cores. + :param overwrite: Whether to forcibly overwrite existing files/directories. + If False and remote path is a directory, will quit regardless if any files + would be overwritten or not. If True, only matching filenames are actually + overwritten + :param buffersize: int [2**22] + Number of bytes for internal buffer. This block cannot be bigger than + a chunk and cannot be smaller than a block + :param blocksize: int [2**22] + Number of bytes for a block. Within each chunk, we write a smaller + block for each API call. This block cannot be bigger than a chunk + :param extra_upload_options: Extra upload options to add to the hook upload method + :param azure_data_lake_conn_id: Reference to the Azure Data Lake connection + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['local_path', 'remote_path'] + + + + .. py:attribute:: ui_color + :annotation: = #e4f0e8 + + + + .. py:method:: execute(context) + + This is the main method to derive when creating an operator. + Context is the same dictionary used as when rendering jinja templates. + + Refer to get_template_context for more context. + + + +.. py:class:: LocalToAzureDataLakeStorageOperator(*args, **kwargs) + + Bases: :py:obj:`LocalFilesystemToADLSOperator` + + This class is deprecated. + Please use `airflow.providers.microsoft.azure.transfers.local_to_adls.LocalFilesystemToADLSOperator`. + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/transfers/local_to_wasb/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/transfers/local_to_wasb/index.rst.txt new file mode 100644 index 00000000000..62af183e80a --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/transfers/local_to_wasb/index.rst.txt @@ -0,0 +1,45 @@ +:py:mod:`airflow.providers.microsoft.azure.transfers.local_to_wasb` +=================================================================== + +.. py:module:: airflow.providers.microsoft.azure.transfers.local_to_wasb + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.transfers.local_to_wasb.LocalFilesystemToWasbOperator + + + + +.. py:class:: LocalFilesystemToWasbOperator(*, file_path, container_name, blob_name, wasb_conn_id = 'wasb_default', create_container = False, load_options = None, **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Uploads a file to Azure Blob Storage. + + :param file_path: Path to the file to load. (templated) + :param container_name: Name of the container. (templated) + :param blob_name: Name of the blob. (templated) + :param wasb_conn_id: Reference to the wasb connection. + :param create_container: Attempt to create the target container prior to uploading the blob. This is + useful if the target container may not exist yet. Defaults to False. + :param load_options: Optional keyword arguments that + `WasbHook.load_file()` takes. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['file_path', 'container_name', 'blob_name'] + + + + .. py:method:: execute(context) + + Upload a file to Azure Blob Storage. + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake/index.rst.txt new file mode 100644 index 00000000000..e50206353fc --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake/index.rst.txt @@ -0,0 +1,62 @@ +:py:mod:`airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake` +=============================================================================== + +.. py:module:: airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake.OracleToAzureDataLakeOperator + + + + +.. py:class:: OracleToAzureDataLakeOperator(*, filename, azure_data_lake_conn_id, azure_data_lake_path, oracle_conn_id, sql, sql_params = None, delimiter = ',', encoding = 'utf-8', quotechar = '"', quoting = csv.QUOTE_MINIMAL, **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Moves data from Oracle to Azure Data Lake. The operator runs the query against + Oracle and stores the file locally before loading it into Azure Data Lake. + + + :param filename: file name to be used by the csv file. + :param azure_data_lake_conn_id: destination azure data lake connection. + :param azure_data_lake_path: destination path in azure data lake to put the file. + :param oracle_conn_id: :ref:`Source Oracle connection `. + :param sql: SQL query to execute against the Oracle database. (templated) + :param sql_params: Parameters to use in sql query. (templated) + :param delimiter: field delimiter in the file. + :param encoding: encoding type for the file. + :param quotechar: Character to use in quoting. + :param quoting: Quoting strategy. See unicodecsv quoting for more information. + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['filename', 'sql', 'sql_params'] + + + + .. py:attribute:: template_fields_renderers + + + + + .. py:attribute:: ui_color + :annotation: = #e08c8c + + + + .. py:method:: execute(context) + + This is the main method to derive when creating an operator. + Context is the same dictionary used as when rendering jinja templates. + + Refer to get_template_context for more context. + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/transfers/sftp_to_wasb/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/transfers/sftp_to_wasb/index.rst.txt new file mode 100644 index 00000000000..891c4789c03 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/transfers/sftp_to_wasb/index.rst.txt @@ -0,0 +1,132 @@ +:py:mod:`airflow.providers.microsoft.azure.transfers.sftp_to_wasb` +================================================================== + +.. py:module:: airflow.providers.microsoft.azure.transfers.sftp_to_wasb + +.. autoapi-nested-parse:: + + This module contains SFTP to Azure Blob Storage operator. + + + +Module Contents +--------------- + +Classes +~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.transfers.sftp_to_wasb.SFTPToWasbOperator + + + + +Attributes +~~~~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.transfers.sftp_to_wasb.WILDCARD + airflow.providers.microsoft.azure.transfers.sftp_to_wasb.SftpFile + + +.. py:data:: WILDCARD + :annotation: = * + + + +.. py:data:: SftpFile + + + + +.. py:class:: SFTPToWasbOperator(*, sftp_source_path, container_name, blob_prefix = '', sftp_conn_id = 'sftp_default', wasb_conn_id = 'wasb_default', load_options = None, move_object = False, wasb_overwrite_object = False, create_container = False, **kwargs) + + Bases: :py:obj:`airflow.models.BaseOperator` + + Transfer files to Azure Blob Storage from SFTP server. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:SFTPToWasbOperator` + + :param sftp_source_path: The sftp remote path. This is the specified file path + for downloading the single file or multiple files from the SFTP server. + You can use only one wildcard within your path. The wildcard can appear + inside the path or at the end of the path. + :param container_name: Name of the container. + :param blob_prefix: Prefix to name a blob. + :param sftp_conn_id: The sftp connection id. The name or identifier for + establishing a connection to the SFTP server. + :param wasb_conn_id: Reference to the wasb connection. + :param load_options: Optional keyword arguments that + ``WasbHook.load_file()`` takes. + :param move_object: When move object is True, the object is moved instead + of copied to the new location. This is the equivalent of a mv command + as opposed to a cp command. + :param wasb_overwrite_object: Whether the blob to be uploaded + should overwrite the current data. + When wasb_overwrite_object is True, it will overwrite the existing data. + If set to False, the operation might fail with + ResourceExistsError in case a blob object already exists. + :param create_container: Attempt to create the target container prior to uploading the blob. This is + useful if the target container may not exist yet. Defaults to False. + + .. py:property:: source_path_contains_wildcard + :type: bool + + Checks if the SFTP source path contains a wildcard. + + + .. py:attribute:: template_fields + :annotation: :Sequence[str] = ['sftp_source_path', 'container_name', 'blob_prefix'] + + + + .. py:method:: dry_run() + + Performs dry run for the operator - just render template fields. + + + .. py:method:: execute(context) + + Upload a file from SFTP to Azure Blob Storage. + + + .. py:method:: get_sftp_files_map() + + Get SFTP files from the source path, it may use a WILDCARD to this end. + + + .. py:method:: get_tree_behavior() + + Extracts from source path the tree behavior to interact with the remote folder + + + .. py:method:: check_wildcards_limit() + + Check if there are multiple wildcards used in the SFTP source path. + + + .. py:method:: sftp_hook() + + Property of sftp hook to be re-used. + + + .. py:method:: get_full_path_blob(file) + + Get a blob name based on the previous name and a blob_prefix variable + + + .. py:method:: copy_files_to_wasb(sftp_files) + + Upload a list of files from sftp_files to Azure Blob Storage with a new Blob Name. + + + .. py:method:: delete_files(uploaded_files) + + Delete files at SFTP which have been moved to Azure Blob Storage. + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/utils/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/utils/index.rst.txt new file mode 100644 index 00000000000..b391309d763 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/airflow/providers/microsoft/azure/utils/index.rst.txt @@ -0,0 +1,24 @@ +:py:mod:`airflow.providers.microsoft.azure.utils` +================================================= + +.. py:module:: airflow.providers.microsoft.azure.utils + + +Module Contents +--------------- + + +Functions +~~~~~~~~~ + +.. autoapisummary:: + + airflow.providers.microsoft.azure.utils.get_field + + + +.. py:function:: get_field(*, conn_id, conn_type, extras, field_name) + + Get field from extra, first checking short name, then for backcompat we check for prefixed name. + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_adf_run_pipeline/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_adf_run_pipeline/index.rst.txt new file mode 100644 index 00000000000..5ba99564256 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_adf_run_pipeline/index.rst.txt @@ -0,0 +1,29 @@ +:py:mod:`tests.system.providers.microsoft.azure.example_adf_run_pipeline` +========================================================================= + +.. py:module:: tests.system.providers.microsoft.azure.example_adf_run_pipeline + + +Module Contents +--------------- + +.. py:data:: ENV_ID + + + + +.. py:data:: DAG_ID + :annotation: = example_adf_run_pipeline + + + +.. py:data:: begin + + + + +.. py:data:: test_run + + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_adls_delete/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_adls_delete/index.rst.txt new file mode 100644 index 00000000000..99e8c6d821e --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_adls_delete/index.rst.txt @@ -0,0 +1,39 @@ +:py:mod:`tests.system.providers.microsoft.azure.example_adls_delete` +==================================================================== + +.. py:module:: tests.system.providers.microsoft.azure.example_adls_delete + + +Module Contents +--------------- + +.. py:data:: LOCAL_FILE_PATH + + + + +.. py:data:: REMOTE_FILE_PATH + + + + +.. py:data:: ENV_ID + + + + +.. py:data:: DAG_ID + :annotation: = example_adls_delete + + + +.. py:data:: upload_file + + + + +.. py:data:: test_run + + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_azure_blob_to_gcs/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_azure_blob_to_gcs/index.rst.txt new file mode 100644 index 00000000000..deda382dac6 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_azure_blob_to_gcs/index.rst.txt @@ -0,0 +1,54 @@ +:py:mod:`tests.system.providers.microsoft.azure.example_azure_blob_to_gcs` +========================================================================== + +.. py:module:: tests.system.providers.microsoft.azure.example_azure_blob_to_gcs + + +Module Contents +--------------- + +.. py:data:: BLOB_NAME + + + + +.. py:data:: AZURE_CONTAINER_NAME + + + + +.. py:data:: GCP_BUCKET_FILE_PATH + + + + +.. py:data:: GCP_BUCKET_NAME + + + + +.. py:data:: GCP_OBJECT_NAME + + + + +.. py:data:: ENV_ID + + + + +.. py:data:: DAG_ID + :annotation: = example_azure_blob_to_gcs + + + +.. py:data:: wait_for_blob + + + + +.. py:data:: test_run + + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_azure_container_instances/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_azure_container_instances/index.rst.txt new file mode 100644 index 00000000000..9c81d347c63 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_azure_container_instances/index.rst.txt @@ -0,0 +1,34 @@ +:py:mod:`tests.system.providers.microsoft.azure.example_azure_container_instances` +================================================================================== + +.. py:module:: tests.system.providers.microsoft.azure.example_azure_container_instances + +.. autoapi-nested-parse:: + + This is an example dag for using the AzureContainerInstancesOperator. + + + +Module Contents +--------------- + +.. py:data:: ENV_ID + + + + +.. py:data:: DAG_ID + :annotation: = aci_example + + + +.. py:data:: t1 + + + + +.. py:data:: test_run + + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_azure_cosmosdb/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_azure_cosmosdb/index.rst.txt new file mode 100644 index 00000000000..b030cd8bc7c --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_azure_cosmosdb/index.rst.txt @@ -0,0 +1,40 @@ +:py:mod:`tests.system.providers.microsoft.azure.example_azure_cosmosdb` +======================================================================= + +.. py:module:: tests.system.providers.microsoft.azure.example_azure_cosmosdb + +.. autoapi-nested-parse:: + + This is only an example DAG to highlight usage of AzureCosmosDocumentSensor to detect + if a document now exists. + + You can trigger this manually with `airflow dags trigger example_cosmosdb_sensor`. + + *Note: Make sure that connection `azure_cosmos_default` is properly set before running + this example.* + + + +Module Contents +--------------- + +.. py:data:: ENV_ID + + + + +.. py:data:: DAG_ID + :annotation: = example_azure_cosmosdb_sensor + + + +.. py:data:: t1 + + + + +.. py:data:: test_run + + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_azure_service_bus/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_azure_service_bus/index.rst.txt new file mode 100644 index 00000000000..f0dd0117979 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_azure_service_bus/index.rst.txt @@ -0,0 +1,54 @@ +:py:mod:`tests.system.providers.microsoft.azure.example_azure_service_bus` +========================================================================== + +.. py:module:: tests.system.providers.microsoft.azure.example_azure_service_bus + + +Module Contents +--------------- + +.. py:data:: EXECUTION_TIMEOUT + + + + +.. py:data:: CLIENT_ID + + + + +.. py:data:: QUEUE_NAME + :annotation: = sb_mgmt_queue_test + + + +.. py:data:: MESSAGE + :annotation: = Test Message + + + +.. py:data:: MESSAGE_LIST + + + + +.. py:data:: TOPIC_NAME + :annotation: = sb_mgmt_topic_test + + + +.. py:data:: SUBSCRIPTION_NAME + :annotation: = sb_mgmt_subscription + + + +.. py:data:: create_service_bus_queue + + + + +.. py:data:: test_run + + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_azure_synapse/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_azure_synapse/index.rst.txt new file mode 100644 index 00000000000..466d47b6f9e --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_azure_synapse/index.rst.txt @@ -0,0 +1,39 @@ +:py:mod:`tests.system.providers.microsoft.azure.example_azure_synapse` +====================================================================== + +.. py:module:: tests.system.providers.microsoft.azure.example_azure_synapse + + +Module Contents +--------------- + +.. py:data:: AIRFLOW_HOME + + + + +.. py:data:: EXECUTION_TIMEOUT + + + + +.. py:data:: default_args + + + + +.. py:data:: SPARK_JOB_PAYLOAD + + + + +.. py:data:: run_spark_job + + + + +.. py:data:: test_run + + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_fileshare/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_fileshare/index.rst.txt new file mode 100644 index 00000000000..350feebdd07 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_fileshare/index.rst.txt @@ -0,0 +1,67 @@ +:py:mod:`tests.system.providers.microsoft.azure.example_fileshare` +================================================================== + +.. py:module:: tests.system.providers.microsoft.azure.example_fileshare + + +Module Contents +--------------- + + +Functions +~~~~~~~~~ + +.. autoapisummary:: + + tests.system.providers.microsoft.azure.example_fileshare.create_fileshare + tests.system.providers.microsoft.azure.example_fileshare.delete_fileshare + + + +Attributes +~~~~~~~~~~ + +.. autoapisummary:: + + tests.system.providers.microsoft.azure.example_fileshare.NAME + tests.system.providers.microsoft.azure.example_fileshare.DIRECTORY + tests.system.providers.microsoft.azure.example_fileshare.ENV_ID + tests.system.providers.microsoft.azure.example_fileshare.DAG_ID + tests.system.providers.microsoft.azure.example_fileshare.test_run + + +.. py:data:: NAME + :annotation: = myfileshare + + + +.. py:data:: DIRECTORY + :annotation: = mydirectory + + + +.. py:data:: ENV_ID + + + + +.. py:data:: DAG_ID + :annotation: = example_fileshare + + + +.. py:function:: create_fileshare() + + Create a fileshare with directory + + +.. py:function:: delete_fileshare() + + Delete a fileshare + + +.. py:data:: test_run + + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_local_to_adls/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_local_to_adls/index.rst.txt new file mode 100644 index 00000000000..2e61b1b5b1f --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_local_to_adls/index.rst.txt @@ -0,0 +1,39 @@ +:py:mod:`tests.system.providers.microsoft.azure.example_local_to_adls` +====================================================================== + +.. py:module:: tests.system.providers.microsoft.azure.example_local_to_adls + + +Module Contents +--------------- + +.. py:data:: LOCAL_FILE_PATH + + + + +.. py:data:: REMOTE_FILE_PATH + + + + +.. py:data:: ENV_ID + + + + +.. py:data:: DAG_ID + :annotation: = example_local_to_adls + + + +.. py:data:: upload_file + + + + +.. py:data:: test_run + + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_local_to_wasb/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_local_to_wasb/index.rst.txt new file mode 100644 index 00000000000..07f15ff1a95 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_local_to_wasb/index.rst.txt @@ -0,0 +1,34 @@ +:py:mod:`tests.system.providers.microsoft.azure.example_local_to_wasb` +====================================================================== + +.. py:module:: tests.system.providers.microsoft.azure.example_local_to_wasb + + +Module Contents +--------------- + +.. py:data:: PATH_TO_UPLOAD_FILE + + + + +.. py:data:: ENV_ID + + + + +.. py:data:: DAG_ID + :annotation: = example_local_to_wasb + + + +.. py:data:: upload + + + + +.. py:data:: test_run + + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_sftp_to_wasb/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_sftp_to_wasb/index.rst.txt new file mode 100644 index 00000000000..222b8e7c001 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/example_sftp_to_wasb/index.rst.txt @@ -0,0 +1,97 @@ +:py:mod:`tests.system.providers.microsoft.azure.example_sftp_to_wasb` +===================================================================== + +.. py:module:: tests.system.providers.microsoft.azure.example_sftp_to_wasb + + +Module Contents +--------------- + + +Functions +~~~~~~~~~ + +.. autoapisummary:: + + tests.system.providers.microsoft.azure.example_sftp_to_wasb.delete_sftp_file + + + +Attributes +~~~~~~~~~~ + +.. autoapisummary:: + + tests.system.providers.microsoft.azure.example_sftp_to_wasb.AZURE_CONTAINER_NAME + tests.system.providers.microsoft.azure.example_sftp_to_wasb.BLOB_PREFIX + tests.system.providers.microsoft.azure.example_sftp_to_wasb.SFTP_SRC_PATH + tests.system.providers.microsoft.azure.example_sftp_to_wasb.LOCAL_FILE_PATH + tests.system.providers.microsoft.azure.example_sftp_to_wasb.SAMPLE_FILENAME + tests.system.providers.microsoft.azure.example_sftp_to_wasb.FILE_COMPLETE_PATH + tests.system.providers.microsoft.azure.example_sftp_to_wasb.SFTP_FILE_COMPLETE_PATH + tests.system.providers.microsoft.azure.example_sftp_to_wasb.ENV_ID + tests.system.providers.microsoft.azure.example_sftp_to_wasb.DAG_ID + tests.system.providers.microsoft.azure.example_sftp_to_wasb.transfer_files_to_sftp_step + tests.system.providers.microsoft.azure.example_sftp_to_wasb.test_run + + +.. py:data:: AZURE_CONTAINER_NAME + + + + +.. py:data:: BLOB_PREFIX + + + + +.. py:data:: SFTP_SRC_PATH + + + + +.. py:data:: LOCAL_FILE_PATH + + + + +.. py:data:: SAMPLE_FILENAME + + + + +.. py:data:: FILE_COMPLETE_PATH + + + + +.. py:data:: SFTP_FILE_COMPLETE_PATH + + + + +.. py:data:: ENV_ID + + + + +.. py:data:: DAG_ID + :annotation: = example_sftp_to_wasb + + + +.. py:function:: delete_sftp_file() + + Delete a file at SFTP SERVER + + +.. py:data:: transfer_files_to_sftp_step + + + + +.. py:data:: test_run + + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/index.rst.txt new file mode 100644 index 00000000000..1efa2a0eb8f --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/_api/tests/system/providers/microsoft/azure/index.rst.txt @@ -0,0 +1,25 @@ +:py:mod:`tests.system.providers.microsoft.azure` +================================================ + +.. py:module:: tests.system.providers.microsoft.azure + + +Submodules +---------- +.. toctree:: + :titlesonly: + :maxdepth: 1 + + example_adf_run_pipeline/index.rst + example_adls_delete/index.rst + example_azure_blob_to_gcs/index.rst + example_azure_container_instances/index.rst + example_azure_cosmosdb/index.rst + example_azure_service_bus/index.rst + example_azure_synapse/index.rst + example_fileshare/index.rst + example_local_to_adls/index.rst + example_local_to_wasb/index.rst + example_sftp_to_wasb/index.rst + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/commits.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/commits.rst.txt new file mode 100644 index 00000000000..087b12a89e7 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/commits.rst.txt @@ -0,0 +1,518 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Package apache-airflow-providers-microsoft-azure +------------------------------------------------------ + +`Microsoft Azure `__ + + +This is detailed commit list of changes for versions provider package: ``microsoft.azure``. +For high-level changelog, see :doc:`package information including changelog `. + + + +5.0.1 +..... + +Latest change: 2022-12-09 + +================================================================================================= =========== ============================================================== +Commit Committed Subject +================================================================================================= =========== ============================================================== +`7e776db254 `_ 2022-12-09 ``Make arguments 'offset' and 'length' not required (#28234)`` +================================================================================================= =========== ============================================================== + +5.0.0 +..... + +Latest change: 2022-11-15 + +================================================================================================= =========== ================================================================================= +Commit Committed Subject +================================================================================================= =========== ================================================================================= +`12c3c39d1a `_ 2022-11-15 ``pRepare docs for November 2022 wave of Providers (#27613)`` +`547e6e80f3 `_ 2022-11-10 ``Fix Azure Batch errors revealed by added typing to azure batch lib (#27601)`` +`a50195d617 `_ 2022-11-07 ``Add azure, google, authentication library limits to eaager upgrade (#27535)`` +`5cd78cf425 `_ 2022-11-06 ``Upgrade dependencies in order to avoid backtracking (#27531)`` +`a16f24b5d7 `_ 2022-11-06 ``Remove deprecated classes in Azure provider (#27417)`` +`59da943428 `_ 2022-11-04 ``Suppress any Exception in wasb task handler (#27495)`` +`680965b2ea `_ 2022-11-03 ``Look for 'extra__' instead of 'extra_' in 'get_field' (#27489)`` +`5df1d6ec20 `_ 2022-10-28 ``Allow and prefer non-prefixed extra fields for remaining azure (#27220)`` +`c49740eb25 `_ 2022-10-28 ``Allow and prefer non-prefixed extra fields for AzureFileShareHook (#27041)`` +`9ab1a6a3e7 `_ 2022-10-27 ``Update old style typing (#26872)`` +`78b8ea2f22 `_ 2022-10-24 ``Move min airflow version to 2.3.0 for all providers (#27196)`` +`3676d3a402 `_ 2022-10-24 ``Allow and prefer non-prefixed extra fields for AzureDataExplorerHook (#27219)`` +`6b9e76b7b3 `_ 2022-10-23 ``Allow and prefer non-prefixed extra fields for AzureDataFactoryHook (#27047)`` +`2a34dc9e84 `_ 2022-10-23 ``Enable string normalization in python formatting - providers (#27205)`` +`d51de50e5c `_ 2022-10-22 ``Update WasbHook to reflect preference for unprefixed extra (#27024)`` +`59cba36db0 `_ 2022-10-13 ``Update azure-storage-blob version (#25426)`` +`32434a128a `_ 2022-09-30 ``Fix separator getting added to variables_prefix when empty (#26749)`` +================================================================================================= =========== ================================================================================= + +4.3.0 +..... + +Latest change: 2022-09-28 + +================================================================================================= =========== ==================================================================================== +Commit Committed Subject +================================================================================================= =========== ==================================================================================== +`f8db64c35c `_ 2022-09-28 ``Update docs for September Provider's release (#26731)`` +`24d88e8fee `_ 2022-09-19 ``Add DataFlow operations to Azure DataFactory hook (#26345)`` +`1f7b296227 `_ 2022-09-18 ``Auto tail file logs in Web UI (#26169)`` +`06acf40a43 `_ 2022-09-13 ``Apply PEP-563 (Postponed Evaluation of Annotations) to non-core airflow (#26289)`` +`5060785988 `_ 2022-09-09 ``Add network_profile param in AzureContainerInstancesOperator (#26117)`` +`4bd0734a35 `_ 2022-09-01 ``Add Azure synapse operator (#26038)`` +`afb282aee4 `_ 2022-08-27 ``Fix AzureBatchOperator false negative task status (#25844)`` +`5c7c518aa0 `_ 2022-08-16 ``Implement Azure Service Bus Topic Create, Delete Operators (#25436)`` +================================================================================================= =========== ==================================================================================== + +4.2.0 +..... + +Latest change: 2022-08-10 + +================================================================================================= =========== =================================================================================== +Commit Committed Subject +================================================================================================= =========== =================================================================================== +`e5ac6c7cfb `_ 2022-08-10 ``Prepare docs for new providers release (August 2022) (#25618)`` +`d5f40d739f `_ 2022-08-09 ``Set default wasb Azure http logging level to warning; fixes #16224 (#18896)`` +`8bb0c4fd32 `_ 2022-07-28 ``Add 'test_connection' method to AzureContainerInstanceHook (#25362)`` +`eab0167f1b `_ 2022-07-22 ``Add test_connection to Azure Batch hook (#25235)`` +`e32e9c5880 `_ 2022-07-18 ``Bump typing-extensions and mypy for ParamSpec (#25088)`` +`292440d54f `_ 2022-07-14 ``Implement Azure Service Bus (Update and Receive) Subscription Operator (#25029)`` +================================================================================================= =========== =================================================================================== + +4.1.0 +..... + +Latest change: 2022-07-13 + +================================================================================================= =========== ============================================================================= +Commit Committed Subject +================================================================================================= =========== ============================================================================= +`d2459a241b `_ 2022-07-13 ``Add documentation for July 2022 Provider's release (#25030)`` +`bfd506cbfc `_ 2022-07-13 ``Add 'test_connection' method to AzureCosmosDBHook (#25018)`` +`aa8bf2cf85 `_ 2022-07-12 ``Implement Azure service bus subscription Operators (#24625)`` +`b27fc0367c `_ 2022-07-06 ``Add test_connection method to AzureFileShareHook (#24843)`` +`f18c609d12 `_ 2022-07-01 ``Add test_connection method to Azure WasbHook (#24771)`` +`0de31bd73a `_ 2022-06-29 ``Move provider dependencies to inside provider folders (#24672)`` +`510a6bab45 `_ 2022-06-28 ``Remove 'hook-class-names' from provider.yaml (#24702)`` +`09f38ad3f6 `_ 2022-06-23 ``Implement Azure Service Bus Queue Operators (#24038)`` +`9c59831ee7 `_ 2022-06-21 ``Update providers to use functools compat for ''cached_property'' (#24582)`` +================================================================================================= =========== ============================================================================= + +4.0.0 +..... + +Latest change: 2022-06-09 + +================================================================================================= =========== ================================================================================== +Commit Committed Subject +================================================================================================= =========== ================================================================================== +`dcdcf3a2b8 `_ 2022-06-09 ``Update release notes for RC2 release of Providers for May 2022 (#24307)`` +`717a7588bc `_ 2022-06-07 ``Update package description to remove double min-airflow specification (#24292)`` +`aeabe994b3 `_ 2022-06-07 ``Prepare docs for May 2022 provider's release (#24231)`` +`c23826915d `_ 2022-06-07 ``Apply per-run log templates to log handlers (#24153)`` +`027b707d21 `_ 2022-06-05 ``Add explanatory note for contributors about updating Changelog (#24229)`` +`389e858d93 `_ 2022-06-03 ``Pass connection extra parameters to wasb BlobServiceClient (#24154)`` +`6e83885c95 `_ 2022-06-03 ``Migrate Microsoft example DAGs to new design #22452 - azure (#24141)`` +`3393647aa6 `_ 2022-05-26 ``Add typing to Azure Cosmos Client Hook (#23941)`` +`ec6761a5c0 `_ 2022-05-23 ``Clean up f-strings in logging calls (#23597)`` +================================================================================================= =========== ================================================================================== + +3.9.0 +..... + +Latest change: 2022-05-12 + +================================================================================================= =========== =============================================================================== +Commit Committed Subject +================================================================================================= =========== =============================================================================== +`75c60923e0 `_ 2022-05-12 ``Prepare provider documentation 2022.05.11 (#23631)`` +`8f181c1034 `_ 2022-05-08 ``wasb hook: user defaultAzureCredentials instead of managedIdentity (#23394)`` +`2d109401b3 `_ 2022-05-04 ``Bump pre-commit hook versions (#22887)`` +`8b6b0848a3 `_ 2022-04-23 ``Use new Breese for building, pulling and verifying the images. (#23104)`` +`49e336ae03 `_ 2022-04-13 ``Replace usage of 'DummyOperator' with 'EmptyOperator' (#22974)`` +`6933022e94 `_ 2022-04-10 ``Fix new MyPy errors in main (#22884)`` +================================================================================================= =========== =============================================================================== + +3.8.0 +..... + +Latest change: 2022-04-07 + +================================================================================================= =========== ================================================================================== +Commit Committed Subject +================================================================================================= =========== ================================================================================== +`56ab82ed7a `_ 2022-04-07 ``Prepare mid-April provider documentation. (#22819)`` +`d3976d9b20 `_ 2022-04-04 ``Docs: Fix example usage for 'AzureCosmosDocumentSensor' (#22735)`` +`7ab45d41d6 `_ 2022-03-24 ``Update secrets backends to use get_conn_value instead of get_conn_uri (#22348)`` +================================================================================================= =========== ================================================================================== + +3.7.2 +..... + +Latest change: 2022-03-22 + +================================================================================================= =========== ============================================================== +Commit Committed Subject +================================================================================================= =========== ============================================================== +`d7dbfb7e26 `_ 2022-03-22 ``Add documentation for bugfix release of Providers (#22383)`` +================================================================================================= =========== ============================================================== + +3.7.1 +..... + +Latest change: 2022-03-14 + +================================================================================================= =========== ==================================================================== +Commit Committed Subject +================================================================================================= =========== ==================================================================== +`16adc035b1 `_ 2022-03-14 ``Add documentation for Classifier release for March 2022 (#22226)`` +`c1ab8e2d7b `_ 2022-03-14 ``Protect against accidental misuse of XCom.get_value() (#22244)`` +`d08284ed25 `_ 2022-03-11 `` Add map_index to XCom model and interface (#22112)`` +================================================================================================= =========== ==================================================================== + +3.7.0 +..... + +Latest change: 2022-03-07 + +================================================================================================= =========== =================================================================================== +Commit Committed Subject +================================================================================================= =========== =================================================================================== +`f5b96315fe `_ 2022-03-07 ``Add documentation for Feb Providers release (#22056)`` +`ba79adb631 `_ 2022-03-02 ``Make container creation configurable when uploading files via WasbHook (#20510)`` +`f42559a773 `_ 2022-03-02 ``Add 'test_connection' method to 'AzureDataFactoryHook' (#21924)`` +`08575ddd8a `_ 2022-03-01 ``Change BaseOperatorLink interface to take a ti_key, not a datetime (#21798)`` +`3c4524b4ec `_ 2022-02-23 ``(AzureCosmosDBHook) Update to latest Cosmos API (#21514)`` +`0a3ff43d41 `_ 2022-02-08 ``Add pre-commit check for docstring param types (#21398)`` +================================================================================================= =========== =================================================================================== + +3.6.0 +..... + +Latest change: 2022-02-08 + +================================================================================================= =========== ========================================================================== +Commit Committed Subject +================================================================================================= =========== ========================================================================== +`d94fa37830 `_ 2022-02-08 ``Fixed changelog for January 2022 (delayed) provider's release (#21439)`` +`6c3a67d4fc `_ 2022-02-05 ``Add documentation for January 2021 providers release (#21257)`` +`ddb5246bd1 `_ 2022-02-03 ``Refactor operator links to not create ad hoc TaskInstances (#21285)`` +`cb73053211 `_ 2022-01-27 ``Add optional features in providers. (#21074)`` +`602abe8394 `_ 2022-01-20 ``Remove ':type' lines now sphinx-autoapi supports typehints (#20951)`` +`730db3fb77 `_ 2022-01-18 ``Remove all "fake" stub files (#20936)`` +`f8fd0f7b4c `_ 2022-01-13 ``Explain stub files are introduced for Mypy errors in examples (#20827)`` +================================================================================================= =========== ========================================================================== + +3.5.0 +..... + +Latest change: 2021-12-31 + +================================================================================================= =========== ========================================================================== +Commit Committed Subject +================================================================================================= =========== ========================================================================== +`f77417eb0d `_ 2021-12-31 ``Fix K8S changelog to be PyPI-compatible (#20614)`` +`97496ba2b4 `_ 2021-12-31 ``Update documentation for provider December 2021 release (#20523)`` +`a22d5bd076 `_ 2021-12-31 ``Fix mypy errors in Google Cloud provider (#20611)`` +`83f8e178ba `_ 2021-12-31 ``Even more typing in operators (template_fields/ext) (#20608)`` +`d56e7b56bb `_ 2021-12-30 ``Fix template_fields type to have MyPy friendly Sequence type (#20571)`` +`a0821235fb `_ 2021-12-30 ``Use typed Context EVERYWHERE (#20565)`` +`3299064958 `_ 2021-12-29 ``Use isort on pyi files (#20556)`` +`e63e23c582 `_ 2021-12-23 ``Fixing MyPy issues inside providers/microsoft (#20409)`` +`341bf5ab1f `_ 2021-12-22 ``Azure: New sftp to wasb operator (#18877)`` +`05e4cd1c6a `_ 2021-12-18 ``Add operator link to monitor Azure Data Factory pipeline runs (#20207)`` +`2fb5e1d0ec `_ 2021-12-15 ``Fix cached_property MyPy declaration and related MyPy errors (#20226)`` +`42f133c5f6 `_ 2021-12-06 ``Removes InputRequired validation with azure extra (#20084)`` +`374574b8d0 `_ 2021-12-06 ``Fix mypy errors in Microsoft Azure provider (#19923)`` +================================================================================================= =========== ========================================================================== + +3.4.0 +..... + +Latest change: 2021-11-30 + +================================================================================================= =========== ============================================================================== +Commit Committed Subject +================================================================================================= =========== ============================================================================== +`853576d901 `_ 2021-11-30 ``Update documentation for November 2021 provider's release (#19882)`` +`e25446a8b1 `_ 2021-11-18 ``Fix argument error in AzureContainerInstancesOperator (#19668)`` +`11e73d2db1 `_ 2021-11-16 ``Remove unnecessary connection form customizations in Azure (#19595)`` +`4212c49324 `_ 2021-11-14 ``Update Azure modules to comply with AIP-21 (#19431)`` +`0f516458be `_ 2021-11-08 ``Remove 'host' from hidden fields in 'WasbHook' (#19475)`` +`ca679c014c `_ 2021-11-07 ``use DefaultAzureCredential if login not provided for Data Factory (#19079)`` +`490a382ed6 `_ 2021-11-04 ``Ensure ''catchup=False'' is used in example dags (#19396)`` +================================================================================================= =========== ============================================================================== + +3.3.0 +..... + +Latest change: 2021-10-29 + +================================================================================================= =========== =========================================================================================== +Commit Committed Subject +================================================================================================= =========== =========================================================================================== +`d9567eb106 `_ 2021-10-29 ``Prepare documentation for October Provider's release (#19321)`` +`61d0093054 `_ 2021-10-27 ``Added sas_token var to BlobServiceClient return. Updated tests (#19234)`` +`ceb2b53a10 `_ 2021-10-20 ``Static start_date and default arg cleanup for Microsoft providers example DAGs (#19062)`` +`86a2a19ad2 `_ 2021-10-17 ``More f-strings (#18855)`` +`1571f80546 `_ 2021-10-14 ``Add pre-commit hook for common misspelling check in files (#18964)`` +`1b75f9181f `_ 2021-10-05 ``Fix changelog for Azure Provider (#18736)`` +`181ac36db3 `_ 2021-10-05 ``update azure cosmos to latest version (#18695)`` +`6d504b43ea `_ 2021-10-04 ``Expanding docs on client auth for AzureKeyVaultBackend (#18659)`` +`c8485a83bc `_ 2021-10-03 ``Revert "update azure cosmos version (#18663)" (#18694)`` +`10421c6931 `_ 2021-10-01 ``update azure cosmos version (#18663)`` +================================================================================================= =========== =========================================================================================== + +3.2.0 +..... + +Latest change: 2021-09-30 + +================================================================================================= =========== ======================================================================================== +Commit Committed Subject +================================================================================================= =========== ======================================================================================== +`840ea3efb9 `_ 2021-09-30 ``Update documentation for September providers release (#18613)`` +`a458fcc573 `_ 2021-09-27 ``Updating miscellaneous provider DAGs to use TaskFlow API where applicable (#18278)`` +`46484466c4 `_ 2021-09-25 ``Removing redundant relabeling of password conn field (#18386)`` +`97d6892318 `_ 2021-09-25 ``Rename AzureDataLakeStorage to ADLS (#18493)`` +`1d2924c94e `_ 2021-09-24 ``Proper handling of Account URL custom conn field in AzureBatchHook (#18456)`` +`11e34535e8 `_ 2021-09-19 ``Creating ADF pipeline run operator, sensor + ADF custom conn fields (#17885)`` +`410e6d7967 `_ 2021-09-18 ``Initial commit (#18203)`` +`2dac083ae2 `_ 2021-09-16 ``Fixed wasb hook attempting to create container when getting a blob client (#18287)`` +`d119ae8f3f `_ 2021-09-12 ``Rename LocalToAzureDataLakeStorageOperator to LocalFilesystemToADLSOperator (#18168)`` +`28de326d61 `_ 2021-09-09 ``Rename FileToWasbOperator to LocalFilesystemToWasbOperator (#18109)`` +================================================================================================= =========== ======================================================================================== + +3.1.1 +..... + +Latest change: 2021-08-30 + +================================================================================================= =========== ============================================================================ +Commit Committed Subject +================================================================================================= =========== ============================================================================ +`0a68588479 `_ 2021-08-30 ``Add August 2021 Provider's documentation (#17890)`` +`be75dcd39c `_ 2021-08-23 ``Update description about the new ''connection-types'' provider meta-data`` +`76ed2a49c6 `_ 2021-08-19 ``Import Hooks lazily individually in providers manager (#17682)`` +`29aab6434f `_ 2021-08-17 ``Adds secrets backend/logging/auth information to provider yaml (#17625)`` +================================================================================================= =========== ============================================================================ + +3.1.0 +..... + +Latest change: 2021-07-26 + +================================================================================================= =========== ============================================================================= +Commit Committed Subject +================================================================================================= =========== ============================================================================= +`87f408b1e7 `_ 2021-07-26 ``Prepares docs for Rc2 release of July providers (#17116)`` +`48ca9374bf `_ 2021-07-26 ``Remove/refactor default_args pattern for Microsoft example DAGs (#16873)`` +`d02ded65ea `_ 2021-07-15 ``Fixed wrongly escaped characters in amazon's changelog (#17020)`` +`b916b75079 `_ 2021-07-15 ``Prepare documentation for July release of providers. (#17015)`` +`866a601b76 `_ 2021-06-28 ``Removes pylint from our toolchain (#16682)`` +`caf0a8499f `_ 2021-06-25 ``Add support for managed identity in WASB hook (#16628)`` +`ffb1fcacff `_ 2021-06-24 ``Fix multiple issues in Microsoft AzureContainerInstancesOperator (#15634)`` +`a2a58d27ef `_ 2021-06-24 ``Reduce log messages for happy path (#16626)`` +================================================================================================= =========== ============================================================================= + +3.0.0 +..... + +Latest change: 2021-06-18 + +================================================================================================= =========== ============================================================================== +Commit Committed Subject +================================================================================================= =========== ============================================================================== +`bbc627a3da `_ 2021-06-18 ``Prepares documentation for rc2 release of Providers (#16501)`` +`cbf8001d76 `_ 2021-06-16 ``Synchronizes updated changelog after buggfix release (#16464)`` +`1fba5402bb `_ 2021-06-15 ``More documentation update for June providers release (#16405)`` +`0c80a7d411 `_ 2021-06-11 ``Fixes AzureFileShare connection extras (#16388)`` +`29b7f795d6 `_ 2021-06-07 ``fix wasb remote logging when blob already exists (#16280)`` +`9c94b72d44 `_ 2021-06-07 ``Updated documentation for June 2021 provider release (#16294)`` +`476d0f6e3d `_ 2021-05-22 ``Bump pyupgrade v2.13.0 to v2.18.1 (#15991)`` +`c844ff742e `_ 2021-05-18 ``Fix colon spacing in ''AzureDataExplorerHook'' docstring (#15841)`` +`37681bca00 `_ 2021-05-07 ``Auto-apply apply_default decorator (#15667)`` +`3b4fdd0a7a `_ 2021-05-06 ``add oracle connection link (#15632)`` +`b1bd59440b `_ 2021-05-04 ``Add delimiter argument to WasbHook delete_file method (#15637)`` +`0f97a3970d `_ 2021-05-04 ``Rename example bucket names to use INVALID BUCKET NAME by default (#15651)`` +`db557a8c4a `_ 2021-05-01 ``Docs: Replace 'airflow' to 'apache-airflow' to install extra (#15628)`` +================================================================================================= =========== ============================================================================== + +2.0.0 +..... + +Latest change: 2021-05-01 + +================================================================================================= =========== ======================================================================= +Commit Committed Subject +================================================================================================= =========== ======================================================================= +`807ad32ce5 `_ 2021-05-01 ``Prepares provider release after PIP 21 compatibility (#15576)`` +`657384615f `_ 2021-04-27 ``Fix 'logging.exception' redundancy (#14823)`` +`d65e492a3e `_ 2021-04-25 ``Removes unnecessary AzureContainerInstance connection type (#15514)`` +`cb1344b63d `_ 2021-04-16 ``Update azure connection documentation (#15352)`` +`1a85ba9e93 `_ 2021-04-13 ``Add dynamic connection fields to Azure Connection (#15159)`` +================================================================================================= =========== ======================================================================= + +1.3.0 +..... + +Latest change: 2021-04-06 + +================================================================================================= =========== ============================================================================= +Commit Committed Subject +================================================================================================= =========== ============================================================================= +`042be2e4e0 `_ 2021-04-06 ``Updated documentation for provider packages before April release (#15236)`` +`9b76b94c94 `_ 2021-04-02 ``A bunch of template_fields_renderers additions (#15130)`` +`a7ca1b3b0b `_ 2021-03-26 ``Fix Sphinx Issues with Docstrings (#14968)`` +`68e4c4dcb0 `_ 2021-03-20 ``Remove Backport Providers (#14886)`` +`4372d45615 `_ 2021-03-12 ``Fix attributes for AzureDataFactory hook (#14704)`` +================================================================================================= =========== ============================================================================= + +1.2.0 +..... + +Latest change: 2021-03-08 + +================================================================================================= =========== ============================================================================== +Commit Committed Subject +================================================================================================= =========== ============================================================================== +`b753c7fa60 `_ 2021-03-08 ``Prepare ad-hoc release of the four previously excluded providers (#14655)`` +`e7bb17aeb8 `_ 2021-03-06 ``Use built-in 'cached_property' on Python 3.8 where possible (#14606)`` +`630aeff72c `_ 2021-03-02 ``Fix AzureDataFactoryHook failing to instantiate its connection (#14565)`` +`589d6dec92 `_ 2021-02-27 ``Prepare to release the next wave of providers: (#14487)`` +`11d03d2f63 `_ 2021-02-26 ``Add Azure Data Factory hook (#11015)`` +`5bfa0f123b `_ 2021-02-25 ``BugFix: Fix remote log in azure storage blob displays in one line (#14313)`` +`ca35bd7f7f `_ 2021-02-21 ``By default PIP will install all packages in .local folder (#14125)`` +`10343ec29f `_ 2021-02-05 ``Corrections in docs and tools after releasing provider RCs (#14082)`` +================================================================================================= =========== ============================================================================== + +1.1.0 +..... + +Latest change: 2021-02-04 + +================================================================================================= =========== ============================================================= +Commit Committed Subject +================================================================================================= =========== ============================================================= +`88bdcfa0df `_ 2021-02-04 ``Prepare to release a new wave of providers. (#14013)`` +`ac2f72c98d `_ 2021-02-01 ``Implement provider versioning tools (#13767)`` +`94b1531230 `_ 2021-01-23 ``Upgrade azure blob to v12 (#12188)`` +`a9ac2b040b `_ 2021-01-23 ``Switch to f-strings using flynt. (#13732)`` +`3fd5ef3555 `_ 2021-01-21 ``Add missing logos for integrations (#13717)`` +`b2cb6ee5ba `_ 2021-01-07 ``Fix Azure Data Explorer Operator (#13520)`` +`295d66f914 `_ 2020-12-30 ``Fix Grammar in PIP warning (#13380)`` +`a1e9195076 `_ 2020-12-26 ``add system test for azure local to adls operator (#13190)`` +`5185d81ff9 `_ 2020-12-24 ``add AzureDatalakeStorageDeleteOperator (#13206)`` +`6cf76d7ac0 `_ 2020-12-18 ``Fix typo in pip upgrade command :( (#13148)`` +`5090fb0c89 `_ 2020-12-15 ``Add script to generate integrations.json (#13073)`` +================================================================================================= =========== ============================================================= + +1.0.0 +..... + +Latest change: 2020-12-09 + +================================================================================================= =========== ====================================================================================================================================================================== +Commit Committed Subject +================================================================================================= =========== ====================================================================================================================================================================== +`32971a1a2d `_ 2020-12-09 ``Updates providers versions to 1.0.0 (#12955)`` +`b40dffa085 `_ 2020-12-08 ``Rename remaing modules to match AIP-21 (#12917)`` +`9b39f24780 `_ 2020-12-08 ``Add support for dynamic connection form fields per provider (#12558)`` +`bd90136aaf `_ 2020-11-30 ``Move operator guides to provider documentation packages (#12681)`` +`2037303eef `_ 2020-11-29 ``Adds support for Connection/Hook discovery from providers (#12466)`` +`543d88b3a1 `_ 2020-11-28 ``Add example dag and system tests for azure wasb and fileshare (#12673)`` +`6b3c6add9e `_ 2020-11-27 ``Update setup.py to get non-conflicting set of dependencies (#12636)`` +`c34ef853c8 `_ 2020-11-20 ``Separate out documentation building per provider (#12444)`` +`0080354502 `_ 2020-11-18 ``Update provider READMEs for 1.0.0b2 batch release (#12449)`` +`7ca0b6f121 `_ 2020-11-18 ``Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438)`` +`ae7cb4a1e2 `_ 2020-11-17 ``Update wrong commit hash in backport provider changes (#12390)`` +`6889a333cf `_ 2020-11-15 ``Improvements for operators and hooks ref docs (#12366)`` +`7825e8f590 `_ 2020-11-13 ``Docs installation improvements (#12304)`` +`dd2095f4a8 `_ 2020-11-10 ``Simplify string expressions & Use f-string (#12216)`` +`85a18e13d9 `_ 2020-11-09 ``Point at pypi project pages for cross-dependency of provider packages (#12212)`` +`59eb5de78c `_ 2020-11-09 ``Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)`` +`b2a28d1590 `_ 2020-11-09 ``Moves provider packages scripts to dev (#12082)`` +`3ff7e0743a `_ 2020-11-08 ``azure key vault optional lookup (#12174)`` +`41bf172c1d `_ 2020-11-04 ``Simplify string expressions (#12093)`` +`4e8f9cc8d0 `_ 2020-11-03 ``Enable Black - Python Auto Formmatter (#9550)`` +`8c42cf1b00 `_ 2020-11-03 ``Use PyUpgrade to use Python 3.6 features (#11447)`` +`5a439e84eb `_ 2020-10-26 ``Prepare providers release 0.0.2a1 (#11855)`` +`872b1566a1 `_ 2020-10-25 ``Generated backport providers readmes/setup for 2020.10.29 (#11826)`` +`6ce855af11 `_ 2020-10-24 ``Fix spelling (#11821)`` +`349b0811c3 `_ 2020-10-20 ``Add D200 pydocstyle check (#11688)`` +`f8ff217e2f `_ 2020-10-18 ``Fix incorrect typing and move config args out of extra connection config to operator args (#11635)`` +`16e7129719 `_ 2020-10-13 ``Added support for provider packages for Airflow 2.0 (#11487)`` +`686e0ee7df `_ 2020-10-11 ``Fix incorrect typing, remove hardcoded argument values and improve code in AzureContainerInstancesOperator (#11408)`` +`d2754ef769 `_ 2020-10-09 ``Strict type check for Microsoft (#11359)`` +`832a7850f1 `_ 2020-10-08 ``Add Azure Blob Storage to GCS transfer operator (#11321)`` +`5d007fd2ff `_ 2020-10-08 ``Strict type check for azure hooks (#11342)`` +`b0fcf67559 `_ 2020-10-07 ``Add AzureFileShareToGCSOperator (#10991)`` +`c51016b0b8 `_ 2020-10-05 ``Add LocalToAzureDataLakeStorageOperator (#10814)`` +`fd682fd70a `_ 2020-10-05 ``fix job deletion (#11272)`` +`4210618789 `_ 2020-10-03 ``Ensure target_dedicated_nodes or enable_auto_scale is set in AzureBatchOperator (#11251)`` +`0a0e1af800 `_ 2020-10-03 ``Fix Broken Markdown links in Providers README TOC (#11249)`` +`ca4238eb4d `_ 2020-10-02 ``Fixed month in backport packages to October (#11242)`` +`5220e4c384 `_ 2020-10-02 ``Prepare Backport release 2020.09.07 (#11238)`` +`5093245d6f `_ 2020-09-30 ``Strict type coverage for Oracle and Yandex provider (#11198)`` +`f3e87c5030 `_ 2020-09-22 ``Add D202 pydocstyle check (#11032)`` +`f77a11d5b1 `_ 2020-09-13 ``Add Secrets backend for Microsoft Azure Key Vault (#10898)`` +`9549274d11 `_ 2020-09-09 ``Upgrade black to 20.8b1 (#10818)`` +`fdd9b6f65b `_ 2020-08-25 ``Enable Black on Providers Packages (#10543)`` +`3696c34c28 `_ 2020-08-24 ``Fix typo in the word "release" (#10528)`` +`ee7ca128a1 `_ 2020-08-22 ``Fix broken Markdown refernces in Providers README (#10483)`` +`2f552233f5 `_ 2020-08-21 ``Add AzureBaseHook (#9747)`` +`cdec301254 `_ 2020-08-07 ``Add correct signature to all operators and sensors (#10205)`` +`24c8e4c2d6 `_ 2020-08-06 ``Changes to all the constructors to remove the args argument (#10163)`` +`aeea71274d `_ 2020-08-02 ``Remove 'args' parameter from provider operator constructors (#10097)`` +`7d24b088cd `_ 2020-07-25 ``Stop using start_date in default_args in example_dags (2) (#9985)`` +`0bf330ba86 `_ 2020-07-24 ``Add get_blobs_list method to WasbHook (#9950)`` +`33f0cd2657 `_ 2020-07-22 ``apply_default keeps the function signature for mypy (#9784)`` +`d3c76da952 `_ 2020-07-12 ``Improve type hinting to provider microsoft (#9774)`` +`23f80f34ad `_ 2020-07-08 ``Move gcs & wasb task handlers to their respective provider packages (#9714)`` +`d0e7db4024 `_ 2020-06-19 ``Fixed release number for fresh release (#9408)`` +`12af6a0800 `_ 2020-06-19 ``Final cleanup for 2020.6.23rc1 release preparation (#9404)`` +`c7e5bce57f `_ 2020-06-19 ``Prepare backport release candidate for 2020.6.23rc1 (#9370)`` +`f6bd817a3a `_ 2020-06-16 ``Introduce 'transfers' packages (#9320)`` +`0b0e4f7a4c `_ 2020-05-26 ``Preparing for RC3 relase of backports (#9026)`` +`00642a46d0 `_ 2020-05-26 ``Fixed name of 20 remaining wrongly named operators. (#8994)`` +`375d1ca229 `_ 2020-05-19 ``Release candidate 2 for backport packages 2020.05.20 (#8898)`` +`12c5e5d8ae `_ 2020-05-17 ``Prepare release candidate for backport packages (#8891)`` +`f3521fb0e3 `_ 2020-05-16 ``Regenerate readme files for backport package release (#8886)`` +`92585ca4cb `_ 2020-05-15 ``Added automated release notes generation for backport operators (#8807)`` +`87969a350d `_ 2020-04-09 ``[AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170)`` +`d99833c9b5 `_ 2020-04-03 ``[AIRFLOW-4529] Add support for Azure Batch Service (#8024)`` +`4bde99f132 `_ 2020-03-23 ``Make airflow/providers pylint compatible (#7802)`` +`a83eb335e5 `_ 2020-03-23 ``Add call to Super call in microsoft providers (#7821)`` +`f0e2421807 `_ 2020-02-24 ``[AIRFLOW-6896] AzureCosmosDBHook: Move DB call out of __init__ (#7520)`` +`4bec1cc489 `_ 2020-02-24 ``[AIRFLOW-6895] AzureFileShareHook: Move DB call out of __init__ (#7519)`` +`3320e432a1 `_ 2020-02-24 ``[AIRFLOW-6817] Lazy-load 'airflow.DAG' to keep user-facing API untouched (#7517)`` +`086e307245 `_ 2020-02-23 ``[AIRFLOW-6890] AzureDataLakeHook: Move DB call out of __init__ (#7513)`` +`4d03e33c11 `_ 2020-02-22 ``[AIRFLOW-6817] remove imports from 'airflow/__init__.py', replaced implicit imports with explicit imports, added entry to 'UPDATING.MD' - squashed/rebased (#7456)`` +`175a160463 `_ 2020-02-19 ``[AIRFLOW-6828] Stop using the zope library (#7448)`` +`1e00243014 `_ 2020-02-10 ``[AIRFLOW-5176] Add Azure Data Explorer (Kusto) operator (#5785)`` +`97a429f9d0 `_ 2020-02-02 ``[AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)`` +`83c037873f `_ 2020-01-30 ``[AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287)`` +`057f3ae3a4 `_ 2020-01-29 ``[AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286)`` +`290330ba60 `_ 2020-01-15 ``[AIRFLOW-6552] Move Azure classes to providers.microsoft package (#7158)`` +================================================================================================= =========== ====================================================================================================================================================================== diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/acr.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/acr.rst.txt new file mode 100644 index 00000000000..3944519998a --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/acr.rst.txt @@ -0,0 +1,62 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + + +.. _howto/connection:acr: + +Microsoft Azure Container Registry Connection +============================================== + +The Microsoft Azure Container Registry connection type enables the Azure Container Registry Integrations. + +Authenticating to Azure Container Registry +------------------------------------------ + +There is one way to connect to Azure Container Registry using Airflow. + +1. Use `Individual login with Azure AD + `_ + i.e. add specific credentials to the Airflow connection. + +Default Connection IDs +---------------------- + +All hooks and operators related to Microsoft Azure Container Registry use ``azure_container_registry_default`` by default. + +Configuring the Connection +-------------------------- + +Login + Specify the Image Registry Username used for the initial connection. + +Password + Specify the Image Registry Password used for the initial connection. + +Host + Specify the Image Registry Server used for the initial connection. + +When specifying the connection in environment variable you should specify +it using URI syntax. + +Note that all components of the URI should be URL-encoded. + +For example: + +.. code-block:: bash + + export AIRFLOW_CONN_AZURE_CONTAINER_REGISTRY_DEFAULT='azure-container-registry://username:password@myregistry.com?tenant=tenant+id&account_name=store+name' diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/adf.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/adf.rst.txt new file mode 100644 index 00000000000..25f231efdeb --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/adf.rst.txt @@ -0,0 +1,93 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + + +.. _howto/connection:adf: + +Microsoft Azure Data Factory +======================================= + +The Microsoft Azure Data Factory connection type enables the Azure Data Factory Integrations. + +Authenticating to Azure Data Factory +------------------------------------ + +There are multiple ways to connect to Azure Data Factory using Airflow. + +1. Use `token credentials + `_ + i.e. add specific credentials (client_id, secret, tenant) and subscription id to the Airflow connection. +2. Fallback on `DefaultAzureCredential + `_. + This includes a mechanism to try different options to authenticate: Managed System Identity, environment variables, authentication through Azure CLI... + +Default Connection IDs +---------------------- + +All hooks and operators related to Microsoft Azure Data Factory use ``azure_data_factory_default`` by default. + +Configuring the Connection +-------------------------- + +Client ID + Specify the ``client_id`` used for the initial connection. + This is needed for *token credentials* authentication mechanism. + It can be left out to fall back on ``DefaultAzureCredential``. + +Secret + Specify the ``secret`` used for the initial connection. + This is needed for *token credentials* authentication mechanism. + It can be left out to fall back on ``DefaultAzureCredential``. + +Tenant ID + Specify the Azure tenant ID used for the initial connection. + This is needed for *token credentials* authentication mechanism. + It can be left out to fall back on ``DefaultAzureCredential``. + Use extra param ``tenantId`` to pass in the tenant ID. + +Subscription ID + Specify the ID of the subscription used for the initial connection. + This is needed for all authentication mechanisms. + Use extra param ``subscriptionId`` to pass in the Azure subscription ID. + +Factory Name (optional) + Specify the Azure Data Factory to interface with. + If not specified in the connection, this needs to be passed in directly to hooks, operators, and sensors. + Use extra param ``factory_name`` to pass in the factory name. + +Resource Group Name (optional) + Specify the Azure Resource Group Name under which the desired data factory resides. + If not specified in the connection, this needs to be passed in directly to hooks, operators, and sensors. + Use extra param ``resource_group_name`` to pass in the resource group name. + + +When specifying the connection in environment variable you should specify +it using URI syntax. + +Note that all components of the URI should be URL-encoded. + +Examples +-------- + +.. code-block:: bash + + export AIRFLOW_CONN_AZURE_DATA_FACTORY_DEFAULT='azure-data-factory://applicationid:serviceprincipalpassword@?tenantId=tenant+id&subscriptionId=subscription+id&resource_group_name=group+name&factory_name=factory+name' + +.. code-block:: bash + + export AIRFLOW_CONN_AZURE_DATA_FACTORY_DEFAULT='azure-data-factory://applicationid:serviceprincipalpassword@?tenantId=tenant+id&subscriptionId=subscription+id' diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/adl.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/adl.rst.txt new file mode 100644 index 00000000000..8cf14a188fa --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/adl.rst.txt @@ -0,0 +1,70 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + + +.. _howto/connection:adl: + +Microsoft Azure Data Lake Connection +==================================== + +The Microsoft Azure Data Lake connection type enables the Azure Data Lake Integrations. + +Authenticating to Azure Data Lake +--------------------------------- + +There is one way to connect to Azure Data Lake using Airflow. + +1. Use `token credentials + `_ + i.e. add specific credentials (client_id, secret, tenant) and account name to the Airflow connection. + +Default Connection IDs +---------------------- + +All hooks and operators related to Microsoft Azure Data Lake use ``azure_data_lake_default`` by default. + +Configuring the Connection +-------------------------- + +Login + Specify the ``client_id`` used for the initial connection. + This is needed for *token credentials* authentication mechanism. + +Password + Specify the ``secret`` used for the initial connection. + This is only needed for *token credentials* authentication mechanism. + +Extra (optional) + Specify the extra parameters (as json dictionary) that can be used in Azure Data Lake connection. + The following parameters are all optional: + + * ``tenant``: Specify the tenant to use. + This is needed for *token credentials* authentication mechanism. + * ``account_name``: Specify the azure data lake account name. + This is sometimes called the ``store_name`` + +When specifying the connection in environment variable you should specify +it using URI syntax. + +Note that all components of the URI should be URL-encoded. + +For example: + +.. code-block:: bash + + export AIRFLOW_CONN_AZURE_DATA_LAKE_DEFAULT='azure-data-lake://client%20id:secret@?tenant=tenant+id&account_name=store+name' diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/adx.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/adx.rst.txt new file mode 100644 index 00000000000..b6ed71d4825 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/adx.rst.txt @@ -0,0 +1,91 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + + +.. _howto/connection:adx: + +Microsoft Azure Data Explorer +============================= + +The ``Azure Data Explorer`` connection type enables Azure Data Explorer (ADX) integrations in Airflow. + +Authenticating to Azure Data Explorer +--------------------------------------- + +There are three ways to connect to Azure Data Explorer using Airflow. + +1. Use `AAD application certificate + `_ + (i.e. use "AAD_APP" or "AAD_APP_CERT" as the Authentication Method in the Airflow connection). +2. Use `AAD username and password + `_ + (i.e. use "AAD_CREDS" as the Authentication Method in the Airflow connection). +3. Use a `AAD device code + `_ + (i.e. use "AAD_DEVICE" as the Authentication Method in the Airflow connection). + +Only one authorization method can be used at a time. If you need to manage multiple credentials or keys then you should +configure multiple connections. + +Default Connection IDs +---------------------- + +All hooks and operators related to Microsoft Azure Data Explorer use ``azure_data_explorer_default`` by default. + +Configuring the Connection +-------------------------- + +Data Explorer Cluster URL + Specify the Data Explorer cluster URL. Needed for all authentication methods. + +Authentication Method + Specify authentication method. Available authentication methods are: + + * AAD_APP: Authentication with AAD application certificate. A Tenant ID is required when using this method. Provide application ID and application key through Username and Password parameters. + + * AAD_APP_CERT: Authentication with AAD application certificate. Tenant ID, Application PEM Certificate, and Application Certificate Thumbprint are required when using this method. + + * AAD_CREDS: Authentication with AAD username and password. A Tenant ID is required when using this method. Username and Password parameters are used for authentication with AAD. + + * AAD_DEVICE: Authenticate with AAD device code. Please note that if you choose this option, you'll need to authenticate for every new instance that is initialized. It is highly recommended to create one instance and use it for all queries. + +Username (optional) + Specify the username used for data explorer. Needed for with AAD_APP, AAD_APP_CERT, and AAD_CREDS authentication methods. + +Password (optional) + Specify the password used for data explorer. Needed for with AAD_APP, and AAD_CREDS authentication methods. + +Tenant ID (optional) + Specify AAD tenant. Needed for AAD_APP, AAD_APP_CERT, and AAD_CREDS. + +Application PEM Certificate (optional) + Specify the certificate. Needed for AAD_APP_CERT authentication method. + +Application Certificate Thumbprint (optional) + Specify the thumbprint needed for use with AAD_APP_CERT authentication method. + +When specifying the connection in environment variable you should specify +it using URI syntax. + +Note that all components of the URI should be URL-encoded. + +For example: + +.. code-block:: bash + + export AIRFLOW_CONN_AZURE_DATA_EXPLORER_DEFAULT='azure-data-explorer://add%20username:add%20password@mycluster.com?auth_method=AAD_APP&tenant=tenant+id' diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/asb.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/asb.rst.txt new file mode 100644 index 00000000000..daf50d6017a --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/asb.rst.txt @@ -0,0 +1,50 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + + +.. _howto/connection:azure_service_bus: + +Microsoft Azure Service Bus +======================================= + +The Microsoft Azure Service Bus connection type enables the Azure Service Bus Integration. + +Authenticating to Azure Service Bus +------------------------------------ + +There are multiple ways to authenticate and authorize access to Azure Service Bus resources: +Currently Supports Shared Access Signatures (SAS). + +1. Use a `Connection String + `_ + i.e. Use connection string Field to add ``Connection String`` in the Airflow connection. + +Default Connection IDs +---------------------- + +All hooks and operators related to Microsoft Azure Service Bus use ``azure_service_bus_default`` by default. + +Configuring the Connection +-------------------------- + +Connection String + Specify the Azure Service bus connection string ID used for the initial connection. + Please find the documentation on how to generate connection string in azure service bus + `Get connection string + `_ + Use the key ``connection_string`` to pass in the Connection ID . diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/azure.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/azure.rst.txt new file mode 100644 index 00000000000..bae0d415c5f --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/azure.rst.txt @@ -0,0 +1,83 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + + +.. _howto/connection:azure: + +Microsoft Azure Connection +========================== + +The Microsoft Azure connection type enables the Azure Integrations. + +Authenticating to Azure +----------------------- + +There are three ways to connect to Azure using Airflow. + +1. Use `token credentials + `_ + i.e. add specific credentials (client_id, secret, tenant) and subscription id to the Airflow connection. +2. Use a `JSON file + `_ + i.e. create a key file on disk and link to it in the Airflow connection. +3. Use a `JSON dictionary + `_ + i.e. add a key config directly into the Airflow connection. + +Only one authorization method can be used at a time. If you need to manage multiple credentials or keys then you should +configure multiple connections. + +Default Connection IDs +---------------------- + +All hooks and operators related to Microsoft Azure Container Instances use ``azure_default`` by default. + +Configuring the Connection +-------------------------- + +Login (optional) + Specify the ``client_id`` used for the initial connection. + This is only needed for *token credentials* authentication mechanism. + +Password (optional) + Specify the ``secret`` used for the initial connection. + This is only needed for *token credentials* authentication mechanism. + +Extra (optional) + Specify the extra parameters (as json dictionary) that can be used in Azure connection. + The following parameters are all optional: + + * ``tenantId``: Specify the tenant to use. + This is only needed for *token credentials* authentication mechanism. + * ``subscriptionId``: Specify the subscription id to use. + This is only needed for *token credentials* authentication mechanism. + * ``key_path``: If set, it uses the *JSON file* authentication mechanism. + It specifies the path to the json file that contains the authentication information. + * ``key_json``: If set, it uses the *JSON dictionary* authentication mechanism. + It specifies the json that contains the authentication information. + +When specifying the connection in environment variable you should specify +it using URI syntax. + +Note that all components of the URI should be URL-encoded. + +For example: + +.. code-block:: bash + + export AIRFLOW_CONN_AZURE_DEFAULT='azure://?key_path=%2Fkeys%2Fkey.json' diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/azure_batch.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/azure_batch.rst.txt new file mode 100644 index 00000000000..b146bb09ac3 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/azure_batch.rst.txt @@ -0,0 +1,61 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + + +.. _howto/connection:azure_batch: + +Microsoft Azure Batch +===================== + +The Microsoft Azure Batch connection type enables the Azure Batch integrations. + +Authenticating to Azure Batch +------------------------------------------ + +There is one way to connect to Azure Batch using Airflow. + +1. Use `Azure Shared Key Credential + `_ + i.e. add shared key credentials to the Airflow connection. + +Default Connection IDs +---------------------- + +All hooks and operators related to Microsoft Azure Batch use ``azure_batch_default`` by default. + +Configuring the Connection +-------------------------- + +Batch Account Name + Specify the Azure Batch Account Name used for the initial connection. + +Batch Account Access Key + Specify the access key used for the initial connection. + +Batch Account URL + Specify the batch account URL you would like to use. + +When specifying the connection in environment variable you should specify it using URI syntax. + +Note that all components of the URI should be URL-encoded. + +For example: + +.. code-block:: bash + + export AIRFLOW_CONN_AZURE_BATCH_DEFAULT='azure-batch://batch%20acount:batch%20key@?account_url=mybatchaccount.com' diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/azure_container_volume.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/azure_container_volume.rst.txt new file mode 100644 index 00000000000..c81be9a88d0 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/azure_container_volume.rst.txt @@ -0,0 +1,75 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + + +.. _howto/connection:azure_container_volume: + +Microsoft Azure Container Volume Connection +=========================================== + +The Microsoft Azure Container Volume connection type enables the Azure Container Volume Integrations. + +Authenticating to Azure Container Volume +---------------------------------------- + +There are four ways to connect to Azure Container Volume using Airflow. + +1. Use `token credentials + `_ + i.e. add specific credentials (client_id, secret) and subscription id to the Airflow connection. +2. Use a `Connection String + `_ + i.e. add connection string to ``extra__azure_container_volume__connection_string`` in the Airflow connection. + +Only one authorization method can be used at a time. If you need to manage multiple credentials or keys then you should +configure multiple connections. + +Default Connection IDs +---------------------- + +All hooks and operators related to Azure Container Volume use ``azure_container_volume_default`` by default. + +Configuring the Connection +-------------------------- + +Login (optional) + Specify the login used for azure blob storage. For use with Shared Key Credential and SAS Token authentication. + +Password (optional) + Specify the password used for azure blob storage. For use with + Active Directory (token credential) and shared key authentication. + +Host (optional) + Specify the account url for anonymous public read, Active Directory, shared access key authentication. + +Extra (optional) + Specify the extra parameters (as json dictionary) that can be used in Azure connection. + The following parameters are all optional: + + * ``extra__azure_container_volume__connection_string``: Connection string for use with connection string authentication. + +When specifying the connection in environment variable you should specify +it using URI syntax. + +Note that all components of the URI should be URL-encoded. + +For example connect with token credentials: + +.. code-block:: bash + + export AIRFLOW_CONN_WASP_DEFAULT='azure_container_volume://blob%20username:blob%20password@myblob.com' diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/azure_cosmos.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/azure_cosmos.rst.txt new file mode 100644 index 00000000000..1de96f2ea57 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/azure_cosmos.rst.txt @@ -0,0 +1,66 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + + +.. _howto/connection:azure_cosmos: + +Microsoft Azure Cosmos +==================================== + +The Microsoft Azure Cosmos connection type enables the Azure Cosmos Integrations. + +Authenticating to Azure +----------------------- + +There is one way to connect to Azure Cosmos using Airflow. + +1. Use `Primary Keys + `_ + i.e. add specific credentials (client_id, secret, tenant) and account name to the Airflow connection. + +Default Connection IDs +---------------------- + +All hooks and operators related to Microsoft Azure Cosmos use ``azure_cosmos_default`` by default. + +Configuring the Connection +-------------------------- + +Login + Specify the Cosmos Endpoint URI used for the initial connection. + +Password + Specify the Cosmos Master Key Token used for the initial connection. + +Extra (optional) + Specify the extra parameters (as json dictionary) that can be used in Azure Cosmos connection. + The following parameters are all optional: + + * ``database_name``: Specify the azure cosmos database to use. + * ``collection_name``: Specify the azure cosmos collection to use. + +When specifying the connection in environment variable you should specify +it using URI syntax. + +Note that all components of the URI should be URL-encoded. + +For example: + +.. code-block:: bash + + export AIRFLOW_CONN_AZURE_COSMOS_DEFAULT='azure-cosmos://https%3A%2F%2Fairflow.azure.com:master%20key@?database_name=mydatabase&collection_name=mycollection' diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/azure_fileshare.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/azure_fileshare.rst.txt new file mode 100644 index 00000000000..39b827d4d1e --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/azure_fileshare.rst.txt @@ -0,0 +1,80 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + + +.. _howto/connection:azure_fileshare: + +Microsoft Azure File Share Connection +===================================== + +The Microsoft Azure File Share connection type enables the Azure File Share Integrations. + +Authenticating to Azure File Share +---------------------------------- + +There are four ways to connect to Azure File Share using Airflow. + +1. Use `token credentials + `_ + i.e. add specific credentials (client_id, secret) and subscription id to the Airflow connection. +2. Use a `SAS Token + `_ + i.e. add a key config to ``sas_token`` in the Airflow connection. +3. Use a `Connection String + `_ + i.e. add connection string to ``connection_string`` in the Airflow connection. + +Only one authorization method can be used at a time. If you need to manage multiple credentials or keys then you should +configure multiple connections. + +Default Connection IDs +---------------------- + +All hooks and operators related to Azure File Share use ``azure_fileshare_default`` by default. + +Configuring the Connection +-------------------------- + +Login (optional) + Specify the login used for azure blob storage. For use with Shared Key Credential and SAS Token authentication. + +Password (optional) + Specify the password used for azure blob storage. For use with + Active Directory (token credential) and shared key authentication. + +Host (optional) + Specify the account url for anonymous public read, Active Directory, shared access key authentication. + +Extra (optional) + Specify the extra parameters (as json dictionary) that can be used in Azure connection. + The following parameters are all optional: + + * ``connection_string``: Connection string for use with connection string authentication. + * ``sas_token``: SAS Token for use with SAS Token authentication. + * ``protocol``: Specify the protocol to use (default is ``https``). + +When specifying the connection in environment variable you should specify +it using URI syntax. + +Note that all components of the URI should be URL-encoded. + +For example connect with token credentials: + +.. code-block:: bash + + export AIRFLOW_CONN_WASP_DEFAULT='azure_fileshare://blob%20username@myblob.com?sas_token=token' diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/azure_synapse.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/azure_synapse.rst.txt new file mode 100644 index 00000000000..dd8dfd02934 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/azure_synapse.rst.txt @@ -0,0 +1,69 @@ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + + +.. _howto/connection:synapse: + +Microsoft Azure Synapse +======================= + +The Microsoft Azure Synapse connection type enables the Azure Synapse Integrations. + +Authenticating to Azure Synapse +------------------------------- + +There are multiple ways to connect to Azure Synapse using Airflow. + +1. Use `token credentials + `_ + i.e. add specific credentials (client_id, secret, tenant) and subscription id to the Airflow connection. +2. Fallback on `DefaultAzureCredential + `_. + This includes a mechanism to try different options to authenticate: Managed System Identity, environment variables, authentication through Azure CLI... + +Default Connection IDs +---------------------- + +All hooks and operators related to Microsoft Azure Synapse use ``azure_synapse_default`` by default. + +Configuring the Connection +-------------------------- + +Client ID + Specify the ``client_id`` used for the initial connection. + This is needed for *token credentials* authentication mechanism. + It can be left out to fall back on ``DefaultAzureCredential``. + +Secret + Specify the ``secret`` used for the initial connection. + This is needed for *token credentials* authentication mechanism. + It can be left out to fall back on ``DefaultAzureCredential``. + +Tenant ID + Specify the Azure tenant ID used for the initial connection. + This is needed for *token credentials* authentication mechanism. + It can be left out to fall back on ``DefaultAzureCredential``. + Use the key ``extra__azure_synapse__tenantId`` to pass in the tenant ID. + +Subscription ID + A subscription ID is required for the connection. + This is needed for all authentication mechanisms. + Use the key ``extra__azure_synapse__subscriptionId`` to pass in the Azure subscription ID. + +Synapse Workspace URL + Specify the Azure Synapse endpoint to interface with. diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/index.rst.txt new file mode 100644 index 00000000000..2b8afb41df9 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/index.rst.txt @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Connection Types +---------------- + +.. toctree:: + :maxdepth: 1 + :glob: + + * diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/wasb.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/wasb.rst.txt new file mode 100644 index 00000000000..823cc85c22d --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/connections/wasb.rst.txt @@ -0,0 +1,84 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + + +.. _howto/connection:wasb: + +Microsoft Azure Blob Storage Connection +======================================= + +The Microsoft Azure Blob Storage connection type enables the Azure Blob Storage Integrations. + +Authenticating to Azure Blob Storage +------------------------------------ + +There are four ways to connect to Azure Blob Storage using Airflow. + +1. Use `token credentials + `_ + i.e. add specific credentials (client_id, secret, tenant) and subscription id to the Airflow connection. +2. Use `Azure Shared Key Credential + `_ + i.e. add shared key credentials to ``shared_access_key`` the Airflow connection. +3. Use a `SAS Token + `_ + i.e. add a key config to ``sas_token`` in the Airflow connection. +4. Use a `Connection String + `_ + i.e. add connection string to ``connection_string`` in the Airflow connection. + +Only one authorization method can be used at a time. If you need to manage multiple credentials or keys then you should +configure multiple connections. + +Default Connection IDs +---------------------- + +All hooks and operators related to Microsoft Azure Blob Storage use ``wasb_default`` by default. + +Configuring the Connection +-------------------------- + +Login (optional) + Specify the login used for azure blob storage. For use with Shared Key Credential and SAS Token authentication. + +Password (optional) + Specify the password used for azure blob storage. For use with + Active Directory (token credential) and shared key authentication. + +Host (optional) + Specify the account url for anonymous public read, Active Directory, shared access key authentication. + +Extra (optional) + Specify the extra parameters (as json dictionary) that can be used in Azure connection. + The following parameters are all optional: + + * ``tenant_id``: Specify the tenant to use. Needed for Active Directory (token) authentication. + * ``shared_access_key``: Specify the shared access key. Needed for shared access key authentication. + * ``connection_string``: Connection string for use with connection string authentication. + * ``sas_token``: SAS Token for use with SAS Token authentication. + +When specifying the connection in environment variable you should specify +it using URI syntax. + +Note that all components of the URI should be URL-encoded. + +For example connect with token credentials: + +.. code-block:: bash + + export AIRFLOW_CONN_WASB_DEFAULT='wasb://blob%20username:blob%20password@myblob.com?tenant_id=tenant+id' diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/index.rst.txt new file mode 100644 index 00000000000..b492562c72e --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/index.rst.txt @@ -0,0 +1,140 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-microsoft-azure`` +============================================ + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: Guides + + Connection types + Operators + Secrets backends + Logging for Tasks + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/microsoft/azure/index> + +.. toctree:: + :hidden: + :caption: System tests + + System Tests <_api/tests/system/providers/microsoft/azure/index> + +.. toctree:: + :maxdepth: 1 + :caption: Resources + + Example DAGs + PyPI Repository + Installing from sources + +.. THE REMAINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! + + +.. toctree:: + :maxdepth: 1 + :caption: Commits + + Detailed list of commits + + +Package apache-airflow-providers-microsoft-azure +------------------------------------------------------ + +`Microsoft Azure `__ + + +Release: 5.0.1 + +Provider package +---------------- + +This is a provider package for ``microsoft.azure`` provider. All classes for this provider package +are in ``airflow.providers.microsoft.azure`` python package. + +Installation +------------ + +You can install this package on top of an existing Airflow 2 installation (see ``Requirements`` below) +for the minimum Airflow version supported) via +``pip install apache-airflow-providers-microsoft-azure`` + +Requirements +------------ + +================================ ========================================== +PIP package Version required +================================ ========================================== +``apache-airflow`` ``>=2.3.0`` +``azure-batch`` ``>=8.0.0`` +``azure-cosmos`` ``>=4.0.0`` +``azure-datalake-store`` ``>=0.0.45`` +``azure-identity`` ``>=1.3.1`` +``azure-keyvault-secrets`` ``>=4.1.0,<5.0`` +``azure-kusto-data`` ``>=0.0.43,<0.1`` +``azure-mgmt-containerinstance`` ``>=1.5.0,<2.0`` +``azure-mgmt-datafactory`` ``>=1.0.0,<2.0`` +``azure-mgmt-datalake-store`` ``>=0.5.0`` +``azure-mgmt-resource`` ``>=2.2.0`` +``azure-storage-blob`` ``>=12.14.0`` +``azure-storage-common`` ``>=2.1.0`` +``azure-storage-file`` ``>=2.1.0`` +``azure-servicebus`` ``>=7.6.1; platform_machine != "aarch64"`` +``azure-synapse-spark`` +``adal`` ``>=1.2.7`` +================================ ========================================== + +Cross provider package dependencies +----------------------------------- + +Those are dependencies that might be needed in order to use all the features of the package. +You need to install the specified provider packages in order to use them. + +You can install such cross-provider dependencies when installing from PyPI. For example: + +.. code-block:: bash + + pip install apache-airflow-providers-microsoft-azure[google] + + +==================================================================================================== ========== +Dependent package Extra +==================================================================================================== ========== +`apache-airflow-providers-google `_ ``google`` +`apache-airflow-providers-oracle `_ ``oracle`` +`apache-airflow-providers-sftp `_ ``sftp`` +==================================================================================================== ========== + +Downloading official packages +----------------------------- + +You can download officially released packages and verify their checksums and signatures from the +`Official Apache Download site `_ + +* `The apache-airflow-providers-microsoft-azure 5.0.1 sdist package `_ (`asc `__, `sha512 `__) +* `The apache-airflow-providers-microsoft-azure 5.0.1 wheel package `_ (`asc `__, `sha512 `__) + +.. include:: ../../airflow/providers/microsoft/azure/CHANGELOG.rst diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/installing-providers-from-sources.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/installing-providers-from-sources.rst.txt new file mode 100644 index 00000000000..1c90205d15b --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/installing-providers-from-sources.rst.txt @@ -0,0 +1,18 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +.. include:: ../installing-providers-from-sources.rst diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/logging/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/logging/index.rst.txt new file mode 100644 index 00000000000..d0f176d01f7 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/logging/index.rst.txt @@ -0,0 +1,55 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +.. _write-logs-azure: + +Writing logs to Azure Blob Storage +---------------------------------- + +Airflow can be configured to read and write task logs in Azure Blob Storage. It uses an existing +Airflow connection to read or write logs. If you don't have a connection properly setup, +this process will fail. + +Follow the steps below to enable Azure Blob Storage logging: + +#. Airflow's logging system requires a custom ``.py`` file to be located in the :envvar:`PYTHONPATH`, so that it's importable from Airflow. Start by creating a directory to store the config file, ``$AIRFLOW_HOME/config`` is recommended. +#. Create empty files called ``$AIRFLOW_HOME/config/log_config.py`` and ``$AIRFLOW_HOME/config/__init__.py``. +#. Copy the contents of ``airflow/config_templates/airflow_local_settings.py`` into the ``log_config.py`` file created in ``Step 2``. +#. Customize the following portions of the template: + + .. code-block:: ini + + # wasb buckets should start with "wasb" just to help Airflow select correct handler + REMOTE_BASE_LOG_FOLDER = 'wasb://@.blob.core.windows.net' + + # Rename DEFAULT_LOGGING_CONFIG to LOGGING CONFIG + LOGGING_CONFIG = ... + + +#. Make sure a Azure Blob Storage (Wasb) connection hook has been defined in Airflow. The hook should have read and write access to the Azure Blob Storage bucket defined above in ``REMOTE_BASE_LOG_FOLDER``. + +#. Update ``$AIRFLOW_HOME/airflow.cfg`` to contain: + + .. code-block:: ini + + [logging] + remote_logging = True + logging_config_class = log_config.LOGGING_CONFIG + remote_log_conn_id = + +#. Restart the Airflow webserver and scheduler, and trigger (or wait for) a new task execution. +#. Verify that logs are showing up for newly executed tasks in the bucket you have defined. diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/adf_run_pipeline.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/adf_run_pipeline.rst.txt new file mode 100644 index 00000000000..7d3b484ba7c --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/adf_run_pipeline.rst.txt @@ -0,0 +1,53 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Azure Data Factory Operators +============================ +Azure Data Factory is Azure's cloud ETL service for scale-out serverless data integration and data transformation. +It offers a code-free UI for intuitive authoring and single-pane-of-glass monitoring and management. + +.. _howto/operator:AzureDataFactoryRunPipelineOperator: + +AzureDataFactoryRunPipelineOperator +----------------------------------- +Use the :class:`~airflow.providers.microsoft.azure.operators.data_factory.AzureDataFactoryRunPipelineOperator` to execute a pipeline within a data factory. +By default, the operator will periodically check on the status of the executed pipeline to terminate with a "Succeeded" status. +This functionality can be disabled for an asynchronous wait -- typically with the :class:`~airflow.providers.microsoft.azure.sensors.data_factory.AzureDataFactoryPipelineRunSensor` -- by setting ``wait_for_termination`` to False. + +Below is an example of using this operator to execute an Azure Data Factory pipeline. + + .. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_adf_run_pipeline.py + :language: python + :dedent: 0 + :start-after: [START howto_operator_adf_run_pipeline] + :end-before: [END howto_operator_adf_run_pipeline] + +Here is a different example of using this operator to execute a pipeline but coupled with the :class:`~airflow.providers.microsoft.azure.sensors.data_factory.AzureDataFactoryPipelineRunSensor` to perform an asynchronous wait. + + .. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_adf_run_pipeline.py + :language: python + :dedent: 0 + :start-after: [START howto_operator_adf_run_pipeline_async] + :end-before: [END howto_operator_adf_run_pipeline_async] + +Reference +--------- + +For further information, please refer to the Microsoft documentation: + + * `Azure Data Factory Documentation `__ diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/adls.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/adls.rst.txt new file mode 100644 index 00000000000..b7e29446c82 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/adls.rst.txt @@ -0,0 +1,50 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Azure DataLake Storage Operators +================================= + +Prerequisite Tasks +^^^^^^^^^^^^^^^^^^ + +.. include::/operators/_partials/prerequisite_tasks.rst + +.. _howto/operator:ADLSDeleteOperator: + +ADLSDeleteOperator +---------------------------------- +Use the +:class:`~airflow.providers.microsoft.azure.operators.adls_delete.ADLSDeleteOperator` to remove +file(s) from Azure DataLake Storage + + +Below is an example of using this operator to delete a file from ADL. + +.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_adls_delete.py + :language: python + :dedent: 0 + :start-after: [START howto_operator_adls_delete] + :end-before: [END howto_operator_adls_delete] + + +Reference +--------- + +For further information, look at: + +* `Azure Data lake Storage Documentation `__ diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/asb.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/asb.rst.txt new file mode 100644 index 00000000000..5ad69624185 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/asb.rst.txt @@ -0,0 +1,214 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Azure Service Bus Operators +============================ +Azure Service Bus is a fully managed enterprise message broker with message queues and +publish-subscribe topics (in a namespace). Service Bus is used to decouple applications +and services from each other. Service Bus that perform operations on +entities, such as namespaces, queues, and topics. + +The Service Bus REST API provides operations for working with the following resources: + - Azure Resource Manager + - Service Bus service + +Azure Service Bus Queue Operators +--------------------------------- +Azure Service Bus Operators helps to interact with Azure Bus Queue based operation like Create, Delete, +Send and Receive message in Queue. + +.. _howto/operator:AzureServiceBusCreateQueueOperator: + +Create Azure Service Bus Queue +=============================== + +To create Azure service bus queue with specific Parameter you can use +:class:`~airflow.providers.microsoft.azure.operators.asb.AzureServiceBusCreateQueueOperator`. + +Below is an example of using this operator to execute an Azure Service Bus Create Queue. + +.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_create_service_bus_queue] + :end-before: [END howto_operator_create_service_bus_queue] + + +.. _howto/operator:AzureServiceBusSendMessageOperator: + +Send Message to Azure Service Bus Queue +======================================= + +To Send message or list of message or batch Message to the Azure Service Bus Queue. You can use +:class:`~airflow.providers.microsoft.azure.operators.asb.AzureServiceBusSendMessageOperator`. + +Below is an example of using this operator to execute an Azure Service Bus Send Message to Queue. + +.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_send_message_to_service_bus_queue] + :end-before: [END howto_operator_send_message_to_service_bus_queue] + + +.. _howto/operator:AzureServiceBusReceiveMessageOperator: + +Receive Message Azure Service Bus Queue +======================================== + +To Receive Message or list of message or Batch message message in a Queue you can use +:class:`~airflow.providers.microsoft.azure.operators.asb.AzureServiceBusReceiveMessageOperator`. + +Below is an example of using this operator to execute an Azure Service Bus Create Queue. + +.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_receive_message_service_bus_queue] + :end-before: [END howto_operator_receive_message_service_bus_queue] + + +.. _howto/operator:AzureServiceBusDeleteQueueOperator: + +Delete Azure Service Bus Queue +=============================== + +To Delete the Azure service bus queue you can use +:class:`~airflow.providers.microsoft.azure.operators.asb.AzureServiceBusDeleteQueueOperator`. + +Below is an example of using this operator to execute an Azure Service Bus Delete Queue. + +.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_delete_service_bus_queue] + :end-before: [END howto_operator_delete_service_bus_queue] + +Azure Service Bus Topic Operators +----------------------------------------- +Azure Service Bus Topic based Operators helps to interact with topic in service bus namespace +and it helps to Create, Delete operation for topic. + +.. _howto/operator:AzureServiceBusTopicCreateOperator: + +Create Azure Service Bus Topic +====================================== + +To create Azure service bus topic with specific Parameter you can use +:class:`~airflow.providers.microsoft.azure.operators.asb.AzureServiceBusTopicCreateOperator`. + +Below is an example of using this operator to execute an Azure Service Bus Create Topic. + +.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_create_service_bus_topic] + :end-before: [END howto_operator_create_service_bus_topic] + +.. _howto/operator:AzureServiceBusTopicDeleteOperator: + +Delete Azure Service Bus Topic +====================================== + +To Delete the Azure service bus topic you can use +:class:`~airflow.providers.microsoft.azure.operators.asb.AzureServiceBusTopicDeleteOperator`. + +Below is an example of using this operator to execute an Azure Service Bus Delete topic. + +.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_delete_service_bus_topic] + :end-before: [END howto_operator_delete_service_bus_topic] + +Azure Service Bus Subscription Operators +----------------------------------------- +Azure Service Bus Subscription based Operators helps to interact topic Subscription in service bus namespace +and it helps to Create, Delete operation for subscription under topic. + +.. _howto/operator:AzureServiceBusSubscriptionCreateOperator: + +Create Azure Service Bus Subscription +====================================== + +To create Azure service bus topic Subscription with specific Parameter you can use +:class:`~airflow.providers.microsoft.azure.operators.asb.AzureServiceBusSubscriptionCreateOperator`. + +Below is an example of using this operator to execute an Azure Service Bus Create Subscription. + +.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_create_service_bus_subscription] + :end-before: [END howto_operator_create_service_bus_subscription] + +.. _howto/operator:AzureServiceBusUpdateSubscriptionOperator: + +Update Azure Service Bus Subscription +====================================== + +To Update the Azure service bus topic Subscription which is already created, with specific Parameter you can use +:class:`~airflow.providers.microsoft.azure.operators.asb.AzureServiceBusUpdateSubscriptionOperator`. + +Below is an example of using this operator to execute an Azure Service Bus Update Subscription. + +.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_update_service_bus_subscription] + :end-before: [END howto_operator_update_service_bus_subscription] + +.. _howto/operator:ASBReceiveSubscriptionMessageOperator: + +Receive Azure Service Bus Subscription Message +=============================================== + +To Receive a Batch messages from a Service Bus Subscription under specific Topic, you can use +:class:`~airflow.providers.microsoft.azure.operators.asb.ASBReceiveSubscriptionMessageOperator`. + +Below is an example of using this operator to execute an Azure Service Bus Receive Subscription Message. + +.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_receive_message_service_bus_subscription] + :end-before: [END howto_operator_receive_message_service_bus_subscription] + +.. _howto/operator:AzureServiceBusSubscriptionDeleteOperator: + +Delete Azure Service Bus Subscription +====================================== + +To Delete the Azure service bus topic Subscription you can use +:class:`~airflow.providers.microsoft.azure.operators.asb.AzureServiceBusSubscriptionDeleteOperator`. + +Below is an example of using this operator to execute an Azure Service Bus Delete Subscription under topic. + +.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_service_bus.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_delete_service_bus_subscription] + :end-before: [END howto_operator_delete_service_bus_subscription] + + + +Reference +--------- + +For further information, please refer to the Microsoft documentation: + + * `Azure Service Bus Documentation `__ diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/azure_blob_to_gcs.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/azure_blob_to_gcs.rst.txt new file mode 100644 index 00000000000..1c1e084f518 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/azure_blob_to_gcs.rst.txt @@ -0,0 +1,60 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Azure Blob Storage Transfer Operator +==================================== +The Blob service stores text and binary data as objects in the cloud. +The Blob service offers the following three resources: the storage account, containers, and blobs. +Within your storage account, containers provide a way to organize sets of blobs. +For more information about the service visit `Azure Blob Storage API documentation `_. + +Before you begin +^^^^^^^^^^^^^^^^ +Before using Blob Storage within Airflow you need to authenticate your account with Token, Login and Password. +Please follow Azure +`instructions `_ +to do it. + +TOKEN should be added to the Connection in Airflow in JSON format, Login and Password as plain text. +You can check `how to do such connection `_. + +See following example. +Set values for these fields: + +.. code-block:: + + Connection Id: wasb_default + Login: Storage Account Name + Password: KEY1 + Extra: {"sas_token": "TOKEN"} + +.. _howto/operator:AzureBlobStorageToGCSOperator: + +Transfer Data from Blob Storage to Google Cloud Storage +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Operator transfers data from Azure Blob Storage to specified bucket in Google Cloud Storage + +To get information about jobs within a Azure Blob Storage use: +:class:`~airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs.AzureBlobStorageToGCSOperator` + +Example usage: + +.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_blob_to_gcs.py + :language: python + :start-after: [START how_to_azure_blob_to_gcs] + :end-before: [END how_to_azure_blob_to_gcs] diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/azure_synapse.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/azure_synapse.rst.txt new file mode 100644 index 00000000000..41740c8b751 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/azure_synapse.rst.txt @@ -0,0 +1,49 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Azure Synapse Operators +======================= +Azure Synapse Analytics is a limitless analytics service that brings together data integration, +enterprise data warehousing and big data analytics. It gives you the freedom to query data on your terms, +using either serverless or dedicated options—at scale. +Azure Synapse brings these worlds together with a unified experience to ingest, +explore, prepare, transform, manage and serve data for immediate BI and machine learning needs. + +.. _howto/operator:AzureSynapseRunSparkBatchOperator: + +AzureSynapseRunSparkBatchOperator +----------------------------------- +Use the :class:`~airflow.providers.microsoft.azure.operators.synapse.AzureSynapseRunSparkBatchOperator` to execute a +spark application within Synapse Analytics. +By default, the operator will periodically check on the status of the executed Spark job to +terminate with a "Succeeded" status. + +Below is an example of using this operator to execute a Spark application on Azure Synapse. + + .. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_azure_synapse.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_azure_synapse] + :end-before: [END howto_operator_azure_synapse] + + +Reference +--------- + +For further information, please refer to the Microsoft documentation: + + * `Azure Synapse Analytics Documentation `__ diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/index.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/index.rst.txt new file mode 100644 index 00000000000..df482e8bcd5 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/index.rst.txt @@ -0,0 +1,28 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + + +Microsoft Operators +=================== + + +.. toctree:: + :maxdepth: 1 + :glob: + + * diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/local_to_adls.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/local_to_adls.rst.txt new file mode 100644 index 00000000000..0de321e63e1 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/local_to_adls.rst.txt @@ -0,0 +1,53 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Upload data from Local Filesystem to Azure Data Lake +==================================================== +The `Azure Data Lake `__ (ADL) make it easy to store data of +any size, shape, and speed. +This page shows how to upload data from local filesystem to ADL. + +Prerequisite Tasks +^^^^^^^^^^^^^^^^^^ + +.. include::/operators/_partials/prerequisite_tasks.rst + +.. _howto/operator:LocalFilesystemToADLSOperator: + +LocalFilesystemToADLSOperator +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +:class:`~airflow.providers.microsoft.azure.transfers.local_to_adls.LocalFilesystemToADLSOperator` allows you to +upload data from local filesystem to ADL. + + +Below is an example of using this operator to upload a file to ADL. + +.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_local_to_adls.py + :language: python + :dedent: 0 + :start-after: [START howto_operator_local_to_adls] + :end-before: [END howto_operator_local_to_adls] + + +Reference +--------- + +For further information, look at: + +* `Azure Data lake Storage Documentation `__ diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/sftp_to_wasb.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/sftp_to_wasb.rst.txt new file mode 100644 index 00000000000..a16e3fb12aa --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/operators/sftp_to_wasb.rst.txt @@ -0,0 +1,61 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + + +Azure Blob Storage Transfer Operator +==================================== +The Blob service stores text and binary data as objects in the cloud. +The Blob service offers the following three resources: the storage account, containers, and blobs. +Within your storage account, containers provide a way to organize sets of blobs. +For more information about the service visit `Azure Blob Storage API documentation `_. + +Before you begin +^^^^^^^^^^^^^^^^ +Before using Blob Storage within Airflow you need to authenticate your account with Token, Login and Password. +Please follow Azure +`instructions `_ +to do it. + +See following example. +Set values for these fields: + +.. code-block:: + + SFTP Conn Id: sftp_default + WASB Conn Id: wasb_default + +.. contents:: + :depth: 1 + :local: + +.. _howto/operator:SFTPToWasbOperator: + +Transfer Data from SFTP Source Path to Blob Storage +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Operator transfers data from SFTP Source Path to specified container in Azure Blob Storage + +To get information about jobs within a Azure Blob Storage use: +:class:`~airflow.providers.microsoft.azure.transfers.sftp_to_wasb.SFTPToWasbOperator` +Example usage: + +.. exampleinclude:: /../../tests/system/providers/microsoft/azure/example_sftp_to_wasb.py + :language: python + :dedent: 4 + :start-after: [START how_to_sftp_to_wasb] + :end-before: [END how_to_sftp_to_wasb] diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/secrets-backends/azure-key-vault.rst.txt b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/secrets-backends/azure-key-vault.rst.txt new file mode 100644 index 00000000000..b7430d32697 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_sources/secrets-backends/azure-key-vault.rst.txt @@ -0,0 +1,73 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Azure Key Vault Backend +^^^^^^^^^^^^^^^^^^^^^^^ + +To enable the Azure Key Vault as secrets backend, specify +:py:class:`~airflow.providers.microsoft.azure.secrets.key_vault.AzureKeyVaultBackend` +as the ``backend`` in ``[secrets]`` section of ``airflow.cfg``. + +Here is a sample configuration: + +.. code-block:: ini + + [secrets] + backend = airflow.providers.microsoft.azure.secrets.key_vault.AzureKeyVaultBackend + backend_kwargs = {"connections_prefix": "airflow-connections", "variables_prefix": "airflow-variables", "vault_url": "https://example-akv-resource-name.vault.azure.net/"} + +For client authentication, the ``DefaultAzureCredential`` from the Azure Python SDK is used as credential provider, +which supports service principal, managed identity and user credentials. + +For example, to specify a service principal with secret you can set the environment variables ``AZURE_TENANT_ID``, ``AZURE_CLIENT_ID`` and ``AZURE_CLIENT_SECRET``. + +Optional lookup +""""""""""""""" + +Optionally connections, variables, or config may be looked up exclusive of each other or in any combination. +This will prevent requests being sent to Azure Key Vault for the excluded type. + +If you want to look up some and not others in Azure Key Vault you may do so by setting the relevant ``*_prefix`` parameter of the ones to be excluded as ``null``. + +For example, if you want to set parameter ``connections_prefix`` to ``"airflow-connections"`` and not look up variables, your configuration file should look like this: + +.. code-block:: ini + + [secrets] + backend = airflow.providers.microsoft.azure.secrets.key_vault.AzureKeyVaultBackend + backend_kwargs = {"connections_prefix": "airflow-connections", "variables_prefix": null, "vault_url": "https://example-akv-resource-name.vault.azure.net/"} + +Storing and Retrieving Connections +"""""""""""""""""""""""""""""""""" + +If you have set ``connections_prefix`` as ``airflow-connections``, then for a connection id of ``smtp_default``, +you would want to store your connection at ``airflow-connections-smtp-default``. + +The value of the secret must be the :ref:`connection URI representation ` +of the connection object. + +Storing and Retrieving Variables +"""""""""""""""""""""""""""""""" + +If you have set ``variables_prefix`` as ``airflow-variables``, then for an Variable key of ``hello``, +you would want to store your Variable at ``airflow-variables-hello``. + +Reference +""""""""" + +For more details on client authentication refer to the `DefaultAzureCredential Class reference `_. diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/_gen/css/main-custom.min.css b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/_gen/css/main-custom.min.css new file mode 100644 index 00000000000..f3ff099c333 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/_gen/css/main-custom.min.css @@ -0,0 +1 @@ +@charset "UTF-8";@import "https://fonts.googleapis.com/css?family=Rubik:500&display=swap";@import "https://fonts.googleapis.com/css?family=Roboto:400,400i,500,700&display=swap";@import "https://fonts.googleapis.com/css?family=Roboto+Mono:400,700&display=swap";.header__large--cerulean-blue{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#017cee}.header__large--shamrock{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#00ad46}.header__large--bright-sky-blue{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#0cb6ff}.header__large--melon{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#ff7557}.header__large--vermillion{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#e43921}.header__large--aqua{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#11e1ee}.header__large--shamrock-green{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#04d659}.header__large--aqua-blue{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#00c7d4}.header__large--white{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#fff}.header__large--brownish-grey{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#707070}.header__large--very-light-pink{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#cbcbcb}.header__large--slate-grey{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#636365}.header__large--greyish-brown,#header-canvas .text-area--header{font-family:rubik,sans-serif;font-weight:500;font-size:72px;line-height:1.17;color:#51504f}.header__medium--cerulean-blue{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#017cee}.header__medium--shamrock{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#00ad46}.header__medium--bright-sky-blue{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#0cb6ff}.header__medium--melon{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#ff7557}.header__medium--vermillion{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#e43921}.header__medium--aqua{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#11e1ee}.header__medium--shamrock-green{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#04d659}.header__medium--aqua-blue{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#00c7d4}.header__medium--white{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#fff}.header__medium--brownish-grey{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#707070}.header__medium--very-light-pink{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#cbcbcb}.header__medium--slate-grey{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#636365}.header__medium--greyish-brown,.page-header{font-family:rubik,sans-serif;font-weight:500;font-size:60px;line-height:1.23;color:#51504f}.header__small--cerulean-blue{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#017cee}.header__small--shamrock{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#00ad46}.header__small--bright-sky-blue{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#0cb6ff}.header__small--melon{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#ff7557}.header__small--vermillion{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#e43921}.header__small--aqua{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#11e1ee}.header__small--shamrock-green{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#04d659}.header__small--aqua-blue{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#00c7d4}.header__small--white{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#fff}.header__small--brownish-grey{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#707070}.header__small--very-light-pink{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#cbcbcb}.header__small--slate-grey{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#636365}.header__small--greyish-brown,.community--committers-header,.community--header-join,.blogpost-content__metadata--title{font-family:rubik,sans-serif;font-weight:500;font-size:48px;line-height:1.25;color:#51504f}.header__xsmall--cerulean-blue{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#017cee}.header__xsmall--shamrock{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#00ad46}.header__xsmall--bright-sky-blue{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#0cb6ff}.header__xsmall--melon{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#ff7557}.header__xsmall--vermillion{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#e43921}.header__xsmall--aqua{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#11e1ee}.header__xsmall--shamrock-green{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#04d659}.header__xsmall--aqua-blue{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#00c7d4}.header__xsmall--white{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#fff}.header__xsmall--brownish-grey{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#707070}.header__xsmall--very-light-pink{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#cbcbcb}.header__xsmall--slate-grey{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#636365}.header__xsmall--greyish-brown,.text-with-icon-item--header,.feature-item--header{font-family:rubik,sans-serif;font-weight:500;font-size:36px;line-height:1.22;color:#51504f}.subtitle__large--cerulean-blue,.box-event__meetup--location{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#017cee}.subtitle__large--shamrock{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#00ad46}.subtitle__large--bright-sky-blue{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#0cb6ff}.subtitle__large--melon{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#ff7557}.subtitle__large--vermillion{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#e43921}.subtitle__large--aqua{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#11e1ee}.subtitle__large--shamrock-green{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#04d659}.subtitle__large--aqua-blue{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#00c7d4}.subtitle__large--white{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#fff}.subtitle__large--brownish-grey,#header-canvas .text-area--subheader,.blogpost-content__metadata--description,.page-subtitle,.quote--text{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#707070}.subtitle__large--very-light-pink{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#cbcbcb}.subtitle__large--slate-grey{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#636365}.subtitle__large--greyish-brown,.markdown-content h1,.markdown-content h2,.markdown-content h3,.markdown-content h4,.markdown-content h5,.box-event__blogpost--header{font-family:roboto,sans-serif;font-weight:500;font-size:24px;line-height:1.5;color:#51504f}.subtitle__medium--cerulean-blue,ol.counter-blue li::before{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#017cee}.subtitle__medium--shamrock{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#00ad46}.subtitle__medium--bright-sky-blue{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#0cb6ff}.subtitle__medium--melon{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#ff7557}.subtitle__medium--vermillion{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#e43921}.subtitle__medium--aqua{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#11e1ee}.subtitle__medium--shamrock-green{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#04d659}.subtitle__medium--aqua-blue{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#00c7d4}.subtitle__medium--white{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#fff}.subtitle__medium--brownish-grey,.box-event__integration--name{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#707070}.subtitle__medium--very-light-pink{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#cbcbcb}.subtitle__medium--slate-grey{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#636365}.subtitle__medium--greyish-brown,.roadmap .td-sidebar nav>ul>li>a{font-family:roboto,sans-serif;font-weight:500;font-size:18px;line-height:1.33;color:#51504f}.bodytext__medium--cerulean-blue,.blogpost-content__metadata--author,.new-entry--link,.tag,.box-event__meetup--next-meetup,.box-event__blogpost--author{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#017cee}.bodytext__medium--shamrock{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#00ad46}.bodytext__medium--bright-sky-blue{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#0cb6ff}.bodytext__medium--melon{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#ff7557}.bodytext__medium--vermillion{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#e43921}.bodytext__medium--aqua{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#11e1ee}.bodytext__medium--shamrock-green{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#04d659}.bodytext__medium--aqua-blue{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#00c7d4}.bodytext__medium--white,footer .footer-section span{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#fff}.bodytext__medium--brownish-grey,.sidebar__version-selector a,.roadmap .td-sidebar li>a,.roadmap .breadcrumb-item a,.install--description,.blogpost-content__metadata--date,.video-list__item .video-list__item--title,.text-with-icon-item--text,.feature-item--text,.markdown-content p,.markdown-content span,.box-event__meetup--members,.box-event__case-study--quote,.box-event__blogpost--date,.box-event__blogpost--description,ol.counter-blue li,ul.ticks-blue li{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#707070}.bodytext__medium--very-light-pink{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#cbcbcb}.bodytext__medium--slate-grey{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#636365}.bodytext__medium--greyish-brown,.navbar__text-link,.install__accordions-content--header,.list-link,.quote--author,.box-event__committer--nick{font-family:roboto,sans-serif;font-weight:400;font-size:16px;line-height:1.63;color:#51504f}.bodytext__mobile--cerulean-blue{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#017cee}.bodytext__mobile--shamrock{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#00ad46}.bodytext__mobile--bright-sky-blue{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#0cb6ff}.bodytext__mobile--melon{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#ff7557}.bodytext__mobile--vermillion{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#e43921}.bodytext__mobile--aqua{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#11e1ee}.bodytext__mobile--shamrock-green{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#04d659}.bodytext__mobile--aqua-blue{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#00c7d4}.bodytext__mobile--white{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#fff}.bodytext__mobile--brownish-grey{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#707070}.bodytext__mobile--very-light-pink{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#cbcbcb}.bodytext__mobile--slate-grey{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#636365}.bodytext__mobile--greyish-brown{font-family:roboto,sans-serif;font-weight:400;font-size:14px;line-height:1.57;color:#51504f}.bodytext__small--cerulean-blue{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#017cee}.bodytext__small--shamrock{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#00ad46}.bodytext__small--bright-sky-blue{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#0cb6ff}.bodytext__small--melon{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#ff7557}.bodytext__small--vermillion{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#e43921}.bodytext__small--aqua{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#11e1ee}.bodytext__small--shamrock-green{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#04d659}.bodytext__small--aqua-blue{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#00c7d4}.bodytext__small--white{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#fff}.bodytext__small--brownish-grey{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#707070}.bodytext__small--very-light-pink{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#cbcbcb}.bodytext__small--slate-grey{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#636365}.bodytext__small--greyish-brown{font-family:roboto,sans-serif;font-weight:400;font-size:12px;line-height:1.33;color:#51504f}.bodytext__xsmall--cerulean-blue{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#017cee}.bodytext__xsmall--shamrock{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#00ad46}.bodytext__xsmall--bright-sky-blue{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#0cb6ff}.bodytext__xsmall--melon{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#ff7557}.bodytext__xsmall--vermillion{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#e43921}.bodytext__xsmall--aqua{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#11e1ee}.bodytext__xsmall--shamrock-green{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#04d659}.bodytext__xsmall--aqua-blue{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#00c7d4}.bodytext__xsmall--white{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#fff}.bodytext__xsmall--brownish-grey{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#707070}.bodytext__xsmall--very-light-pink{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#cbcbcb}.bodytext__xsmall--slate-grey{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#636365}.bodytext__xsmall--greyish-brown{font-family:roboto,sans-serif;font-weight:400;font-size:10px;line-height:2.6;color:#51504f}.monotext--cerulean-blue{font-family:roboto mono,monospace;color:#017cee;font-size:14px;line-height:1.71}.monotext--shamrock{font-family:roboto mono,monospace;color:#00ad46;font-size:14px;line-height:1.71}.monotext--bright-sky-blue{font-family:roboto mono,monospace;color:#0cb6ff;font-size:14px;line-height:1.71}.monotext--melon{font-family:roboto mono,monospace;color:#ff7557;font-size:14px;line-height:1.71}.monotext--vermillion{font-family:roboto mono,monospace;color:#e43921;font-size:14px;line-height:1.71}.monotext--aqua{font-family:roboto mono,monospace;color:#11e1ee;font-size:14px;line-height:1.71}.monotext--shamrock-green{font-family:roboto mono,monospace;color:#04d659;font-size:14px;line-height:1.71}.monotext--aqua-blue{font-family:roboto mono,monospace;color:#00c7d4;font-size:14px;line-height:1.71}.monotext--white{font-family:roboto mono,monospace;color:#fff;font-size:14px;line-height:1.71}.monotext--brownish-grey,pre span,.markdown-content pre span{font-family:roboto mono,monospace;color:#707070;font-size:14px;line-height:1.71}.monotext--very-light-pink{font-family:roboto mono,monospace;color:#cbcbcb;font-size:14px;line-height:1.71}.monotext--slate-grey{font-family:roboto mono,monospace;color:#636365;font-size:14px;line-height:1.71}.monotext--greyish-brown{font-family:roboto mono,monospace;color:#51504f;font-size:14px;line-height:1.71}.font-weight-normal{font-weight:400!important}.font-weight-500{font-weight:500!important}.font-weight-bold{font-weight:700!important}details.accordion{padding:40px 30px;border-bottom:solid 1px #cbcbcb;-webkit-transition:ease 1s;-o-transition:ease 1s;transition:ease 1s}details.accordion:first-of-type{border-top:solid 1px #cbcbcb}details.accordion summary{position:relative;display:block;outline:none}details.accordion summary::-webkit-details-marker{display:none}details.accordion .accordion__summary-content{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;max-width:750px;margin-right:40px}details.accordion .accordion__summary-content--icon{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;width:60px;margin-right:42px}details.accordion .accordion__summary-content--header{margin-bottom:20px}details.accordion .accordion__arrow{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;position:absolute;width:36px;height:36px;top:0;right:0;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;border:solid 1px #017cee;border-radius:50%}details.accordion .accordion__arrow svg{-webkit-transition:ease-out .2s;-o-transition:ease-out .2s;transition:ease-out .2s}details.accordion[open] .accordion__arrow svg{-webkit-transform:rotate(180deg);-ms-transform:rotate(180deg);-o-transform:rotate(180deg);transform:rotate(180deg)}details.accordion .accordion__content{margin-top:30px;margin-right:36px}details.accordion .accordion__content.indented{margin-left:102px}@media(max-width:1280px){details.accordion{padding:30px 0}details.accordion .accordion__summary-content--icon{-webkit-box-align:unset;-webkit-align-items:unset;-ms-flex-align:unset;align-items:unset;margin-right:20px;margin-top:28px}details.accordion .accordion__arrow{width:28px;height:28px;top:5px}details.accordion .accordion__content.indented{margin-left:80px}details.accordion .accordion__content ol.counter-blue{margin-left:-38px!important}}button{cursor:pointer;border:1px solid;border-radius:5px;padding:9px 29px;-webkit-transition:all ease-out .2s;-o-transition:all ease-out .2s;transition:all ease-out .2s}button:disabled{cursor:not-allowed}button.btn-filled{border-color:#017cee;background-color:#017cee}button.btn-filled:hover{border-color:#0cb6ff;background-color:#0cb6ff}button.btn-with-icon{padding:14px 20px}button.btn-with-icon svg{height:30px;width:auto;padding-right:15px}button.btn-with-icon span{display:inline-block;line-height:30px;vertical-align:middle}button.btn-hollow{background-color:#fff}button.btn-hollow.btn-blue{color:#017cee;border-color:#017cee}button.btn-hollow.btn-blue:disabled{color:#cbcbcb;border-color:#cbcbcb}button.btn-hollow.btn-blue:hover:enabled{color:#fff;background-color:#017cee}button.btn-hollow.btn-brown{border-color:#cbcbcb}button.btn-hollow.btn-brown:hover{background-color:#51504f;border-color:#51504f}button.btn-hollow.btn-brown:hover span{color:#fff}button.btn-hollow.btn-brown:hover svg path{fill:#fff}button.with-box-shadow{-webkit-box-shadow:0 2px 6px 0 rgba(0,0,0,.12);box-shadow:0 2px 6px rgba(0,0,0,.12)}@media(max-width:1280px){button{padding:4px 17px}}ol.counter-blue,ul.ticks-blue{list-style:none;margin-bottom:0}ol.counter-blue li,ul.ticks-blue li{position:relative;padding-left:10px}ol.counter-blue li::before,ul.ticks-blue li::before{position:absolute;border:solid 1px #017cee;border-radius:50%}ol.counter-blue{counter-reset:custom-counter;padding-left:-webkit-calc(26px + 2px);padding-left:calc(26px + 2px)}ol.counter-blue li{counter-increment:custom-counter;margin-bottom:25px}ol.counter-blue li::before{content:counter(custom-counter);-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;top:-2px;left:-webkit-calc(-1 * 26px);left:calc(-1 * 26px);width:26px;height:26px;text-align:center;line-height:26px}ul.ticks-blue{padding-left:-webkit-calc(24px + 2px);padding-left:calc(24px + 2px)}ul.ticks-blue li{margin-bottom:22px}ul.ticks-blue li::before{content:"";left:-webkit-calc(-1 * 24px);left:calc(-1 * 24px);width:24px;height:24px;background-position:50%;background-repeat:no-repeat;background-image:url(/images/tick.svg)}.list-items{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;margin:-20px;-webkit-box-align:stretch;-webkit-align-items:stretch;-ms-flex-align:stretch;align-items:stretch}@media(max-width:1280px){.list-items{margin:auto;max-width:580px}}@media(max-width:640px){.list-items{-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center}}.list-item{width:25%;padding:20px}@media(min-width:1920px){.list-item{width:20%}}@media(max-width:1280px){.list-item{padding:10px;width:50%}}@media(max-width:640px){.list-item{width:100%}}.list-item--wide{width:50%}@media(max-width:1280px){.list-item--wide{width:100%}}.card{border:solid 1px #cbcbcb;border-radius:5px;padding:30px 10px;height:100%}.box-event{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center}.box-event__blogpost{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-flex:1;-webkit-flex:1;-ms-flex:1;flex:1;padding:0 20px}.box-event__blogpost--metadata{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between;margin-bottom:20px}.box-event__blogpost--header{margin-bottom:4px}.box-event__blogpost--author{font-weight:500}.box-event__blogpost--description{margin-bottom:20px}.box-event__case-study{padding:18px 18px 0;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between}.box-event__case-study--logo{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;height:60px;width:100%;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center}.box-event__case-study--logo svg,.box-event__case-study--logo img{max-height:100%;max-width:100%}.box-event__case-study--quote{font-style:italic;margin:30px 0 20px;text-align:center}.box-event__case-study--quote::before{content:"“"}.box-event__case-study--quote::after{content:"”"}.box-event__committer--nick{font-weight:500;margin-top:12px}.box-event__committer--social-media-container{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center}.box-event__committer--social-media-icon{margin:0 6px}.box-event__meetup--location{margin-bottom:10px;text-align:center}.box-event__meetup--members{margin-bottom:30px}.box-event__meetup--members span{vertical-align:middle}.box-event__meetup--next-meetup{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;min-height:52px;margin-bottom:20px;text-align:center}.box-event__integration{height:208px;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center}.box-event__integration--logo{margin:auto 0;-webkit-filter:grayscale(1);filter:grayscale(1);opacity:.6;max-width:100%;max-height:100%}.box-event__integration--name{font-weight:700;text-align:center}.box-event__integration:hover .box-event__integration--logo{-webkit-filter:none;filter:none;opacity:1}.box-event.hoverable-icon svg,.box-event.hoverable-icon img{-webkit-filter:grayscale(1);filter:grayscale(1);opacity:.6;-webkit-transition:all .2s;-o-transition:all .2s;transition:all .2s}.box-event.hoverable-icon:hover svg,.box-event.hoverable-icon:hover img{-webkit-filter:none;filter:none;opacity:1}@media(max-width:640px){.box-event__blogpost--metadata{-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column}.box-event__blogpost--date{margin-top:17px}}.avatar{border-radius:50%;width:80px;height:80px}.quote{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;border-bottom:solid 1px #cbcbcb;padding:0 78px 60px}.quote--text{text-align:center;font-weight:400}.quote--text::before{content:"“"}.quote--text::after{content:"”"}.quote--author{text-align:center;font-weight:500;margin-bottom:32px}.quote--logo{max-height:140px;margin:0 auto}@media(max-width:640px){.quote{padding:0 0 40px}}.pager{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;width:100%;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between;margin-top:60px}.case-study-page{max-width:790px!important;margin:60px auto 0}@media(max-width:640px){.case-study-page{margin-top:40px}}.markdown-content h1,.markdown-content h2,.markdown-content h3,.markdown-content h4,.markdown-content h5{margin-top:40px;margin-bottom:20px}.markdown-content p,.markdown-content span{margin-bottom:30px;margin-top:20px}.markdown-content img{width:100%}.markdown-content table{border-collapse:collapse;width:100%}.markdown-content th{background:#ccc}.markdown-content th,.markdown-content td{border:1px solid #ccc;padding:8px}.markdown-content tr:nth-child(even){background:#efefef}.markdown-content tr:hover{background:#d1d1d1}.markdown-content li{color:#707070}.base-layout{padding:123px 0 40px}.base-layout--button{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;position:-webkit-sticky;position:sticky;width:-webkit-fit-content;width:-moz-fit-content;width:fit-content;margin-left:auto;margin-right:40px;bottom:40px;-webkit-box-pack:end;-webkit-justify-content:flex-end;-ms-flex-pack:end;justify-content:flex-end;z-index:1}@media(max-width:1280px){.base-layout{padding:76px 0 60px}.base-layout--button{display:none}}.page-header{text-align:center;margin-bottom:16px}@media(max-width:1280px){.page-header{font-size:36px!important;line-height:1.22!important}}.page-subtitle{text-align:center;font-weight:400!important;margin-bottom:80px}@media(max-width:1280px){.page-subtitle{font-family:roboto,sans-serif!important;font-size:16px!important;line-height:1.63!important;margin-bottom:30px}}.container{margin-top:44px;max-width:1200px}@media(min-width:1920px){.container{max-width:1510px}}@media(max-width:1280px){.container>*{max-width:630px;margin-left:auto;margin-right:auto}.container .no-width-restriction{max-width:none}}@media(max-width:640px){.container>*{max-width:306px}}.container-fluid{padding-left:20px;padding-right:20px}.show-more-button{width:-webkit-fit-content;width:-moz-fit-content;width:fit-content;margin:60px auto 0}@media(max-width:1280px){.show-more-button{margin-top:30px}}@media(max-width:1280px){.desktop-only{display:none}}@media(min-width:calc(1280px + 1px)){.no-desktop{display:none}}@media(min-width:calc(640px + 1px)){.mobile-only{display:none}}.features-list{margin:76px auto 100px;max-width:720px;display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between}@media(min-width:1920px){.features-list{max-width:unset;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;margin-top:0}}@media(max-width:640px){.features-list{margin-top:0}}.feature-item{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;margin:44px auto 0}.feature-item--icon-box{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;max-width:150px;width:100%;height:-webkit-fit-content;height:-moz-fit-content;height:fit-content;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;margin-top:18px}.feature-item--text-box{margin-left:60px}@media(min-width:1920px),(max-width:1280px){.feature-item{-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column}.feature-item--icon-box{margin-top:0;margin-bottom:20px;max-width:unset}.feature-item--text-box{margin-left:0;text-align:center}}@media(min-width:1920px){.feature-item{margin-top:60px;max-width:392px;margin-left:40px;margin-right:40px}}@media(max-width:1280px){.feature-item{margin-top:40px;max-width:260px}.feature-item svg{height:60px;width:auto}.feature-item--header{font-family:roboto,sans-serif!important;font-size:24px!important;line-height:1.5!important}.feature-item--text{font-size:14px!important;line-height:1.57!important}}.text-with-icon-list{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;max-width:940px;margin:0 auto 100px;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between}.text-with-icon-item{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;max-width:410px;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;margin-top:54px}.text-with-icon-item svg{height:100px}.text-with-icon-item--header{text-align:center;margin-top:23px}.text-with-icon-item--text{text-align:center}@media(min-width:1920px){.text-with-icon-list{max-width:unset}.text-with-icon-item{max-width:305px}}@media(max-width:1280px){.text-with-icon-item{margin-top:30px;max-width:276px}.text-with-icon-item svg{width:70px;height:70px}.text-with-icon-item--header{font-family:roboto,sans-serif;font-size:24px;line-height:1.5;margin-top:16px}.text-with-icon-item--text{font-size:14px;line-height:1.57}}@media(max-width:640px){.text-with-icon-list{-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center}}.video-section{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;border:solid 1px #cbcbcb;padding:40px}@media(max-width:1280px){.video-section{-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;max-width:560px;margin:0 auto;padding:20px}}.video-wrapper{-webkit-box-flex:1;-webkit-flex:1;-ms-flex:1;flex:1}.video-wrapper .video-container{display:none}.video-wrapper .video-container:last-child{display:block}.video-wrapper .anchor{position:fixed}.video-wrapper .anchor:target+.video-container{display:block}.video-wrapper .anchor:target+.video-container~.video-container{display:none}.video-list-wrapper{overflow-y:auto;max-height:403px;max-width:370px;width:100%;margin-left:40px}@media(max-width:1280px){.video-list-wrapper{max-width:unset;margin-left:0}}.video-list-wrapper::-webkit-scrollbar{-webkit-appearance:none}.video-list-wrapper::-webkit-scrollbar-thumb{border-radius:8px;border:2px solid #fff;background-color:rgba(0,0,0,.5)}.video-list-wrapper::-webkit-scrollbar:vertical{width:9px}.video-list{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:reverse;-webkit-flex-direction:column-reverse;-ms-flex-direction:column-reverse;flex-direction:column-reverse;-webkit-box-pack:end;-webkit-justify-content:flex-end;-ms-flex-pack:end;justify-content:flex-end}.video-list__item{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;border-bottom:solid 1px #cbcbcb;padding:16px 0}.video-list__item .video-list__item--title{margin-left:9px;vertical-align:middle}.video-list__item--icon-container{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex}.video-list__item:hover .video-list__item--title,.video-list__item.active .video-list__item--title{font-weight:500}.video-list__item:hover svg path,.video-list__item.active svg path{fill:#707070;stroke:none}.tag{display:block;background-color:rgba(1,124,238,.25);padding:1px 15px;border-radius:5px;-webkit-transition:.2s;-o-transition:.2s;transition:.2s;margin:7px}.tag.active,.tag:hover{background-color:#017cee;color:#fff}.tags-container{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;width:-webkit-fit-content;width:-moz-fit-content;width:fit-content;margin:-7px}@media(max-width:640px){.tags-container{-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;margin-bottom:7px}}.all-tags-container{margin:30px 0}.blog__list-items-wrapper{max-width:1200px;margin:30px auto 0}@media(max-width:1280px){.blog__list-items-wrapper{max-width:580px}}.new-entry{margin-bottom:20px}.new-entry--link{font-weight:500}@media(max-width:1280px){.new-entry{margin-bottom:10px;padding-left:10px}}.blogpost-content{max-width:790px;margin:0 auto}.blogpost-content--header-wrapper{border-bottom:solid 1px #cbcbcb}.blogpost-content__metadata--container{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between;margin-bottom:30px}.blogpost-content__metadata--title{margin-bottom:20px}.blogpost-content__metadata--author{font-weight:500;margin-bottom:30px;margin-right:17px}.blogpost-content__metadata--social-media-icon{margin-right:7px}.blogpost-content__metadata--description{font-weight:400;margin-bottom:30px}.blogpost-content__metadata--date{margin-top:17px}@media(max-width:640px){.blogpost-content__metadata--container{-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;margin-bottom:19px}.blogpost-content__metadata--title{font-family:roboto,sans-serif!important;font-size:24px!important;line-height:1.5!important;max-width:272px;margin-bottom:13px}.blogpost-content__metadata--author{margin-bottom:20px}.blogpost-content__metadata--description{font-family:roboto,sans-serif!important;font-weight:400!important;font-size:16px!important;line-height:1.63!important;margin-bottom:20px}}.blog-pager{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;max-width:1200px;margin:60px auto 0}.blog-pager .pager{margin-top:40px;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center}.home-page-layout.base-layout{padding-top:70px}@media(max-width:640px){.home-page-layout.base-layout{padding-top:16px}}.principles-header{margin-top:20px;margin-bottom:4px}.integrations-header{margin-bottom:60px}@media(max-width:640px){.integrations-header{margin-bottom:30px}}#integrations .list-items{margin-top:40px}@media(max-width:640px){#integrations .list-items{margin-top:20px}}.video-section-container{margin:80px auto;max-width:1200px}@media(max-width:640px){.video-section-container{margin:60px 0}}.host-header{margin-bottom:6px}.meetups{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center}.meetups>.list-items{margin-top:40px;margin-bottom:40px;width:100%}@media(max-width:1280px){.meetups>.list-items{margin-top:30px;margin-bottom:30px}}@media(max-width:640px){.meetups>.list-items{margin-bottom:10px}}.meetups-page.page-subtitle{margin-bottom:0}.list-link{text-decoration:underline}.community--header-container{max-width:936px;margin:0 auto}.community--header-join{text-align:center}.community--accordion-container{margin:60px 18px}.community--committers-header{text-align:center;margin-top:70px;margin-bottom:40px}.community--committers-header.large-margin{margin-top:100px;margin-bottom:40px}@media(min-width:1920px){.community .list-items{max-width:1220px;margin-right:auto;margin-left:auto}.community .list-item{width:25%}}@media(max-width:1280px){.community--header-container{max-width:580px}.community--header-join{font-family:roboto,sans-serif!important;font-size:24px!important;line-height:1.5!important}.community--accordion-container{margin:40px 0}.community--committers-header{font-size:36px!important;line-height:1.22!important;margin-top:60px}.community--committers-header.large-margin{margin-top:60px;margin-bottom:40px}.community--button-container{margin-top:20px}}.install .page-subtitle{margin-bottom:45px}.install--headers-wrapper{max-width:936px;margin:0 auto}.install--description{text-align:center;margin:45px 0 60px}.install--description a{color:#017cee;text-decoration:underline}.install__accordions--wrapper{max-width:900px;margin:60px auto}.install__accordions-content--header{font-weight:500}.install__accordions-content--list-wrapper{margin-bottom:40px}.install__accordions-content--methods-wrapper{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;margin-bottom:20px}.install__accordions-content--method-box{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;width:270px;height:160px;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;border:solid 1px #cbcbcb;border-radius:5px;margin-right:40px}@media(max-width:1280px){.install .page-subtitle{margin-bottom:30px}.install--headers-wrapper{max-width:580px}.install--description{margin-top:30px;margin-bottom:30px}.install__accordions--wrapper{max-width:580px;margin-top:40px;margin-bottom:0}}@media(max-width:640px){.install__accordions-content--methods-wrapper{-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;margin-bottom:0;margin-right:-36px}.install__accordions-content--method-box{margin-right:0;margin-bottom:20px}}footer{min-height:unset}footer .footer-section{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between}footer .footer-section__media-section{padding:60px 60px 30px;background-color:#51504f}footer .footer-section__media-section--link{margin-right:30px}footer .footer-section__media-section--button-with-text{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center}footer .footer-section__media-section--text{margin-right:20px}footer .footer-section__policies-section{padding:30px 60px;background-color:#636365;font-size:12px;-webkit-box-orient:vertical!important;-webkit-box-direction:normal!important;-webkit-flex-direction:column!important;-ms-flex-direction:column!important;flex-direction:column!important}footer .footer-section__policies-section span{font-size:12px}footer .footer-section__policies-section--policies{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex}footer .footer-section__policies-section--policy-item::before{content:"\00a0\00a0"}footer .footer-section__policies-section--policy-item::after{content:"\00a0\00a0|";color:#fff}footer .footer-section__policies-section--policy-item:last-of-type::after{content:"\00a0\00a0|\00a0\00a0"}footer .footer-section__policies-section--disclaimer{display:block;max-width:600px;color:#cbcbcb!important;margin-top:16px}@media(min-width:1920px){footer .footer-section__policies-section--disclaimer{max-width:800px}}footer .dropdown-toggle::after{color:#fff;vertical-align:middle}@media(max-width:1280px){footer .footer-section{-webkit-box-orient:vertical;-webkit-box-direction:reverse;-webkit-flex-direction:column-reverse;-ms-flex-direction:column-reverse;flex-direction:column-reverse;-webkit-box-align:start;-webkit-align-items:flex-start;-ms-flex-align:start;align-items:flex-start}footer .footer-section span{font-size:14px!important;line-height:1.57!important}footer .footer-section__media-section{padding:30px 40px}footer .footer-section__media-section svg{height:31px;width:auto}footer .footer-section__media-section--link{margin-right:20px}footer .footer-section__media-section--button-with-text{-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:start;-webkit-align-items:flex-start;-ms-flex-align:start;align-items:flex-start;margin-bottom:47px}footer .footer-section__media-section--text{margin-right:0;margin-bottom:16px}footer .footer-section__policies-section{padding:30px 40px}footer .footer-section__policies-section--policies{-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column}footer .footer-section__policies-section--policy-item::before,footer .footer-section__policies-section--policy-item::after,footer .footer-section__policies-section--policy-item:last-of-type::before,footer .footer-section__policies-section--policy-item:last-of-type::after{content:""}footer .footer-section__policies-section--language-toggle{margin:17px 0 35px}}.navbar{position:fixed;top:0;width:100%;background-color:#fff;-webkit-box-pack:start;-webkit-justify-content:flex-start;-ms-flex-pack:start;justify-content:flex-start;border-bottom:solid 1px #cbcbcb;z-index:32;padding:30px 60px}.navbar__menu-container{-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1}.navbar__menu-content{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;padding-top:16px;padding-left:88px}.navbar__links-container{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex}.navbar__text-link{margin-right:30px;position:relative;width:-webkit-fit-content;width:-moz-fit-content;width:fit-content}.navbar__text-link::before,.navbar__text-link::after{content:"";position:absolute;top:100%;width:0;right:0;height:2px;background-color:#017cee;-webkit-transition:.2s ease-out;-o-transition:.2s ease-out;transition:.2s ease-out}.navbar__text-link:hover,.navbar__text-link.active{color:#51504f}.navbar__text-link:hover::before,.navbar__text-link:hover::after,.navbar__text-link.active::before,.navbar__text-link.active::after{width:100%;left:0}.navbar--box-shadow{-webkit-box-shadow:0 2px 6px 0 rgba(0,0,0,.12);box-shadow:0 2px 6px rgba(0,0,0,.12)}@media(max-width:1280px){.navbar{padding:20px}.navbar__icon-container svg{width:93px;height:auto}.navbar__menu-content{-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:start;-webkit-align-items:flex-start;-ms-flex-align:start;align-items:flex-start;padding-left:0;padding-top:0}.navbar__drawer-container{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;-webkit-box-pack:end;-webkit-justify-content:flex-end;-ms-flex-pack:end;justify-content:flex-end}.navbar__toggle-button{border:none;background:0 0;padding:0;position:relative;width:26px;height:20px}.navbar__toggle-button--icon{position:absolute;top:0;bottom:0;left:0;right:0;visibility:hidden;opacity:0;-webkit-transition:.2s ease-out;-o-transition:.2s ease-out;transition:.2s ease-out}.navbar__toggle-button--icon.visible{visibility:visible;opacity:1}.navbar__drawer{position:fixed;top:77px;left:0;width:100%;height:-webkit-calc(100% - 77px);height:calc(100% - 77px);background-color:#fff;-webkit-transform:translateX(100%);-ms-transform:translateX(100%);-o-transform:translateX(100%);transform:translateX(100%);-webkit-transition:.2s ease-out;-o-transition:.2s ease-out;transition:.2s ease-out;padding:40px 40px 30px}.navbar__drawer--open{-webkit-transform:translateX(0);-ms-transform:translateX(0);-o-transform:translateX(0);transform:translateX(0)}.navbar__links-container{-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column}.navbar__text-link{margin-right:0;margin-bottom:10px}.navbar__text-link:last-child{margin-bottom:20px}}#header{position:relative;margin:123px -20px 0;min-height:-webkit-calc(100vh - 123px);min-height:calc(100vh - 123px)}#header-canvas{padding:0;margin:0;position:absolute;width:100%;height:100%;top:0;left:0;right:0;bottom:0}#header-canvas .text-area{max-width:706px;width:100%;display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;position:absolute;top:50%;left:50%;-webkit-transform:translate(-50%,-50%);-ms-transform:translate(-50%,-50%);-o-transform:translate(-50%,-50%);transform:translate(-50%,-50%)}#header-canvas .text-area--header{text-align:center;margin-bottom:20px}#header-canvas .text-area--subheader{font-weight:400!important;text-align:center;margin-bottom:20px}#header-canvas canvas{position:absolute;top:0;left:0;width:100%;height:100%;z-index:-1}@media(min-width:1920px){#header-canvas .text-area--header{font-size:90px}}@media(max-width:1280px){#header{margin:77px -20px 0;min-height:-webkit-calc(100vh - 77px);min-height:calc(100vh - 77px)}#header-canvas .text-area{max-width:450px}#header-canvas .text-area--header{margin-bottom:14px}#header-canvas .text-area--subheader{margin-bottom:26px}}@media(max-width:640px){#header-canvas .text-area{max-width:290px}#header-canvas .text-area--header{font-size:48px!important;line-height:1.25!important}#header-canvas .text-area--subheader{font-size:16px!important;line-height:1.63!important}}.roadmap{margin-top:40px}.roadmap main{padding-left:40px}.roadmap .breadcrumb{padding-bottom:0;margin-bottom:30px}.roadmap .breadcrumb-item+.breadcrumb-item{padding-left:4px}.roadmap .breadcrumb-item+.breadcrumb-item::before{color:#707070;padding-right:0}.roadmap .td-sidebar{position:-webkit-sticky;position:sticky;top:163px;height:-webkit-fit-content;height:-moz-fit-content;height:fit-content;max-height:-webkit-calc(100vh - 163px);max-height:calc(100vh - 163px);max-width:270px;min-width:270px;width:100%;overflow-y:auto;padding-top:12px;padding-left:15px;padding-bottom:30px;margin-left:40px;background-color:#f9f9f9;border:none}.roadmap .td-sidebar::-webkit-scrollbar{-webkit-appearance:none}.roadmap .td-sidebar::-webkit-scrollbar-thumb{border-radius:8px;border:2px solid #fff;background-color:rgba(0,0,0,.5)}.roadmap .td-sidebar::-webkit-scrollbar:vertical{width:9px}.roadmap .td-sidebar__inner{position:static;height:unset}.roadmap .td-sidebar li>a{width:-webkit-fit-content;width:-moz-fit-content;width:fit-content}.roadmap .td-sidebar li>a.active{color:#017cee}.roadmap .td-sidebar .searchb-box{margin-bottom:26px}.roadmap .td-sidebar .searchb-box .search-form{width:100%}.roadmap .td-sidebar .toctree li{list-style:none;font-family:roboto,sans-serif;font-size:16px;font-weight:400;font-stretch:normal;font-style:normal;line-height:1.63;letter-spacing:normal;color:#707070}.roadmap .td-sidebar .toctree ul{padding-left:15px;display:none}.roadmap .td-sidebar .toctree>ul,.roadmap .td-sidebar .toctree li.current>ul{display:block}.roadmap .td-sidebar .toctree .caption{font-family:roboto,sans-serif;font-size:18px;font-weight:700;font-stretch:normal;font-style:normal;line-height:1.33;letter-spacing:normal;color:#51504f;padding-bottom:13px;text-transform:uppercase;margin-bottom:0}.roadmap .td-sidebar .toctree .current{color:#017cee;font-weight:500}.roadmap .td-sidebar .toctree .current>a:not([href="#"]){color:#017cee}.roadmap .td-sidebar .toctree a .toctree-expand{display:inline-block;position:relative;height:1em}.roadmap .td-sidebar .toctree a .toctree-expand::before{position:absolute;top:6px;left:-12px;content:'►';font-size:7px}.roadmap .td-sidebar .toctree .current>a>.toctree-expand:before{content:'▼'}.roadmap .td-sidebar-nav__section{padding-right:0}.roadmap .td-sidebar-nav>.td-sidebar-nav__section .td-sidebar-nav__section{list-style:none;position:relative;margin-left:10px}.roadmap .td-sidebar-nav>.td-sidebar-nav__section .td-sidebar-nav__section::before{content:'►';position:absolute;top:6px;left:-12px;font-size:8px;color:#51504f}.roadmap .td-sidebar-nav>.td-sidebar-nav__section .td-sidebar-nav__section>ul{display:none}.roadmap .td-sidebar-nav>.td-sidebar-nav__section .td-sidebar-nav__section.current-section::before{content:'▼'}.roadmap .td-sidebar-nav>.td-sidebar-nav__section .td-sidebar-nav__section.current-section>ul{display:block}.roadmap .wy-nav-side-toc{position:-webkit-sticky;position:sticky;top:163px;overflow-x:hidden;overflow-y:auto;width:280px;height:-webkit-fit-content;height:-moz-fit-content;height:fit-content;max-height:-webkit-calc(100vh - 163px);max-height:calc(100vh - 163px);font-size:14px;line-height:1.43}.roadmap .wy-nav-side-toc::-webkit-scrollbar{-webkit-appearance:none}.roadmap .wy-nav-side-toc::-webkit-scrollbar-thumb{border-radius:8px;border:2px solid #fff;background-color:rgba(0,0,0,.5)}.roadmap .wy-nav-side-toc::-webkit-scrollbar:vertical{width:9px}.roadmap .wy-nav-side-toc ul,.roadmap .wy-nav-side-toc ol{padding:0;list-style:none none}.roadmap .wy-nav-side-toc li{list-style:none}.roadmap .wy-nav-side-toc .wy-menu-vertical>ul,.roadmap .wy-nav-side-toc .wy-menu-vertical li.current>ul{padding-left:1px}.roadmap .wy-nav-side-toc .wy-menu-vertical a{display:inline-block;padding:.4045em 0;color:#707070}.roadmap .wy-nav-side-toc .wy-menu-vertical li a{border-left:solid 2px #cbcbcb;padding-left:-webkit-calc(1.25em + 1px);padding-left:calc(1.25em + 1px)}.roadmap .wy-nav-side-toc .wy-menu-vertical li.current{margin-left:-1px}.roadmap .wy-nav-side-toc .wy-menu-vertical li.current>a{border-left:solid 4px #017cee;color:#017cee}.roadmap .wy-nav-side-toc .wy-menu-vertical li li>a{padding-left:2.427em}.roadmap .wy-nav-side-toc .wy-menu-vertical li li li>a{padding-left:4.045em}.roadmap .wy-nav-side-toc .wy-menu-vertical li li li li>a{padding-left:5.663em}@media(max-width:1280px){.roadmap main{padding-left:0}.roadmap .td-sidebar{position:static;background-color:transparent;padding:0;margin:0;max-width:unset;height:530px}}.search-form{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;width:344px;padding:8px 20px;border:solid 1px #cbcbcb;border-radius:5px;margin:60px auto 0}.search-form__input{font-family:roboto,sans-serif;font-size:16px;color:#707070;line-height:1.63;-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;padding-right:10px;border:none;background:0 0;outline:none;float:left}.search-form__button{border:none;background-color:transparent;padding:0}@media(max-width:1280px){.search-form{width:270px;padding:3px 20px;margin-top:30px}}.rating-container{margin-top:40px}.rating{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:horizontal;-webkit-box-direction:reverse;-webkit-flex-direction:row-reverse;-ms-flex-direction:row-reverse;flex-direction:row-reverse;width:-webkit-fit-content;width:-moz-fit-content;width:fit-content}.rate-star{cursor:pointer;margin-right:13px}.rate-star svg path{fill:none;stroke:#51504f}.rate-star:hover svg path,.rate-star:hover~.rate-star svg path{fill:#017cee;stroke:none}.rst-content{color:#707070}.rst-content h1{margin-top:0;margin-bottom:30px;font-weight:500;font-family:rubik,sans-serif;color:#51504f;font-size:225%}.rst-content h2,.rst-content h3,.rst-content h4,.rst-content h5,.rst-content h6,.rst-content p{font-family:roboto,sans-serif;color:#707070}.rst-content h2,.rst-content h3,.rst-content h4,.rst-content h5,.rst-content h6{margin-top:40px;margin-bottom:20px;font-weight:500}.rst-content p{line-height:1.63;margin:0 0 30px;font-size:16px;overflow-wrap:break-word}.rst-content h2{font-size:150%}.rst-content h3{font-size:125%}.rst-content h4{font-size:115%}.rst-content h5{font-size:110%}.rst-content h6{font-size:100%}.rst-content code{max-width:100%;color:#51504f;padding:0 5px;font-family:roboto mono,monospace;overflow-x:auto}.rst-content .note,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .tip,.rst-content .warning,.rst-content .admonition-todo,.rst-content .admonition{padding:9px 10px;line-height:24px;margin-bottom:24px;background:#e7f2fa}@media(max-width:768px){.rst-content .admonition p:not(.admonition-title){font-size:12px;line-height:1.83}}.rst-content .admonition-title:before{content:"!";background-color:#fff;border-radius:50%;padding:0 4px;margin-right:5px}.rst-content .admonition-title{color:#fff;font-weight:500;font-size:10px;line-height:2.1;display:block;background:#68d1ff;margin:-10px;padding:0 12px;margin-bottom:9px}.rst-content .danger,.rst-content .error{background:#fdece9}.rst-content .danger::before,.rst-content .error::before{color:#fdece9}.rst-content .danger .admonition-title,.rst-content .error .admonition-title{background:#ee8170}.rst-content .danger .admonition-title::before,.rst-content .error .admonition-title::before{color:#ee8170}.rst-content .attention,.rst-content .caution{background:#fff8f6}.rst-content .warning{background:#f8f8f8}.rst-content .attention .admonition-title,.rst-content .caution .admonition-title{background:#ffa996}.rst-content .attention .admonition-title::before,.rst-content .caution .admonition-title::before{color:#ffa996}.rst-content .warning .admonition-title{background:#a6a6a6}.rst-content .warning .admonition-title::before{color:#a6a6a6}.rst-content .note,.rst-content .seealso{background:#f3fbff}.rst-content .note .admonition-title,.rst-content .seealso .admonition-title{background:#68d2fe}.rst-content .note .admonition-title::before,.rst-content .seealso .admonition-title::before{color:#68d2fe}.rst-content .hint{background:#f2fef6}.rst-content .important{background:#e6f9fc}.rst-content .tip{background:#e5f7ec}.rst-content .hint .admonition-title{background:#63e598}.rst-content .hint .admonition-title::before{color:#63e598}.rst-content .important .admonition-title{background:#5bdae3}.rst-content .important .admonition-title::before{color:#5bdae3}.rst-content .tip .admonition-title{background:#5bcb88}.rst-content .tip .admonition-title::before{color:#5bcb88}.rst-content .note p:last-child,.rst-content .attention p:last-child,.rst-content .caution p:last-child,.rst-content .danger p:last-child,.rst-content .error p:last-child,.rst-content .hint p:last-child,.rst-content .important p:last-child,.rst-content .tip p:last-child,.rst-content .warning p:last-child,.rst-content .seealso p:last-child,.rst-content .admonition p:last-child{margin-bottom:0}.rst-content img{max-width:100%;height:auto}.rst-content div.figure{margin-bottom:24px}.rst-content div.figure p.caption{font-style:italic}.rst-content div.figure p:last-child.caption{margin-bottom:0}.rst-content div.figure.align-center{text-align:center}.rst-content .section>img,.rst-content .section>a>img{margin-bottom:24px}.rst-content abbr[title]{text-decoration:none}.rst-content blockquote{margin-left:24px;line-height:24px;margin-bottom:24px}.rst-content pre{background-color:#f2f8fe}.rst-content pre.literal-block,.rst-content .linenodiv pre{font-family:roboto mono,monospace;font-size:12px;line-height:1.4}@media print{.rst-content div[class^=highlight],.rst-content div[class^=highlight] pre{white-space:pre-wrap}}.rst-content .admonition table{border-color:rgba(0,0,0,.1)}.rst-content .admonition table td,.rst-content .admonition table th{background:0 0!important;border-color:rgba(0,0,0,.1)!important}.rst-content .section ol p,.rst-content .section ul p{margin-bottom:12px}.rst-content .section ol li p:last-child,.rst-content .section ul li p:last-child{margin-bottom:0}.rst-content .section ol p:last-child,.rst-content .section ul p:last-child{margin-bottom:24px}.rst-content .topic-title{font-weight:700;margin-bottom:12px}.rst-content .toc-backref{color:inherit}.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content .toctree-wrapper p.caption .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content dl dt .headerlink,.rst-content p.caption .headerlink,.rst-content table>caption .headerlink,.rst-content .code-block-caption .headerlink{visibility:hidden;font-size:0}.rst-content h1 .headerlink:after,.rst-content h2 .headerlink:after,.rst-content .toctree-wrapper p.caption .headerlink:after,.rst-content h3 .headerlink:after,.rst-content h4 .headerlink:after,.rst-content h5 .headerlink:after,.rst-content h6 .headerlink:after,.rst-content dl dt .headerlink:after,.rst-content p.caption .headerlink:after,.rst-content table>caption .headerlink:after,.rst-content .code-block-caption .headerlink:after{content:url(/images/anchor.svg);vertical-align:bottom;padding-left:8px}.rst-content h1:hover .headerlink:after,.rst-content h2:hover .headerlink:after,.rst-content .toctree-wrapper p.caption:hover .headerlink:after,.rst-content h3:hover .headerlink:after,.rst-content h4:hover .headerlink:after,.rst-content h5:hover .headerlink:after,.rst-content h6:hover .headerlink:after,.rst-content dl dt:hover .headerlink:after,.rst-content p.caption:hover .headerlink:after,.rst-content table>caption:hover .headerlink:after,.rst-content .code-block-caption:hover .headerlink:after{visibility:visible}.rst-content table>caption .headerlink:after{font-size:12px}.rst-content .footnote-reference,.rst-content .citation-reference{vertical-align:baseline;position:relative;top:-.4em;line-height:0;font-size:90%}.rst-content table.docutils,.rst-content table.field-list{border-collapse:collapse;border-spacing:0;empty-cells:show;margin-bottom:24px}.rst-content table.docutils caption,.rst-content table.field-list caption{padding:1em 0;text-align:center}.rst-content table.docutils td,.rst-content table.docutils th,.rst-content table.field-list th{margin:0;overflow:visible}.rst-content table.docutils td{padding:10px 31px}.rst-content table.docutils th,.rst-content table.field-list th{padding:11px 31px}.rst-content table.docutils td:first-child,.rst-content table.field-list td:first-child,.rst-content table.docutils th:first-child,.rst-content table.field-list th:first-child{border-left-width:0}.rst-content table.docutils thead,.rst-content table.field-list thead{text-align:left;vertical-align:bottom;white-space:nowrap}.rst-content table.docutils thead th,.rst-content table.field-list thead th{border-bottom:solid 1px rgba(81,80,79,.3);border-left:solid 1px rgba(81,80,79,.3)}.rst-content table.docutils thead th p,.rst-content table.field-list thead th p{font-weight:700;font-size:18px;color:#51504f;line-height:1.33;margin-bottom:0}.rst-content table.docutils td,.rst-content table.field-list td{background-color:transparent;vertical-align:middle}.rst-content td p:last-child,.rst-content table.docutils td p:last-child,.rst-content table.field-list td p:last-child{margin-bottom:0}.rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td{background-color:rgba(112,112,112,5%)}.rst-content table.docutils{border:1px solid rgba(81,80,79,.3)}.rst-content table.docutils td{border-bottom:1px solid rgba(81,80,79,.3);border-left:1px solid rgba(81,80,79,.3)}.rst-content table.docutils tbody>tr:last-child td{border-bottom-width:0}.rst-content .wy-table-responsive{margin-bottom:24px;max-width:100%;overflow:auto}.rst-content .wy-table-responsive table{margin-bottom:0!important}.rst-content .wy-table-responsive table th{white-space:nowrap}.rst-content code big,.rst-content tt em,.rst-content code em{font-size:100%!important;line-height:normal}.rst-content code.literal{color:#e74c3c}.rst-content code.xref,.rst-content a code{font-weight:700;color:#707070}.rst-content pre,.rst-content kbd{font-family:roboto mono,monospace}.rst-content kbd{background-color:inherit;color:inherit;-webkit-box-shadow:none;box-shadow:none;border:none;font-size:100%}.rst-content a code{color:#2980b9}.rst-content dl{margin-bottom:24px}.rst-content dl dt{font-weight:700;margin-bottom:12px;word-break:break-word}.rst-content dl p,.rst-content dl table,.rst-content dl ul,.rst-content dl ol{margin-bottom:12px!important}.rst-content dl dd{margin:0 0 12px 24px;line-height:24px}.rst-content dl:not(.docutils){margin-bottom:24px}.rst-content dl:not(.docutils) dt{font-family:roboto mono,monospace;display:table;margin:6px 0;font-size:100%;line-height:1.63;background:#f3fbff;color:#51504f;border-top:solid 4px #68d1ff;padding:8px 10px;position:relative}@media(max-width:768px){.rst-content dl:not(.docutils) dt{font-size:10px}}.rst-content dl:not(.docutils) dt:before{color:#68d1ff}.rst-content dl:not(.docutils) dt .headerlink{color:#707070;font-size:100%!important}.rst-content dl:not(.docutils) dt .fn-backref{color:#0cb6ff}.rst-content dl:not(.docutils) dl dt{margin-bottom:6px;border:none;border-left:solid 8px #a6a6a6;background:#f8f8f8;color:#707070}.rst-content dl:not(.docutils) dl dt .headerlink{color:#707070;font-size:100%!important}.rst-content dl:not(.docutils) dt:first-child{margin-top:0}.rst-content dl:not(.docutils) code{font-weight:700}.rst-content dl:not(.docutils) code.descname,.rst-content dl:not(.docutils) code.descclassname{background-color:transparent;border:none;padding:0;font-size:100%!important}.rst-content dl:not(.docutils) code.descname{font-weight:700}.rst-content dl:not(.docutils) .optional{display:inline-block;padding:0 4px;color:#51504f;font-weight:700}.rst-content dl:not(.docutils) .property{display:inline-block;padding-right:8px}.rst-content p.rubric{margin-bottom:12px;font-weight:700}.rst-content .versionmodified{font-style:italic}.rst-content .example-header{position:relative;background:#017cee;padding:8px 16px;margin-bottom:0}.rst-content .example-block-wrapper pre{margin:0;width:unset;border-top-left-radius:0;border-top-right-radius:0}.rst-content .example-header--with-button{padding-right:166px}@media(max-width:768px){.rst-content .example-header--with-button{padding-right:100px}}.rst-content .example-header:after{content:'';display:table;clear:both}.rst-content .example-title{display:block;padding:4px;margin-right:16px;color:#fff;overflow-x:auto}@media(max-width:768px){.rst-content .example-title{overflow-wrap:initial;font-size:12px}}.rst-content .example-header-button{top:8px;right:16px;position:absolute}.rst-content .example-header+.highlight-python{margin-top:0!important}.rst-content .viewcode-button{display:inline-block;padding:8px 16px;border:0;margin:0;color:#fff;font-weight:700;line-height:1;text-decoration:underline;text-overflow:ellipsis;overflow:hidden;text-transform:lowercase;vertical-align:middle;white-space:nowrap}@media(max-width:768px){.rst-content .viewcode-button{font-size:12px;padding:7px 0}}.rst-content .viewcode-button:visited{color:#404040}.rst-content .viewcode-button:hover,.rst-content .viewcode-button:focus{color:#404040}@media(min-width:1024px){.rst-content .section::before{display:block;content:" ";margin-top:-83px;height:83px;visibility:hidden}}.content-drawer-wrapper{display:none}@media(max-width:1280px){.content-drawer-wrapper{display:block;margin-bottom:30px}}.content-drawer-wrapper .navbar{-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between;left:0}.content-drawer-wrapper .navbar--hidden{display:none}.content-drawer-container{position:fixed;top:77px;left:0;width:100%;height:100%;background-color:#fff;-webkit-transform:translateX(-100%);-ms-transform:translateX(-100%);-o-transform:translateX(-100%);transform:translateX(-100%);-webkit-transition:.2s ease-out;-o-transition:.2s ease-out;transition:.2s ease-out;z-index:100}.content-drawer-container--open{-webkit-transform:translateX(0);-ms-transform:translateX(0);-o-transform:translateX(0);transform:translateX(0)}.content-drawer{padding:20px 40px}.content-drawer__toggle-button{border:none;background:0 0;padding:0;position:relative;width:26px;height:20px}.content-drawer__toggle-button--icon{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;width:-webkit-fit-content;width:-moz-fit-content;width:fit-content;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;visibility:hidden;opacity:0;-webkit-transition:.2s ease-out;-o-transition:.2s ease-out;transition:.2s ease-out}.content-drawer__toggle-button--icon svg{margin-right:10px}.content-drawer__toggle-button--icon.visible{visibility:visible;opacity:1}.dropdown-menu{font-family:roboto,sans-serif}.dropdown-toggle::after{color:#51504f}.sidebar__version-selector{margin-bottom:22px}.chroma,.highlight{background-color:#fff}.chroma .lntd,.highlight .lntd{vertical-align:top;padding:0;margin:0;border:0}.chroma .lntable,.highlight .lntable{border-spacing:0;padding:0;margin:0;border:0;width:auto;overflow:auto;display:block}.chroma .hl,.highlight .hl{display:block;width:100%;background-color:#ffc}.chroma .lnt,.chroma .ln,.highlight .lnt,.highlight .ln{margin-right:.4em;padding:0 .4em;color:#7f7f7f}.chroma .k,.chroma .kc,.chroma .kd,.chroma .kn,.chroma .kp,.chroma .kr,.highlight .k,.highlight .kc,.highlight .kd,.highlight .kn,.highlight .kp,.highlight .kr{color:#00f}.chroma .kt,.chroma .nc,.highlight .kt,.highlight .nc{color:#2b91af}.chroma .s,.chroma .sa,.chroma .sb,.chroma .sc,.chroma .dl,.chroma .sd,.chroma .s2,.chroma .se,.chroma .sh,.chroma .si,.chroma .sx,.chroma .sr,.chroma .s1,.chroma .ss,.highlight .s,.highlight .sa,.highlight .sb,.highlight .sc,.highlight .dl,.highlight .sd,.highlight .s2,.highlight .se,.highlight .sh,.highlight .si,.highlight .sx,.highlight .sr,.highlight .s1,.highlight .ss{color:#a31515}.chroma .ow,.highlight .ow{color:#00f}.chroma .c,.chroma .ch,.chroma .cm,.chroma .c1,.chroma .cs,.highlight .c,.highlight .ch,.highlight .cm,.highlight .c1,.highlight .cs{color:green}.chroma .cp,.chroma .cpf,.highlight .cp,.highlight .cpf{color:#00f}.chroma .ge,.highlight .ge{font-style:italic}.chroma .gh,.chroma .gp,.chroma .gs,.chroma .gu,.highlight .gh,.highlight .gp,.highlight .gs,.highlight .gu{font-weight:700}pre{margin:40px 0;padding:16px 20px;border:solid 1px #cbcbcb;border-radius:5px;width:100%}.share-section--icons-wrapper{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex}.share-section--icon{margin-right:12px;cursor:pointer}.four-oh-four{height:-webkit-calc(100vh - 123px);height:calc(100vh - 123px);position:relative}.four-oh-four__text-container{position:relative;top:50%;left:50%;-webkit-transform:translate(-50%,-50%);-ms-transform:translate(-50%,-50%);-o-transform:translate(-50%,-50%);transform:translate(-50%,-50%);text-align:center}@media(min-width:768px){.list-providers{-webkit-column-count:2;-moz-column-count:2;column-count:2}}@media(min-width:1000px){.list-providers{-webkit-column-count:3;-moz-column-count:3;column-count:3}} \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/_gen/css/main.min.css b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/_gen/css/main.min.css new file mode 100644 index 00000000000..0ccc485909e --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/_gen/css/main.min.css @@ -0,0 +1,7 @@ +@import "https://fonts.googleapis.com/css?family=Open+Sans:300,300i,400,400i,700,700i";@fa-font-path:"../webfonts";/*!* Bootstrap v4.1.3 (https://getbootstrap.com/) +* Copyright 2011-2018 The Bootstrap Authors +* Copyright 2011-2018 Twitter, Inc. +* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)*/:root{--blue:#72A1E5;--indigo:#6610f2;--purple:#6f42c1;--pink:#e83e8c;--red:#dc3545;--orange:#BA5A31;--yellow:#ffc107;--green:#28a745;--teal:#20c997;--cyan:#17a2b8;--white:#fff;--gray:#888;--gray-dark:#333;--primary:#30638E;--secondary:#FFA630;--success:#3772FF;--info:#C0E0DE;--warning:#ED6A5A;--danger:#ED6A5A;--light:#D3F3EE;--dark:#403F4C;--breakpoint-xs:0;--breakpoint-sm:576px;--breakpoint-md:768px;--breakpoint-lg:992px;--breakpoint-xl:1200px;--font-family-sans-serif:"Open Sans", -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol";--font-family-monospace:SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace}*,*::before,*::after{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-family:sans-serif;line-height:1.15;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%;-ms-overflow-style:scrollbar;-webkit-tap-highlight-color:transparent}@-ms-viewport{width: device-width; }article,aside,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}body{margin:0;font-family:open sans,-apple-system,BlinkMacSystemFont,segoe ui,Roboto,helvetica neue,Arial,sans-serif,apple color emoji,segoe ui emoji,segoe ui symbol;font-size:1rem;font-weight:400;line-height:1.5;color:#222;text-align:left;background-color:#fff}[tabindex="-1"]:focus{outline:0!important}hr{-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;height:0;overflow:visible}h1,h2,h3,h4,h5,h6{margin-top:0;margin-bottom:.5rem}p{margin-top:0;margin-bottom:1rem}abbr[title],abbr[data-original-title]{text-decoration:underline;-webkit-text-decoration:underline dotted;-moz-text-decoration:underline dotted;text-decoration:underline dotted;cursor:help;border-bottom:0}address{margin-bottom:1rem;font-style:normal;line-height:inherit}ol,ul,dl{margin-top:0;margin-bottom:1rem}ol ol,ul ul,ol ul,ul ol{margin-bottom:0}dt{font-weight:700}dd{margin-bottom:.5rem;margin-left:0}blockquote{margin:0 0 1rem}dfn{font-style:italic}b,strong{font-weight:bolder}small{font-size:80%}sub,sup{position:relative;font-size:75%;line-height:0;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}a{color:#3176d9;text-decoration:none;background-color:transparent;-webkit-text-decoration-skip:objects}a:hover{color:#1e53a0;text-decoration:none}a:not([href]):not([tabindex]){color:inherit;text-decoration:none}a:not([href]):not([tabindex]):hover,a:not([href]):not([tabindex]):focus{color:inherit;text-decoration:none}a:not([href]):not([tabindex]):focus{outline:0}pre,code,kbd,samp{font-family:SFMono-Regular,Menlo,Monaco,Consolas,liberation mono,courier new,monospace;font-size:1em}pre{margin-top:0;margin-bottom:1rem;overflow:auto;-ms-overflow-style:scrollbar}figure{margin:0 0 1rem}img{vertical-align:middle;border-style:none}svg{overflow:hidden;vertical-align:middle}table{border-collapse:collapse}caption{padding-top:.75rem;padding-bottom:.75rem;color:#888;text-align:left;caption-side:bottom}th{text-align:inherit}label{display:inline-block;margin-bottom:.5rem}button{border-radius:0}button:focus{outline:1px dotted;outline:5px auto -webkit-focus-ring-color}input,button,select,optgroup,textarea{margin:0;font-family:inherit;font-size:inherit;line-height:inherit}button,input{overflow:visible}button,select{text-transform:none}button,html [type=button],[type=reset],[type=submit]{-webkit-appearance:button}button::-moz-focus-inner,[type=button]::-moz-focus-inner,[type=reset]::-moz-focus-inner,[type=submit]::-moz-focus-inner{padding:0;border-style:none}input[type=radio],input[type=checkbox]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;padding:0}input[type=date],input[type=time],input[type=datetime-local],input[type=month]{-webkit-appearance:listbox}textarea{overflow:auto;resize:vertical}fieldset{min-width:0;padding:0;margin:0;border:0}legend{display:block;width:100%;max-width:100%;padding:0;margin-bottom:.5rem;font-size:1.5rem;line-height:inherit;color:inherit;white-space:normal}progress{vertical-align:baseline}[type=number]::-webkit-inner-spin-button,[type=number]::-webkit-outer-spin-button{height:auto}[type=search]{outline-offset:-2px;-webkit-appearance:none}[type=search]::-webkit-search-cancel-button,[type=search]::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{font:inherit;-webkit-appearance:button}output{display:inline-block}summary{display:list-item;cursor:pointer}template{display:none}[hidden]{display:none!important}h1,h2,h3,h4,h5,h6,.h1,.h2,.h3,.h4,.h5,.h6{margin-bottom:.5rem;font-family:inherit;font-weight:500;line-height:1.2;color:inherit}h1,.h1{font-size:2.25rem}h2,.h2{font-size:2rem}h3,.h3{font-size:1.5rem}h4,.h4{font-size:1.35rem}h5,.h5{font-size:1.15rem}h6,.h6{font-size:1rem}.lead{font-size:1.25rem;font-weight:300}.display-1{font-size:3rem;font-weight:700;line-height:1.2}.display-2{font-size:2.5rem;font-weight:700;line-height:1.2}.display-3{font-size:2rem;font-weight:700;line-height:1.2}.display-4{font-size:1.75rem;font-weight:700;line-height:1.2}hr{margin-top:1rem;margin-bottom:1rem;border:0;border-top:1px solid rgba(0,0,0,.1)}small,.small{font-size:80%;font-weight:400}mark,.mark{padding:.2em;background-color:#fcf8e3}.list-unstyled{padding-left:0;list-style:none}.list-inline{padding-left:0;list-style:none}.list-inline-item{display:inline-block}.list-inline-item:not(:last-child){margin-right:.5rem}.initialism{font-size:90%;text-transform:uppercase}.blockquote{margin-bottom:1rem;font-size:1.25rem}.blockquote-footer{display:block;font-size:80%;color:#888}.blockquote-footer::before{content:"\2014 \00A0"}.img-fluid,.td-content img{max-width:100%;height:auto}.img-thumbnail{padding:.25rem;background-color:#fff;border:1px solid #dee2e6;border-radius:.25rem;-webkit-box-shadow:0 1px 2px rgba(0,0,0,.075);box-shadow:0 1px 2px rgba(0,0,0,.075);max-width:100%;height:auto}.figure{display:inline-block}.figure-img{margin-bottom:.5rem;line-height:1}.figure-caption{font-size:90%;color:#888}code{font-size:87.5%;color:#c97300;word-break:break-word}a>code{color:inherit}kbd{padding:.2rem .4rem;font-size:87.5%;color:#fff;background-color:#222;border-radius:.2rem;-webkit-box-shadow:inset 0 -.1rem 0 rgba(0,0,0,.25);box-shadow:inset 0 -.1rem rgba(0,0,0,.25)}kbd kbd{padding:0;font-size:100%;font-weight:700;-webkit-box-shadow:none;box-shadow:none}pre{display:block;font-size:87.5%;color:#222}pre code{font-size:inherit;color:inherit;word-break:normal}.pre-scrollable{max-height:340px;overflow-y:scroll}.container{width:100%;padding-right:15px;padding-left:15px;margin-right:auto;margin-left:auto}@media(min-width:576px){.container{max-width:540px}}@media(min-width:768px){.container{max-width:720px}}@media(min-width:992px){.container{max-width:960px}}@media(min-width:1200px){.container{max-width:1140px}}.container-fluid{width:100%;padding-right:15px;padding-left:15px;margin-right:auto;margin-left:auto}.row{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;margin-right:-15px;margin-left:-15px}.no-gutters{margin-right:0;margin-left:0}.no-gutters>.col,.no-gutters>[class*=col-]{padding-right:0;padding-left:0}.col-xl,.col-xl-auto,.col-xl-12,.col-xl-11,.col-xl-10,.col-xl-9,.col-xl-8,.col-xl-7,.col-xl-6,.col-xl-5,.col-xl-4,.col-xl-3,.col-xl-2,.col-xl-1,.col-lg,.col-lg-auto,.col-lg-12,.col-lg-11,.col-lg-10,.col-lg-9,.col-lg-8,.col-lg-7,.col-lg-6,.col-lg-5,.col-lg-4,.col-lg-3,.col-lg-2,.col-lg-1,.col-md,.col-md-auto,.col-md-12,.col-md-11,.col-md-10,.col-md-9,.col-md-8,.col-md-7,.col-md-6,.col-md-5,.col-md-4,.col-md-3,.col-md-2,.col-md-1,.col-sm,.col-sm-auto,.col-sm-12,.col-sm-11,.col-sm-10,.col-sm-9,.col-sm-8,.col-sm-7,.col-sm-6,.col-sm-5,.col-sm-4,.col-sm-3,.col-sm-2,.col-sm-1,.col,.col-auto,.col-12,.col-11,.col-10,.col-9,.col-8,.col-7,.col-6,.col-5,.col-4,.col-3,.col-2,.col-1{position:relative;width:100%;min-height:1px;padding-right:15px;padding-left:15px}.col{-webkit-flex-basis:0;-ms-flex-preferred-size:0;flex-basis:0;-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;max-width:100%}.col-auto{-webkit-box-flex:0;-webkit-flex:0 0 auto;-ms-flex:0 0 auto;flex:none;width:auto;max-width:none}.col-1{-webkit-box-flex:0;-webkit-flex:0 0 8.33333333%;-ms-flex:0 0 8.33333333%;flex:0 0 8.33333333%;max-width:8.33333333%}.col-2{-webkit-box-flex:0;-webkit-flex:0 0 16.66666667%;-ms-flex:0 0 16.66666667%;flex:0 0 16.66666667%;max-width:16.66666667%}.col-3{-webkit-box-flex:0;-webkit-flex:0 0 25%;-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}.col-4{-webkit-box-flex:0;-webkit-flex:0 0 33.33333333%;-ms-flex:0 0 33.33333333%;flex:0 0 33.33333333%;max-width:33.33333333%}.col-5{-webkit-box-flex:0;-webkit-flex:0 0 41.66666667%;-ms-flex:0 0 41.66666667%;flex:0 0 41.66666667%;max-width:41.66666667%}.col-6{-webkit-box-flex:0;-webkit-flex:0 0 50%;-ms-flex:0 0 50%;flex:0 0 50%;max-width:50%}.col-7{-webkit-box-flex:0;-webkit-flex:0 0 58.33333333%;-ms-flex:0 0 58.33333333%;flex:0 0 58.33333333%;max-width:58.33333333%}.col-8{-webkit-box-flex:0;-webkit-flex:0 0 66.66666667%;-ms-flex:0 0 66.66666667%;flex:0 0 66.66666667%;max-width:66.66666667%}.col-9{-webkit-box-flex:0;-webkit-flex:0 0 75%;-ms-flex:0 0 75%;flex:0 0 75%;max-width:75%}.col-10{-webkit-box-flex:0;-webkit-flex:0 0 83.33333333%;-ms-flex:0 0 83.33333333%;flex:0 0 83.33333333%;max-width:83.33333333%}.col-11{-webkit-box-flex:0;-webkit-flex:0 0 91.66666667%;-ms-flex:0 0 91.66666667%;flex:0 0 91.66666667%;max-width:91.66666667%}.col-12{-webkit-box-flex:0;-webkit-flex:0 0 100%;-ms-flex:0 0 100%;flex:0 0 100%;max-width:100%}.order-first{-webkit-box-ordinal-group:0;-webkit-order:-1;-ms-flex-order:-1;order:-1}.order-last{-webkit-box-ordinal-group:14;-webkit-order:13;-ms-flex-order:13;order:13}.order-0{-webkit-box-ordinal-group:1;-webkit-order:0;-ms-flex-order:0;order:0}.order-1{-webkit-box-ordinal-group:2;-webkit-order:1;-ms-flex-order:1;order:1}.order-2{-webkit-box-ordinal-group:3;-webkit-order:2;-ms-flex-order:2;order:2}.order-3{-webkit-box-ordinal-group:4;-webkit-order:3;-ms-flex-order:3;order:3}.order-4{-webkit-box-ordinal-group:5;-webkit-order:4;-ms-flex-order:4;order:4}.order-5{-webkit-box-ordinal-group:6;-webkit-order:5;-ms-flex-order:5;order:5}.order-6{-webkit-box-ordinal-group:7;-webkit-order:6;-ms-flex-order:6;order:6}.order-7{-webkit-box-ordinal-group:8;-webkit-order:7;-ms-flex-order:7;order:7}.order-8{-webkit-box-ordinal-group:9;-webkit-order:8;-ms-flex-order:8;order:8}.order-9{-webkit-box-ordinal-group:10;-webkit-order:9;-ms-flex-order:9;order:9}.order-10{-webkit-box-ordinal-group:11;-webkit-order:10;-ms-flex-order:10;order:10}.order-11{-webkit-box-ordinal-group:12;-webkit-order:11;-ms-flex-order:11;order:11}.order-12{-webkit-box-ordinal-group:13;-webkit-order:12;-ms-flex-order:12;order:12}.offset-1{margin-left:8.33333333%}.offset-2{margin-left:16.66666667%}.offset-3{margin-left:25%}.offset-4{margin-left:33.33333333%}.offset-5{margin-left:41.66666667%}.offset-6{margin-left:50%}.offset-7{margin-left:58.33333333%}.offset-8{margin-left:66.66666667%}.offset-9{margin-left:75%}.offset-10{margin-left:83.33333333%}.offset-11{margin-left:91.66666667%}@media(min-width:576px){.col-sm{-webkit-flex-basis:0;-ms-flex-preferred-size:0;flex-basis:0;-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;max-width:100%}.col-sm-auto{-webkit-box-flex:0;-webkit-flex:0 0 auto;-ms-flex:0 0 auto;flex:none;width:auto;max-width:none}.col-sm-1{-webkit-box-flex:0;-webkit-flex:0 0 8.33333333%;-ms-flex:0 0 8.33333333%;flex:0 0 8.33333333%;max-width:8.33333333%}.col-sm-2{-webkit-box-flex:0;-webkit-flex:0 0 16.66666667%;-ms-flex:0 0 16.66666667%;flex:0 0 16.66666667%;max-width:16.66666667%}.col-sm-3{-webkit-box-flex:0;-webkit-flex:0 0 25%;-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}.col-sm-4{-webkit-box-flex:0;-webkit-flex:0 0 33.33333333%;-ms-flex:0 0 33.33333333%;flex:0 0 33.33333333%;max-width:33.33333333%}.col-sm-5{-webkit-box-flex:0;-webkit-flex:0 0 41.66666667%;-ms-flex:0 0 41.66666667%;flex:0 0 41.66666667%;max-width:41.66666667%}.col-sm-6{-webkit-box-flex:0;-webkit-flex:0 0 50%;-ms-flex:0 0 50%;flex:0 0 50%;max-width:50%}.col-sm-7{-webkit-box-flex:0;-webkit-flex:0 0 58.33333333%;-ms-flex:0 0 58.33333333%;flex:0 0 58.33333333%;max-width:58.33333333%}.col-sm-8{-webkit-box-flex:0;-webkit-flex:0 0 66.66666667%;-ms-flex:0 0 66.66666667%;flex:0 0 66.66666667%;max-width:66.66666667%}.col-sm-9{-webkit-box-flex:0;-webkit-flex:0 0 75%;-ms-flex:0 0 75%;flex:0 0 75%;max-width:75%}.col-sm-10{-webkit-box-flex:0;-webkit-flex:0 0 83.33333333%;-ms-flex:0 0 83.33333333%;flex:0 0 83.33333333%;max-width:83.33333333%}.col-sm-11{-webkit-box-flex:0;-webkit-flex:0 0 91.66666667%;-ms-flex:0 0 91.66666667%;flex:0 0 91.66666667%;max-width:91.66666667%}.col-sm-12{-webkit-box-flex:0;-webkit-flex:0 0 100%;-ms-flex:0 0 100%;flex:0 0 100%;max-width:100%}.order-sm-first{-webkit-box-ordinal-group:0;-webkit-order:-1;-ms-flex-order:-1;order:-1}.order-sm-last{-webkit-box-ordinal-group:14;-webkit-order:13;-ms-flex-order:13;order:13}.order-sm-0{-webkit-box-ordinal-group:1;-webkit-order:0;-ms-flex-order:0;order:0}.order-sm-1{-webkit-box-ordinal-group:2;-webkit-order:1;-ms-flex-order:1;order:1}.order-sm-2{-webkit-box-ordinal-group:3;-webkit-order:2;-ms-flex-order:2;order:2}.order-sm-3{-webkit-box-ordinal-group:4;-webkit-order:3;-ms-flex-order:3;order:3}.order-sm-4{-webkit-box-ordinal-group:5;-webkit-order:4;-ms-flex-order:4;order:4}.order-sm-5{-webkit-box-ordinal-group:6;-webkit-order:5;-ms-flex-order:5;order:5}.order-sm-6{-webkit-box-ordinal-group:7;-webkit-order:6;-ms-flex-order:6;order:6}.order-sm-7{-webkit-box-ordinal-group:8;-webkit-order:7;-ms-flex-order:7;order:7}.order-sm-8{-webkit-box-ordinal-group:9;-webkit-order:8;-ms-flex-order:8;order:8}.order-sm-9{-webkit-box-ordinal-group:10;-webkit-order:9;-ms-flex-order:9;order:9}.order-sm-10{-webkit-box-ordinal-group:11;-webkit-order:10;-ms-flex-order:10;order:10}.order-sm-11{-webkit-box-ordinal-group:12;-webkit-order:11;-ms-flex-order:11;order:11}.order-sm-12{-webkit-box-ordinal-group:13;-webkit-order:12;-ms-flex-order:12;order:12}.offset-sm-0{margin-left:0}.offset-sm-1{margin-left:8.33333333%}.offset-sm-2{margin-left:16.66666667%}.offset-sm-3{margin-left:25%}.offset-sm-4{margin-left:33.33333333%}.offset-sm-5{margin-left:41.66666667%}.offset-sm-6{margin-left:50%}.offset-sm-7{margin-left:58.33333333%}.offset-sm-8{margin-left:66.66666667%}.offset-sm-9{margin-left:75%}.offset-sm-10{margin-left:83.33333333%}.offset-sm-11{margin-left:91.66666667%}}@media(min-width:768px){.col-md{-webkit-flex-basis:0;-ms-flex-preferred-size:0;flex-basis:0;-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;max-width:100%}.col-md-auto{-webkit-box-flex:0;-webkit-flex:0 0 auto;-ms-flex:0 0 auto;flex:none;width:auto;max-width:none}.col-md-1{-webkit-box-flex:0;-webkit-flex:0 0 8.33333333%;-ms-flex:0 0 8.33333333%;flex:0 0 8.33333333%;max-width:8.33333333%}.col-md-2{-webkit-box-flex:0;-webkit-flex:0 0 16.66666667%;-ms-flex:0 0 16.66666667%;flex:0 0 16.66666667%;max-width:16.66666667%}.col-md-3{-webkit-box-flex:0;-webkit-flex:0 0 25%;-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}.col-md-4{-webkit-box-flex:0;-webkit-flex:0 0 33.33333333%;-ms-flex:0 0 33.33333333%;flex:0 0 33.33333333%;max-width:33.33333333%}.col-md-5{-webkit-box-flex:0;-webkit-flex:0 0 41.66666667%;-ms-flex:0 0 41.66666667%;flex:0 0 41.66666667%;max-width:41.66666667%}.col-md-6{-webkit-box-flex:0;-webkit-flex:0 0 50%;-ms-flex:0 0 50%;flex:0 0 50%;max-width:50%}.col-md-7{-webkit-box-flex:0;-webkit-flex:0 0 58.33333333%;-ms-flex:0 0 58.33333333%;flex:0 0 58.33333333%;max-width:58.33333333%}.col-md-8{-webkit-box-flex:0;-webkit-flex:0 0 66.66666667%;-ms-flex:0 0 66.66666667%;flex:0 0 66.66666667%;max-width:66.66666667%}.col-md-9{-webkit-box-flex:0;-webkit-flex:0 0 75%;-ms-flex:0 0 75%;flex:0 0 75%;max-width:75%}.col-md-10{-webkit-box-flex:0;-webkit-flex:0 0 83.33333333%;-ms-flex:0 0 83.33333333%;flex:0 0 83.33333333%;max-width:83.33333333%}.col-md-11{-webkit-box-flex:0;-webkit-flex:0 0 91.66666667%;-ms-flex:0 0 91.66666667%;flex:0 0 91.66666667%;max-width:91.66666667%}.col-md-12{-webkit-box-flex:0;-webkit-flex:0 0 100%;-ms-flex:0 0 100%;flex:0 0 100%;max-width:100%}.order-md-first{-webkit-box-ordinal-group:0;-webkit-order:-1;-ms-flex-order:-1;order:-1}.order-md-last{-webkit-box-ordinal-group:14;-webkit-order:13;-ms-flex-order:13;order:13}.order-md-0{-webkit-box-ordinal-group:1;-webkit-order:0;-ms-flex-order:0;order:0}.order-md-1{-webkit-box-ordinal-group:2;-webkit-order:1;-ms-flex-order:1;order:1}.order-md-2{-webkit-box-ordinal-group:3;-webkit-order:2;-ms-flex-order:2;order:2}.order-md-3{-webkit-box-ordinal-group:4;-webkit-order:3;-ms-flex-order:3;order:3}.order-md-4{-webkit-box-ordinal-group:5;-webkit-order:4;-ms-flex-order:4;order:4}.order-md-5{-webkit-box-ordinal-group:6;-webkit-order:5;-ms-flex-order:5;order:5}.order-md-6{-webkit-box-ordinal-group:7;-webkit-order:6;-ms-flex-order:6;order:6}.order-md-7{-webkit-box-ordinal-group:8;-webkit-order:7;-ms-flex-order:7;order:7}.order-md-8{-webkit-box-ordinal-group:9;-webkit-order:8;-ms-flex-order:8;order:8}.order-md-9{-webkit-box-ordinal-group:10;-webkit-order:9;-ms-flex-order:9;order:9}.order-md-10{-webkit-box-ordinal-group:11;-webkit-order:10;-ms-flex-order:10;order:10}.order-md-11{-webkit-box-ordinal-group:12;-webkit-order:11;-ms-flex-order:11;order:11}.order-md-12{-webkit-box-ordinal-group:13;-webkit-order:12;-ms-flex-order:12;order:12}.offset-md-0{margin-left:0}.offset-md-1{margin-left:8.33333333%}.offset-md-2{margin-left:16.66666667%}.offset-md-3{margin-left:25%}.offset-md-4{margin-left:33.33333333%}.offset-md-5{margin-left:41.66666667%}.offset-md-6{margin-left:50%}.offset-md-7{margin-left:58.33333333%}.offset-md-8{margin-left:66.66666667%}.offset-md-9{margin-left:75%}.offset-md-10{margin-left:83.33333333%}.offset-md-11{margin-left:91.66666667%}}@media(min-width:992px){.col-lg{-webkit-flex-basis:0;-ms-flex-preferred-size:0;flex-basis:0;-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;max-width:100%}.col-lg-auto{-webkit-box-flex:0;-webkit-flex:0 0 auto;-ms-flex:0 0 auto;flex:none;width:auto;max-width:none}.col-lg-1{-webkit-box-flex:0;-webkit-flex:0 0 8.33333333%;-ms-flex:0 0 8.33333333%;flex:0 0 8.33333333%;max-width:8.33333333%}.col-lg-2{-webkit-box-flex:0;-webkit-flex:0 0 16.66666667%;-ms-flex:0 0 16.66666667%;flex:0 0 16.66666667%;max-width:16.66666667%}.col-lg-3{-webkit-box-flex:0;-webkit-flex:0 0 25%;-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}.col-lg-4{-webkit-box-flex:0;-webkit-flex:0 0 33.33333333%;-ms-flex:0 0 33.33333333%;flex:0 0 33.33333333%;max-width:33.33333333%}.col-lg-5{-webkit-box-flex:0;-webkit-flex:0 0 41.66666667%;-ms-flex:0 0 41.66666667%;flex:0 0 41.66666667%;max-width:41.66666667%}.col-lg-6{-webkit-box-flex:0;-webkit-flex:0 0 50%;-ms-flex:0 0 50%;flex:0 0 50%;max-width:50%}.col-lg-7{-webkit-box-flex:0;-webkit-flex:0 0 58.33333333%;-ms-flex:0 0 58.33333333%;flex:0 0 58.33333333%;max-width:58.33333333%}.col-lg-8{-webkit-box-flex:0;-webkit-flex:0 0 66.66666667%;-ms-flex:0 0 66.66666667%;flex:0 0 66.66666667%;max-width:66.66666667%}.col-lg-9{-webkit-box-flex:0;-webkit-flex:0 0 75%;-ms-flex:0 0 75%;flex:0 0 75%;max-width:75%}.col-lg-10{-webkit-box-flex:0;-webkit-flex:0 0 83.33333333%;-ms-flex:0 0 83.33333333%;flex:0 0 83.33333333%;max-width:83.33333333%}.col-lg-11{-webkit-box-flex:0;-webkit-flex:0 0 91.66666667%;-ms-flex:0 0 91.66666667%;flex:0 0 91.66666667%;max-width:91.66666667%}.col-lg-12{-webkit-box-flex:0;-webkit-flex:0 0 100%;-ms-flex:0 0 100%;flex:0 0 100%;max-width:100%}.order-lg-first{-webkit-box-ordinal-group:0;-webkit-order:-1;-ms-flex-order:-1;order:-1}.order-lg-last{-webkit-box-ordinal-group:14;-webkit-order:13;-ms-flex-order:13;order:13}.order-lg-0{-webkit-box-ordinal-group:1;-webkit-order:0;-ms-flex-order:0;order:0}.order-lg-1{-webkit-box-ordinal-group:2;-webkit-order:1;-ms-flex-order:1;order:1}.order-lg-2{-webkit-box-ordinal-group:3;-webkit-order:2;-ms-flex-order:2;order:2}.order-lg-3{-webkit-box-ordinal-group:4;-webkit-order:3;-ms-flex-order:3;order:3}.order-lg-4{-webkit-box-ordinal-group:5;-webkit-order:4;-ms-flex-order:4;order:4}.order-lg-5{-webkit-box-ordinal-group:6;-webkit-order:5;-ms-flex-order:5;order:5}.order-lg-6{-webkit-box-ordinal-group:7;-webkit-order:6;-ms-flex-order:6;order:6}.order-lg-7{-webkit-box-ordinal-group:8;-webkit-order:7;-ms-flex-order:7;order:7}.order-lg-8{-webkit-box-ordinal-group:9;-webkit-order:8;-ms-flex-order:8;order:8}.order-lg-9{-webkit-box-ordinal-group:10;-webkit-order:9;-ms-flex-order:9;order:9}.order-lg-10{-webkit-box-ordinal-group:11;-webkit-order:10;-ms-flex-order:10;order:10}.order-lg-11{-webkit-box-ordinal-group:12;-webkit-order:11;-ms-flex-order:11;order:11}.order-lg-12{-webkit-box-ordinal-group:13;-webkit-order:12;-ms-flex-order:12;order:12}.offset-lg-0{margin-left:0}.offset-lg-1{margin-left:8.33333333%}.offset-lg-2{margin-left:16.66666667%}.offset-lg-3{margin-left:25%}.offset-lg-4{margin-left:33.33333333%}.offset-lg-5{margin-left:41.66666667%}.offset-lg-6{margin-left:50%}.offset-lg-7{margin-left:58.33333333%}.offset-lg-8{margin-left:66.66666667%}.offset-lg-9{margin-left:75%}.offset-lg-10{margin-left:83.33333333%}.offset-lg-11{margin-left:91.66666667%}}@media(min-width:1200px){.col-xl{-webkit-flex-basis:0;-ms-flex-preferred-size:0;flex-basis:0;-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;max-width:100%}.col-xl-auto{-webkit-box-flex:0;-webkit-flex:0 0 auto;-ms-flex:0 0 auto;flex:none;width:auto;max-width:none}.col-xl-1{-webkit-box-flex:0;-webkit-flex:0 0 8.33333333%;-ms-flex:0 0 8.33333333%;flex:0 0 8.33333333%;max-width:8.33333333%}.col-xl-2{-webkit-box-flex:0;-webkit-flex:0 0 16.66666667%;-ms-flex:0 0 16.66666667%;flex:0 0 16.66666667%;max-width:16.66666667%}.col-xl-3{-webkit-box-flex:0;-webkit-flex:0 0 25%;-ms-flex:0 0 25%;flex:0 0 25%;max-width:25%}.col-xl-4{-webkit-box-flex:0;-webkit-flex:0 0 33.33333333%;-ms-flex:0 0 33.33333333%;flex:0 0 33.33333333%;max-width:33.33333333%}.col-xl-5{-webkit-box-flex:0;-webkit-flex:0 0 41.66666667%;-ms-flex:0 0 41.66666667%;flex:0 0 41.66666667%;max-width:41.66666667%}.col-xl-6{-webkit-box-flex:0;-webkit-flex:0 0 50%;-ms-flex:0 0 50%;flex:0 0 50%;max-width:50%}.col-xl-7{-webkit-box-flex:0;-webkit-flex:0 0 58.33333333%;-ms-flex:0 0 58.33333333%;flex:0 0 58.33333333%;max-width:58.33333333%}.col-xl-8{-webkit-box-flex:0;-webkit-flex:0 0 66.66666667%;-ms-flex:0 0 66.66666667%;flex:0 0 66.66666667%;max-width:66.66666667%}.col-xl-9{-webkit-box-flex:0;-webkit-flex:0 0 75%;-ms-flex:0 0 75%;flex:0 0 75%;max-width:75%}.col-xl-10{-webkit-box-flex:0;-webkit-flex:0 0 83.33333333%;-ms-flex:0 0 83.33333333%;flex:0 0 83.33333333%;max-width:83.33333333%}.col-xl-11{-webkit-box-flex:0;-webkit-flex:0 0 91.66666667%;-ms-flex:0 0 91.66666667%;flex:0 0 91.66666667%;max-width:91.66666667%}.col-xl-12{-webkit-box-flex:0;-webkit-flex:0 0 100%;-ms-flex:0 0 100%;flex:0 0 100%;max-width:100%}.order-xl-first{-webkit-box-ordinal-group:0;-webkit-order:-1;-ms-flex-order:-1;order:-1}.order-xl-last{-webkit-box-ordinal-group:14;-webkit-order:13;-ms-flex-order:13;order:13}.order-xl-0{-webkit-box-ordinal-group:1;-webkit-order:0;-ms-flex-order:0;order:0}.order-xl-1{-webkit-box-ordinal-group:2;-webkit-order:1;-ms-flex-order:1;order:1}.order-xl-2{-webkit-box-ordinal-group:3;-webkit-order:2;-ms-flex-order:2;order:2}.order-xl-3{-webkit-box-ordinal-group:4;-webkit-order:3;-ms-flex-order:3;order:3}.order-xl-4{-webkit-box-ordinal-group:5;-webkit-order:4;-ms-flex-order:4;order:4}.order-xl-5{-webkit-box-ordinal-group:6;-webkit-order:5;-ms-flex-order:5;order:5}.order-xl-6{-webkit-box-ordinal-group:7;-webkit-order:6;-ms-flex-order:6;order:6}.order-xl-7{-webkit-box-ordinal-group:8;-webkit-order:7;-ms-flex-order:7;order:7}.order-xl-8{-webkit-box-ordinal-group:9;-webkit-order:8;-ms-flex-order:8;order:8}.order-xl-9{-webkit-box-ordinal-group:10;-webkit-order:9;-ms-flex-order:9;order:9}.order-xl-10{-webkit-box-ordinal-group:11;-webkit-order:10;-ms-flex-order:10;order:10}.order-xl-11{-webkit-box-ordinal-group:12;-webkit-order:11;-ms-flex-order:11;order:11}.order-xl-12{-webkit-box-ordinal-group:13;-webkit-order:12;-ms-flex-order:12;order:12}.offset-xl-0{margin-left:0}.offset-xl-1{margin-left:8.33333333%}.offset-xl-2{margin-left:16.66666667%}.offset-xl-3{margin-left:25%}.offset-xl-4{margin-left:33.33333333%}.offset-xl-5{margin-left:41.66666667%}.offset-xl-6{margin-left:50%}.offset-xl-7{margin-left:58.33333333%}.offset-xl-8{margin-left:66.66666667%}.offset-xl-9{margin-left:75%}.offset-xl-10{margin-left:83.33333333%}.offset-xl-11{margin-left:91.66666667%}}.table,.td-content>table,.td-box .row.section>table{width:100%;margin-bottom:1rem;background-color:transparent}.table th,.td-content>table th,.td-box .row.section>table th,.table td,.td-content>table td,.td-box .row.section>table td{padding:.75rem;vertical-align:top;border-top:1px solid #dee2e6}.table thead th,.td-content>table thead th,.td-box .row.section>table thead th{vertical-align:bottom;border-bottom:2px solid #dee2e6}.table tbody+tbody,.td-content>table tbody+tbody,.td-box .row.section>table tbody+tbody{border-top:2px solid #dee2e6}.table .table,.td-content>table .table,.table .td-content>table,.td-content>table .td-content>table,.td-box .row.section>table .table,.td-box .row.section>table .td-content>table,.table .td-box .row.section>table,.td-content>table .td-box .row.section>table,.td-box .table .row.section>table,.td-box .td-content>table .row.section>table,.td-box .row.section>table .row.section>table{background-color:#fff}.table-sm th,.table-sm td{padding:.3rem}.table-bordered{border:1px solid #dee2e6}.table-bordered th,.table-bordered td{border:1px solid #dee2e6}.table-bordered thead th,.table-bordered thead td{border-bottom-width:2px}.table-borderless th,.table-borderless td,.table-borderless thead th,.table-borderless tbody+tbody{border:0}.table-striped tbody tr:nth-of-type(odd),.td-content>table tbody tr:nth-of-type(odd),.td-box .row.section>table tbody tr:nth-of-type(odd){background-color:rgba(0,0,0,5%)}.table-hover tbody tr:hover{background-color:rgba(0,0,0,.075)}.table-primary,.table-primary>th,.table-primary>td{background-color:#c5d3df}.table-hover .table-primary:hover{background-color:#b5c7d6}.table-hover .table-primary:hover>td,.table-hover .table-primary:hover>th{background-color:#b5c7d6}.table-secondary,.table-secondary>th,.table-secondary>td{background-color:#ffe6c5}.table-hover .table-secondary:hover{background-color:#ffdbac}.table-hover .table-secondary:hover>td,.table-hover .table-secondary:hover>th{background-color:#ffdbac}.table-success,.table-success>th,.table-success>td{background-color:#c7d8ff}.table-hover .table-success:hover{background-color:#aec6ff}.table-hover .table-success:hover>td,.table-hover .table-success:hover>th{background-color:#aec6ff}.table-info,.table-info>th,.table-info>td{background-color:#edf6f6}.table-hover .table-info:hover{background-color:#dceeee}.table-hover .table-info:hover>td,.table-hover .table-info:hover>th{background-color:#dceeee}.table-warning,.table-warning>th,.table-warning>td{background-color:#fad5d1}.table-hover .table-warning:hover{background-color:#f8c0ba}.table-hover .table-warning:hover>td,.table-hover .table-warning:hover>th{background-color:#f8c0ba}.table-danger,.table-danger>th,.table-danger>td{background-color:#fad5d1}.table-hover .table-danger:hover{background-color:#f8c0ba}.table-hover .table-danger:hover>td,.table-hover .table-danger:hover>th{background-color:#f8c0ba}.table-light,.table-light>th,.table-light>td{background-color:#f3fcfa}.table-hover .table-light:hover{background-color:#dff7f2}.table-hover .table-light:hover>td,.table-hover .table-light:hover>th{background-color:#dff7f2}.table-dark,.table-dark>th,.table-dark>td{background-color:#cac9cd}.table-hover .table-dark:hover{background-color:#bdbcc1}.table-hover .table-dark:hover>td,.table-hover .table-dark:hover>th{background-color:#bdbcc1}.table-active,.table-active>th,.table-active>td{background-color:rgba(0,0,0,.075)}.table-hover .table-active:hover{background-color:rgba(0,0,0,.075)}.table-hover .table-active:hover>td,.table-hover .table-active:hover>th{background-color:rgba(0,0,0,.075)}.table .thead-dark th,.td-content>table .thead-dark th,.td-box .row.section>table .thead-dark th{color:#fff;background-color:#222;border-color:#353535}.table .thead-light th,.td-content>table .thead-light th,.td-box .row.section>table .thead-light th{color:#495057;background-color:#eee;border-color:#dee2e6}.table-dark{color:#fff;background-color:#222}.table-dark th,.table-dark td,.table-dark thead th{border-color:#353535}.table-dark.table-bordered{border:0}.table-dark.table-striped tbody tr:nth-of-type(odd),.td-content>table.table-dark tbody tr:nth-of-type(odd),.td-box .row.section>table.table-dark tbody tr:nth-of-type(odd){background-color:rgba(255,255,255,5%)}.table-dark.table-hover tbody tr:hover{background-color:rgba(255,255,255,.075)}@media(max-width:575.98px){.table-responsive-sm{display:block;width:100%;overflow-x:auto;-webkit-overflow-scrolling:touch;-ms-overflow-style:-ms-autohiding-scrollbar}.table-responsive-sm>.table-bordered{border:0}}@media(max-width:767.98px){.table-responsive-md{display:block;width:100%;overflow-x:auto;-webkit-overflow-scrolling:touch;-ms-overflow-style:-ms-autohiding-scrollbar}.table-responsive-md>.table-bordered{border:0}}@media(max-width:991.98px){.table-responsive-lg{display:block;width:100%;overflow-x:auto;-webkit-overflow-scrolling:touch;-ms-overflow-style:-ms-autohiding-scrollbar}.table-responsive-lg>.table-bordered{border:0}}@media(max-width:1199.98px){.table-responsive-xl{display:block;width:100%;overflow-x:auto;-webkit-overflow-scrolling:touch;-ms-overflow-style:-ms-autohiding-scrollbar}.table-responsive-xl>.table-bordered{border:0}}.table-responsive,.td-content>table,.td-box .row.section>table{display:block;width:100%;overflow-x:auto;-webkit-overflow-scrolling:touch;-ms-overflow-style:-ms-autohiding-scrollbar}.table-responsive>.table-bordered,.td-content>table>.table-bordered,.td-box .row.section>table>.table-bordered{border:0}.form-control{display:block;width:100%;height:-webkit-calc(2.25rem + 2px);height:calc(2.25rem + 2px);padding:.375rem .75rem;font-size:1rem;line-height:1.5;color:#495057;background-color:#fff;background-clip:padding-box;border:1px solid #ccc;border-radius:.25rem;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075);-webkit-transition:border-color .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;transition:border-color .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;-o-transition:border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:border-color .15s ease-in-out,box-shadow .15s ease-in-out,-webkit-box-shadow .15s ease-in-out}@media screen and (prefers-reduced-motion:reduce){.form-control{-webkit-transition:none;-o-transition:none;transition:none}}.form-control::-ms-expand{background-color:transparent;border:0}.form-control:focus{color:#495057;background-color:#fff;border-color:#6fa3ce;outline:0;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(48,99,142,.25);box-shadow:inset 0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(48,99,142,.25)}.form-control::-webkit-input-placeholder{color:#888;opacity:1}.form-control::-moz-placeholder{color:#888;opacity:1}.form-control:-ms-input-placeholder{color:#888;opacity:1}.form-control::placeholder{color:#888;opacity:1}.form-control:disabled,.form-control[readonly]{background-color:#eee;opacity:1}select.form-control:focus::-ms-value{color:#495057;background-color:#fff}.form-control-file,.form-control-range{display:block;width:100%}.col-form-label{padding-top:-webkit-calc(.375rem + 1px);padding-top:calc(.375rem + 1px);padding-bottom:-webkit-calc(.375rem + 1px);padding-bottom:calc(.375rem + 1px);margin-bottom:0;font-size:inherit;line-height:1.5}.col-form-label-lg{padding-top:-webkit-calc(.5rem + 1px);padding-top:calc(.5rem + 1px);padding-bottom:-webkit-calc(.5rem + 1px);padding-bottom:calc(.5rem + 1px);font-size:1.25rem;line-height:1.5}.col-form-label-sm{padding-top:-webkit-calc(.25rem + 1px);padding-top:calc(.25rem + 1px);padding-bottom:-webkit-calc(.25rem + 1px);padding-bottom:calc(.25rem + 1px);font-size:.875rem;line-height:1.5}.form-control-plaintext{display:block;width:100%;padding-top:.375rem;padding-bottom:.375rem;margin-bottom:0;line-height:1.5;color:#222;background-color:transparent;border:solid transparent;border-width:1px 0}.form-control-plaintext.form-control-sm,.form-control-plaintext.form-control-lg{padding-right:0;padding-left:0}.form-control-sm{height:-webkit-calc(1.8125rem + 2px);height:calc(1.8125rem + 2px);padding:.25rem .5rem;font-size:.875rem;line-height:1.5;border-radius:.2rem}.form-control-lg{height:-webkit-calc(2.875rem + 2px);height:calc(2.875rem + 2px);padding:.5rem 1rem;font-size:1.25rem;line-height:1.5;border-radius:.3rem}select.form-control[size],select.form-control[multiple]{height:auto}textarea.form-control{height:auto}.form-group{margin-bottom:1rem}.form-text{display:block;margin-top:.25rem}.form-row{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;margin-right:-5px;margin-left:-5px}.form-row>.col,.form-row>[class*=col-]{padding-right:5px;padding-left:5px}.form-check{position:relative;display:block;padding-left:1.25rem}.form-check-input{position:absolute;margin-top:.3rem;margin-left:-1.25rem}.form-check-input:disabled~.form-check-label{color:#888}.form-check-label{margin-bottom:0}.form-check-inline{display:-webkit-inline-box;display:-webkit-inline-flex;display:-ms-inline-flexbox;display:inline-flex;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;padding-left:0;margin-right:.75rem}.form-check-inline .form-check-input{position:static;margin-top:0;margin-right:.3125rem;margin-left:0}.valid-feedback{display:none;width:100%;margin-top:.25rem;font-size:80%;color:#3772ff}.valid-tooltip{position:absolute;top:100%;z-index:5;display:none;max-width:100%;padding:.25rem .5rem;margin-top:.1rem;font-size:.875rem;line-height:1.5;color:#fff;background-color:rgba(55,114,255,.9);border-radius:.25rem}.was-validated .form-control:valid,.form-control.is-valid,.was-validated .custom-select:valid,.custom-select.is-valid{border-color:#3772ff}.was-validated .form-control:valid:focus,.form-control.is-valid:focus,.was-validated .custom-select:valid:focus,.custom-select.is-valid:focus{border-color:#3772ff;-webkit-box-shadow:0 0 0 .2rem rgba(55,114,255,.25);box-shadow:0 0 0 .2rem rgba(55,114,255,.25)}.was-validated .form-control:valid~.valid-feedback,.was-validated .form-control:valid~.valid-tooltip,.form-control.is-valid~.valid-feedback,.form-control.is-valid~.valid-tooltip,.was-validated .custom-select:valid~.valid-feedback,.was-validated .custom-select:valid~.valid-tooltip,.custom-select.is-valid~.valid-feedback,.custom-select.is-valid~.valid-tooltip{display:block}.was-validated .form-control-file:valid~.valid-feedback,.was-validated .form-control-file:valid~.valid-tooltip,.form-control-file.is-valid~.valid-feedback,.form-control-file.is-valid~.valid-tooltip{display:block}.was-validated .form-check-input:valid~.form-check-label,.form-check-input.is-valid~.form-check-label{color:#3772ff}.was-validated .form-check-input:valid~.valid-feedback,.was-validated .form-check-input:valid~.valid-tooltip,.form-check-input.is-valid~.valid-feedback,.form-check-input.is-valid~.valid-tooltip{display:block}.was-validated .custom-control-input:valid~.custom-control-label,.custom-control-input.is-valid~.custom-control-label{color:#3772ff}.was-validated .custom-control-input:valid~.custom-control-label::before,.custom-control-input.is-valid~.custom-control-label::before{background-color:#b7ccff}.was-validated .custom-control-input:valid~.valid-feedback,.was-validated .custom-control-input:valid~.valid-tooltip,.custom-control-input.is-valid~.valid-feedback,.custom-control-input.is-valid~.valid-tooltip{display:block}.was-validated .custom-control-input:valid:checked~.custom-control-label::before,.custom-control-input.is-valid:checked~.custom-control-label::before{background:#6a96ff -webkit-gradient(linear,left top,left bottom,from(#80a6ff),to(#6a96ff))repeat-x;background:#6a96ff -webkit-linear-gradient(top,#80a6ff,#6a96ff)repeat-x;background:#6a96ff -o-linear-gradient(top,#80a6ff,#6a96ff)repeat-x;background:#6a96ff linear-gradient(180deg,#80a6ff,#6a96ff)repeat-x}.was-validated .custom-control-input:valid:focus~.custom-control-label::before,.custom-control-input.is-valid:focus~.custom-control-label::before{-webkit-box-shadow:0 0 0 1px #fff,0 0 0 .2rem rgba(55,114,255,.25);box-shadow:0 0 0 1px #fff,0 0 0 .2rem rgba(55,114,255,.25)}.was-validated .custom-file-input:valid~.custom-file-label,.custom-file-input.is-valid~.custom-file-label{border-color:#3772ff}.was-validated .custom-file-input:valid~.custom-file-label::after,.custom-file-input.is-valid~.custom-file-label::after{border-color:inherit}.was-validated .custom-file-input:valid~.valid-feedback,.was-validated .custom-file-input:valid~.valid-tooltip,.custom-file-input.is-valid~.valid-feedback,.custom-file-input.is-valid~.valid-tooltip{display:block}.was-validated .custom-file-input:valid:focus~.custom-file-label,.custom-file-input.is-valid:focus~.custom-file-label{-webkit-box-shadow:0 0 0 .2rem rgba(55,114,255,.25);box-shadow:0 0 0 .2rem rgba(55,114,255,.25)}.invalid-feedback{display:none;width:100%;margin-top:.25rem;font-size:80%;color:#ed6a5a}.invalid-tooltip{position:absolute;top:100%;z-index:5;display:none;max-width:100%;padding:.25rem .5rem;margin-top:.1rem;font-size:.875rem;line-height:1.5;color:#fff;background-color:rgba(237,106,90,.9);border-radius:.25rem}.was-validated .form-control:invalid,.form-control.is-invalid,.was-validated .custom-select:invalid,.custom-select.is-invalid{border-color:#ed6a5a}.was-validated .form-control:invalid:focus,.form-control.is-invalid:focus,.was-validated .custom-select:invalid:focus,.custom-select.is-invalid:focus{border-color:#ed6a5a;-webkit-box-shadow:0 0 0 .2rem rgba(237,106,90,.25);box-shadow:0 0 0 .2rem rgba(237,106,90,.25)}.was-validated .form-control:invalid~.invalid-feedback,.was-validated .form-control:invalid~.invalid-tooltip,.form-control.is-invalid~.invalid-feedback,.form-control.is-invalid~.invalid-tooltip,.was-validated .custom-select:invalid~.invalid-feedback,.was-validated .custom-select:invalid~.invalid-tooltip,.custom-select.is-invalid~.invalid-feedback,.custom-select.is-invalid~.invalid-tooltip{display:block}.was-validated .form-control-file:invalid~.invalid-feedback,.was-validated .form-control-file:invalid~.invalid-tooltip,.form-control-file.is-invalid~.invalid-feedback,.form-control-file.is-invalid~.invalid-tooltip{display:block}.was-validated .form-check-input:invalid~.form-check-label,.form-check-input.is-invalid~.form-check-label{color:#ed6a5a}.was-validated .form-check-input:invalid~.invalid-feedback,.was-validated .form-check-input:invalid~.invalid-tooltip,.form-check-input.is-invalid~.invalid-feedback,.form-check-input.is-invalid~.invalid-tooltip{display:block}.was-validated .custom-control-input:invalid~.custom-control-label,.custom-control-input.is-invalid~.custom-control-label{color:#ed6a5a}.was-validated .custom-control-input:invalid~.custom-control-label::before,.custom-control-input.is-invalid~.custom-control-label::before{background-color:#fad2cd}.was-validated .custom-control-input:invalid~.invalid-feedback,.was-validated .custom-control-input:invalid~.invalid-tooltip,.custom-control-input.is-invalid~.invalid-feedback,.custom-control-input.is-invalid~.invalid-tooltip{display:block}.was-validated .custom-control-input:invalid:checked~.custom-control-label::before,.custom-control-input.is-invalid:checked~.custom-control-label::before{background:#f29488 -webkit-gradient(linear,left top,left bottom,from(#f4a49a),to(#f29488))repeat-x;background:#f29488 -webkit-linear-gradient(top,#f4a49a,#f29488)repeat-x;background:#f29488 -o-linear-gradient(top,#f4a49a,#f29488)repeat-x;background:#f29488 linear-gradient(180deg,#f4a49a,#f29488)repeat-x}.was-validated .custom-control-input:invalid:focus~.custom-control-label::before,.custom-control-input.is-invalid:focus~.custom-control-label::before{-webkit-box-shadow:0 0 0 1px #fff,0 0 0 .2rem rgba(237,106,90,.25);box-shadow:0 0 0 1px #fff,0 0 0 .2rem rgba(237,106,90,.25)}.was-validated .custom-file-input:invalid~.custom-file-label,.custom-file-input.is-invalid~.custom-file-label{border-color:#ed6a5a}.was-validated .custom-file-input:invalid~.custom-file-label::after,.custom-file-input.is-invalid~.custom-file-label::after{border-color:inherit}.was-validated .custom-file-input:invalid~.invalid-feedback,.was-validated .custom-file-input:invalid~.invalid-tooltip,.custom-file-input.is-invalid~.invalid-feedback,.custom-file-input.is-invalid~.invalid-tooltip{display:block}.was-validated .custom-file-input:invalid:focus~.custom-file-label,.custom-file-input.is-invalid:focus~.custom-file-label{-webkit-box-shadow:0 0 0 .2rem rgba(237,106,90,.25);box-shadow:0 0 0 .2rem rgba(237,106,90,.25)}.form-inline{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-flow:row wrap;-ms-flex-flow:row wrap;flex-flow:row wrap;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center}.form-inline .form-check{width:100%}@media(min-width:576px){.form-inline label{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;margin-bottom:0}.form-inline .form-group{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-flex:0;-webkit-flex:0 0 auto;-ms-flex:0 0 auto;flex:none;-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-flow:row wrap;-ms-flex-flow:row wrap;flex-flow:row wrap;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;margin-bottom:0}.form-inline .form-control{display:inline-block;width:auto;vertical-align:middle}.form-inline .form-control-plaintext{display:inline-block}.form-inline .input-group,.form-inline .custom-select{width:auto}.form-inline .form-check{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;width:auto;padding-left:0}.form-inline .form-check-input{position:relative;margin-top:0;margin-right:.25rem;margin-left:0}.form-inline .custom-control{-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center}.form-inline .custom-control-label{margin-bottom:0}}.btn{display:inline-block;font-weight:400;text-align:center;white-space:nowrap;vertical-align:middle;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;border:1px solid transparent;padding:.375rem .75rem;font-size:1rem;line-height:1.5;border-radius:.25rem;-webkit-transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;-o-transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:color .15s ease-in-out,background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out,-webkit-box-shadow .15s ease-in-out}@media screen and (prefers-reduced-motion:reduce){.btn{-webkit-transition:none;-o-transition:none;transition:none}}.btn:hover,.btn:focus{text-decoration:none}.btn:focus,.btn.focus{outline:0;-webkit-box-shadow:0 0 0 .2rem rgba(48,99,142,.25);box-shadow:0 0 0 .2rem rgba(48,99,142,.25)}.btn.disabled,.btn:disabled{opacity:.65;-webkit-box-shadow:none;box-shadow:none}.btn:not(:disabled):not(.disabled){cursor:pointer}.btn:not(:disabled):not(.disabled):active,.btn:not(:disabled):not(.disabled).active{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125);box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn:not(:disabled):not(.disabled):active:focus,.btn:not(:disabled):not(.disabled).active:focus{-webkit-box-shadow:0 0 0 .2rem rgba(48,99,142,.25),inset 0 3px 5px rgba(0,0,0,.125);box-shadow:0 0 0 .2rem rgba(48,99,142,.25),inset 0 3px 5px rgba(0,0,0,.125)}a.btn.disabled,fieldset:disabled a.btn{pointer-events:none}.btn-primary{color:#fff;background:#30638e -webkit-gradient(linear,left top,left bottom,from(#4f7a9f),to(#30638E))repeat-x;background:#30638e -webkit-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e -o-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x;border-color:#30638e;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075)}.btn-primary:hover{color:#fff;background:#264f71 -webkit-gradient(linear,left top,left bottom,from(#476987),to(#264f71))repeat-x;background:#264f71 -webkit-linear-gradient(top,#476987,#264f71)repeat-x;background:#264f71 -o-linear-gradient(top,#476987,#264f71)repeat-x;background:#264f71 linear-gradient(180deg,#476987,#264f71)repeat-x;border-color:#234868}.btn-primary:focus,.btn-primary.focus{-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(48,99,142,.5);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(48,99,142,.5)}.btn-primary.disabled,.btn-primary:disabled{color:#fff;background-color:#30638e;border-color:#30638e}.btn-primary:not(:disabled):not(.disabled):active,.btn-primary:not(:disabled):not(.disabled).active,.show>.btn-primary.dropdown-toggle{color:#fff;background-color:#234868;background-image:none;border-color:#20425e}.btn-primary:not(:disabled):not(.disabled):active:focus,.btn-primary:not(:disabled):not(.disabled).active:focus,.show>.btn-primary.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(48,99,142,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(48,99,142,.5)}.btn-secondary{color:#fff;background:#ffa630 -webkit-gradient(linear,left top,left bottom,from(#ffb34f),to(#FFA630))repeat-x;background:#ffa630 -webkit-linear-gradient(top,#ffb34f,#FFA630)repeat-x;background:#ffa630 -o-linear-gradient(top,#ffb34f,#FFA630)repeat-x;background:#ffa630 linear-gradient(180deg,#ffb34f,#FFA630)repeat-x;border-color:#ffa630;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075)}.btn-secondary:hover{color:#fff;background:#ff960a -webkit-gradient(linear,left top,left bottom,from(#ffa52f),to(#ff960a))repeat-x;background:#ff960a -webkit-linear-gradient(top,#ffa52f,#ff960a)repeat-x;background:#ff960a -o-linear-gradient(top,#ffa52f,#ff960a)repeat-x;background:#ff960a linear-gradient(180deg,#ffa52f,#ff960a)repeat-x;border-color:#fc9000}.btn-secondary:focus,.btn-secondary.focus{-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(255,166,48,.5);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(255,166,48,.5)}.btn-secondary.disabled,.btn-secondary:disabled{color:#fff;background-color:#ffa630;border-color:#ffa630}.btn-secondary:not(:disabled):not(.disabled):active,.btn-secondary:not(:disabled):not(.disabled).active,.show>.btn-secondary.dropdown-toggle{color:#fff;background-color:#fc9000;background-image:none;border-color:#ef8800}.btn-secondary:not(:disabled):not(.disabled):active:focus,.btn-secondary:not(:disabled):not(.disabled).active:focus,.show>.btn-secondary.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(255,166,48,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(255,166,48,.5)}.btn-success{color:#fff;background:#3772ff -webkit-gradient(linear,left top,left bottom,from(#5587ff),to(#3772FF))repeat-x;background:#3772ff -webkit-linear-gradient(top,#5587ff,#3772FF)repeat-x;background:#3772ff -o-linear-gradient(top,#5587ff,#3772FF)repeat-x;background:#3772ff linear-gradient(180deg,#5587ff,#3772FF)repeat-x;border-color:#3772ff;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075)}.btn-success:hover{color:#fff;background:#1157ff -webkit-gradient(linear,left top,left bottom,from(#3470ff),to(#1157ff))repeat-x;background:#1157ff -webkit-linear-gradient(top,#3470ff,#1157ff)repeat-x;background:#1157ff -o-linear-gradient(top,#3470ff,#1157ff)repeat-x;background:#1157ff linear-gradient(180deg,#3470ff,#1157ff)repeat-x;border-color:#044eff}.btn-success:focus,.btn-success.focus{-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(55,114,255,.5);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(55,114,255,.5)}.btn-success.disabled,.btn-success:disabled{color:#fff;background-color:#3772ff;border-color:#3772ff}.btn-success:not(:disabled):not(.disabled):active,.btn-success:not(:disabled):not(.disabled).active,.show>.btn-success.dropdown-toggle{color:#fff;background-color:#044eff;background-image:none;border-color:#0049f6}.btn-success:not(:disabled):not(.disabled):active:focus,.btn-success:not(:disabled):not(.disabled).active:focus,.show>.btn-success.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(55,114,255,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(55,114,255,.5)}.btn-info{color:#222;background:#c0e0de -webkit-gradient(linear,left top,left bottom,from(#c9e5e3),to(#C0E0DE))repeat-x;background:#c0e0de -webkit-linear-gradient(top,#c9e5e3,#C0E0DE)repeat-x;background:#c0e0de -o-linear-gradient(top,#c9e5e3,#C0E0DE)repeat-x;background:#c0e0de linear-gradient(180deg,#c9e5e3,#C0E0DE)repeat-x;border-color:#c0e0de;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075)}.btn-info:hover{color:#fff;background:#a6d3d1 -webkit-gradient(linear,left top,left bottom,from(#b4dad8),to(#a6d3d1))repeat-x;background:#a6d3d1 -webkit-linear-gradient(top,#b4dad8,#a6d3d1)repeat-x;background:#a6d3d1 -o-linear-gradient(top,#b4dad8,#a6d3d1)repeat-x;background:#a6d3d1 linear-gradient(180deg,#b4dad8,#a6d3d1)repeat-x;border-color:#9ecfcc}.btn-info:focus,.btn-info.focus{-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(192,224,222,.5);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(192,224,222,.5)}.btn-info.disabled,.btn-info:disabled{color:#222;background-color:#c0e0de;border-color:#c0e0de}.btn-info:not(:disabled):not(.disabled):active,.btn-info:not(:disabled):not(.disabled).active,.show>.btn-info.dropdown-toggle{color:#fff;background-color:#9ecfcc;background-image:none;border-color:#95cbc8}.btn-info:not(:disabled):not(.disabled):active:focus,.btn-info:not(:disabled):not(.disabled).active:focus,.show>.btn-info.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(192,224,222,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(192,224,222,.5)}.btn-warning{color:#fff;background:#ed6a5a -webkit-gradient(linear,left top,left bottom,from(#f08073),to(#ED6A5A))repeat-x;background:#ed6a5a -webkit-linear-gradient(top,#f08073,#ED6A5A)repeat-x;background:#ed6a5a -o-linear-gradient(top,#f08073,#ED6A5A)repeat-x;background:#ed6a5a linear-gradient(180deg,#f08073,#ED6A5A)repeat-x;border-color:#ed6a5a;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075)}.btn-warning:hover{color:#fff;background:#e94b38 -webkit-gradient(linear,left top,left bottom,from(#ed6655),to(#e94b38))repeat-x;background:#e94b38 -webkit-linear-gradient(top,#ed6655,#e94b38)repeat-x;background:#e94b38 -o-linear-gradient(top,#ed6655,#e94b38)repeat-x;background:#e94b38 linear-gradient(180deg,#ed6655,#e94b38)repeat-x;border-color:#e8402c}.btn-warning:focus,.btn-warning.focus{-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(237,106,90,.5);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(237,106,90,.5)}.btn-warning.disabled,.btn-warning:disabled{color:#fff;background-color:#ed6a5a;border-color:#ed6a5a}.btn-warning:not(:disabled):not(.disabled):active,.btn-warning:not(:disabled):not(.disabled).active,.show>.btn-warning.dropdown-toggle{color:#fff;background-color:#e8402c;background-image:none;border-color:#e73621}.btn-warning:not(:disabled):not(.disabled):active:focus,.btn-warning:not(:disabled):not(.disabled).active:focus,.show>.btn-warning.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(237,106,90,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(237,106,90,.5)}.btn-danger{color:#fff;background:#ed6a5a -webkit-gradient(linear,left top,left bottom,from(#f08073),to(#ED6A5A))repeat-x;background:#ed6a5a -webkit-linear-gradient(top,#f08073,#ED6A5A)repeat-x;background:#ed6a5a -o-linear-gradient(top,#f08073,#ED6A5A)repeat-x;background:#ed6a5a linear-gradient(180deg,#f08073,#ED6A5A)repeat-x;border-color:#ed6a5a;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075)}.btn-danger:hover{color:#fff;background:#e94b38 -webkit-gradient(linear,left top,left bottom,from(#ed6655),to(#e94b38))repeat-x;background:#e94b38 -webkit-linear-gradient(top,#ed6655,#e94b38)repeat-x;background:#e94b38 -o-linear-gradient(top,#ed6655,#e94b38)repeat-x;background:#e94b38 linear-gradient(180deg,#ed6655,#e94b38)repeat-x;border-color:#e8402c}.btn-danger:focus,.btn-danger.focus{-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(237,106,90,.5);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(237,106,90,.5)}.btn-danger.disabled,.btn-danger:disabled{color:#fff;background-color:#ed6a5a;border-color:#ed6a5a}.btn-danger:not(:disabled):not(.disabled):active,.btn-danger:not(:disabled):not(.disabled).active,.show>.btn-danger.dropdown-toggle{color:#fff;background-color:#e8402c;background-image:none;border-color:#e73621}.btn-danger:not(:disabled):not(.disabled):active:focus,.btn-danger:not(:disabled):not(.disabled).active:focus,.show>.btn-danger.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(237,106,90,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(237,106,90,.5)}.btn-light{color:#222;background:#d3f3ee -webkit-gradient(linear,left top,left bottom,from(#daf5f1),to(#D3F3EE))repeat-x;background:#d3f3ee -webkit-linear-gradient(top,#daf5f1,#D3F3EE)repeat-x;background:#d3f3ee -o-linear-gradient(top,#daf5f1,#D3F3EE)repeat-x;background:#d3f3ee linear-gradient(180deg,#daf5f1,#D3F3EE)repeat-x;border-color:#d3f3ee;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075)}.btn-light:hover{color:#222;background:#b5ebe2 -webkit-gradient(linear,left top,left bottom,from(#c0eee7),to(#b5ebe2))repeat-x;background:#b5ebe2 -webkit-linear-gradient(top,#c0eee7,#b5ebe2)repeat-x;background:#b5ebe2 -o-linear-gradient(top,#c0eee7,#b5ebe2)repeat-x;background:#b5ebe2 linear-gradient(180deg,#c0eee7,#b5ebe2)repeat-x;border-color:#abe8df}.btn-light:focus,.btn-light.focus{-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(211,243,238,.5);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(211,243,238,.5)}.btn-light.disabled,.btn-light:disabled{color:#222;background-color:#d3f3ee;border-color:#d3f3ee}.btn-light:not(:disabled):not(.disabled):active,.btn-light:not(:disabled):not(.disabled).active,.show>.btn-light.dropdown-toggle{color:#222;background-color:#abe8df;background-image:none;border-color:#a1e5db}.btn-light:not(:disabled):not(.disabled):active:focus,.btn-light:not(:disabled):not(.disabled).active:focus,.show>.btn-light.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(211,243,238,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(211,243,238,.5)}.btn-dark{color:#fff;background:#403f4c -webkit-gradient(linear,left top,left bottom,from(#5d5c67),to(#403F4C))repeat-x;background:#403f4c -webkit-linear-gradient(top,#5d5c67,#403F4C)repeat-x;background:#403f4c -o-linear-gradient(top,#5d5c67,#403F4C)repeat-x;background:#403f4c linear-gradient(180deg,#5d5c67,#403F4C)repeat-x;border-color:#403f4c;-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075)}.btn-dark:hover{color:#fff;background:#2e2e37 -webkit-gradient(linear,left top,left bottom,from(#4e4d55),to(#2e2e37))repeat-x;background:#2e2e37 -webkit-linear-gradient(top,#4e4d55,#2e2e37)repeat-x;background:#2e2e37 -o-linear-gradient(top,#4e4d55,#2e2e37)repeat-x;background:#2e2e37 linear-gradient(180deg,#4e4d55,#2e2e37)repeat-x;border-color:#292830}.btn-dark:focus,.btn-dark.focus{-webkit-box-shadow:inset 0 1px 0 rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(64,63,76,.5);box-shadow:inset 0 1px rgba(255,255,255,.15),0 1px 1px rgba(0,0,0,.075),0 0 0 .2rem rgba(64,63,76,.5)}.btn-dark.disabled,.btn-dark:disabled{color:#fff;background-color:#403f4c;border-color:#403f4c}.btn-dark:not(:disabled):not(.disabled):active,.btn-dark:not(:disabled):not(.disabled).active,.show>.btn-dark.dropdown-toggle{color:#fff;background-color:#292830;background-image:none;border-color:#232229}.btn-dark:not(:disabled):not(.disabled):active:focus,.btn-dark:not(:disabled):not(.disabled).active:focus,.show>.btn-dark.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(64,63,76,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(64,63,76,.5)}.btn-outline-primary{color:#30638e;background-color:transparent;background-image:none;border-color:#30638e}.btn-outline-primary:hover{color:#fff;background-color:#30638e;border-color:#30638e}.btn-outline-primary:focus,.btn-outline-primary.focus{-webkit-box-shadow:0 0 0 .2rem rgba(48,99,142,.5);box-shadow:0 0 0 .2rem rgba(48,99,142,.5)}.btn-outline-primary.disabled,.btn-outline-primary:disabled{color:#30638e;background-color:transparent}.btn-outline-primary:not(:disabled):not(.disabled):active,.btn-outline-primary:not(:disabled):not(.disabled).active,.show>.btn-outline-primary.dropdown-toggle{color:#fff;background-color:#30638e;border-color:#30638e}.btn-outline-primary:not(:disabled):not(.disabled):active:focus,.btn-outline-primary:not(:disabled):not(.disabled).active:focus,.show>.btn-outline-primary.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(48,99,142,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(48,99,142,.5)}.btn-outline-secondary{color:#ffa630;background-color:transparent;background-image:none;border-color:#ffa630}.btn-outline-secondary:hover{color:#fff;background-color:#ffa630;border-color:#ffa630}.btn-outline-secondary:focus,.btn-outline-secondary.focus{-webkit-box-shadow:0 0 0 .2rem rgba(255,166,48,.5);box-shadow:0 0 0 .2rem rgba(255,166,48,.5)}.btn-outline-secondary.disabled,.btn-outline-secondary:disabled{color:#ffa630;background-color:transparent}.btn-outline-secondary:not(:disabled):not(.disabled):active,.btn-outline-secondary:not(:disabled):not(.disabled).active,.show>.btn-outline-secondary.dropdown-toggle{color:#fff;background-color:#ffa630;border-color:#ffa630}.btn-outline-secondary:not(:disabled):not(.disabled):active:focus,.btn-outline-secondary:not(:disabled):not(.disabled).active:focus,.show>.btn-outline-secondary.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(255,166,48,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(255,166,48,.5)}.btn-outline-success{color:#3772ff;background-color:transparent;background-image:none;border-color:#3772ff}.btn-outline-success:hover{color:#fff;background-color:#3772ff;border-color:#3772ff}.btn-outline-success:focus,.btn-outline-success.focus{-webkit-box-shadow:0 0 0 .2rem rgba(55,114,255,.5);box-shadow:0 0 0 .2rem rgba(55,114,255,.5)}.btn-outline-success.disabled,.btn-outline-success:disabled{color:#3772ff;background-color:transparent}.btn-outline-success:not(:disabled):not(.disabled):active,.btn-outline-success:not(:disabled):not(.disabled).active,.show>.btn-outline-success.dropdown-toggle{color:#fff;background-color:#3772ff;border-color:#3772ff}.btn-outline-success:not(:disabled):not(.disabled):active:focus,.btn-outline-success:not(:disabled):not(.disabled).active:focus,.show>.btn-outline-success.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(55,114,255,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(55,114,255,.5)}.btn-outline-info{color:#c0e0de;background-color:transparent;background-image:none;border-color:#c0e0de}.btn-outline-info:hover{color:#222;background-color:#c0e0de;border-color:#c0e0de}.btn-outline-info:focus,.btn-outline-info.focus{-webkit-box-shadow:0 0 0 .2rem rgba(192,224,222,.5);box-shadow:0 0 0 .2rem rgba(192,224,222,.5)}.btn-outline-info.disabled,.btn-outline-info:disabled{color:#c0e0de;background-color:transparent}.btn-outline-info:not(:disabled):not(.disabled):active,.btn-outline-info:not(:disabled):not(.disabled).active,.show>.btn-outline-info.dropdown-toggle{color:#222;background-color:#c0e0de;border-color:#c0e0de}.btn-outline-info:not(:disabled):not(.disabled):active:focus,.btn-outline-info:not(:disabled):not(.disabled).active:focus,.show>.btn-outline-info.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(192,224,222,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(192,224,222,.5)}.btn-outline-warning{color:#ed6a5a;background-color:transparent;background-image:none;border-color:#ed6a5a}.btn-outline-warning:hover{color:#fff;background-color:#ed6a5a;border-color:#ed6a5a}.btn-outline-warning:focus,.btn-outline-warning.focus{-webkit-box-shadow:0 0 0 .2rem rgba(237,106,90,.5);box-shadow:0 0 0 .2rem rgba(237,106,90,.5)}.btn-outline-warning.disabled,.btn-outline-warning:disabled{color:#ed6a5a;background-color:transparent}.btn-outline-warning:not(:disabled):not(.disabled):active,.btn-outline-warning:not(:disabled):not(.disabled).active,.show>.btn-outline-warning.dropdown-toggle{color:#fff;background-color:#ed6a5a;border-color:#ed6a5a}.btn-outline-warning:not(:disabled):not(.disabled):active:focus,.btn-outline-warning:not(:disabled):not(.disabled).active:focus,.show>.btn-outline-warning.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(237,106,90,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(237,106,90,.5)}.btn-outline-danger{color:#ed6a5a;background-color:transparent;background-image:none;border-color:#ed6a5a}.btn-outline-danger:hover{color:#fff;background-color:#ed6a5a;border-color:#ed6a5a}.btn-outline-danger:focus,.btn-outline-danger.focus{-webkit-box-shadow:0 0 0 .2rem rgba(237,106,90,.5);box-shadow:0 0 0 .2rem rgba(237,106,90,.5)}.btn-outline-danger.disabled,.btn-outline-danger:disabled{color:#ed6a5a;background-color:transparent}.btn-outline-danger:not(:disabled):not(.disabled):active,.btn-outline-danger:not(:disabled):not(.disabled).active,.show>.btn-outline-danger.dropdown-toggle{color:#fff;background-color:#ed6a5a;border-color:#ed6a5a}.btn-outline-danger:not(:disabled):not(.disabled):active:focus,.btn-outline-danger:not(:disabled):not(.disabled).active:focus,.show>.btn-outline-danger.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(237,106,90,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(237,106,90,.5)}.btn-outline-light{color:#d3f3ee;background-color:transparent;background-image:none;border-color:#d3f3ee}.btn-outline-light:hover{color:#222;background-color:#d3f3ee;border-color:#d3f3ee}.btn-outline-light:focus,.btn-outline-light.focus{-webkit-box-shadow:0 0 0 .2rem rgba(211,243,238,.5);box-shadow:0 0 0 .2rem rgba(211,243,238,.5)}.btn-outline-light.disabled,.btn-outline-light:disabled{color:#d3f3ee;background-color:transparent}.btn-outline-light:not(:disabled):not(.disabled):active,.btn-outline-light:not(:disabled):not(.disabled).active,.show>.btn-outline-light.dropdown-toggle{color:#222;background-color:#d3f3ee;border-color:#d3f3ee}.btn-outline-light:not(:disabled):not(.disabled):active:focus,.btn-outline-light:not(:disabled):not(.disabled).active:focus,.show>.btn-outline-light.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(211,243,238,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(211,243,238,.5)}.btn-outline-dark{color:#403f4c;background-color:transparent;background-image:none;border-color:#403f4c}.btn-outline-dark:hover{color:#fff;background-color:#403f4c;border-color:#403f4c}.btn-outline-dark:focus,.btn-outline-dark.focus{-webkit-box-shadow:0 0 0 .2rem rgba(64,63,76,.5);box-shadow:0 0 0 .2rem rgba(64,63,76,.5)}.btn-outline-dark.disabled,.btn-outline-dark:disabled{color:#403f4c;background-color:transparent}.btn-outline-dark:not(:disabled):not(.disabled):active,.btn-outline-dark:not(:disabled):not(.disabled).active,.show>.btn-outline-dark.dropdown-toggle{color:#fff;background-color:#403f4c;border-color:#403f4c}.btn-outline-dark:not(:disabled):not(.disabled):active:focus,.btn-outline-dark:not(:disabled):not(.disabled).active:focus,.show>.btn-outline-dark.dropdown-toggle:focus{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(64,63,76,.5);box-shadow:inset 0 3px 5px rgba(0,0,0,.125),0 0 0 .2rem rgba(64,63,76,.5)}.btn-link{font-weight:400;color:#3176d9;background-color:transparent}.btn-link:hover{color:#1e53a0;text-decoration:none;background-color:transparent;border-color:transparent}.btn-link:focus,.btn-link.focus{text-decoration:none;border-color:transparent;-webkit-box-shadow:none;box-shadow:none}.btn-link:disabled,.btn-link.disabled{color:#888;pointer-events:none}.btn-lg,.btn-group-lg>.btn{padding:.5rem 1rem;font-size:1.25rem;line-height:1.5;border-radius:.3rem}.btn-sm,.btn-group-sm>.btn{padding:.25rem .5rem;font-size:.875rem;line-height:1.5;border-radius:.2rem}.btn-block{display:block;width:100%}.btn-block+.btn-block{margin-top:.5rem}input[type=submit].btn-block,input[type=reset].btn-block,input[type=button].btn-block{width:100%}.fade{-webkit-transition:opacity .15s linear;-o-transition:opacity .15s linear;transition:opacity .15s linear}@media screen and (prefers-reduced-motion:reduce){.fade{-webkit-transition:none;-o-transition:none;transition:none}}.fade:not(.show){opacity:0}.collapse:not(.show){display:none}.collapsing{position:relative;height:0;overflow:hidden;-webkit-transition:height .35s ease;-o-transition:height .35s ease;transition:height .35s ease}@media screen and (prefers-reduced-motion:reduce){.collapsing{-webkit-transition:none;-o-transition:none;transition:none}}.dropup,.dropright,.dropdown,.dropleft{position:relative}.dropdown-toggle::after{display:inline-block;width:0;height:0;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid;border-right:.3em solid transparent;border-bottom:0;border-left:.3em solid transparent}.dropdown-toggle:empty::after{margin-left:0}.dropdown-menu{position:absolute;top:100%;left:0;z-index:1000;display:none;float:left;min-width:10rem;padding:.5rem 0;margin:.125rem 0 0;font-size:1rem;color:#222;text-align:left;list-style:none;background-color:#fff;background-clip:padding-box;border:1px solid rgba(0,0,0,.15);border-radius:.25rem;-webkit-box-shadow:0 .5rem 1rem rgba(0,0,0,.175);box-shadow:0 .5rem 1rem rgba(0,0,0,.175)}.dropdown-menu-right{right:0;left:auto}.dropup .dropdown-menu{top:auto;bottom:100%;margin-top:0;margin-bottom:.125rem}.dropup .dropdown-toggle::after{display:inline-block;width:0;height:0;margin-left:.255em;vertical-align:.255em;content:"";border-top:0;border-right:.3em solid transparent;border-bottom:.3em solid;border-left:.3em solid transparent}.dropup .dropdown-toggle:empty::after{margin-left:0}.dropright .dropdown-menu{top:0;right:auto;left:100%;margin-top:0;margin-left:.125rem}.dropright .dropdown-toggle::after{display:inline-block;width:0;height:0;margin-left:.255em;vertical-align:.255em;content:"";border-top:.3em solid transparent;border-right:0;border-bottom:.3em solid transparent;border-left:.3em solid}.dropright .dropdown-toggle:empty::after{margin-left:0}.dropright .dropdown-toggle::after{vertical-align:0}.dropleft .dropdown-menu{top:0;right:100%;left:auto;margin-top:0;margin-right:.125rem}.dropleft .dropdown-toggle::after{display:inline-block;width:0;height:0;margin-left:.255em;vertical-align:.255em;content:""}.dropleft .dropdown-toggle::after{display:none}.dropleft .dropdown-toggle::before{display:inline-block;width:0;height:0;margin-right:.255em;vertical-align:.255em;content:"";border-top:.3em solid transparent;border-right:.3em solid;border-bottom:.3em solid transparent}.dropleft .dropdown-toggle:empty::after{margin-left:0}.dropleft .dropdown-toggle::before{vertical-align:0}.dropdown-menu[x-placement^=top],.dropdown-menu[x-placement^=right],.dropdown-menu[x-placement^=bottom],.dropdown-menu[x-placement^=left]{right:auto;bottom:auto}.dropdown-divider{height:0;margin:.5rem 0;overflow:hidden;border-top:1px solid #eee}.dropdown-item{display:block;width:100%;padding:.25rem 1.5rem;clear:both;font-weight:400;color:#222;text-align:inherit;white-space:nowrap;background-color:transparent;border:0}.dropdown-item:hover,.dropdown-item:focus{color:#151515;text-decoration:none;background:#f8f9fa -webkit-gradient(linear,left top,left bottom,from(#f9fafb),to(#f8f9fa))repeat-x;background:#f8f9fa -webkit-linear-gradient(top,#f9fafb,#f8f9fa)repeat-x;background:#f8f9fa -o-linear-gradient(top,#f9fafb,#f8f9fa)repeat-x;background:#f8f9fa linear-gradient(180deg,#f9fafb,#f8f9fa)repeat-x}.dropdown-item.active,.dropdown-item:active{color:#fff;text-decoration:none;background:#30638e -webkit-gradient(linear,left top,left bottom,from(#4f7a9f),to(#30638E))repeat-x;background:#30638e -webkit-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e -o-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x}.dropdown-item.disabled,.dropdown-item:disabled{color:#888;background-color:transparent;background-image:none}.dropdown-menu.show{display:block}.dropdown-header{display:block;padding:.5rem 1.5rem;margin-bottom:0;font-size:.875rem;color:#888;white-space:nowrap}.dropdown-item-text{display:block;padding:.25rem 1.5rem;color:#222}.btn-group,.btn-group-vertical{position:relative;display:-webkit-inline-box;display:-webkit-inline-flex;display:-ms-inline-flexbox;display:inline-flex;vertical-align:middle}.btn-group>.btn,.btn-group-vertical>.btn{position:relative;-webkit-box-flex:0;-webkit-flex:0 1 auto;-ms-flex:0 1 auto;flex:initial}.btn-group>.btn:hover,.btn-group-vertical>.btn:hover{z-index:1}.btn-group>.btn:focus,.btn-group>.btn:active,.btn-group>.btn.active,.btn-group-vertical>.btn:focus,.btn-group-vertical>.btn:active,.btn-group-vertical>.btn.active{z-index:1}.btn-group .btn+.btn,.btn-group .btn+.btn-group,.btn-group .btn-group+.btn,.btn-group .btn-group+.btn-group,.btn-group-vertical .btn+.btn,.btn-group-vertical .btn+.btn-group,.btn-group-vertical .btn-group+.btn,.btn-group-vertical .btn-group+.btn-group{margin-left:-1px}.btn-toolbar{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;-webkit-box-pack:start;-webkit-justify-content:flex-start;-ms-flex-pack:start;justify-content:flex-start}.btn-toolbar .input-group{width:auto}.btn-group>.btn:first-child{margin-left:0}.btn-group>.btn:not(:last-child):not(.dropdown-toggle),.btn-group>.btn-group:not(:last-child)>.btn{border-top-right-radius:0;border-bottom-right-radius:0}.btn-group>.btn:not(:first-child),.btn-group>.btn-group:not(:first-child)>.btn{border-top-left-radius:0;border-bottom-left-radius:0}.dropdown-toggle-split{padding-right:.5625rem;padding-left:.5625rem}.dropdown-toggle-split::after,.dropup .dropdown-toggle-split::after,.dropright .dropdown-toggle-split::after{margin-left:0}.dropleft .dropdown-toggle-split::before{margin-right:0}.btn-sm+.dropdown-toggle-split,.btn-group-sm>.btn+.dropdown-toggle-split{padding-right:.375rem;padding-left:.375rem}.btn-lg+.dropdown-toggle-split,.btn-group-lg>.btn+.dropdown-toggle-split{padding-right:.75rem;padding-left:.75rem}.btn-group.show .dropdown-toggle{-webkit-box-shadow:inset 0 3px 5px rgba(0,0,0,.125);box-shadow:inset 0 3px 5px rgba(0,0,0,.125)}.btn-group.show .dropdown-toggle.btn-link{-webkit-box-shadow:none;box-shadow:none}.btn-group-vertical{-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-align:start;-webkit-align-items:flex-start;-ms-flex-align:start;align-items:flex-start;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center}.btn-group-vertical .btn,.btn-group-vertical .btn-group{width:100%}.btn-group-vertical>.btn+.btn,.btn-group-vertical>.btn+.btn-group,.btn-group-vertical>.btn-group+.btn,.btn-group-vertical>.btn-group+.btn-group{margin-top:-1px;margin-left:0}.btn-group-vertical>.btn:not(:last-child):not(.dropdown-toggle),.btn-group-vertical>.btn-group:not(:last-child)>.btn{border-bottom-right-radius:0;border-bottom-left-radius:0}.btn-group-vertical>.btn:not(:first-child),.btn-group-vertical>.btn-group:not(:first-child)>.btn{border-top-left-radius:0;border-top-right-radius:0}.btn-group-toggle>.btn,.btn-group-toggle>.btn-group>.btn{margin-bottom:0}.btn-group-toggle>.btn input[type=radio],.btn-group-toggle>.btn input[type=checkbox],.btn-group-toggle>.btn-group>.btn input[type=radio],.btn-group-toggle>.btn-group>.btn input[type=checkbox]{position:absolute;clip:rect(0,0,0,0);pointer-events:none}.input-group{position:relative;display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;-webkit-box-align:stretch;-webkit-align-items:stretch;-ms-flex-align:stretch;align-items:stretch;width:100%}.input-group>.form-control,.input-group>.custom-select,.input-group>.custom-file{position:relative;-webkit-box-flex:1;-webkit-flex:1 1 auto;-ms-flex:1 1 auto;flex:auto;width:1%;margin-bottom:0}.input-group>.form-control+.form-control,.input-group>.form-control+.custom-select,.input-group>.form-control+.custom-file,.input-group>.custom-select+.form-control,.input-group>.custom-select+.custom-select,.input-group>.custom-select+.custom-file,.input-group>.custom-file+.form-control,.input-group>.custom-file+.custom-select,.input-group>.custom-file+.custom-file{margin-left:-1px}.input-group>.form-control:focus,.input-group>.custom-select:focus,.input-group>.custom-file .custom-file-input:focus~.custom-file-label{z-index:3}.input-group>.custom-file .custom-file-input:focus{z-index:4}.input-group>.form-control:not(:last-child),.input-group>.custom-select:not(:last-child){border-top-right-radius:0;border-bottom-right-radius:0}.input-group>.form-control:not(:first-child),.input-group>.custom-select:not(:first-child){border-top-left-radius:0;border-bottom-left-radius:0}.input-group>.custom-file{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center}.input-group>.custom-file:not(:last-child) .custom-file-label,.input-group>.custom-file:not(:last-child) .custom-file-label::after{border-top-right-radius:0;border-bottom-right-radius:0}.input-group>.custom-file:not(:first-child) .custom-file-label{border-top-left-radius:0;border-bottom-left-radius:0}.input-group-prepend,.input-group-append{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex}.input-group-prepend .btn,.input-group-append .btn{position:relative;z-index:2}.input-group-prepend .btn+.btn,.input-group-prepend .btn+.input-group-text,.input-group-prepend .input-group-text+.input-group-text,.input-group-prepend .input-group-text+.btn,.input-group-append .btn+.btn,.input-group-append .btn+.input-group-text,.input-group-append .input-group-text+.input-group-text,.input-group-append .input-group-text+.btn{margin-left:-1px}.input-group-prepend{margin-right:-1px}.input-group-append{margin-left:-1px}.input-group-text{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;padding:.375rem .75rem;margin-bottom:0;font-size:1rem;font-weight:400;line-height:1.5;color:#495057;text-align:center;white-space:nowrap;background-color:#eee;border:1px solid #ccc;border-radius:.25rem}.input-group-text input[type=radio],.input-group-text input[type=checkbox]{margin-top:0}.input-group-lg>.form-control,.input-group-lg>.input-group-prepend>.input-group-text,.input-group-lg>.input-group-append>.input-group-text,.input-group-lg>.input-group-prepend>.btn,.input-group-lg>.input-group-append>.btn{height:-webkit-calc(2.875rem + 2px);height:calc(2.875rem + 2px);padding:.5rem 1rem;font-size:1.25rem;line-height:1.5;border-radius:.3rem}.input-group-sm>.form-control,.input-group-sm>.input-group-prepend>.input-group-text,.input-group-sm>.input-group-append>.input-group-text,.input-group-sm>.input-group-prepend>.btn,.input-group-sm>.input-group-append>.btn{height:-webkit-calc(1.8125rem + 2px);height:calc(1.8125rem + 2px);padding:.25rem .5rem;font-size:.875rem;line-height:1.5;border-radius:.2rem}.input-group>.input-group-prepend>.btn,.input-group>.input-group-prepend>.input-group-text,.input-group>.input-group-append:not(:last-child)>.btn,.input-group>.input-group-append:not(:last-child)>.input-group-text,.input-group>.input-group-append:last-child>.btn:not(:last-child):not(.dropdown-toggle),.input-group>.input-group-append:last-child>.input-group-text:not(:last-child){border-top-right-radius:0;border-bottom-right-radius:0}.input-group>.input-group-append>.btn,.input-group>.input-group-append>.input-group-text,.input-group>.input-group-prepend:not(:first-child)>.btn,.input-group>.input-group-prepend:not(:first-child)>.input-group-text,.input-group>.input-group-prepend:first-child>.btn:not(:first-child),.input-group>.input-group-prepend:first-child>.input-group-text:not(:first-child){border-top-left-radius:0;border-bottom-left-radius:0}.custom-control{position:relative;display:block;min-height:1.5rem;padding-left:1.5rem}.custom-control-inline{display:-webkit-inline-box;display:-webkit-inline-flex;display:-ms-inline-flexbox;display:inline-flex;margin-right:1rem}.custom-control-input{position:absolute;z-index:-1;opacity:0}.custom-control-input:checked~.custom-control-label::before{color:#fff;background:#30638e -webkit-gradient(linear,left top,left bottom,from(#4f7a9f),to(#30638E))repeat-x;background:#30638e -webkit-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e -o-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x;-webkit-box-shadow:none;box-shadow:none}.custom-control-input:focus~.custom-control-label::before{-webkit-box-shadow:0 0 0 1px #fff,0 0 0 .2rem rgba(48,99,142,.25);box-shadow:0 0 0 1px #fff,0 0 0 .2rem rgba(48,99,142,.25)}.custom-control-input:active~.custom-control-label::before{color:#fff;background-color:#95bbdb;-webkit-box-shadow:none;box-shadow:none}.custom-control-input:disabled~.custom-control-label{color:#888}.custom-control-input:disabled~.custom-control-label::before{background-color:#eee}.custom-control-label{position:relative;margin-bottom:0}.custom-control-label::before{position:absolute;top:.25rem;left:-1.5rem;display:block;width:1rem;height:1rem;pointer-events:none;content:"";-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;background-color:#dee2e6;-webkit-box-shadow:inset 0 .25rem .25rem rgba(0,0,0,.1);box-shadow:inset 0 .25rem .25rem rgba(0,0,0,.1)}.custom-control-label::after{position:absolute;top:.25rem;left:-1.5rem;display:block;width:1rem;height:1rem;content:"";background-repeat:no-repeat;background-position:50%;-webkit-background-size:50% 50%;background-size:50% 50%}.custom-checkbox .custom-control-label::before{border-radius:.25rem}.custom-checkbox .custom-control-input:checked~.custom-control-label::before{background:#30638e -webkit-gradient(linear,left top,left bottom,from(#4f7a9f),to(#30638E))repeat-x;background:#30638e -webkit-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e -o-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x}.custom-checkbox .custom-control-input:checked~.custom-control-label::after{background-image:url("data:image/svg+xml;charset=utf8,%3Csvg xmlns=%22http://www.w3.org/2000/svg%22 viewBox=%220 0 8 8%22%3E%3Cpath fill=%22%23fff%22 d=%22M6.564.75l-3.59 3.612-1.538-1.55L0 4.26 2.974 7.25 8 2.193z%22/%3E%3C/svg%3E")}.custom-checkbox .custom-control-input:indeterminate~.custom-control-label::before{background:#30638e -webkit-gradient(linear,left top,left bottom,from(#4f7a9f),to(#30638E))repeat-x;background:#30638e -webkit-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e -o-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x;-webkit-box-shadow:none;box-shadow:none}.custom-checkbox .custom-control-input:indeterminate~.custom-control-label::after{background-image:url("data:image/svg+xml;charset=utf8,%3Csvg xmlns=%22http://www.w3.org/2000/svg%22 viewBox=%220 0 4 4%22%3E%3Cpath stroke=%22%23fff%22 d=%22M0 2h4%22/%3E%3C/svg%3E")}.custom-checkbox .custom-control-input:disabled:checked~.custom-control-label::before{background-color:rgba(48,99,142,.5)}.custom-checkbox .custom-control-input:disabled:indeterminate~.custom-control-label::before{background-color:rgba(48,99,142,.5)}.custom-radio .custom-control-label::before{border-radius:50%}.custom-radio .custom-control-input:checked~.custom-control-label::before{background:#30638e -webkit-gradient(linear,left top,left bottom,from(#4f7a9f),to(#30638E))repeat-x;background:#30638e -webkit-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e -o-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x}.custom-radio .custom-control-input:checked~.custom-control-label::after{background-image:url("data:image/svg+xml;charset=utf8,%3Csvg xmlns=%22http://www.w3.org/2000/svg%22 viewBox=%22-4 -4 8 8%22%3E%3Ccircle r=%223%22 fill=%22%23fff%22/%3E%3C/svg%3E")}.custom-radio .custom-control-input:disabled:checked~.custom-control-label::before{background-color:rgba(48,99,142,.5)}.custom-select{display:inline-block;width:100%;height:-webkit-calc(2.25rem + 2px);height:calc(2.25rem + 2px);padding:.375rem 1.75rem .375rem .75rem;line-height:1.5;color:#495057;vertical-align:middle;background:#fff url("data:image/svg+xml;charset=utf8,%3Csvg xmlns=%22http://www.w3.org/2000/svg%22 viewBox=%220 0 4 5%22%3E%3Cpath fill=%22%23333%22 d=%22M2 0 0 2h4zm0 5L0 3h4z%22/%3E%3C/svg%3E")no-repeat right .75rem center;-webkit-background-size:8px 10px;background-size:8px 10px;border:1px solid #ccc;border-radius:.25rem;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,.075);box-shadow:inset 0 1px 2px rgba(0,0,0,.075);-webkit-appearance:none;-moz-appearance:none;appearance:none}.custom-select:focus{border-color:#6fa3ce;outline:0;-webkit-box-shadow:inset 0 1px 2px rgba(0,0,0,.075),0 0 0 .2rem rgba(111,163,206,.5);box-shadow:inset 0 1px 2px rgba(0,0,0,.075),0 0 0 .2rem rgba(111,163,206,.5)}.custom-select:focus::-ms-value{color:#495057;background-color:#fff}.custom-select[multiple],.custom-select[size]:not([size="1"]){height:auto;padding-right:.75rem;background-image:none}.custom-select:disabled{color:#888;background-color:#eee}.custom-select::-ms-expand{opacity:0}.custom-select-sm{height:-webkit-calc(1.8125rem + 2px);height:calc(1.8125rem + 2px);padding-top:.375rem;padding-bottom:.375rem;font-size:75%}.custom-select-lg{height:-webkit-calc(2.875rem + 2px);height:calc(2.875rem + 2px);padding-top:.375rem;padding-bottom:.375rem;font-size:125%}.custom-file{position:relative;display:inline-block;width:100%;height:-webkit-calc(2.25rem + 2px);height:calc(2.25rem + 2px);margin-bottom:0}.custom-file-input{position:relative;z-index:2;width:100%;height:-webkit-calc(2.25rem + 2px);height:calc(2.25rem + 2px);margin:0;opacity:0}.custom-file-input:focus~.custom-file-label{border-color:#6fa3ce;-webkit-box-shadow:0 0 0 .2rem rgba(48,99,142,.25);box-shadow:0 0 0 .2rem rgba(48,99,142,.25)}.custom-file-input:focus~.custom-file-label::after{border-color:#6fa3ce}.custom-file-input:disabled~.custom-file-label{background-color:#eee}.custom-file-input:lang(en)~.custom-file-label::after{content:"Browse"}.custom-file-label{position:absolute;top:0;right:0;left:0;z-index:1;height:-webkit-calc(2.25rem + 2px);height:calc(2.25rem + 2px);padding:.375rem .75rem;line-height:1.5;color:#495057;background-color:#fff;border:1px solid #ccc;border-radius:.25rem;-webkit-box-shadow:inset 0 1px 1px rgba(0,0,0,.075);box-shadow:inset 0 1px 1px rgba(0,0,0,.075)}.custom-file-label::after{position:absolute;top:0;right:0;bottom:0;z-index:3;display:block;height:2.25rem;padding:.375rem .75rem;line-height:1.5;color:#495057;content:"Browse";background:#eee -webkit-gradient(linear,left top,left bottom,from(#f1f1f1),to(#eee))repeat-x;background:#eee -webkit-linear-gradient(top,#f1f1f1,#eee)repeat-x;background:#eee -o-linear-gradient(top,#f1f1f1,#eee)repeat-x;background:#eee linear-gradient(180deg,#f1f1f1,#eee)repeat-x;border-left:1px solid #ccc;border-radius:0 .25rem .25rem 0}.custom-range{width:100%;padding-left:0;background-color:transparent;-webkit-appearance:none;-moz-appearance:none;appearance:none}.custom-range:focus{outline:none}.custom-range:focus::-webkit-slider-thumb{-webkit-box-shadow:0 0 0 1px #fff,0 0 0 .2rem rgba(48,99,142,.25);box-shadow:0 0 0 1px #fff,0 0 0 .2rem rgba(48,99,142,.25)}.custom-range:focus::-moz-range-thumb{box-shadow:0 0 0 1px #fff,0 0 0 .2rem rgba(48,99,142,.25)}.custom-range:focus::-ms-thumb{box-shadow:0 0 0 1px #fff,0 0 0 .2rem rgba(48,99,142,.25)}.custom-range::-moz-focus-outer{border:0}.custom-range::-webkit-slider-thumb{width:1rem;height:1rem;margin-top:-.25rem;background:#30638e -webkit-gradient(linear,left top,left bottom,from(#4f7a9f),to(#30638E))repeat-x;background:#30638e -webkit-linear-gradient(top,#4f7a9f,#30638E)repeat-x;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x;border:0;border-radius:1rem;-webkit-box-shadow:0 .1rem .25rem rgba(0,0,0,.1);box-shadow:0 .1rem .25rem rgba(0,0,0,.1);-webkit-transition:background-color .15s ease-in-out,border-color .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;-o-transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;-webkit-appearance:none;appearance:none}@media screen and (prefers-reduced-motion:reduce){.custom-range::-webkit-slider-thumb{-webkit-transition:none;-o-transition:none;transition:none}}.custom-range::-webkit-slider-thumb:active{background:#95bbdb -webkit-gradient(linear,left top,left bottom,from(#a5c5e1),to(#95bbdb))repeat-x;background:#95bbdb -webkit-linear-gradient(top,#a5c5e1,#95bbdb)repeat-x;background:#95bbdb linear-gradient(180deg,#a5c5e1,#95bbdb)repeat-x}.custom-range::-webkit-slider-runnable-track{width:100%;height:.5rem;color:transparent;cursor:pointer;background-color:#dee2e6;border-color:transparent;border-radius:1rem;-webkit-box-shadow:inset 0 .25rem .25rem rgba(0,0,0,.1);box-shadow:inset 0 .25rem .25rem rgba(0,0,0,.1)}.custom-range::-moz-range-thumb{width:1rem;height:1rem;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x;border:0;border-radius:1rem;box-shadow:0 .1rem .25rem rgba(0,0,0,.1);-webkit-transition:background-color .15s ease-in-out,border-color .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;-o-transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;-moz-appearance:none;appearance:none}@media screen and (prefers-reduced-motion:reduce){.custom-range::-moz-range-thumb{-webkit-transition:none;-o-transition:none;transition:none}}.custom-range::-moz-range-thumb:active{background:#95bbdb linear-gradient(180deg,#a5c5e1,#95bbdb)repeat-x}.custom-range::-moz-range-track{width:100%;height:.5rem;color:transparent;cursor:pointer;background-color:#dee2e6;border-color:transparent;border-radius:1rem;box-shadow:inset 0 .25rem .25rem rgba(0,0,0,.1)}.custom-range::-ms-thumb{width:1rem;height:1rem;margin-top:0;margin-right:.2rem;margin-left:.2rem;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x;border:0;border-radius:1rem;box-shadow:0 .1rem .25rem rgba(0,0,0,.1);-webkit-transition:background-color .15s ease-in-out,border-color .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;-o-transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;appearance:none}@media screen and (prefers-reduced-motion:reduce){.custom-range::-ms-thumb{-webkit-transition:none;-o-transition:none;transition:none}}.custom-range::-ms-thumb:active{background:#95bbdb linear-gradient(180deg,#a5c5e1,#95bbdb)repeat-x}.custom-range::-ms-track{width:100%;height:.5rem;color:transparent;cursor:pointer;background-color:transparent;border-color:transparent;border-width:.5rem;box-shadow:inset 0 .25rem .25rem rgba(0,0,0,.1)}.custom-range::-ms-fill-lower{background-color:#dee2e6;border-radius:1rem}.custom-range::-ms-fill-upper{margin-right:15px;background-color:#dee2e6;border-radius:1rem}.custom-control-label::before,.custom-file-label,.custom-select{-webkit-transition:background-color .15s ease-in-out,border-color .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,-webkit-box-shadow .15s ease-in-out;-o-transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out;transition:background-color .15s ease-in-out,border-color .15s ease-in-out,box-shadow .15s ease-in-out,-webkit-box-shadow .15s ease-in-out}@media screen and (prefers-reduced-motion:reduce){.custom-control-label::before,.custom-file-label,.custom-select{-webkit-transition:none;-o-transition:none;transition:none}}.nav{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;padding-left:0;margin-bottom:0;list-style:none}.nav-link{display:block;padding:.5rem 1rem}.nav-link:hover,.nav-link:focus{text-decoration:none}.nav-link.disabled{color:#888}.nav-tabs{border-bottom:1px solid #dee2e6}.nav-tabs .nav-item{margin-bottom:-1px}.nav-tabs .nav-link{border:1px solid transparent;border-top-left-radius:.25rem;border-top-right-radius:.25rem}.nav-tabs .nav-link:hover,.nav-tabs .nav-link:focus{border-color:#eee #eee #dee2e6}.nav-tabs .nav-link.disabled{color:#888;background-color:transparent;border-color:transparent}.nav-tabs .nav-link.active,.nav-tabs .nav-item.show .nav-link{color:#495057;background-color:#fff;border-color:#dee2e6 #dee2e6 #fff}.nav-tabs .dropdown-menu{margin-top:-1px;border-top-left-radius:0;border-top-right-radius:0}.nav-pills .nav-link{border-radius:.25rem}.nav-pills .nav-link.active,.nav-pills .show>.nav-link{color:#fff;background-color:#30638e}.nav-fill .nav-item{-webkit-box-flex:1;-webkit-flex:1 1 auto;-ms-flex:1 1 auto;flex:auto;text-align:center}.nav-justified .nav-item{-webkit-flex-basis:0;-ms-flex-preferred-size:0;flex-basis:0;-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;text-align:center}.tab-content>.tab-pane{display:none}.tab-content>.active{display:block}.navbar{position:relative;display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between;padding:.5rem 1rem}.navbar>.container,.navbar>.container-fluid{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between}.navbar-brand{display:inline-block;padding-top:.3125rem;padding-bottom:.3125rem;margin-right:1rem;font-size:1.25rem;line-height:inherit;white-space:nowrap}.navbar-brand:hover,.navbar-brand:focus{text-decoration:none}.navbar-nav{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;padding-left:0;margin-bottom:0;list-style:none}.navbar-nav .nav-link{padding-right:0;padding-left:0}.navbar-nav .dropdown-menu{position:static;float:none}.navbar-text{display:inline-block;padding-top:.5rem;padding-bottom:.5rem}.navbar-collapse{-webkit-flex-basis:100%;-ms-flex-preferred-size:100%;flex-basis:100%;-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center}.navbar-toggler{padding:.25rem .75rem;font-size:1.25rem;line-height:1;background-color:transparent;border:1px solid transparent;border-radius:.25rem}.navbar-toggler:hover,.navbar-toggler:focus{text-decoration:none}.navbar-toggler:not(:disabled):not(.disabled){cursor:pointer}.navbar-toggler-icon{display:inline-block;width:1.5em;height:1.5em;vertical-align:middle;content:"";background:no-repeat 50%;-webkit-background-size:100% 100%;background-size:100% 100%}@media(max-width:575.98px){.navbar-expand-sm>.container,.navbar-expand-sm>.container-fluid{padding-right:0;padding-left:0}}@media(min-width:576px){.navbar-expand-sm{-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-flow:row nowrap;-ms-flex-flow:row nowrap;flex-flow:row nowrap;-webkit-box-pack:start;-webkit-justify-content:flex-start;-ms-flex-pack:start;justify-content:flex-start}.navbar-expand-sm .navbar-nav{-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-direction:row;-ms-flex-direction:row;flex-direction:row}.navbar-expand-sm .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-sm .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-sm>.container,.navbar-expand-sm>.container-fluid{-webkit-flex-wrap:nowrap;-ms-flex-wrap:nowrap;flex-wrap:nowrap}.navbar-expand-sm .navbar-collapse{display:-webkit-box!important;display:-webkit-flex!important;display:-ms-flexbox!important;display:flex!important;-webkit-flex-basis:auto;-ms-flex-preferred-size:auto;flex-basis:auto}.navbar-expand-sm .navbar-toggler{display:none}}@media(max-width:767.98px){.navbar-expand-md>.container,.navbar-expand-md>.container-fluid{padding-right:0;padding-left:0}}@media(min-width:768px){.navbar-expand-md{-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-flow:row nowrap;-ms-flex-flow:row nowrap;flex-flow:row nowrap;-webkit-box-pack:start;-webkit-justify-content:flex-start;-ms-flex-pack:start;justify-content:flex-start}.navbar-expand-md .navbar-nav{-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-direction:row;-ms-flex-direction:row;flex-direction:row}.navbar-expand-md .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-md .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-md>.container,.navbar-expand-md>.container-fluid{-webkit-flex-wrap:nowrap;-ms-flex-wrap:nowrap;flex-wrap:nowrap}.navbar-expand-md .navbar-collapse{display:-webkit-box!important;display:-webkit-flex!important;display:-ms-flexbox!important;display:flex!important;-webkit-flex-basis:auto;-ms-flex-preferred-size:auto;flex-basis:auto}.navbar-expand-md .navbar-toggler{display:none}}@media(max-width:991.98px){.navbar-expand-lg>.container,.navbar-expand-lg>.container-fluid{padding-right:0;padding-left:0}}@media(min-width:992px){.navbar-expand-lg{-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-flow:row nowrap;-ms-flex-flow:row nowrap;flex-flow:row nowrap;-webkit-box-pack:start;-webkit-justify-content:flex-start;-ms-flex-pack:start;justify-content:flex-start}.navbar-expand-lg .navbar-nav{-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-direction:row;-ms-flex-direction:row;flex-direction:row}.navbar-expand-lg .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-lg .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-lg>.container,.navbar-expand-lg>.container-fluid{-webkit-flex-wrap:nowrap;-ms-flex-wrap:nowrap;flex-wrap:nowrap}.navbar-expand-lg .navbar-collapse{display:-webkit-box!important;display:-webkit-flex!important;display:-ms-flexbox!important;display:flex!important;-webkit-flex-basis:auto;-ms-flex-preferred-size:auto;flex-basis:auto}.navbar-expand-lg .navbar-toggler{display:none}}@media(max-width:1199.98px){.navbar-expand-xl>.container,.navbar-expand-xl>.container-fluid{padding-right:0;padding-left:0}}@media(min-width:1200px){.navbar-expand-xl{-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-flow:row nowrap;-ms-flex-flow:row nowrap;flex-flow:row nowrap;-webkit-box-pack:start;-webkit-justify-content:flex-start;-ms-flex-pack:start;justify-content:flex-start}.navbar-expand-xl .navbar-nav{-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-direction:row;-ms-flex-direction:row;flex-direction:row}.navbar-expand-xl .navbar-nav .dropdown-menu{position:absolute}.navbar-expand-xl .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand-xl>.container,.navbar-expand-xl>.container-fluid{-webkit-flex-wrap:nowrap;-ms-flex-wrap:nowrap;flex-wrap:nowrap}.navbar-expand-xl .navbar-collapse{display:-webkit-box!important;display:-webkit-flex!important;display:-ms-flexbox!important;display:flex!important;-webkit-flex-basis:auto;-ms-flex-preferred-size:auto;flex-basis:auto}.navbar-expand-xl .navbar-toggler{display:none}}.navbar-expand{-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-flow:row nowrap;-ms-flex-flow:row nowrap;flex-flow:row nowrap;-webkit-box-pack:start;-webkit-justify-content:flex-start;-ms-flex-pack:start;justify-content:flex-start}.navbar-expand>.container,.navbar-expand>.container-fluid{padding-right:0;padding-left:0}.navbar-expand .navbar-nav{-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-direction:row;-ms-flex-direction:row;flex-direction:row}.navbar-expand .navbar-nav .dropdown-menu{position:absolute}.navbar-expand .navbar-nav .nav-link{padding-right:.5rem;padding-left:.5rem}.navbar-expand>.container,.navbar-expand>.container-fluid{-webkit-flex-wrap:nowrap;-ms-flex-wrap:nowrap;flex-wrap:nowrap}.navbar-expand .navbar-collapse{display:-webkit-box!important;display:-webkit-flex!important;display:-ms-flexbox!important;display:flex!important;-webkit-flex-basis:auto;-ms-flex-preferred-size:auto;flex-basis:auto}.navbar-expand .navbar-toggler{display:none}.navbar-light .navbar-brand{color:rgba(0,0,0,.9)}.navbar-light .navbar-brand:hover,.navbar-light .navbar-brand:focus{color:rgba(0,0,0,.9)}.navbar-light .navbar-nav .nav-link{color:rgba(0,0,0,.5)}.navbar-light .navbar-nav .nav-link:hover,.navbar-light .navbar-nav .nav-link:focus{color:rgba(0,0,0,.7)}.navbar-light .navbar-nav .nav-link.disabled{color:rgba(0,0,0,.3)}.navbar-light .navbar-nav .show>.nav-link,.navbar-light .navbar-nav .active>.nav-link,.navbar-light .navbar-nav .nav-link.show,.navbar-light .navbar-nav .nav-link.active{color:rgba(0,0,0,.9)}.navbar-light .navbar-toggler{color:rgba(0,0,0,.5);border-color:rgba(0,0,0,.1)}.navbar-light .navbar-toggler-icon{background-image:url("data:image/svg+xml;charset=utf8,%3Csvg viewBox=%220 0 30 30%22 xmlns=%22http://www.w3.org/2000/svg%22%3E%3Cpath stroke=%22rgba(0, 0, 0, 0.5)%22 stroke-width=%222%22 stroke-linecap=%22round%22 stroke-miterlimit=%2210%22 d=%22M4 7h22M4 15h22M4 23h22%22/%3E%3C/svg%3E")}.navbar-light .navbar-text{color:rgba(0,0,0,.5)}.navbar-light .navbar-text a{color:rgba(0,0,0,.9)}.navbar-light .navbar-text a:hover,.navbar-light .navbar-text a:focus{color:rgba(0,0,0,.9)}.navbar-dark .navbar-brand{color:#fff}.navbar-dark .navbar-brand:hover,.navbar-dark .navbar-brand:focus{color:#fff}.navbar-dark .navbar-nav .nav-link{color:rgba(255,255,255,.75)}.navbar-dark .navbar-nav .nav-link:hover,.navbar-dark .navbar-nav .nav-link:focus{color:rgba(255,255,255,.5)}.navbar-dark .navbar-nav .nav-link.disabled{color:rgba(255,255,255,.25)}.navbar-dark .navbar-nav .show>.nav-link,.navbar-dark .navbar-nav .active>.nav-link,.navbar-dark .navbar-nav .nav-link.show,.navbar-dark .navbar-nav .nav-link.active{color:#fff}.navbar-dark .navbar-toggler{color:rgba(255,255,255,.75);border-color:rgba(255,255,255,.1)}.navbar-dark .navbar-toggler-icon{background-image:url("data:image/svg+xml;charset=utf8,%3Csvg viewBox=%220 0 30 30%22 xmlns=%22http://www.w3.org/2000/svg%22%3E%3Cpath stroke=%22rgba(255, 255, 255, 0.75)%22 stroke-width=%222%22 stroke-linecap=%22round%22 stroke-miterlimit=%2210%22 d=%22M4 7h22M4 15h22M4 23h22%22/%3E%3C/svg%3E")}.navbar-dark .navbar-text{color:rgba(255,255,255,.75)}.navbar-dark .navbar-text a{color:#fff}.navbar-dark .navbar-text a:hover,.navbar-dark .navbar-text a:focus{color:#fff}.card,.td-content .highlight{position:relative;display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;min-width:0;word-wrap:break-word;background-color:#fff;background-clip:border-box;border:1px solid rgba(0,0,0,.125);border-radius:.25rem}.card>hr,.td-content .highlight>hr{margin-right:0;margin-left:0}.card>.list-group:first-child .list-group-item:first-child,.td-content .highlight>.list-group:first-child .list-group-item:first-child{border-top-left-radius:.25rem;border-top-right-radius:.25rem}.card>.list-group:last-child .list-group-item:last-child,.td-content .highlight>.list-group:last-child .list-group-item:last-child{border-bottom-right-radius:.25rem;border-bottom-left-radius:.25rem}.card-body{-webkit-box-flex:1;-webkit-flex:1 1 auto;-ms-flex:1 1 auto;flex:auto;padding:1.25rem}.card-title{margin-bottom:.75rem}.card-subtitle{margin-top:-.375rem;margin-bottom:0}.card-text:last-child{margin-bottom:0}.card-link:hover{text-decoration:none}.card-link+.card-link{margin-left:1.25rem}.card-header{padding:.75rem 1.25rem;margin-bottom:0;background-color:rgba(0,0,0,3%);border-bottom:1px solid rgba(0,0,0,.125)}.card-header:first-child{border-radius:-webkit-calc(.25rem - 1px)-webkit-calc(.25rem - 1px)0 0;border-radius:calc(.25rem - 1px)calc(.25rem - 1px)0 0}.card-header+.list-group .list-group-item:first-child{border-top:0}.card-footer{padding:.75rem 1.25rem;background-color:rgba(0,0,0,3%);border-top:1px solid rgba(0,0,0,.125)}.card-footer:last-child{border-radius:0 0 -webkit-calc(.25rem - 1px)-webkit-calc(.25rem - 1px);border-radius:0 0 calc(.25rem - 1px)calc(.25rem - 1px)}.card-header-tabs{margin-right:-.625rem;margin-bottom:-.75rem;margin-left:-.625rem;border-bottom:0}.card-header-pills{margin-right:-.625rem;margin-left:-.625rem}.card-img-overlay{position:absolute;top:0;right:0;bottom:0;left:0;padding:1.25rem}.card-img{width:100%;border-radius:-webkit-calc(.25rem - 1px);border-radius:calc(.25rem - 1px)}.card-img-top{width:100%;border-top-left-radius:-webkit-calc(.25rem - 1px);border-top-left-radius:calc(.25rem - 1px);border-top-right-radius:-webkit-calc(.25rem - 1px);border-top-right-radius:calc(.25rem - 1px)}.card-img-bottom{width:100%;border-bottom-right-radius:-webkit-calc(.25rem - 1px);border-bottom-right-radius:calc(.25rem - 1px);border-bottom-left-radius:-webkit-calc(.25rem - 1px);border-bottom-left-radius:calc(.25rem - 1px)}.card-deck{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column}.card-deck .card,.card-deck .td-content .highlight,.td-content .card-deck .highlight{margin-bottom:15px}@media(min-width:576px){.card-deck{-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-flow:row wrap;-ms-flex-flow:row wrap;flex-flow:row wrap;margin-right:-15px;margin-left:-15px}.card-deck .card,.card-deck .td-content .highlight,.td-content .card-deck .highlight{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-flex:1;-webkit-flex:1 0 0%;-ms-flex:1 0 0%;flex:1 0;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;margin-right:15px;margin-bottom:0;margin-left:15px}}.card-group{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column}.card-group>.card,.td-content .card-group>.highlight{margin-bottom:15px}@media(min-width:576px){.card-group{-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-flow:row wrap;-ms-flex-flow:row wrap;flex-flow:row wrap}.card-group>.card,.td-content .card-group>.highlight{-webkit-box-flex:1;-webkit-flex:1 0 0%;-ms-flex:1 0 0%;flex:1 0;margin-bottom:0}.card-group>.card+.card,.td-content .card-group>.highlight+.card,.td-content .card-group>.card+.highlight,.td-content .card-group>.highlight+.highlight{margin-left:0;border-left:0}.card-group>.card:first-child,.td-content .card-group>.highlight:first-child{border-top-right-radius:0;border-bottom-right-radius:0}.card-group>.card:first-child .card-img-top,.td-content .card-group>.highlight:first-child .card-img-top,.card-group>.card:first-child .card-header,.td-content .card-group>.highlight:first-child .card-header{border-top-right-radius:0}.card-group>.card:first-child .card-img-bottom,.td-content .card-group>.highlight:first-child .card-img-bottom,.card-group>.card:first-child .card-footer,.td-content .card-group>.highlight:first-child .card-footer{border-bottom-right-radius:0}.card-group>.card:last-child,.td-content .card-group>.highlight:last-child{border-top-left-radius:0;border-bottom-left-radius:0}.card-group>.card:last-child .card-img-top,.td-content .card-group>.highlight:last-child .card-img-top,.card-group>.card:last-child .card-header,.td-content .card-group>.highlight:last-child .card-header{border-top-left-radius:0}.card-group>.card:last-child .card-img-bottom,.td-content .card-group>.highlight:last-child .card-img-bottom,.card-group>.card:last-child .card-footer,.td-content .card-group>.highlight:last-child .card-footer{border-bottom-left-radius:0}.card-group>.card:only-child,.td-content .card-group>.highlight:only-child{border-radius:.25rem}.card-group>.card:only-child .card-img-top,.td-content .card-group>.highlight:only-child .card-img-top,.card-group>.card:only-child .card-header,.td-content .card-group>.highlight:only-child .card-header{border-top-left-radius:.25rem;border-top-right-radius:.25rem}.card-group>.card:only-child .card-img-bottom,.td-content .card-group>.highlight:only-child .card-img-bottom,.card-group>.card:only-child .card-footer,.td-content .card-group>.highlight:only-child .card-footer{border-bottom-right-radius:.25rem;border-bottom-left-radius:.25rem}.card-group>.card:not(:first-child):not(:last-child):not(:only-child),.td-content .card-group>.highlight:not(:first-child):not(:last-child):not(:only-child){border-radius:0}.card-group>.card:not(:first-child):not(:last-child):not(:only-child) .card-img-top,.td-content .card-group>.highlight:not(:first-child):not(:last-child):not(:only-child) .card-img-top,.card-group>.card:not(:first-child):not(:last-child):not(:only-child) .card-img-bottom,.td-content .card-group>.highlight:not(:first-child):not(:last-child):not(:only-child) .card-img-bottom,.card-group>.card:not(:first-child):not(:last-child):not(:only-child) .card-header,.td-content .card-group>.highlight:not(:first-child):not(:last-child):not(:only-child) .card-header,.card-group>.card:not(:first-child):not(:last-child):not(:only-child) .card-footer,.td-content .card-group>.highlight:not(:first-child):not(:last-child):not(:only-child) .card-footer{border-radius:0}}.card-columns .card,.card-columns .td-content .highlight,.td-content .card-columns .highlight{margin-bottom:.75rem}@media(min-width:576px){.card-columns{-webkit-column-count:3;-moz-column-count:3;column-count:3;-webkit-column-gap:1.25rem;-moz-column-gap:1.25rem;column-gap:1.25rem;orphans:1;widows:1}.card-columns .card,.card-columns .td-content .highlight,.td-content .card-columns .highlight{display:inline-block;width:100%}}.accordion .card:not(:first-of-type):not(:last-of-type),.accordion .td-content .highlight:not(:first-of-type):not(:last-of-type),.td-content .accordion .highlight:not(:first-of-type):not(:last-of-type){border-bottom:0;border-radius:0}.accordion .card:not(:first-of-type) .card-header:first-child,.accordion .td-content .highlight:not(:first-of-type) .card-header:first-child,.td-content .accordion .highlight:not(:first-of-type) .card-header:first-child{border-radius:0}.accordion .card:first-of-type,.accordion .td-content .highlight:first-of-type,.td-content .accordion .highlight:first-of-type{border-bottom:0;border-bottom-right-radius:0;border-bottom-left-radius:0}.accordion .card:last-of-type,.accordion .td-content .highlight:last-of-type,.td-content .accordion .highlight:last-of-type{border-top-left-radius:0;border-top-right-radius:0}.breadcrumb{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-flex-wrap:wrap;-ms-flex-wrap:wrap;flex-wrap:wrap;padding:.75rem 1rem;margin-bottom:1rem;list-style:none;background-color:#eee;border-radius:.25rem}.breadcrumb-item+.breadcrumb-item{padding-left:.5rem}.breadcrumb-item+.breadcrumb-item::before{display:inline-block;padding-right:.5rem;color:#888;content:"/"}.breadcrumb-item+.breadcrumb-item:hover::before{text-decoration:underline}.breadcrumb-item+.breadcrumb-item:hover::before{text-decoration:none}.breadcrumb-item.active{color:#888}.pagination{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;padding-left:0;list-style:none;border-radius:.25rem}.page-link{position:relative;display:block;padding:.5rem .75rem;margin-left:-1px;line-height:1.25;color:#888;background-color:#fff;border:1px solid rgba(0,0,0,.1)}.page-link:hover{z-index:2;color:#1e53a0;text-decoration:none;background-color:#eee;border-color:#dee2e6}.page-link:focus{z-index:2;outline:0;-webkit-box-shadow:0 0 0 .2rem rgba(48,99,142,.25);box-shadow:0 0 0 .2rem rgba(48,99,142,.25)}.page-link:not(:disabled):not(.disabled){cursor:pointer}.page-item:first-child .page-link{margin-left:0;border-top-left-radius:.25rem;border-bottom-left-radius:.25rem}.page-item:last-child .page-link{border-top-right-radius:.25rem;border-bottom-right-radius:.25rem}.page-item.active .page-link{z-index:1;color:#fff;background-color:#30638e;border-color:#2a567b}.page-item.disabled .page-link{color:#dee2e6;pointer-events:none;cursor:auto;background-color:#fff;border-color:#dee2e6}.pagination-lg .page-link{padding:.75rem 1.5rem;font-size:1.25rem;line-height:1.5}.pagination-lg .page-item:first-child .page-link{border-top-left-radius:.3rem;border-bottom-left-radius:.3rem}.pagination-lg .page-item:last-child .page-link{border-top-right-radius:.3rem;border-bottom-right-radius:.3rem}.pagination-sm .page-link{padding:.25rem .5rem;font-size:.875rem;line-height:1.5}.pagination-sm .page-item:first-child .page-link{border-top-left-radius:.2rem;border-bottom-left-radius:.2rem}.pagination-sm .page-item:last-child .page-link{border-top-right-radius:.2rem;border-bottom-right-radius:.2rem}.badge{display:inline-block;padding:.25em .4em;font-size:75%;font-weight:700;line-height:1;text-align:center;white-space:nowrap;vertical-align:baseline;border-radius:.25rem}.badge:empty{display:none}.btn .badge{position:relative;top:-1px}.badge-pill{padding-right:.6em;padding-left:.6em;border-radius:10rem}.badge-primary{color:#fff;background-color:#30638e}.badge-primary[href]:hover,.badge-primary[href]:focus{color:#fff;text-decoration:none;background-color:#234868}.badge-secondary{color:#fff;background-color:#ffa630}.badge-secondary[href]:hover,.badge-secondary[href]:focus{color:#fff;text-decoration:none;background-color:#fc9000}.badge-success{color:#fff;background-color:#3772ff}.badge-success[href]:hover,.badge-success[href]:focus{color:#fff;text-decoration:none;background-color:#044eff}.badge-info{color:#222;background-color:#c0e0de}.badge-info[href]:hover,.badge-info[href]:focus{color:#222;text-decoration:none;background-color:#9ecfcc}.badge-warning{color:#fff;background-color:#ed6a5a}.badge-warning[href]:hover,.badge-warning[href]:focus{color:#fff;text-decoration:none;background-color:#e8402c}.badge-danger{color:#fff;background-color:#ed6a5a}.badge-danger[href]:hover,.badge-danger[href]:focus{color:#fff;text-decoration:none;background-color:#e8402c}.badge-light{color:#222;background-color:#d3f3ee}.badge-light[href]:hover,.badge-light[href]:focus{color:#222;text-decoration:none;background-color:#abe8df}.badge-dark{color:#fff;background-color:#403f4c}.badge-dark[href]:hover,.badge-dark[href]:focus{color:#fff;text-decoration:none;background-color:#292830}.jumbotron{padding:2rem 1rem;margin-bottom:2rem;background-color:#eee;border-radius:.3rem}@media(min-width:576px){.jumbotron{padding:4rem 2rem}}.jumbotron-fluid{padding-right:0;padding-left:0;border-radius:0}.alert{position:relative;padding:.75rem 1.25rem;margin-bottom:1rem;border:1px solid transparent;border-radius:.25rem}.alert-heading{color:inherit}.alert-link{font-weight:700}.alert-dismissible{padding-right:4rem}.alert-dismissible .close{position:absolute;top:0;right:0;padding:.75rem 1.25rem;color:inherit}.alert-primary{color:#19334a;background:#d6e0e8 -webkit-gradient(linear,left top,left bottom,from(#dce5eb),to(#d6e0e8))repeat-x;background:#d6e0e8 -webkit-linear-gradient(top,#dce5eb,#d6e0e8)repeat-x;background:#d6e0e8 -o-linear-gradient(top,#dce5eb,#d6e0e8)repeat-x;background:#d6e0e8 linear-gradient(180deg,#dce5eb,#d6e0e8)repeat-x;border-color:#c5d3df}.alert-primary hr{border-top-color:#b5c7d6}.alert-primary .alert-link{color:#0c1924}.alert-secondary{color:#855619;background:#ffedd6 -webkit-gradient(linear,left top,left bottom,from(#fff0dc),to(#ffedd6))repeat-x;background:#ffedd6 -webkit-linear-gradient(top,#fff0dc,#ffedd6)repeat-x;background:#ffedd6 -o-linear-gradient(top,#fff0dc,#ffedd6)repeat-x;background:#ffedd6 linear-gradient(180deg,#fff0dc,#ffedd6)repeat-x;border-color:#ffe6c5}.alert-secondary hr{border-top-color:#ffdbac}.alert-secondary .alert-link{color:#5a3a11}.alert-success{color:#1d3b85;background:#d7e3ff -webkit-gradient(linear,left top,left bottom,from(#dde7ff),to(#d7e3ff))repeat-x;background:#d7e3ff -webkit-linear-gradient(top,#dde7ff,#d7e3ff)repeat-x;background:#d7e3ff -o-linear-gradient(top,#dde7ff,#d7e3ff)repeat-x;background:#d7e3ff linear-gradient(180deg,#dde7ff,#d7e3ff)repeat-x;border-color:#c7d8ff}.alert-success hr{border-top-color:#aec6ff}.alert-success .alert-link{color:#14285b}.alert-info{color:#647473;background:#f2f9f8 -webkit-gradient(linear,left top,left bottom,from(#f4faf9),to(#f2f9f8))repeat-x;background:#f2f9f8 -webkit-linear-gradient(top,#f4faf9,#f2f9f8)repeat-x;background:#f2f9f8 -o-linear-gradient(top,#f4faf9,#f2f9f8)repeat-x;background:#f2f9f8 linear-gradient(180deg,#f4faf9,#f2f9f8)repeat-x;border-color:#edf6f6}.alert-info hr{border-top-color:#dceeee}.alert-info .alert-link{color:#4c5958}.alert-warning{color:#7b372f;background:#fbe1de -webkit-gradient(linear,left top,left bottom,from(#fce6e3),to(#fbe1de))repeat-x;background:#fbe1de -webkit-linear-gradient(top,#fce6e3,#fbe1de)repeat-x;background:#fbe1de -o-linear-gradient(top,#fce6e3,#fbe1de)repeat-x;background:#fbe1de linear-gradient(180deg,#fce6e3,#fbe1de)repeat-x;border-color:#fad5d1}.alert-warning hr{border-top-color:#f8c0ba}.alert-warning .alert-link{color:#562721}.alert-danger{color:#7b372f;background:#fbe1de -webkit-gradient(linear,left top,left bottom,from(#fce6e3),to(#fbe1de))repeat-x;background:#fbe1de -webkit-linear-gradient(top,#fce6e3,#fbe1de)repeat-x;background:#fbe1de -o-linear-gradient(top,#fce6e3,#fbe1de)repeat-x;background:#fbe1de linear-gradient(180deg,#fce6e3,#fbe1de)repeat-x;border-color:#fad5d1}.alert-danger hr{border-top-color:#f8c0ba}.alert-danger .alert-link{color:#562721}.alert-light{color:#6e7e7c;background:#f6fdfc -webkit-gradient(linear,left top,left bottom,from(#f7fdfc),to(#f6fdfc))repeat-x;background:#f6fdfc -webkit-linear-gradient(top,#f7fdfc,#f6fdfc)repeat-x;background:#f6fdfc -o-linear-gradient(top,#f7fdfc,#f6fdfc)repeat-x;background:#f6fdfc linear-gradient(180deg,#f7fdfc,#f6fdfc)repeat-x;border-color:#f3fcfa}.alert-light hr{border-top-color:#dff7f2}.alert-light .alert-link{color:#566361}.alert-dark{color:#212128;background:#d9d9db -webkit-gradient(linear,left top,left bottom,from(#dfdfe0),to(#d9d9db))repeat-x;background:#d9d9db -webkit-linear-gradient(top,#dfdfe0,#d9d9db)repeat-x;background:#d9d9db -o-linear-gradient(top,#dfdfe0,#d9d9db)repeat-x;background:#d9d9db linear-gradient(180deg,#dfdfe0,#d9d9db)repeat-x;border-color:#cac9cd}.alert-dark hr{border-top-color:#bdbcc1}.alert-dark .alert-link{color:#0a0a0c}@-webkit-keyframes progress-bar-stripes{from{background-position:1rem 0}to{background-position:0 0}}@-o-keyframes progress-bar-stripes{from{background-position:1rem 0}to{background-position:0 0}}@keyframes progress-bar-stripes{from{background-position:1rem 0}to{background-position:0 0}}.progress{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;height:1rem;overflow:hidden;font-size:.75rem;background-color:#eee;border-radius:.25rem;-webkit-box-shadow:inset 0 .1rem .1rem rgba(0,0,0,.1);box-shadow:inset 0 .1rem .1rem rgba(0,0,0,.1)}.progress-bar{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;color:#fff;text-align:center;white-space:nowrap;background-color:#30638e;-webkit-transition:width .6s ease;-o-transition:width .6s ease;transition:width .6s ease}@media screen and (prefers-reduced-motion:reduce){.progress-bar{-webkit-transition:none;-o-transition:none;transition:none}}.progress-bar-striped{background-image:-webkit-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:-o-linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);background-image:linear-gradient(45deg,rgba(255,255,255,.15) 25%,transparent 25%,transparent 50%,rgba(255,255,255,.15) 50%,rgba(255,255,255,.15) 75%,transparent 75%,transparent);-webkit-background-size:1rem 1rem;background-size:1rem 1rem}.progress-bar-animated{-webkit-animation:progress-bar-stripes 1s linear infinite;-o-animation:progress-bar-stripes 1s linear infinite;animation:progress-bar-stripes 1s linear infinite}.media{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-align:start;-webkit-align-items:flex-start;-ms-flex-align:start;align-items:flex-start}.media-body{-webkit-box-flex:1;-webkit-flex:1;-ms-flex:1;flex:1}.list-group{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;padding-left:0;margin-bottom:0}.list-group-item-action{width:100%;color:#495057;text-align:inherit}.list-group-item-action:hover,.list-group-item-action:focus{color:#495057;text-decoration:none;background-color:#f8f9fa}.list-group-item-action:active{color:#222;background-color:#eee}.list-group-item{position:relative;display:block;padding:.75rem 1.25rem;margin-bottom:-1px;background-color:#fff;border:1px solid rgba(0,0,0,.125)}.list-group-item:first-child{border-top-left-radius:.25rem;border-top-right-radius:.25rem}.list-group-item:last-child{margin-bottom:0;border-bottom-right-radius:.25rem;border-bottom-left-radius:.25rem}.list-group-item:hover,.list-group-item:focus{z-index:1;text-decoration:none}.list-group-item.disabled,.list-group-item:disabled{color:#888;background-color:#fff}.list-group-item.active{z-index:2;color:#fff;background-color:#30638e;border-color:#30638e}.list-group-flush .list-group-item{border-right:0;border-left:0;border-radius:0}.list-group-flush:first-child .list-group-item:first-child{border-top:0}.list-group-flush:last-child .list-group-item:last-child{border-bottom:0}.list-group-item-primary{color:#19334a;background-color:#c5d3df}.list-group-item-primary.list-group-item-action:hover,.list-group-item-primary.list-group-item-action:focus{color:#19334a;background-color:#b5c7d6}.list-group-item-primary.list-group-item-action.active{color:#fff;background-color:#19334a;border-color:#19334a}.list-group-item-secondary{color:#855619;background-color:#ffe6c5}.list-group-item-secondary.list-group-item-action:hover,.list-group-item-secondary.list-group-item-action:focus{color:#855619;background-color:#ffdbac}.list-group-item-secondary.list-group-item-action.active{color:#fff;background-color:#855619;border-color:#855619}.list-group-item-success{color:#1d3b85;background-color:#c7d8ff}.list-group-item-success.list-group-item-action:hover,.list-group-item-success.list-group-item-action:focus{color:#1d3b85;background-color:#aec6ff}.list-group-item-success.list-group-item-action.active{color:#fff;background-color:#1d3b85;border-color:#1d3b85}.list-group-item-info{color:#647473;background-color:#edf6f6}.list-group-item-info.list-group-item-action:hover,.list-group-item-info.list-group-item-action:focus{color:#647473;background-color:#dceeee}.list-group-item-info.list-group-item-action.active{color:#fff;background-color:#647473;border-color:#647473}.list-group-item-warning{color:#7b372f;background-color:#fad5d1}.list-group-item-warning.list-group-item-action:hover,.list-group-item-warning.list-group-item-action:focus{color:#7b372f;background-color:#f8c0ba}.list-group-item-warning.list-group-item-action.active{color:#fff;background-color:#7b372f;border-color:#7b372f}.list-group-item-danger{color:#7b372f;background-color:#fad5d1}.list-group-item-danger.list-group-item-action:hover,.list-group-item-danger.list-group-item-action:focus{color:#7b372f;background-color:#f8c0ba}.list-group-item-danger.list-group-item-action.active{color:#fff;background-color:#7b372f;border-color:#7b372f}.list-group-item-light{color:#6e7e7c;background-color:#f3fcfa}.list-group-item-light.list-group-item-action:hover,.list-group-item-light.list-group-item-action:focus{color:#6e7e7c;background-color:#dff7f2}.list-group-item-light.list-group-item-action.active{color:#fff;background-color:#6e7e7c;border-color:#6e7e7c}.list-group-item-dark{color:#212128;background-color:#cac9cd}.list-group-item-dark.list-group-item-action:hover,.list-group-item-dark.list-group-item-action:focus{color:#212128;background-color:#bdbcc1}.list-group-item-dark.list-group-item-action.active{color:#fff;background-color:#212128;border-color:#212128}.close{float:right;font-size:1.5rem;font-weight:700;line-height:1;color:#000;text-shadow:0 1px 0 #fff;opacity:.5}.close:not(:disabled):not(.disabled){cursor:pointer}.close:not(:disabled):not(.disabled):hover,.close:not(:disabled):not(.disabled):focus{color:#000;text-decoration:none;opacity:.75}button.close{padding:0;background-color:transparent;border:0;-webkit-appearance:none}.modal-open{overflow:hidden}.modal-open .modal{overflow-x:hidden;overflow-y:auto}.modal{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1050;display:none;overflow:hidden;outline:0}.modal-dialog{position:relative;width:auto;margin:.5rem;pointer-events:none}.modal.fade .modal-dialog{-webkit-transition:-webkit-transform .3s ease-out;transition:-webkit-transform .3s ease-out;-o-transition:-o-transform .3s ease-out;transition:transform .3s ease-out;transition:transform .3s ease-out,-webkit-transform .3s ease-out,-o-transform .3s ease-out;-webkit-transform:translate(0,-25%);-ms-transform:translate(0,-25%);-o-transform:translate(0,-25%);transform:translate(0,-25%)}@media screen and (prefers-reduced-motion:reduce){.modal.fade .modal-dialog{-webkit-transition:none;-o-transition:none;transition:none}}.modal.show .modal-dialog{-webkit-transform:translate(0,0);-ms-transform:translate(0,0);-o-transform:translate(0,0);transform:translate(0,0)}.modal-dialog-centered{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;min-height:-webkit-calc(100% - (.5rem * 2));min-height:calc(100% - (.5rem * 2))}.modal-dialog-centered::before{display:block;height:-webkit-calc(100vh - (.5rem * 2));height:calc(100vh - (.5rem * 2));content:""}.modal-content{position:relative;display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;width:100%;pointer-events:auto;background-color:#fff;background-clip:padding-box;border:1px solid rgba(0,0,0,.2);border-radius:.3rem;-webkit-box-shadow:0 .25rem .5rem rgba(0,0,0,.5);box-shadow:0 .25rem .5rem rgba(0,0,0,.5);outline:0}.modal-backdrop{position:fixed;top:0;right:0;bottom:0;left:0;z-index:1040;background-color:#000}.modal-backdrop.fade{opacity:0}.modal-backdrop.show{opacity:.5}.modal-header{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-align:start;-webkit-align-items:flex-start;-ms-flex-align:start;align-items:flex-start;-webkit-box-pack:justify;-webkit-justify-content:space-between;-ms-flex-pack:justify;justify-content:space-between;padding:1rem;border-bottom:1px solid #eee;border-top-left-radius:.3rem;border-top-right-radius:.3rem}.modal-header .close{padding:1rem;margin:-1rem -1rem -1rem auto}.modal-title{margin-bottom:0;line-height:1.5}.modal-body{position:relative;-webkit-box-flex:1;-webkit-flex:1 1 auto;-ms-flex:1 1 auto;flex:auto;padding:1rem}.modal-footer{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;-webkit-box-pack:end;-webkit-justify-content:flex-end;-ms-flex-pack:end;justify-content:flex-end;padding:1rem;border-top:1px solid #eee}.modal-footer>:not(:first-child){margin-left:.25rem}.modal-footer>:not(:last-child){margin-right:.25rem}.modal-scrollbar-measure{position:absolute;top:-9999px;width:50px;height:50px;overflow:scroll}@media(min-width:576px){.modal-dialog{max-width:500px;margin:1.75rem auto}.modal-dialog-centered{min-height:-webkit-calc(100% - (1.75rem * 2));min-height:calc(100% - (1.75rem * 2))}.modal-dialog-centered::before{height:-webkit-calc(100vh - (1.75rem * 2));height:calc(100vh - (1.75rem * 2))}.modal-content{-webkit-box-shadow:0 .5rem 1rem rgba(0,0,0,.5);box-shadow:0 .5rem 1rem rgba(0,0,0,.5)}.modal-sm{max-width:300px}}@media(min-width:992px){.modal-lg{max-width:800px}}.tooltip{position:absolute;z-index:1070;display:block;margin:0;font-family:open sans,-apple-system,BlinkMacSystemFont,segoe ui,Roboto,helvetica neue,Arial,sans-serif,apple color emoji,segoe ui emoji,segoe ui symbol;font-style:normal;font-weight:400;line-height:1.5;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;white-space:normal;line-break:auto;font-size:.875rem;word-wrap:break-word;opacity:0}.tooltip.show{opacity:.9}.tooltip .arrow{position:absolute;display:block;width:.8rem;height:.4rem}.tooltip .arrow::before{position:absolute;content:"";border-color:transparent;border-style:solid}.bs-tooltip-top,.bs-tooltip-auto[x-placement^=top]{padding:.4rem 0}.bs-tooltip-top .arrow,.bs-tooltip-auto[x-placement^=top] .arrow{bottom:0}.bs-tooltip-top .arrow::before,.bs-tooltip-auto[x-placement^=top] .arrow::before{top:0;border-width:.4rem .4rem 0;border-top-color:#000}.bs-tooltip-right,.bs-tooltip-auto[x-placement^=right]{padding:0 .4rem}.bs-tooltip-right .arrow,.bs-tooltip-auto[x-placement^=right] .arrow{left:0;width:.4rem;height:.8rem}.bs-tooltip-right .arrow::before,.bs-tooltip-auto[x-placement^=right] .arrow::before{right:0;border-width:.4rem .4rem .4rem 0;border-right-color:#000}.bs-tooltip-bottom,.bs-tooltip-auto[x-placement^=bottom]{padding:.4rem 0}.bs-tooltip-bottom .arrow,.bs-tooltip-auto[x-placement^=bottom] .arrow{top:0}.bs-tooltip-bottom .arrow::before,.bs-tooltip-auto[x-placement^=bottom] .arrow::before{bottom:0;border-width:0 .4rem .4rem;border-bottom-color:#000}.bs-tooltip-left,.bs-tooltip-auto[x-placement^=left]{padding:0 .4rem}.bs-tooltip-left .arrow,.bs-tooltip-auto[x-placement^=left] .arrow{right:0;width:.4rem;height:.8rem}.bs-tooltip-left .arrow::before,.bs-tooltip-auto[x-placement^=left] .arrow::before{left:0;border-width:.4rem 0 .4rem .4rem;border-left-color:#000}.tooltip-inner{max-width:200px;padding:.25rem .5rem;color:#fff;text-align:center;background-color:#000;border-radius:.25rem}.popover{position:absolute;top:0;left:0;z-index:1060;display:block;max-width:276px;font-family:open sans,-apple-system,BlinkMacSystemFont,segoe ui,Roboto,helvetica neue,Arial,sans-serif,apple color emoji,segoe ui emoji,segoe ui symbol;font-style:normal;font-weight:400;line-height:1.5;text-align:left;text-align:start;text-decoration:none;text-shadow:none;text-transform:none;letter-spacing:normal;word-break:normal;word-spacing:normal;white-space:normal;line-break:auto;font-size:.875rem;word-wrap:break-word;background-color:#fff;background-clip:padding-box;border:1px solid rgba(0,0,0,.2);border-radius:.3rem;-webkit-box-shadow:0 .25rem .5rem rgba(0,0,0,.2);box-shadow:0 .25rem .5rem rgba(0,0,0,.2)}.popover .arrow{position:absolute;display:block;width:1rem;height:.5rem;margin:0 .3rem}.popover .arrow::before,.popover .arrow::after{position:absolute;display:block;content:"";border-color:transparent;border-style:solid}.bs-popover-top,.bs-popover-auto[x-placement^=top]{margin-bottom:.5rem}.bs-popover-top .arrow,.bs-popover-auto[x-placement^=top] .arrow{bottom:-webkit-calc((.5rem + 1px) * -1);bottom:calc((.5rem + 1px) * -1)}.bs-popover-top .arrow::before,.bs-popover-auto[x-placement^=top] .arrow::before,.bs-popover-top .arrow::after,.bs-popover-auto[x-placement^=top] .arrow::after{border-width:.5rem .5rem 0}.bs-popover-top .arrow::before,.bs-popover-auto[x-placement^=top] .arrow::before{bottom:0;border-top-color:rgba(0,0,0,.25)}.bs-popover-top .arrow::after,.bs-popover-auto[x-placement^=top] .arrow::after{bottom:1px;border-top-color:#fff}.bs-popover-right,.bs-popover-auto[x-placement^=right]{margin-left:.5rem}.bs-popover-right .arrow,.bs-popover-auto[x-placement^=right] .arrow{left:-webkit-calc((.5rem + 1px) * -1);left:calc((.5rem + 1px) * -1);width:.5rem;height:1rem;margin:.3rem 0}.bs-popover-right .arrow::before,.bs-popover-auto[x-placement^=right] .arrow::before,.bs-popover-right .arrow::after,.bs-popover-auto[x-placement^=right] .arrow::after{border-width:.5rem .5rem .5rem 0}.bs-popover-right .arrow::before,.bs-popover-auto[x-placement^=right] .arrow::before{left:0;border-right-color:rgba(0,0,0,.25)}.bs-popover-right .arrow::after,.bs-popover-auto[x-placement^=right] .arrow::after{left:1px;border-right-color:#fff}.bs-popover-bottom,.bs-popover-auto[x-placement^=bottom]{margin-top:.5rem}.bs-popover-bottom .arrow,.bs-popover-auto[x-placement^=bottom] .arrow{top:-webkit-calc((.5rem + 1px) * -1);top:calc((.5rem + 1px) * -1)}.bs-popover-bottom .arrow::before,.bs-popover-auto[x-placement^=bottom] .arrow::before,.bs-popover-bottom .arrow::after,.bs-popover-auto[x-placement^=bottom] .arrow::after{border-width:0 .5rem .5rem}.bs-popover-bottom .arrow::before,.bs-popover-auto[x-placement^=bottom] .arrow::before{top:0;border-bottom-color:rgba(0,0,0,.25)}.bs-popover-bottom .arrow::after,.bs-popover-auto[x-placement^=bottom] .arrow::after{top:1px;border-bottom-color:#fff}.bs-popover-bottom .popover-header::before,.bs-popover-auto[x-placement^=bottom] .popover-header::before{position:absolute;top:0;left:50%;display:block;width:1rem;margin-left:-.5rem;content:"";border-bottom:1px solid #f7f7f7}.bs-popover-left,.bs-popover-auto[x-placement^=left]{margin-right:.5rem}.bs-popover-left .arrow,.bs-popover-auto[x-placement^=left] .arrow{right:-webkit-calc((.5rem + 1px) * -1);right:calc((.5rem + 1px) * -1);width:.5rem;height:1rem;margin:.3rem 0}.bs-popover-left .arrow::before,.bs-popover-auto[x-placement^=left] .arrow::before,.bs-popover-left .arrow::after,.bs-popover-auto[x-placement^=left] .arrow::after{border-width:.5rem 0 .5rem .5rem}.bs-popover-left .arrow::before,.bs-popover-auto[x-placement^=left] .arrow::before{right:0;border-left-color:rgba(0,0,0,.25)}.bs-popover-left .arrow::after,.bs-popover-auto[x-placement^=left] .arrow::after{right:1px;border-left-color:#fff}.popover-header{padding:.5rem .75rem;margin-bottom:0;font-size:1rem;color:inherit;background-color:#f7f7f7;border-bottom:1px solid #ebebeb;border-top-left-radius:-webkit-calc(.3rem - 1px);border-top-left-radius:calc(.3rem - 1px);border-top-right-radius:-webkit-calc(.3rem - 1px);border-top-right-radius:calc(.3rem - 1px)}.popover-header:empty{display:none}.popover-body{padding:.5rem .75rem;color:#222}.carousel{position:relative}.carousel-inner{position:relative;width:100%;overflow:hidden}.carousel-item{position:relative;display:none;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;width:100%;-webkit-backface-visibility:hidden;backface-visibility:hidden;-webkit-perspective:1000px;perspective:1000px}.carousel-item.active,.carousel-item-next,.carousel-item-prev{display:block;-webkit-transition:-webkit-transform .6s ease;transition:-webkit-transform .6s ease;-o-transition:-o-transform .6s ease;transition:transform .6s ease;transition:transform .6s ease,-webkit-transform .6s ease,-o-transform .6s ease}@media screen and (prefers-reduced-motion:reduce){.carousel-item.active,.carousel-item-next,.carousel-item-prev{-webkit-transition:none;-o-transition:none;transition:none}}.carousel-item-next,.carousel-item-prev{position:absolute;top:0}.carousel-item-next.carousel-item-left,.carousel-item-prev.carousel-item-right{-webkit-transform:translateX(0);-ms-transform:translateX(0);-o-transform:translateX(0);transform:translateX(0)}@supports((-webkit-transform-style:preserve-3d) or (transform-style:preserve-3d)){.carousel-item-next.carousel-item-left,.carousel-item-prev.carousel-item-right{-webkit-transform:translate3d(0,0,0);transform:translate3d(0,0,0)}}.carousel-item-next,.active.carousel-item-right{-webkit-transform:translateX(100%);-ms-transform:translateX(100%);-o-transform:translateX(100%);transform:translateX(100%)}@supports((-webkit-transform-style:preserve-3d) or (transform-style:preserve-3d)){.carousel-item-next,.active.carousel-item-right{-webkit-transform:translate3d(100%,0,0);transform:translate3d(100%,0,0)}}.carousel-item-prev,.active.carousel-item-left{-webkit-transform:translateX(-100%);-ms-transform:translateX(-100%);-o-transform:translateX(-100%);transform:translateX(-100%)}@supports((-webkit-transform-style:preserve-3d) or (transform-style:preserve-3d)){.carousel-item-prev,.active.carousel-item-left{-webkit-transform:translate3d(-100%,0,0);transform:translate3d(-100%,0,0)}}.carousel-fade .carousel-item{opacity:0;-webkit-transition-duration:.6s;-o-transition-duration:.6s;transition-duration:.6s;-webkit-transition-property:opacity;-o-transition-property:opacity;transition-property:opacity}.carousel-fade .carousel-item.active,.carousel-fade .carousel-item-next.carousel-item-left,.carousel-fade .carousel-item-prev.carousel-item-right{opacity:1}.carousel-fade .active.carousel-item-left,.carousel-fade .active.carousel-item-right{opacity:0}.carousel-fade .carousel-item-next,.carousel-fade .carousel-item-prev,.carousel-fade .carousel-item.active,.carousel-fade .active.carousel-item-left,.carousel-fade .active.carousel-item-prev{-webkit-transform:translateX(0);-ms-transform:translateX(0);-o-transform:translateX(0);transform:translateX(0)}@supports((-webkit-transform-style:preserve-3d) or (transform-style:preserve-3d)){.carousel-fade .carousel-item-next,.carousel-fade .carousel-item-prev,.carousel-fade .carousel-item.active,.carousel-fade .active.carousel-item-left,.carousel-fade .active.carousel-item-prev{-webkit-transform:translate3d(0,0,0);transform:translate3d(0,0,0)}}.carousel-control-prev,.carousel-control-next{position:absolute;top:0;bottom:0;display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-align:center;-webkit-align-items:center;-ms-flex-align:center;align-items:center;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;width:15%;color:#fff;text-align:center;opacity:.5}.carousel-control-prev:hover,.carousel-control-prev:focus,.carousel-control-next:hover,.carousel-control-next:focus{color:#fff;text-decoration:none;outline:0;opacity:.9}.carousel-control-prev{left:0;background:-webkit-gradient(linear,left top,right top,from(rgba(0,0,0,.25)),to(rgba(0,0,0,.1%)));background:-webkit-linear-gradient(left,rgba(0,0,0,.25),rgba(0,0,0,.1%));background:-o-linear-gradient(left,rgba(0,0,0,.25),rgba(0,0,0,.1%));background:linear-gradient(90deg,rgba(0,0,0,.25),rgba(0,0,0,.1%))}.carousel-control-next{right:0;background:-webkit-gradient(linear,right top,left top,from(rgba(0,0,0,.25)),to(rgba(0,0,0,.1%)));background:-webkit-linear-gradient(right,rgba(0,0,0,.25),rgba(0,0,0,.1%));background:-o-linear-gradient(right,rgba(0,0,0,.25),rgba(0,0,0,.1%));background:linear-gradient(270deg,rgba(0,0,0,.25),rgba(0,0,0,.1%))}.carousel-control-prev-icon,.carousel-control-next-icon{display:inline-block;width:20px;height:20px;background:no-repeat 50%;-webkit-background-size:100% 100%;background-size:100% 100%}.carousel-control-prev-icon{background-image:url("data:image/svg+xml;charset=utf8,%3Csvg xmlns=%22http://www.w3.org/2000/svg%22 fill=%22%23fff%22 viewBox=%220 0 8 8%22%3E%3Cpath d=%22M5.25.0l-4 4 4 4 1.5-1.5L4.25 4l2.5-2.5L5.25.0z%22/%3E%3C/svg%3E")}.carousel-control-next-icon{background-image:url("data:image/svg+xml;charset=utf8,%3Csvg xmlns=%22http://www.w3.org/2000/svg%22 fill=%22%23fff%22 viewBox=%220 0 8 8%22%3E%3Cpath d=%22M2.75.0l-1.5 1.5L3.75 4l-2.5 2.5L2.75 8l4-4-4-4z%22/%3E%3C/svg%3E")}.carousel-indicators{position:absolute;right:0;bottom:10px;left:0;z-index:15;display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-pack:center;-webkit-justify-content:center;-ms-flex-pack:center;justify-content:center;padding-left:0;margin-right:15%;margin-left:15%;list-style:none}.carousel-indicators li{position:relative;-webkit-box-flex:0;-webkit-flex:0 1 auto;-ms-flex:0 1 auto;flex:initial;width:30px;height:3px;margin-right:3px;margin-left:3px;text-indent:-999px;cursor:pointer;background-color:rgba(255,255,255,.5)}.carousel-indicators li::before{position:absolute;top:-10px;left:0;display:inline-block;width:100%;height:10px;content:""}.carousel-indicators li::after{position:absolute;bottom:-10px;left:0;display:inline-block;width:100%;height:10px;content:""}.carousel-indicators .active{background-color:#fff}.carousel-caption{position:absolute;right:15%;bottom:20px;left:15%;z-index:10;padding-top:20px;padding-bottom:20px;color:#fff;text-align:center}.align-baseline{vertical-align:baseline!important}.align-top{vertical-align:top!important}.align-middle{vertical-align:middle!important}.align-bottom{vertical-align:bottom!important}.align-text-bottom{vertical-align:text-bottom!important}.align-text-top{vertical-align:text-top!important}.bg-primary{background-color:#30638e!important}a.bg-primary:hover,a.bg-primary:focus,button.bg-primary:hover,button.bg-primary:focus{background-color:#234868!important}.bg-secondary{background-color:#ffa630!important}a.bg-secondary:hover,a.bg-secondary:focus,button.bg-secondary:hover,button.bg-secondary:focus{background-color:#fc9000!important}.bg-success{background-color:#3772ff!important}a.bg-success:hover,a.bg-success:focus,button.bg-success:hover,button.bg-success:focus{background-color:#044eff!important}.bg-info{background-color:#c0e0de!important}a.bg-info:hover,a.bg-info:focus,button.bg-info:hover,button.bg-info:focus{background-color:#9ecfcc!important}.bg-warning{background-color:#ed6a5a!important}a.bg-warning:hover,a.bg-warning:focus,button.bg-warning:hover,button.bg-warning:focus{background-color:#e8402c!important}.bg-danger{background-color:#ed6a5a!important}a.bg-danger:hover,a.bg-danger:focus,button.bg-danger:hover,button.bg-danger:focus{background-color:#e8402c!important}.bg-light{background-color:#d3f3ee!important}a.bg-light:hover,a.bg-light:focus,button.bg-light:hover,button.bg-light:focus{background-color:#abe8df!important}.bg-dark{background-color:#403f4c!important}a.bg-dark:hover,a.bg-dark:focus,button.bg-dark:hover,button.bg-dark:focus{background-color:#292830!important}.bg-gradient-primary{background:#30638e -webkit-gradient(linear,left top,left bottom,from(#4f7a9f),to(#30638E))repeat-x!important;background:#30638e -webkit-linear-gradient(top,#4f7a9f,#30638E)repeat-x!important;background:#30638e -o-linear-gradient(top,#4f7a9f,#30638E)repeat-x!important;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x!important}.bg-gradient-secondary{background:#ffa630 -webkit-gradient(linear,left top,left bottom,from(#ffb34f),to(#FFA630))repeat-x!important;background:#ffa630 -webkit-linear-gradient(top,#ffb34f,#FFA630)repeat-x!important;background:#ffa630 -o-linear-gradient(top,#ffb34f,#FFA630)repeat-x!important;background:#ffa630 linear-gradient(180deg,#ffb34f,#FFA630)repeat-x!important}.bg-gradient-success{background:#3772ff -webkit-gradient(linear,left top,left bottom,from(#5587ff),to(#3772FF))repeat-x!important;background:#3772ff -webkit-linear-gradient(top,#5587ff,#3772FF)repeat-x!important;background:#3772ff -o-linear-gradient(top,#5587ff,#3772FF)repeat-x!important;background:#3772ff linear-gradient(180deg,#5587ff,#3772FF)repeat-x!important}.bg-gradient-info{background:#c0e0de -webkit-gradient(linear,left top,left bottom,from(#c9e5e3),to(#C0E0DE))repeat-x!important;background:#c0e0de -webkit-linear-gradient(top,#c9e5e3,#C0E0DE)repeat-x!important;background:#c0e0de -o-linear-gradient(top,#c9e5e3,#C0E0DE)repeat-x!important;background:#c0e0de linear-gradient(180deg,#c9e5e3,#C0E0DE)repeat-x!important}.bg-gradient-warning{background:#ed6a5a -webkit-gradient(linear,left top,left bottom,from(#f08073),to(#ED6A5A))repeat-x!important;background:#ed6a5a -webkit-linear-gradient(top,#f08073,#ED6A5A)repeat-x!important;background:#ed6a5a -o-linear-gradient(top,#f08073,#ED6A5A)repeat-x!important;background:#ed6a5a linear-gradient(180deg,#f08073,#ED6A5A)repeat-x!important}.bg-gradient-danger{background:#ed6a5a -webkit-gradient(linear,left top,left bottom,from(#f08073),to(#ED6A5A))repeat-x!important;background:#ed6a5a -webkit-linear-gradient(top,#f08073,#ED6A5A)repeat-x!important;background:#ed6a5a -o-linear-gradient(top,#f08073,#ED6A5A)repeat-x!important;background:#ed6a5a linear-gradient(180deg,#f08073,#ED6A5A)repeat-x!important}.bg-gradient-light{background:#d3f3ee -webkit-gradient(linear,left top,left bottom,from(#daf5f1),to(#D3F3EE))repeat-x!important;background:#d3f3ee -webkit-linear-gradient(top,#daf5f1,#D3F3EE)repeat-x!important;background:#d3f3ee -o-linear-gradient(top,#daf5f1,#D3F3EE)repeat-x!important;background:#d3f3ee linear-gradient(180deg,#daf5f1,#D3F3EE)repeat-x!important}.bg-gradient-dark{background:#403f4c -webkit-gradient(linear,left top,left bottom,from(#5d5c67),to(#403F4C))repeat-x!important;background:#403f4c -webkit-linear-gradient(top,#5d5c67,#403F4C)repeat-x!important;background:#403f4c -o-linear-gradient(top,#5d5c67,#403F4C)repeat-x!important;background:#403f4c linear-gradient(180deg,#5d5c67,#403F4C)repeat-x!important}.bg-white{background-color:#fff!important}.bg-transparent{background-color:transparent!important}.border{border:1px solid #dee2e6!important}.border-top{border-top:1px solid #dee2e6!important}.border-right{border-right:1px solid #dee2e6!important}.border-bottom{border-bottom:1px solid #dee2e6!important}.border-left{border-left:1px solid #dee2e6!important}.border-0{border:0!important}.border-top-0{border-top:0!important}.border-right-0{border-right:0!important}.border-bottom-0{border-bottom:0!important}.border-left-0{border-left:0!important}.border-primary{border-color:#30638e!important}.border-secondary{border-color:#ffa630!important}.border-success{border-color:#3772ff!important}.border-info{border-color:#c0e0de!important}.border-warning{border-color:#ed6a5a!important}.border-danger{border-color:#ed6a5a!important}.border-light{border-color:#d3f3ee!important}.border-dark{border-color:#403f4c!important}.border-white{border-color:#fff!important}.rounded{border-radius:.25rem!important}.rounded-top{border-top-left-radius:.25rem!important;border-top-right-radius:.25rem!important}.rounded-right{border-top-right-radius:.25rem!important;border-bottom-right-radius:.25rem!important}.rounded-bottom{border-bottom-right-radius:.25rem!important;border-bottom-left-radius:.25rem!important}.rounded-left{border-top-left-radius:.25rem!important;border-bottom-left-radius:.25rem!important}.rounded-circle{border-radius:50%!important}.rounded-0{border-radius:0!important}.clearfix::after{display:block;clear:both;content:""}.d-none{display:none!important}.d-inline{display:inline!important}.d-inline-block{display:inline-block!important}.d-block{display:block!important}.d-table{display:table!important}.d-table-row{display:table-row!important}.d-table-cell{display:table-cell!important}.d-flex{display:-webkit-box!important;display:-webkit-flex!important;display:-ms-flexbox!important;display:flex!important}.d-inline-flex{display:-webkit-inline-box!important;display:-webkit-inline-flex!important;display:-ms-inline-flexbox!important;display:inline-flex!important}@media(min-width:576px){.d-sm-none{display:none!important}.d-sm-inline{display:inline!important}.d-sm-inline-block{display:inline-block!important}.d-sm-block{display:block!important}.d-sm-table{display:table!important}.d-sm-table-row{display:table-row!important}.d-sm-table-cell{display:table-cell!important}.d-sm-flex{display:-webkit-box!important;display:-webkit-flex!important;display:-ms-flexbox!important;display:flex!important}.d-sm-inline-flex{display:-webkit-inline-box!important;display:-webkit-inline-flex!important;display:-ms-inline-flexbox!important;display:inline-flex!important}}@media(min-width:768px){.d-md-none{display:none!important}.d-md-inline{display:inline!important}.d-md-inline-block{display:inline-block!important}.d-md-block{display:block!important}.d-md-table{display:table!important}.d-md-table-row{display:table-row!important}.d-md-table-cell{display:table-cell!important}.d-md-flex{display:-webkit-box!important;display:-webkit-flex!important;display:-ms-flexbox!important;display:flex!important}.d-md-inline-flex{display:-webkit-inline-box!important;display:-webkit-inline-flex!important;display:-ms-inline-flexbox!important;display:inline-flex!important}}@media(min-width:992px){.d-lg-none{display:none!important}.d-lg-inline{display:inline!important}.d-lg-inline-block{display:inline-block!important}.d-lg-block{display:block!important}.d-lg-table{display:table!important}.d-lg-table-row{display:table-row!important}.d-lg-table-cell{display:table-cell!important}.d-lg-flex{display:-webkit-box!important;display:-webkit-flex!important;display:-ms-flexbox!important;display:flex!important}.d-lg-inline-flex{display:-webkit-inline-box!important;display:-webkit-inline-flex!important;display:-ms-inline-flexbox!important;display:inline-flex!important}}@media(min-width:1200px){.d-xl-none{display:none!important}.d-xl-inline{display:inline!important}.d-xl-inline-block{display:inline-block!important}.d-xl-block{display:block!important}.d-xl-table{display:table!important}.d-xl-table-row{display:table-row!important}.d-xl-table-cell{display:table-cell!important}.d-xl-flex{display:-webkit-box!important;display:-webkit-flex!important;display:-ms-flexbox!important;display:flex!important}.d-xl-inline-flex{display:-webkit-inline-box!important;display:-webkit-inline-flex!important;display:-ms-inline-flexbox!important;display:inline-flex!important}}@media print{.d-print-none{display:none!important}.d-print-inline{display:inline!important}.d-print-inline-block{display:inline-block!important}.d-print-block{display:block!important}.d-print-table{display:table!important}.d-print-table-row{display:table-row!important}.d-print-table-cell{display:table-cell!important}.d-print-flex{display:-webkit-box!important;display:-webkit-flex!important;display:-ms-flexbox!important;display:flex!important}.d-print-inline-flex{display:-webkit-inline-box!important;display:-webkit-inline-flex!important;display:-ms-inline-flexbox!important;display:inline-flex!important}}.embed-responsive{position:relative;display:block;width:100%;padding:0;overflow:hidden}.embed-responsive::before{display:block;content:""}.embed-responsive .embed-responsive-item,.embed-responsive iframe,.embed-responsive embed,.embed-responsive object,.embed-responsive video{position:absolute;top:0;bottom:0;left:0;width:100%;height:100%;border:0}.embed-responsive-21by9::before{padding-top:42.85714286%}.embed-responsive-16by9::before{padding-top:56.25%}.embed-responsive-4by3::before{padding-top:75%}.embed-responsive-1by1::before{padding-top:100%}.flex-row{-webkit-box-orient:horizontal!important;-webkit-box-direction:normal!important;-webkit-flex-direction:row!important;-ms-flex-direction:row!important;flex-direction:row!important}.flex-column{-webkit-box-orient:vertical!important;-webkit-box-direction:normal!important;-webkit-flex-direction:column!important;-ms-flex-direction:column!important;flex-direction:column!important}.flex-row-reverse{-webkit-box-orient:horizontal!important;-webkit-box-direction:reverse!important;-webkit-flex-direction:row-reverse!important;-ms-flex-direction:row-reverse!important;flex-direction:row-reverse!important}.flex-column-reverse{-webkit-box-orient:vertical!important;-webkit-box-direction:reverse!important;-webkit-flex-direction:column-reverse!important;-ms-flex-direction:column-reverse!important;flex-direction:column-reverse!important}.flex-wrap{-webkit-flex-wrap:wrap!important;-ms-flex-wrap:wrap!important;flex-wrap:wrap!important}.flex-nowrap{-webkit-flex-wrap:nowrap!important;-ms-flex-wrap:nowrap!important;flex-wrap:nowrap!important}.flex-wrap-reverse{-webkit-flex-wrap:wrap-reverse!important;-ms-flex-wrap:wrap-reverse!important;flex-wrap:wrap-reverse!important}.flex-fill{-webkit-box-flex:1!important;-webkit-flex:1 1 auto!important;-ms-flex:1 1 auto!important;flex:auto!important}.flex-grow-0{-webkit-box-flex:0!important;-webkit-flex-grow:0!important;-ms-flex-positive:0!important;flex-grow:0!important}.flex-grow-1{-webkit-box-flex:1!important;-webkit-flex-grow:1!important;-ms-flex-positive:1!important;flex-grow:1!important}.flex-shrink-0{-webkit-flex-shrink:0!important;-ms-flex-negative:0!important;flex-shrink:0!important}.flex-shrink-1{-webkit-flex-shrink:1!important;-ms-flex-negative:1!important;flex-shrink:1!important}.justify-content-start{-webkit-box-pack:start!important;-webkit-justify-content:flex-start!important;-ms-flex-pack:start!important;justify-content:flex-start!important}.justify-content-end{-webkit-box-pack:end!important;-webkit-justify-content:flex-end!important;-ms-flex-pack:end!important;justify-content:flex-end!important}.justify-content-center{-webkit-box-pack:center!important;-webkit-justify-content:center!important;-ms-flex-pack:center!important;justify-content:center!important}.justify-content-between{-webkit-box-pack:justify!important;-webkit-justify-content:space-between!important;-ms-flex-pack:justify!important;justify-content:space-between!important}.justify-content-around{-webkit-justify-content:space-around!important;-ms-flex-pack:distribute!important;justify-content:space-around!important}.align-items-start{-webkit-box-align:start!important;-webkit-align-items:flex-start!important;-ms-flex-align:start!important;align-items:flex-start!important}.align-items-end{-webkit-box-align:end!important;-webkit-align-items:flex-end!important;-ms-flex-align:end!important;align-items:flex-end!important}.align-items-center{-webkit-box-align:center!important;-webkit-align-items:center!important;-ms-flex-align:center!important;align-items:center!important}.align-items-baseline{-webkit-box-align:baseline!important;-webkit-align-items:baseline!important;-ms-flex-align:baseline!important;align-items:baseline!important}.align-items-stretch{-webkit-box-align:stretch!important;-webkit-align-items:stretch!important;-ms-flex-align:stretch!important;align-items:stretch!important}.align-content-start{-webkit-align-content:flex-start!important;-ms-flex-line-pack:start!important;align-content:flex-start!important}.align-content-end{-webkit-align-content:flex-end!important;-ms-flex-line-pack:end!important;align-content:flex-end!important}.align-content-center{-webkit-align-content:center!important;-ms-flex-line-pack:center!important;align-content:center!important}.align-content-between{-webkit-align-content:space-between!important;-ms-flex-line-pack:justify!important;align-content:space-between!important}.align-content-around{-webkit-align-content:space-around!important;-ms-flex-line-pack:distribute!important;align-content:space-around!important}.align-content-stretch{-webkit-align-content:stretch!important;-ms-flex-line-pack:stretch!important;align-content:stretch!important}.align-self-auto{-webkit-align-self:auto!important;-ms-flex-item-align:auto!important;align-self:auto!important}.align-self-start{-webkit-align-self:flex-start!important;-ms-flex-item-align:start!important;align-self:flex-start!important}.align-self-end{-webkit-align-self:flex-end!important;-ms-flex-item-align:end!important;align-self:flex-end!important}.align-self-center{-webkit-align-self:center!important;-ms-flex-item-align:center!important;align-self:center!important}.align-self-baseline{-webkit-align-self:baseline!important;-ms-flex-item-align:baseline!important;align-self:baseline!important}.align-self-stretch{-webkit-align-self:stretch!important;-ms-flex-item-align:stretch!important;align-self:stretch!important}@media(min-width:576px){.flex-sm-row{-webkit-box-orient:horizontal!important;-webkit-box-direction:normal!important;-webkit-flex-direction:row!important;-ms-flex-direction:row!important;flex-direction:row!important}.flex-sm-column{-webkit-box-orient:vertical!important;-webkit-box-direction:normal!important;-webkit-flex-direction:column!important;-ms-flex-direction:column!important;flex-direction:column!important}.flex-sm-row-reverse{-webkit-box-orient:horizontal!important;-webkit-box-direction:reverse!important;-webkit-flex-direction:row-reverse!important;-ms-flex-direction:row-reverse!important;flex-direction:row-reverse!important}.flex-sm-column-reverse{-webkit-box-orient:vertical!important;-webkit-box-direction:reverse!important;-webkit-flex-direction:column-reverse!important;-ms-flex-direction:column-reverse!important;flex-direction:column-reverse!important}.flex-sm-wrap{-webkit-flex-wrap:wrap!important;-ms-flex-wrap:wrap!important;flex-wrap:wrap!important}.flex-sm-nowrap{-webkit-flex-wrap:nowrap!important;-ms-flex-wrap:nowrap!important;flex-wrap:nowrap!important}.flex-sm-wrap-reverse{-webkit-flex-wrap:wrap-reverse!important;-ms-flex-wrap:wrap-reverse!important;flex-wrap:wrap-reverse!important}.flex-sm-fill{-webkit-box-flex:1!important;-webkit-flex:1 1 auto!important;-ms-flex:1 1 auto!important;flex:auto!important}.flex-sm-grow-0{-webkit-box-flex:0!important;-webkit-flex-grow:0!important;-ms-flex-positive:0!important;flex-grow:0!important}.flex-sm-grow-1{-webkit-box-flex:1!important;-webkit-flex-grow:1!important;-ms-flex-positive:1!important;flex-grow:1!important}.flex-sm-shrink-0{-webkit-flex-shrink:0!important;-ms-flex-negative:0!important;flex-shrink:0!important}.flex-sm-shrink-1{-webkit-flex-shrink:1!important;-ms-flex-negative:1!important;flex-shrink:1!important}.justify-content-sm-start{-webkit-box-pack:start!important;-webkit-justify-content:flex-start!important;-ms-flex-pack:start!important;justify-content:flex-start!important}.justify-content-sm-end{-webkit-box-pack:end!important;-webkit-justify-content:flex-end!important;-ms-flex-pack:end!important;justify-content:flex-end!important}.justify-content-sm-center{-webkit-box-pack:center!important;-webkit-justify-content:center!important;-ms-flex-pack:center!important;justify-content:center!important}.justify-content-sm-between{-webkit-box-pack:justify!important;-webkit-justify-content:space-between!important;-ms-flex-pack:justify!important;justify-content:space-between!important}.justify-content-sm-around{-webkit-justify-content:space-around!important;-ms-flex-pack:distribute!important;justify-content:space-around!important}.align-items-sm-start{-webkit-box-align:start!important;-webkit-align-items:flex-start!important;-ms-flex-align:start!important;align-items:flex-start!important}.align-items-sm-end{-webkit-box-align:end!important;-webkit-align-items:flex-end!important;-ms-flex-align:end!important;align-items:flex-end!important}.align-items-sm-center{-webkit-box-align:center!important;-webkit-align-items:center!important;-ms-flex-align:center!important;align-items:center!important}.align-items-sm-baseline{-webkit-box-align:baseline!important;-webkit-align-items:baseline!important;-ms-flex-align:baseline!important;align-items:baseline!important}.align-items-sm-stretch{-webkit-box-align:stretch!important;-webkit-align-items:stretch!important;-ms-flex-align:stretch!important;align-items:stretch!important}.align-content-sm-start{-webkit-align-content:flex-start!important;-ms-flex-line-pack:start!important;align-content:flex-start!important}.align-content-sm-end{-webkit-align-content:flex-end!important;-ms-flex-line-pack:end!important;align-content:flex-end!important}.align-content-sm-center{-webkit-align-content:center!important;-ms-flex-line-pack:center!important;align-content:center!important}.align-content-sm-between{-webkit-align-content:space-between!important;-ms-flex-line-pack:justify!important;align-content:space-between!important}.align-content-sm-around{-webkit-align-content:space-around!important;-ms-flex-line-pack:distribute!important;align-content:space-around!important}.align-content-sm-stretch{-webkit-align-content:stretch!important;-ms-flex-line-pack:stretch!important;align-content:stretch!important}.align-self-sm-auto{-webkit-align-self:auto!important;-ms-flex-item-align:auto!important;align-self:auto!important}.align-self-sm-start{-webkit-align-self:flex-start!important;-ms-flex-item-align:start!important;align-self:flex-start!important}.align-self-sm-end{-webkit-align-self:flex-end!important;-ms-flex-item-align:end!important;align-self:flex-end!important}.align-self-sm-center{-webkit-align-self:center!important;-ms-flex-item-align:center!important;align-self:center!important}.align-self-sm-baseline{-webkit-align-self:baseline!important;-ms-flex-item-align:baseline!important;align-self:baseline!important}.align-self-sm-stretch{-webkit-align-self:stretch!important;-ms-flex-item-align:stretch!important;align-self:stretch!important}}@media(min-width:768px){.flex-md-row{-webkit-box-orient:horizontal!important;-webkit-box-direction:normal!important;-webkit-flex-direction:row!important;-ms-flex-direction:row!important;flex-direction:row!important}.flex-md-column{-webkit-box-orient:vertical!important;-webkit-box-direction:normal!important;-webkit-flex-direction:column!important;-ms-flex-direction:column!important;flex-direction:column!important}.flex-md-row-reverse{-webkit-box-orient:horizontal!important;-webkit-box-direction:reverse!important;-webkit-flex-direction:row-reverse!important;-ms-flex-direction:row-reverse!important;flex-direction:row-reverse!important}.flex-md-column-reverse{-webkit-box-orient:vertical!important;-webkit-box-direction:reverse!important;-webkit-flex-direction:column-reverse!important;-ms-flex-direction:column-reverse!important;flex-direction:column-reverse!important}.flex-md-wrap{-webkit-flex-wrap:wrap!important;-ms-flex-wrap:wrap!important;flex-wrap:wrap!important}.flex-md-nowrap{-webkit-flex-wrap:nowrap!important;-ms-flex-wrap:nowrap!important;flex-wrap:nowrap!important}.flex-md-wrap-reverse{-webkit-flex-wrap:wrap-reverse!important;-ms-flex-wrap:wrap-reverse!important;flex-wrap:wrap-reverse!important}.flex-md-fill{-webkit-box-flex:1!important;-webkit-flex:1 1 auto!important;-ms-flex:1 1 auto!important;flex:auto!important}.flex-md-grow-0{-webkit-box-flex:0!important;-webkit-flex-grow:0!important;-ms-flex-positive:0!important;flex-grow:0!important}.flex-md-grow-1{-webkit-box-flex:1!important;-webkit-flex-grow:1!important;-ms-flex-positive:1!important;flex-grow:1!important}.flex-md-shrink-0{-webkit-flex-shrink:0!important;-ms-flex-negative:0!important;flex-shrink:0!important}.flex-md-shrink-1{-webkit-flex-shrink:1!important;-ms-flex-negative:1!important;flex-shrink:1!important}.justify-content-md-start{-webkit-box-pack:start!important;-webkit-justify-content:flex-start!important;-ms-flex-pack:start!important;justify-content:flex-start!important}.justify-content-md-end{-webkit-box-pack:end!important;-webkit-justify-content:flex-end!important;-ms-flex-pack:end!important;justify-content:flex-end!important}.justify-content-md-center{-webkit-box-pack:center!important;-webkit-justify-content:center!important;-ms-flex-pack:center!important;justify-content:center!important}.justify-content-md-between{-webkit-box-pack:justify!important;-webkit-justify-content:space-between!important;-ms-flex-pack:justify!important;justify-content:space-between!important}.justify-content-md-around{-webkit-justify-content:space-around!important;-ms-flex-pack:distribute!important;justify-content:space-around!important}.align-items-md-start{-webkit-box-align:start!important;-webkit-align-items:flex-start!important;-ms-flex-align:start!important;align-items:flex-start!important}.align-items-md-end{-webkit-box-align:end!important;-webkit-align-items:flex-end!important;-ms-flex-align:end!important;align-items:flex-end!important}.align-items-md-center{-webkit-box-align:center!important;-webkit-align-items:center!important;-ms-flex-align:center!important;align-items:center!important}.align-items-md-baseline{-webkit-box-align:baseline!important;-webkit-align-items:baseline!important;-ms-flex-align:baseline!important;align-items:baseline!important}.align-items-md-stretch{-webkit-box-align:stretch!important;-webkit-align-items:stretch!important;-ms-flex-align:stretch!important;align-items:stretch!important}.align-content-md-start{-webkit-align-content:flex-start!important;-ms-flex-line-pack:start!important;align-content:flex-start!important}.align-content-md-end{-webkit-align-content:flex-end!important;-ms-flex-line-pack:end!important;align-content:flex-end!important}.align-content-md-center{-webkit-align-content:center!important;-ms-flex-line-pack:center!important;align-content:center!important}.align-content-md-between{-webkit-align-content:space-between!important;-ms-flex-line-pack:justify!important;align-content:space-between!important}.align-content-md-around{-webkit-align-content:space-around!important;-ms-flex-line-pack:distribute!important;align-content:space-around!important}.align-content-md-stretch{-webkit-align-content:stretch!important;-ms-flex-line-pack:stretch!important;align-content:stretch!important}.align-self-md-auto{-webkit-align-self:auto!important;-ms-flex-item-align:auto!important;align-self:auto!important}.align-self-md-start{-webkit-align-self:flex-start!important;-ms-flex-item-align:start!important;align-self:flex-start!important}.align-self-md-end{-webkit-align-self:flex-end!important;-ms-flex-item-align:end!important;align-self:flex-end!important}.align-self-md-center{-webkit-align-self:center!important;-ms-flex-item-align:center!important;align-self:center!important}.align-self-md-baseline{-webkit-align-self:baseline!important;-ms-flex-item-align:baseline!important;align-self:baseline!important}.align-self-md-stretch{-webkit-align-self:stretch!important;-ms-flex-item-align:stretch!important;align-self:stretch!important}}@media(min-width:992px){.flex-lg-row{-webkit-box-orient:horizontal!important;-webkit-box-direction:normal!important;-webkit-flex-direction:row!important;-ms-flex-direction:row!important;flex-direction:row!important}.flex-lg-column{-webkit-box-orient:vertical!important;-webkit-box-direction:normal!important;-webkit-flex-direction:column!important;-ms-flex-direction:column!important;flex-direction:column!important}.flex-lg-row-reverse{-webkit-box-orient:horizontal!important;-webkit-box-direction:reverse!important;-webkit-flex-direction:row-reverse!important;-ms-flex-direction:row-reverse!important;flex-direction:row-reverse!important}.flex-lg-column-reverse{-webkit-box-orient:vertical!important;-webkit-box-direction:reverse!important;-webkit-flex-direction:column-reverse!important;-ms-flex-direction:column-reverse!important;flex-direction:column-reverse!important}.flex-lg-wrap{-webkit-flex-wrap:wrap!important;-ms-flex-wrap:wrap!important;flex-wrap:wrap!important}.flex-lg-nowrap{-webkit-flex-wrap:nowrap!important;-ms-flex-wrap:nowrap!important;flex-wrap:nowrap!important}.flex-lg-wrap-reverse{-webkit-flex-wrap:wrap-reverse!important;-ms-flex-wrap:wrap-reverse!important;flex-wrap:wrap-reverse!important}.flex-lg-fill{-webkit-box-flex:1!important;-webkit-flex:1 1 auto!important;-ms-flex:1 1 auto!important;flex:auto!important}.flex-lg-grow-0{-webkit-box-flex:0!important;-webkit-flex-grow:0!important;-ms-flex-positive:0!important;flex-grow:0!important}.flex-lg-grow-1{-webkit-box-flex:1!important;-webkit-flex-grow:1!important;-ms-flex-positive:1!important;flex-grow:1!important}.flex-lg-shrink-0{-webkit-flex-shrink:0!important;-ms-flex-negative:0!important;flex-shrink:0!important}.flex-lg-shrink-1{-webkit-flex-shrink:1!important;-ms-flex-negative:1!important;flex-shrink:1!important}.justify-content-lg-start{-webkit-box-pack:start!important;-webkit-justify-content:flex-start!important;-ms-flex-pack:start!important;justify-content:flex-start!important}.justify-content-lg-end{-webkit-box-pack:end!important;-webkit-justify-content:flex-end!important;-ms-flex-pack:end!important;justify-content:flex-end!important}.justify-content-lg-center{-webkit-box-pack:center!important;-webkit-justify-content:center!important;-ms-flex-pack:center!important;justify-content:center!important}.justify-content-lg-between{-webkit-box-pack:justify!important;-webkit-justify-content:space-between!important;-ms-flex-pack:justify!important;justify-content:space-between!important}.justify-content-lg-around{-webkit-justify-content:space-around!important;-ms-flex-pack:distribute!important;justify-content:space-around!important}.align-items-lg-start{-webkit-box-align:start!important;-webkit-align-items:flex-start!important;-ms-flex-align:start!important;align-items:flex-start!important}.align-items-lg-end{-webkit-box-align:end!important;-webkit-align-items:flex-end!important;-ms-flex-align:end!important;align-items:flex-end!important}.align-items-lg-center{-webkit-box-align:center!important;-webkit-align-items:center!important;-ms-flex-align:center!important;align-items:center!important}.align-items-lg-baseline{-webkit-box-align:baseline!important;-webkit-align-items:baseline!important;-ms-flex-align:baseline!important;align-items:baseline!important}.align-items-lg-stretch{-webkit-box-align:stretch!important;-webkit-align-items:stretch!important;-ms-flex-align:stretch!important;align-items:stretch!important}.align-content-lg-start{-webkit-align-content:flex-start!important;-ms-flex-line-pack:start!important;align-content:flex-start!important}.align-content-lg-end{-webkit-align-content:flex-end!important;-ms-flex-line-pack:end!important;align-content:flex-end!important}.align-content-lg-center{-webkit-align-content:center!important;-ms-flex-line-pack:center!important;align-content:center!important}.align-content-lg-between{-webkit-align-content:space-between!important;-ms-flex-line-pack:justify!important;align-content:space-between!important}.align-content-lg-around{-webkit-align-content:space-around!important;-ms-flex-line-pack:distribute!important;align-content:space-around!important}.align-content-lg-stretch{-webkit-align-content:stretch!important;-ms-flex-line-pack:stretch!important;align-content:stretch!important}.align-self-lg-auto{-webkit-align-self:auto!important;-ms-flex-item-align:auto!important;align-self:auto!important}.align-self-lg-start{-webkit-align-self:flex-start!important;-ms-flex-item-align:start!important;align-self:flex-start!important}.align-self-lg-end{-webkit-align-self:flex-end!important;-ms-flex-item-align:end!important;align-self:flex-end!important}.align-self-lg-center{-webkit-align-self:center!important;-ms-flex-item-align:center!important;align-self:center!important}.align-self-lg-baseline{-webkit-align-self:baseline!important;-ms-flex-item-align:baseline!important;align-self:baseline!important}.align-self-lg-stretch{-webkit-align-self:stretch!important;-ms-flex-item-align:stretch!important;align-self:stretch!important}}@media(min-width:1200px){.flex-xl-row{-webkit-box-orient:horizontal!important;-webkit-box-direction:normal!important;-webkit-flex-direction:row!important;-ms-flex-direction:row!important;flex-direction:row!important}.flex-xl-column{-webkit-box-orient:vertical!important;-webkit-box-direction:normal!important;-webkit-flex-direction:column!important;-ms-flex-direction:column!important;flex-direction:column!important}.flex-xl-row-reverse{-webkit-box-orient:horizontal!important;-webkit-box-direction:reverse!important;-webkit-flex-direction:row-reverse!important;-ms-flex-direction:row-reverse!important;flex-direction:row-reverse!important}.flex-xl-column-reverse{-webkit-box-orient:vertical!important;-webkit-box-direction:reverse!important;-webkit-flex-direction:column-reverse!important;-ms-flex-direction:column-reverse!important;flex-direction:column-reverse!important}.flex-xl-wrap{-webkit-flex-wrap:wrap!important;-ms-flex-wrap:wrap!important;flex-wrap:wrap!important}.flex-xl-nowrap{-webkit-flex-wrap:nowrap!important;-ms-flex-wrap:nowrap!important;flex-wrap:nowrap!important}.flex-xl-wrap-reverse{-webkit-flex-wrap:wrap-reverse!important;-ms-flex-wrap:wrap-reverse!important;flex-wrap:wrap-reverse!important}.flex-xl-fill{-webkit-box-flex:1!important;-webkit-flex:1 1 auto!important;-ms-flex:1 1 auto!important;flex:auto!important}.flex-xl-grow-0{-webkit-box-flex:0!important;-webkit-flex-grow:0!important;-ms-flex-positive:0!important;flex-grow:0!important}.flex-xl-grow-1{-webkit-box-flex:1!important;-webkit-flex-grow:1!important;-ms-flex-positive:1!important;flex-grow:1!important}.flex-xl-shrink-0{-webkit-flex-shrink:0!important;-ms-flex-negative:0!important;flex-shrink:0!important}.flex-xl-shrink-1{-webkit-flex-shrink:1!important;-ms-flex-negative:1!important;flex-shrink:1!important}.justify-content-xl-start{-webkit-box-pack:start!important;-webkit-justify-content:flex-start!important;-ms-flex-pack:start!important;justify-content:flex-start!important}.justify-content-xl-end{-webkit-box-pack:end!important;-webkit-justify-content:flex-end!important;-ms-flex-pack:end!important;justify-content:flex-end!important}.justify-content-xl-center{-webkit-box-pack:center!important;-webkit-justify-content:center!important;-ms-flex-pack:center!important;justify-content:center!important}.justify-content-xl-between{-webkit-box-pack:justify!important;-webkit-justify-content:space-between!important;-ms-flex-pack:justify!important;justify-content:space-between!important}.justify-content-xl-around{-webkit-justify-content:space-around!important;-ms-flex-pack:distribute!important;justify-content:space-around!important}.align-items-xl-start{-webkit-box-align:start!important;-webkit-align-items:flex-start!important;-ms-flex-align:start!important;align-items:flex-start!important}.align-items-xl-end{-webkit-box-align:end!important;-webkit-align-items:flex-end!important;-ms-flex-align:end!important;align-items:flex-end!important}.align-items-xl-center{-webkit-box-align:center!important;-webkit-align-items:center!important;-ms-flex-align:center!important;align-items:center!important}.align-items-xl-baseline{-webkit-box-align:baseline!important;-webkit-align-items:baseline!important;-ms-flex-align:baseline!important;align-items:baseline!important}.align-items-xl-stretch{-webkit-box-align:stretch!important;-webkit-align-items:stretch!important;-ms-flex-align:stretch!important;align-items:stretch!important}.align-content-xl-start{-webkit-align-content:flex-start!important;-ms-flex-line-pack:start!important;align-content:flex-start!important}.align-content-xl-end{-webkit-align-content:flex-end!important;-ms-flex-line-pack:end!important;align-content:flex-end!important}.align-content-xl-center{-webkit-align-content:center!important;-ms-flex-line-pack:center!important;align-content:center!important}.align-content-xl-between{-webkit-align-content:space-between!important;-ms-flex-line-pack:justify!important;align-content:space-between!important}.align-content-xl-around{-webkit-align-content:space-around!important;-ms-flex-line-pack:distribute!important;align-content:space-around!important}.align-content-xl-stretch{-webkit-align-content:stretch!important;-ms-flex-line-pack:stretch!important;align-content:stretch!important}.align-self-xl-auto{-webkit-align-self:auto!important;-ms-flex-item-align:auto!important;align-self:auto!important}.align-self-xl-start{-webkit-align-self:flex-start!important;-ms-flex-item-align:start!important;align-self:flex-start!important}.align-self-xl-end{-webkit-align-self:flex-end!important;-ms-flex-item-align:end!important;align-self:flex-end!important}.align-self-xl-center{-webkit-align-self:center!important;-ms-flex-item-align:center!important;align-self:center!important}.align-self-xl-baseline{-webkit-align-self:baseline!important;-ms-flex-item-align:baseline!important;align-self:baseline!important}.align-self-xl-stretch{-webkit-align-self:stretch!important;-ms-flex-item-align:stretch!important;align-self:stretch!important}}.float-left{float:left!important}.float-right{float:right!important}.float-none{float:none!important}@media(min-width:576px){.float-sm-left{float:left!important}.float-sm-right{float:right!important}.float-sm-none{float:none!important}}@media(min-width:768px){.float-md-left{float:left!important}.float-md-right{float:right!important}.float-md-none{float:none!important}}@media(min-width:992px){.float-lg-left{float:left!important}.float-lg-right{float:right!important}.float-lg-none{float:none!important}}@media(min-width:1200px){.float-xl-left{float:left!important}.float-xl-right{float:right!important}.float-xl-none{float:none!important}}.position-static{position:static!important}.position-relative{position:relative!important}.position-absolute{position:absolute!important}.position-fixed{position:fixed!important}.position-sticky{position:-webkit-sticky!important;position:sticky!important}.fixed-top{position:fixed;top:0;right:0;left:0;z-index:1030}.fixed-bottom{position:fixed;right:0;bottom:0;left:0;z-index:1030}@supports((position:-webkit-sticky) or (position:sticky)){.sticky-top{position:-webkit-sticky;position:sticky;top:0;z-index:1020}}.sr-only{position:absolute;width:1px;height:1px;padding:0;overflow:hidden;clip:rect(0,0,0,0);white-space:nowrap;border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;overflow:visible;clip:auto;white-space:normal}.shadow-sm{-webkit-box-shadow:0 .125rem .25rem rgba(0,0,0,.075)!important;box-shadow:0 .125rem .25rem rgba(0,0,0,.075)!important}.shadow{-webkit-box-shadow:0 .5rem 1rem rgba(0,0,0,.15)!important;box-shadow:0 .5rem 1rem rgba(0,0,0,.15)!important}.shadow-lg{-webkit-box-shadow:0 1rem 3rem rgba(0,0,0,.175)!important;box-shadow:0 1rem 3rem rgba(0,0,0,.175)!important}.shadow-none{-webkit-box-shadow:none!important;box-shadow:none!important}.w-25{width:25%!important}.w-50{width:50%!important}.w-75{width:75%!important}.w-100{width:100%!important}.w-auto{width:auto!important}.h-25{height:25%!important}.h-50{height:50%!important}.h-75{height:75%!important}.h-100{height:100%!important}.h-auto{height:auto!important}.mw-100{max-width:100%!important}.mh-100{max-height:100%!important}.m-0{margin:0!important}.mt-0,.my-0{margin-top:0!important}.mr-0,.mx-0{margin-right:0!important}.mb-0,.my-0{margin-bottom:0!important}.ml-0,.mx-0{margin-left:0!important}.m-1{margin:.25rem!important}.mt-1,.my-1{margin-top:.25rem!important}.mr-1,.mx-1{margin-right:.25rem!important}.mb-1,.my-1{margin-bottom:.25rem!important}.ml-1,.mx-1{margin-left:.25rem!important}.m-2{margin:.5rem!important}.mt-2,.my-2{margin-top:.5rem!important}.mr-2,.mx-2{margin-right:.5rem!important}.mb-2,.my-2{margin-bottom:.5rem!important}.ml-2,.mx-2{margin-left:.5rem!important}.m-3{margin:1rem!important}.mt-3,.my-3{margin-top:1rem!important}.mr-3,.mx-3{margin-right:1rem!important}.mb-3,.my-3{margin-bottom:1rem!important}.ml-3,.mx-3{margin-left:1rem!important}.m-4{margin:1.5rem!important}.mt-4,.my-4{margin-top:1.5rem!important}.mr-4,.mx-4{margin-right:1.5rem!important}.mb-4,.my-4{margin-bottom:1.5rem!important}.ml-4,.mx-4{margin-left:1.5rem!important}.m-5{margin:3rem!important}.mt-5,.my-5{margin-top:3rem!important}.mr-5,.mx-5{margin-right:3rem!important}.mb-5,.my-5{margin-bottom:3rem!important}.ml-5,.mx-5{margin-left:3rem!important}.p-0{padding:0!important}.pt-0,.py-0{padding-top:0!important}.pr-0,.px-0{padding-right:0!important}.pb-0,.py-0{padding-bottom:0!important}.pl-0,.px-0{padding-left:0!important}.p-1{padding:.25rem!important}.pt-1,.py-1{padding-top:.25rem!important}.pr-1,.px-1{padding-right:.25rem!important}.pb-1,.py-1{padding-bottom:.25rem!important}.pl-1,.px-1{padding-left:.25rem!important}.p-2{padding:.5rem!important}.pt-2,.py-2{padding-top:.5rem!important}.pr-2,.px-2{padding-right:.5rem!important}.pb-2,.py-2{padding-bottom:.5rem!important}.pl-2,.px-2{padding-left:.5rem!important}.p-3{padding:1rem!important}.pt-3,.py-3{padding-top:1rem!important}.pr-3,.px-3{padding-right:1rem!important}.pb-3,.py-3{padding-bottom:1rem!important}.pl-3,.px-3{padding-left:1rem!important}.p-4{padding:1.5rem!important}.pt-4,.py-4{padding-top:1.5rem!important}.pr-4,.px-4{padding-right:1.5rem!important}.pb-4,.py-4{padding-bottom:1.5rem!important}.pl-4,.px-4{padding-left:1.5rem!important}.p-5{padding:3rem!important}.pt-5,.py-5{padding-top:3rem!important}.pr-5,.px-5{padding-right:3rem!important}.pb-5,.py-5{padding-bottom:3rem!important}.pl-5,.px-5{padding-left:3rem!important}.m-auto{margin:auto!important}.mt-auto,.my-auto{margin-top:auto!important}.mr-auto,.mx-auto{margin-right:auto!important}.mb-auto,.my-auto{margin-bottom:auto!important}.ml-auto,.mx-auto{margin-left:auto!important}@media(min-width:576px){.m-sm-0{margin:0!important}.mt-sm-0,.my-sm-0{margin-top:0!important}.mr-sm-0,.mx-sm-0{margin-right:0!important}.mb-sm-0,.my-sm-0{margin-bottom:0!important}.ml-sm-0,.mx-sm-0{margin-left:0!important}.m-sm-1{margin:.25rem!important}.mt-sm-1,.my-sm-1{margin-top:.25rem!important}.mr-sm-1,.mx-sm-1{margin-right:.25rem!important}.mb-sm-1,.my-sm-1{margin-bottom:.25rem!important}.ml-sm-1,.mx-sm-1{margin-left:.25rem!important}.m-sm-2{margin:.5rem!important}.mt-sm-2,.my-sm-2{margin-top:.5rem!important}.mr-sm-2,.mx-sm-2{margin-right:.5rem!important}.mb-sm-2,.my-sm-2{margin-bottom:.5rem!important}.ml-sm-2,.mx-sm-2{margin-left:.5rem!important}.m-sm-3{margin:1rem!important}.mt-sm-3,.my-sm-3{margin-top:1rem!important}.mr-sm-3,.mx-sm-3{margin-right:1rem!important}.mb-sm-3,.my-sm-3{margin-bottom:1rem!important}.ml-sm-3,.mx-sm-3{margin-left:1rem!important}.m-sm-4{margin:1.5rem!important}.mt-sm-4,.my-sm-4{margin-top:1.5rem!important}.mr-sm-4,.mx-sm-4{margin-right:1.5rem!important}.mb-sm-4,.my-sm-4{margin-bottom:1.5rem!important}.ml-sm-4,.mx-sm-4{margin-left:1.5rem!important}.m-sm-5{margin:3rem!important}.mt-sm-5,.my-sm-5{margin-top:3rem!important}.mr-sm-5,.mx-sm-5{margin-right:3rem!important}.mb-sm-5,.my-sm-5{margin-bottom:3rem!important}.ml-sm-5,.mx-sm-5{margin-left:3rem!important}.p-sm-0{padding:0!important}.pt-sm-0,.py-sm-0{padding-top:0!important}.pr-sm-0,.px-sm-0{padding-right:0!important}.pb-sm-0,.py-sm-0{padding-bottom:0!important}.pl-sm-0,.px-sm-0{padding-left:0!important}.p-sm-1{padding:.25rem!important}.pt-sm-1,.py-sm-1{padding-top:.25rem!important}.pr-sm-1,.px-sm-1{padding-right:.25rem!important}.pb-sm-1,.py-sm-1{padding-bottom:.25rem!important}.pl-sm-1,.px-sm-1{padding-left:.25rem!important}.p-sm-2{padding:.5rem!important}.pt-sm-2,.py-sm-2{padding-top:.5rem!important}.pr-sm-2,.px-sm-2{padding-right:.5rem!important}.pb-sm-2,.py-sm-2{padding-bottom:.5rem!important}.pl-sm-2,.px-sm-2{padding-left:.5rem!important}.p-sm-3{padding:1rem!important}.pt-sm-3,.py-sm-3{padding-top:1rem!important}.pr-sm-3,.px-sm-3{padding-right:1rem!important}.pb-sm-3,.py-sm-3{padding-bottom:1rem!important}.pl-sm-3,.px-sm-3{padding-left:1rem!important}.p-sm-4{padding:1.5rem!important}.pt-sm-4,.py-sm-4{padding-top:1.5rem!important}.pr-sm-4,.px-sm-4{padding-right:1.5rem!important}.pb-sm-4,.py-sm-4{padding-bottom:1.5rem!important}.pl-sm-4,.px-sm-4{padding-left:1.5rem!important}.p-sm-5{padding:3rem!important}.pt-sm-5,.py-sm-5{padding-top:3rem!important}.pr-sm-5,.px-sm-5{padding-right:3rem!important}.pb-sm-5,.py-sm-5{padding-bottom:3rem!important}.pl-sm-5,.px-sm-5{padding-left:3rem!important}.m-sm-auto{margin:auto!important}.mt-sm-auto,.my-sm-auto{margin-top:auto!important}.mr-sm-auto,.mx-sm-auto{margin-right:auto!important}.mb-sm-auto,.my-sm-auto{margin-bottom:auto!important}.ml-sm-auto,.mx-sm-auto{margin-left:auto!important}}@media(min-width:768px){.m-md-0{margin:0!important}.mt-md-0,.my-md-0{margin-top:0!important}.mr-md-0,.mx-md-0{margin-right:0!important}.mb-md-0,.my-md-0{margin-bottom:0!important}.ml-md-0,.mx-md-0{margin-left:0!important}.m-md-1{margin:.25rem!important}.mt-md-1,.my-md-1{margin-top:.25rem!important}.mr-md-1,.mx-md-1{margin-right:.25rem!important}.mb-md-1,.my-md-1{margin-bottom:.25rem!important}.ml-md-1,.mx-md-1{margin-left:.25rem!important}.m-md-2{margin:.5rem!important}.mt-md-2,.my-md-2{margin-top:.5rem!important}.mr-md-2,.mx-md-2{margin-right:.5rem!important}.mb-md-2,.my-md-2{margin-bottom:.5rem!important}.ml-md-2,.mx-md-2{margin-left:.5rem!important}.m-md-3{margin:1rem!important}.mt-md-3,.my-md-3{margin-top:1rem!important}.mr-md-3,.mx-md-3{margin-right:1rem!important}.mb-md-3,.my-md-3{margin-bottom:1rem!important}.ml-md-3,.mx-md-3{margin-left:1rem!important}.m-md-4{margin:1.5rem!important}.mt-md-4,.my-md-4{margin-top:1.5rem!important}.mr-md-4,.mx-md-4{margin-right:1.5rem!important}.mb-md-4,.my-md-4{margin-bottom:1.5rem!important}.ml-md-4,.mx-md-4{margin-left:1.5rem!important}.m-md-5{margin:3rem!important}.mt-md-5,.my-md-5{margin-top:3rem!important}.mr-md-5,.mx-md-5{margin-right:3rem!important}.mb-md-5,.my-md-5{margin-bottom:3rem!important}.ml-md-5,.mx-md-5{margin-left:3rem!important}.p-md-0{padding:0!important}.pt-md-0,.py-md-0{padding-top:0!important}.pr-md-0,.px-md-0{padding-right:0!important}.pb-md-0,.py-md-0{padding-bottom:0!important}.pl-md-0,.px-md-0{padding-left:0!important}.p-md-1{padding:.25rem!important}.pt-md-1,.py-md-1{padding-top:.25rem!important}.pr-md-1,.px-md-1{padding-right:.25rem!important}.pb-md-1,.py-md-1{padding-bottom:.25rem!important}.pl-md-1,.px-md-1{padding-left:.25rem!important}.p-md-2{padding:.5rem!important}.pt-md-2,.py-md-2{padding-top:.5rem!important}.pr-md-2,.px-md-2{padding-right:.5rem!important}.pb-md-2,.py-md-2{padding-bottom:.5rem!important}.pl-md-2,.px-md-2{padding-left:.5rem!important}.p-md-3{padding:1rem!important}.pt-md-3,.py-md-3{padding-top:1rem!important}.pr-md-3,.px-md-3{padding-right:1rem!important}.pb-md-3,.py-md-3{padding-bottom:1rem!important}.pl-md-3,.px-md-3{padding-left:1rem!important}.p-md-4{padding:1.5rem!important}.pt-md-4,.py-md-4{padding-top:1.5rem!important}.pr-md-4,.px-md-4{padding-right:1.5rem!important}.pb-md-4,.py-md-4{padding-bottom:1.5rem!important}.pl-md-4,.px-md-4{padding-left:1.5rem!important}.p-md-5{padding:3rem!important}.pt-md-5,.py-md-5{padding-top:3rem!important}.pr-md-5,.px-md-5{padding-right:3rem!important}.pb-md-5,.py-md-5{padding-bottom:3rem!important}.pl-md-5,.px-md-5{padding-left:3rem!important}.m-md-auto{margin:auto!important}.mt-md-auto,.my-md-auto{margin-top:auto!important}.mr-md-auto,.mx-md-auto{margin-right:auto!important}.mb-md-auto,.my-md-auto{margin-bottom:auto!important}.ml-md-auto,.mx-md-auto{margin-left:auto!important}}@media(min-width:992px){.m-lg-0{margin:0!important}.mt-lg-0,.my-lg-0{margin-top:0!important}.mr-lg-0,.mx-lg-0{margin-right:0!important}.mb-lg-0,.my-lg-0{margin-bottom:0!important}.ml-lg-0,.mx-lg-0{margin-left:0!important}.m-lg-1{margin:.25rem!important}.mt-lg-1,.my-lg-1{margin-top:.25rem!important}.mr-lg-1,.mx-lg-1{margin-right:.25rem!important}.mb-lg-1,.my-lg-1{margin-bottom:.25rem!important}.ml-lg-1,.mx-lg-1{margin-left:.25rem!important}.m-lg-2{margin:.5rem!important}.mt-lg-2,.my-lg-2{margin-top:.5rem!important}.mr-lg-2,.mx-lg-2{margin-right:.5rem!important}.mb-lg-2,.my-lg-2{margin-bottom:.5rem!important}.ml-lg-2,.mx-lg-2{margin-left:.5rem!important}.m-lg-3{margin:1rem!important}.mt-lg-3,.my-lg-3{margin-top:1rem!important}.mr-lg-3,.mx-lg-3{margin-right:1rem!important}.mb-lg-3,.my-lg-3{margin-bottom:1rem!important}.ml-lg-3,.mx-lg-3{margin-left:1rem!important}.m-lg-4{margin:1.5rem!important}.mt-lg-4,.my-lg-4{margin-top:1.5rem!important}.mr-lg-4,.mx-lg-4{margin-right:1.5rem!important}.mb-lg-4,.my-lg-4{margin-bottom:1.5rem!important}.ml-lg-4,.mx-lg-4{margin-left:1.5rem!important}.m-lg-5{margin:3rem!important}.mt-lg-5,.my-lg-5{margin-top:3rem!important}.mr-lg-5,.mx-lg-5{margin-right:3rem!important}.mb-lg-5,.my-lg-5{margin-bottom:3rem!important}.ml-lg-5,.mx-lg-5{margin-left:3rem!important}.p-lg-0{padding:0!important}.pt-lg-0,.py-lg-0{padding-top:0!important}.pr-lg-0,.px-lg-0{padding-right:0!important}.pb-lg-0,.py-lg-0{padding-bottom:0!important}.pl-lg-0,.px-lg-0{padding-left:0!important}.p-lg-1{padding:.25rem!important}.pt-lg-1,.py-lg-1{padding-top:.25rem!important}.pr-lg-1,.px-lg-1{padding-right:.25rem!important}.pb-lg-1,.py-lg-1{padding-bottom:.25rem!important}.pl-lg-1,.px-lg-1{padding-left:.25rem!important}.p-lg-2{padding:.5rem!important}.pt-lg-2,.py-lg-2{padding-top:.5rem!important}.pr-lg-2,.px-lg-2{padding-right:.5rem!important}.pb-lg-2,.py-lg-2{padding-bottom:.5rem!important}.pl-lg-2,.px-lg-2{padding-left:.5rem!important}.p-lg-3{padding:1rem!important}.pt-lg-3,.py-lg-3{padding-top:1rem!important}.pr-lg-3,.px-lg-3{padding-right:1rem!important}.pb-lg-3,.py-lg-3{padding-bottom:1rem!important}.pl-lg-3,.px-lg-3{padding-left:1rem!important}.p-lg-4{padding:1.5rem!important}.pt-lg-4,.py-lg-4{padding-top:1.5rem!important}.pr-lg-4,.px-lg-4{padding-right:1.5rem!important}.pb-lg-4,.py-lg-4{padding-bottom:1.5rem!important}.pl-lg-4,.px-lg-4{padding-left:1.5rem!important}.p-lg-5{padding:3rem!important}.pt-lg-5,.py-lg-5{padding-top:3rem!important}.pr-lg-5,.px-lg-5{padding-right:3rem!important}.pb-lg-5,.py-lg-5{padding-bottom:3rem!important}.pl-lg-5,.px-lg-5{padding-left:3rem!important}.m-lg-auto{margin:auto!important}.mt-lg-auto,.my-lg-auto{margin-top:auto!important}.mr-lg-auto,.mx-lg-auto{margin-right:auto!important}.mb-lg-auto,.my-lg-auto{margin-bottom:auto!important}.ml-lg-auto,.mx-lg-auto{margin-left:auto!important}}@media(min-width:1200px){.m-xl-0{margin:0!important}.mt-xl-0,.my-xl-0{margin-top:0!important}.mr-xl-0,.mx-xl-0{margin-right:0!important}.mb-xl-0,.my-xl-0{margin-bottom:0!important}.ml-xl-0,.mx-xl-0{margin-left:0!important}.m-xl-1{margin:.25rem!important}.mt-xl-1,.my-xl-1{margin-top:.25rem!important}.mr-xl-1,.mx-xl-1{margin-right:.25rem!important}.mb-xl-1,.my-xl-1{margin-bottom:.25rem!important}.ml-xl-1,.mx-xl-1{margin-left:.25rem!important}.m-xl-2{margin:.5rem!important}.mt-xl-2,.my-xl-2{margin-top:.5rem!important}.mr-xl-2,.mx-xl-2{margin-right:.5rem!important}.mb-xl-2,.my-xl-2{margin-bottom:.5rem!important}.ml-xl-2,.mx-xl-2{margin-left:.5rem!important}.m-xl-3{margin:1rem!important}.mt-xl-3,.my-xl-3{margin-top:1rem!important}.mr-xl-3,.mx-xl-3{margin-right:1rem!important}.mb-xl-3,.my-xl-3{margin-bottom:1rem!important}.ml-xl-3,.mx-xl-3{margin-left:1rem!important}.m-xl-4{margin:1.5rem!important}.mt-xl-4,.my-xl-4{margin-top:1.5rem!important}.mr-xl-4,.mx-xl-4{margin-right:1.5rem!important}.mb-xl-4,.my-xl-4{margin-bottom:1.5rem!important}.ml-xl-4,.mx-xl-4{margin-left:1.5rem!important}.m-xl-5{margin:3rem!important}.mt-xl-5,.my-xl-5{margin-top:3rem!important}.mr-xl-5,.mx-xl-5{margin-right:3rem!important}.mb-xl-5,.my-xl-5{margin-bottom:3rem!important}.ml-xl-5,.mx-xl-5{margin-left:3rem!important}.p-xl-0{padding:0!important}.pt-xl-0,.py-xl-0{padding-top:0!important}.pr-xl-0,.px-xl-0{padding-right:0!important}.pb-xl-0,.py-xl-0{padding-bottom:0!important}.pl-xl-0,.px-xl-0{padding-left:0!important}.p-xl-1{padding:.25rem!important}.pt-xl-1,.py-xl-1{padding-top:.25rem!important}.pr-xl-1,.px-xl-1{padding-right:.25rem!important}.pb-xl-1,.py-xl-1{padding-bottom:.25rem!important}.pl-xl-1,.px-xl-1{padding-left:.25rem!important}.p-xl-2{padding:.5rem!important}.pt-xl-2,.py-xl-2{padding-top:.5rem!important}.pr-xl-2,.px-xl-2{padding-right:.5rem!important}.pb-xl-2,.py-xl-2{padding-bottom:.5rem!important}.pl-xl-2,.px-xl-2{padding-left:.5rem!important}.p-xl-3{padding:1rem!important}.pt-xl-3,.py-xl-3{padding-top:1rem!important}.pr-xl-3,.px-xl-3{padding-right:1rem!important}.pb-xl-3,.py-xl-3{padding-bottom:1rem!important}.pl-xl-3,.px-xl-3{padding-left:1rem!important}.p-xl-4{padding:1.5rem!important}.pt-xl-4,.py-xl-4{padding-top:1.5rem!important}.pr-xl-4,.px-xl-4{padding-right:1.5rem!important}.pb-xl-4,.py-xl-4{padding-bottom:1.5rem!important}.pl-xl-4,.px-xl-4{padding-left:1.5rem!important}.p-xl-5{padding:3rem!important}.pt-xl-5,.py-xl-5{padding-top:3rem!important}.pr-xl-5,.px-xl-5{padding-right:3rem!important}.pb-xl-5,.py-xl-5{padding-bottom:3rem!important}.pl-xl-5,.px-xl-5{padding-left:3rem!important}.m-xl-auto{margin:auto!important}.mt-xl-auto,.my-xl-auto{margin-top:auto!important}.mr-xl-auto,.mx-xl-auto{margin-right:auto!important}.mb-xl-auto,.my-xl-auto{margin-bottom:auto!important}.ml-xl-auto,.mx-xl-auto{margin-left:auto!important}}.text-monospace{font-family:SFMono-Regular,Menlo,Monaco,Consolas,liberation mono,courier new,monospace}.text-justify{text-align:justify!important}.text-nowrap{white-space:nowrap!important}.text-truncate{overflow:hidden;text-overflow:ellipsis;white-space:nowrap}.text-left{text-align:left!important}.text-right{text-align:right!important}.text-center{text-align:center!important}@media(min-width:576px){.text-sm-left{text-align:left!important}.text-sm-right{text-align:right!important}.text-sm-center{text-align:center!important}}@media(min-width:768px){.text-md-left{text-align:left!important}.text-md-right{text-align:right!important}.text-md-center{text-align:center!important}}@media(min-width:992px){.text-lg-left{text-align:left!important}.text-lg-right{text-align:right!important}.text-lg-center{text-align:center!important}}@media(min-width:1200px){.text-xl-left{text-align:left!important}.text-xl-right{text-align:right!important}.text-xl-center{text-align:center!important}}.text-lowercase{text-transform:lowercase!important}.text-uppercase{text-transform:uppercase!important}.text-capitalize{text-transform:capitalize!important}.font-weight-light{font-weight:300!important}.font-weight-normal{font-weight:400!important}.font-weight-bold{font-weight:700!important}.font-italic{font-style:italic!important}.text-white{color:#fff!important}.text-primary{color:#30638e!important}a.text-primary:hover,a.text-primary:focus{color:#234868!important}.text-secondary{color:#ffa630!important}a.text-secondary:hover,a.text-secondary:focus{color:#fc9000!important}.text-success{color:#3772ff!important}a.text-success:hover,a.text-success:focus{color:#044eff!important}.text-info{color:#c0e0de!important}a.text-info:hover,a.text-info:focus{color:#9ecfcc!important}.text-warning{color:#ed6a5a!important}a.text-warning:hover,a.text-warning:focus{color:#e8402c!important}.text-danger{color:#ed6a5a!important}a.text-danger:hover,a.text-danger:focus{color:#e8402c!important}.text-light{color:#d3f3ee!important}a.text-light:hover,a.text-light:focus{color:#abe8df!important}.text-dark{color:#403f4c!important}a.text-dark:hover,a.text-dark:focus{color:#292830!important}.text-body{color:#222!important}.text-muted{color:#888!important}.text-black-50{color:rgba(0,0,0,.5)!important}.text-white-50{color:rgba(255,255,255,.5)!important}.text-hide{font:0/0 a;color:transparent;text-shadow:none;background-color:transparent;border:0}.visible{visibility:visible!important}.invisible{visibility:hidden!important}@media print{*,*::before,*::after{text-shadow:none!important;-webkit-box-shadow:none!important;box-shadow:none!important}a:not(.btn){text-decoration:underline}abbr[title]::after{content:" (" attr(title)")"}pre{white-space:pre-wrap!important}pre,blockquote{border:1px solid #adb5bd;page-break-inside:avoid}thead{display:table-header-group}tr,img{page-break-inside:avoid}p,h2,h3{orphans:3;widows:3}h2,h3{page-break-after:avoid}@page{size:a3}body{min-width:992px!important}.container{min-width:992px!important}.navbar{display:none}.badge{border:1px solid #000}.table,.td-content>table,.td-box .row.section>table{border-collapse:collapse!important}.table td,.td-content>table td,.td-box .row.section>table td,.table th,.td-content>table th,.td-box .row.section>table th{background-color:#fff!important}.table-bordered th,.table-bordered td{border:1px solid #dee2e6!important}.table-dark{color:inherit}.table-dark th,.table-dark td,.table-dark thead th,.table-dark tbody+tbody{border-color:#dee2e6}.table .thead-dark th,.td-content>table .thead-dark th,.td-box .row.section>table .thead-dark th{color:inherit;border-color:#dee2e6}}/*!* Font Awesome Free 5.10.1 by @fontawesome - https://fontawesome.com +* License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License)*/.fa,.fas,.far,.fal,.fad,.fab{-moz-osx-font-smoothing:grayscale;-webkit-font-smoothing:antialiased;display:inline-block;font-style:normal;font-variant:normal;text-rendering:auto;line-height:1}.fa-lg{font-size:1.33333333em;line-height:.75em;vertical-align:-.0667em}.fa-xs{font-size:.75em}.fa-sm{font-size:.875em}.fa-1x{font-size:1em}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-6x{font-size:6em}.fa-7x{font-size:7em}.fa-8x{font-size:8em}.fa-9x{font-size:9em}.fa-10x{font-size:10em}.fa-fw{text-align:center;width:1.25em}.fa-ul{list-style-type:none;margin-left:2.5em;padding-left:0}.fa-ul>li{position:relative}.fa-li{left:-2em;position:absolute;text-align:center;width:2em;line-height:inherit}.fa-border{border:solid .08em #eee;border-radius:.1em;padding:.2em .25em .15em}.fa-pull-left{float:left}.fa-pull-right{float:right}.fa.fa-pull-left,.fas.fa-pull-left,.far.fa-pull-left,.fal.fa-pull-left,.fab.fa-pull-left{margin-right:.3em}.fa.fa-pull-right,.fas.fa-pull-right,.far.fa-pull-right,.fal.fa-pull-right,.fab.fa-pull-right{margin-left:.3em}.fa-spin{-webkit-animation:fa-spin 2s infinite linear;-o-animation:fa-spin 2s infinite linear;animation:fa-spin 2s infinite linear}.fa-pulse{-webkit-animation:fa-spin 1s infinite steps(8);-o-animation:fa-spin 1s infinite steps(8);animation:fa-spin 1s infinite steps(8)}@-webkit-keyframes fa-spin{0%{-webkit-transform:rotate(0);transform:rotate(0)}100%{-webkit-transform:rotate(360deg);transform:rotate(360deg)}}@-o-keyframes fa-spin{0%{-o-transform:rotate(0);transform:rotate(0)}100%{-o-transform:rotate(360deg);transform:rotate(360deg)}}@keyframes fa-spin{0%{-webkit-transform:rotate(0);-o-transform:rotate(0);transform:rotate(0)}100%{-webkit-transform:rotate(360deg);-o-transform:rotate(360deg);transform:rotate(360deg)}}.fa-rotate-90{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=1)";-webkit-transform:rotate(90deg);-ms-transform:rotate(90deg);-o-transform:rotate(90deg);transform:rotate(90deg)}.fa-rotate-180{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2)";-webkit-transform:rotate(180deg);-ms-transform:rotate(180deg);-o-transform:rotate(180deg);transform:rotate(180deg)}.fa-rotate-270{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=3)";-webkit-transform:rotate(270deg);-ms-transform:rotate(270deg);-o-transform:rotate(270deg);transform:rotate(270deg)}.fa-flip-horizontal{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)";-webkit-transform:scale(-1,1);-ms-transform:scale(-1,1);-o-transform:scale(-1,1);transform:scale(-1,1)}.fa-flip-vertical{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)";-webkit-transform:scale(1,-1);-ms-transform:scale(1,-1);-o-transform:scale(1,-1);transform:scale(1,-1)}.fa-flip-both,.fa-flip-horizontal.fa-flip-vertical{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)";-webkit-transform:scale(-1,-1);-ms-transform:scale(-1,-1);-o-transform:scale(-1,-1);transform:scale(-1,-1)}:root .fa-rotate-90,:root .fa-rotate-180,:root .fa-rotate-270,:root .fa-flip-horizontal,:root .fa-flip-vertical,:root .fa-flip-both{-webkit-filter:none;filter:none}.fa-stack{display:inline-block;height:2em;line-height:2em;position:relative;vertical-align:middle;width:2.5em}.fa-stack-1x,.fa-stack-2x{left:0;position:absolute;text-align:center;width:100%}.fa-stack-1x{line-height:inherit}.fa-stack-2x{font-size:2em}.fa-inverse{color:#fff}.fa-500px:before{content:"\f26e"}.fa-accessible-icon:before{content:"\f368"}.fa-accusoft:before{content:"\f369"}.fa-acquisitions-incorporated:before{content:"\f6af"}.fa-ad:before{content:"\f641"}.fa-address-book:before{content:"\f2b9"}.fa-address-card:before{content:"\f2bb"}.fa-adjust:before{content:"\f042"}.fa-adn:before{content:"\f170"}.fa-adobe:before{content:"\f778"}.fa-adversal:before{content:"\f36a"}.fa-affiliatetheme:before{content:"\f36b"}.fa-air-freshener:before{content:"\f5d0"}.fa-airbnb:before{content:"\f834"}.fa-algolia:before{content:"\f36c"}.fa-align-center:before{content:"\f037"}.fa-align-justify:before{content:"\f039"}.fa-align-left:before{content:"\f036"}.fa-align-right:before{content:"\f038"}.fa-alipay:before{content:"\f642"}.fa-allergies:before{content:"\f461"}.fa-amazon:before{content:"\f270"}.fa-amazon-pay:before{content:"\f42c"}.fa-ambulance:before{content:"\f0f9"}.fa-american-sign-language-interpreting:before{content:"\f2a3"}.fa-amilia:before{content:"\f36d"}.fa-anchor:before{content:"\f13d"}.fa-android:before{content:"\f17b"}.fa-angellist:before{content:"\f209"}.fa-angle-double-down:before{content:"\f103"}.fa-angle-double-left:before{content:"\f100"}.fa-angle-double-right:before{content:"\f101"}.fa-angle-double-up:before{content:"\f102"}.fa-angle-down:before{content:"\f107"}.fa-angle-left:before{content:"\f104"}.fa-angle-right:before{content:"\f105"}.fa-angle-up:before{content:"\f106"}.fa-angry:before{content:"\f556"}.fa-angrycreative:before{content:"\f36e"}.fa-angular:before{content:"\f420"}.fa-ankh:before{content:"\f644"}.fa-app-store:before{content:"\f36f"}.fa-app-store-ios:before{content:"\f370"}.fa-apper:before{content:"\f371"}.fa-apple:before{content:"\f179"}.fa-apple-alt:before{content:"\f5d1"}.fa-apple-pay:before{content:"\f415"}.fa-archive:before{content:"\f187"}.fa-archway:before{content:"\f557"}.fa-arrow-alt-circle-down:before{content:"\f358"}.fa-arrow-alt-circle-left:before{content:"\f359"}.fa-arrow-alt-circle-right:before{content:"\f35a"}.fa-arrow-alt-circle-up:before{content:"\f35b"}.fa-arrow-circle-down:before{content:"\f0ab"}.fa-arrow-circle-left:before{content:"\f0a8"}.fa-arrow-circle-right:before{content:"\f0a9"}.fa-arrow-circle-up:before{content:"\f0aa"}.fa-arrow-down:before{content:"\f063"}.fa-arrow-left:before{content:"\f060"}.fa-arrow-right:before{content:"\f061"}.fa-arrow-up:before{content:"\f062"}.fa-arrows-alt:before{content:"\f0b2"}.fa-arrows-alt-h:before{content:"\f337"}.fa-arrows-alt-v:before{content:"\f338"}.fa-artstation:before{content:"\f77a"}.fa-assistive-listening-systems:before{content:"\f2a2"}.fa-asterisk:before{content:"\f069"}.fa-asymmetrik:before{content:"\f372"}.fa-at:before{content:"\f1fa"}.fa-atlas:before{content:"\f558"}.fa-atlassian:before{content:"\f77b"}.fa-atom:before{content:"\f5d2"}.fa-audible:before{content:"\f373"}.fa-audio-description:before{content:"\f29e"}.fa-autoprefixer:before{content:"\f41c"}.fa-avianex:before{content:"\f374"}.fa-aviato:before{content:"\f421"}.fa-award:before{content:"\f559"}.fa-aws:before{content:"\f375"}.fa-baby:before{content:"\f77c"}.fa-baby-carriage:before{content:"\f77d"}.fa-backspace:before{content:"\f55a"}.fa-backward:before{content:"\f04a"}.fa-bacon:before{content:"\f7e5"}.fa-balance-scale:before{content:"\f24e"}.fa-balance-scale-left:before{content:"\f515"}.fa-balance-scale-right:before{content:"\f516"}.fa-ban:before{content:"\f05e"}.fa-band-aid:before{content:"\f462"}.fa-bandcamp:before{content:"\f2d5"}.fa-barcode:before{content:"\f02a"}.fa-bars:before{content:"\f0c9"}.fa-baseball-ball:before{content:"\f433"}.fa-basketball-ball:before{content:"\f434"}.fa-bath:before{content:"\f2cd"}.fa-battery-empty:before{content:"\f244"}.fa-battery-full:before{content:"\f240"}.fa-battery-half:before{content:"\f242"}.fa-battery-quarter:before{content:"\f243"}.fa-battery-three-quarters:before{content:"\f241"}.fa-battle-net:before{content:"\f835"}.fa-bed:before{content:"\f236"}.fa-beer:before{content:"\f0fc"}.fa-behance:before{content:"\f1b4"}.fa-behance-square:before{content:"\f1b5"}.fa-bell:before{content:"\f0f3"}.fa-bell-slash:before{content:"\f1f6"}.fa-bezier-curve:before{content:"\f55b"}.fa-bible:before{content:"\f647"}.fa-bicycle:before{content:"\f206"}.fa-biking:before{content:"\f84a"}.fa-bimobject:before{content:"\f378"}.fa-binoculars:before{content:"\f1e5"}.fa-biohazard:before{content:"\f780"}.fa-birthday-cake:before{content:"\f1fd"}.fa-bitbucket:before{content:"\f171"}.fa-bitcoin:before{content:"\f379"}.fa-bity:before{content:"\f37a"}.fa-black-tie:before{content:"\f27e"}.fa-blackberry:before{content:"\f37b"}.fa-blender:before{content:"\f517"}.fa-blender-phone:before{content:"\f6b6"}.fa-blind:before{content:"\f29d"}.fa-blog:before{content:"\f781"}.fa-blogger:before{content:"\f37c"}.fa-blogger-b:before{content:"\f37d"}.fa-bluetooth:before{content:"\f293"}.fa-bluetooth-b:before{content:"\f294"}.fa-bold:before{content:"\f032"}.fa-bolt:before{content:"\f0e7"}.fa-bomb:before{content:"\f1e2"}.fa-bone:before{content:"\f5d7"}.fa-bong:before{content:"\f55c"}.fa-book:before{content:"\f02d"}.fa-book-dead:before{content:"\f6b7"}.fa-book-medical:before{content:"\f7e6"}.fa-book-open:before{content:"\f518"}.fa-book-reader:before{content:"\f5da"}.fa-bookmark:before{content:"\f02e"}.fa-bootstrap:before{content:"\f836"}.fa-border-all:before{content:"\f84c"}.fa-border-none:before{content:"\f850"}.fa-border-style:before{content:"\f853"}.fa-bowling-ball:before{content:"\f436"}.fa-box:before{content:"\f466"}.fa-box-open:before{content:"\f49e"}.fa-boxes:before{content:"\f468"}.fa-braille:before{content:"\f2a1"}.fa-brain:before{content:"\f5dc"}.fa-bread-slice:before{content:"\f7ec"}.fa-briefcase:before{content:"\f0b1"}.fa-briefcase-medical:before{content:"\f469"}.fa-broadcast-tower:before{content:"\f519"}.fa-broom:before{content:"\f51a"}.fa-brush:before{content:"\f55d"}.fa-btc:before{content:"\f15a"}.fa-buffer:before{content:"\f837"}.fa-bug:before{content:"\f188"}.fa-building:before{content:"\f1ad"}.fa-bullhorn:before{content:"\f0a1"}.fa-bullseye:before{content:"\f140"}.fa-burn:before{content:"\f46a"}.fa-buromobelexperte:before{content:"\f37f"}.fa-bus:before{content:"\f207"}.fa-bus-alt:before{content:"\f55e"}.fa-business-time:before{content:"\f64a"}.fa-buysellads:before{content:"\f20d"}.fa-calculator:before{content:"\f1ec"}.fa-calendar:before{content:"\f133"}.fa-calendar-alt:before{content:"\f073"}.fa-calendar-check:before{content:"\f274"}.fa-calendar-day:before{content:"\f783"}.fa-calendar-minus:before{content:"\f272"}.fa-calendar-plus:before{content:"\f271"}.fa-calendar-times:before{content:"\f273"}.fa-calendar-week:before{content:"\f784"}.fa-camera:before{content:"\f030"}.fa-camera-retro:before{content:"\f083"}.fa-campground:before{content:"\f6bb"}.fa-canadian-maple-leaf:before{content:"\f785"}.fa-candy-cane:before{content:"\f786"}.fa-cannabis:before{content:"\f55f"}.fa-capsules:before{content:"\f46b"}.fa-car:before{content:"\f1b9"}.fa-car-alt:before{content:"\f5de"}.fa-car-battery:before{content:"\f5df"}.fa-car-crash:before{content:"\f5e1"}.fa-car-side:before{content:"\f5e4"}.fa-caret-down:before{content:"\f0d7"}.fa-caret-left:before{content:"\f0d9"}.fa-caret-right:before{content:"\f0da"}.fa-caret-square-down:before{content:"\f150"}.fa-caret-square-left:before{content:"\f191"}.fa-caret-square-right:before{content:"\f152"}.fa-caret-square-up:before{content:"\f151"}.fa-caret-up:before{content:"\f0d8"}.fa-carrot:before{content:"\f787"}.fa-cart-arrow-down:before{content:"\f218"}.fa-cart-plus:before{content:"\f217"}.fa-cash-register:before{content:"\f788"}.fa-cat:before{content:"\f6be"}.fa-cc-amazon-pay:before{content:"\f42d"}.fa-cc-amex:before{content:"\f1f3"}.fa-cc-apple-pay:before{content:"\f416"}.fa-cc-diners-club:before{content:"\f24c"}.fa-cc-discover:before{content:"\f1f2"}.fa-cc-jcb:before{content:"\f24b"}.fa-cc-mastercard:before{content:"\f1f1"}.fa-cc-paypal:before{content:"\f1f4"}.fa-cc-stripe:before{content:"\f1f5"}.fa-cc-visa:before{content:"\f1f0"}.fa-centercode:before{content:"\f380"}.fa-centos:before{content:"\f789"}.fa-certificate:before{content:"\f0a3"}.fa-chair:before{content:"\f6c0"}.fa-chalkboard:before{content:"\f51b"}.fa-chalkboard-teacher:before{content:"\f51c"}.fa-charging-station:before{content:"\f5e7"}.fa-chart-area:before{content:"\f1fe"}.fa-chart-bar:before{content:"\f080"}.fa-chart-line:before{content:"\f201"}.fa-chart-pie:before{content:"\f200"}.fa-check:before{content:"\f00c"}.fa-check-circle:before{content:"\f058"}.fa-check-double:before{content:"\f560"}.fa-check-square:before{content:"\f14a"}.fa-cheese:before{content:"\f7ef"}.fa-chess:before{content:"\f439"}.fa-chess-bishop:before{content:"\f43a"}.fa-chess-board:before{content:"\f43c"}.fa-chess-king:before{content:"\f43f"}.fa-chess-knight:before{content:"\f441"}.fa-chess-pawn:before{content:"\f443"}.fa-chess-queen:before{content:"\f445"}.fa-chess-rook:before{content:"\f447"}.fa-chevron-circle-down:before{content:"\f13a"}.fa-chevron-circle-left:before{content:"\f137"}.fa-chevron-circle-right:before{content:"\f138"}.fa-chevron-circle-up:before{content:"\f139"}.fa-chevron-down:before{content:"\f078"}.fa-chevron-left:before{content:"\f053"}.fa-chevron-right:before{content:"\f054"}.fa-chevron-up:before{content:"\f077"}.fa-child:before{content:"\f1ae"}.fa-chrome:before{content:"\f268"}.fa-chromecast:before{content:"\f838"}.fa-church:before{content:"\f51d"}.fa-circle:before{content:"\f111"}.fa-circle-notch:before{content:"\f1ce"}.fa-city:before{content:"\f64f"}.fa-clinic-medical:before{content:"\f7f2"}.fa-clipboard:before{content:"\f328"}.fa-clipboard-check:before{content:"\f46c"}.fa-clipboard-list:before{content:"\f46d"}.fa-clock:before{content:"\f017"}.fa-clone:before{content:"\f24d"}.fa-closed-captioning:before{content:"\f20a"}.fa-cloud:before{content:"\f0c2"}.fa-cloud-download-alt:before{content:"\f381"}.fa-cloud-meatball:before{content:"\f73b"}.fa-cloud-moon:before{content:"\f6c3"}.fa-cloud-moon-rain:before{content:"\f73c"}.fa-cloud-rain:before{content:"\f73d"}.fa-cloud-showers-heavy:before{content:"\f740"}.fa-cloud-sun:before{content:"\f6c4"}.fa-cloud-sun-rain:before{content:"\f743"}.fa-cloud-upload-alt:before{content:"\f382"}.fa-cloudscale:before{content:"\f383"}.fa-cloudsmith:before{content:"\f384"}.fa-cloudversify:before{content:"\f385"}.fa-cocktail:before{content:"\f561"}.fa-code:before{content:"\f121"}.fa-code-branch:before{content:"\f126"}.fa-codepen:before{content:"\f1cb"}.fa-codiepie:before{content:"\f284"}.fa-coffee:before{content:"\f0f4"}.fa-cog:before{content:"\f013"}.fa-cogs:before{content:"\f085"}.fa-coins:before{content:"\f51e"}.fa-columns:before{content:"\f0db"}.fa-comment:before{content:"\f075"}.fa-comment-alt:before{content:"\f27a"}.fa-comment-dollar:before{content:"\f651"}.fa-comment-dots:before{content:"\f4ad"}.fa-comment-medical:before{content:"\f7f5"}.fa-comment-slash:before{content:"\f4b3"}.fa-comments:before{content:"\f086"}.fa-comments-dollar:before{content:"\f653"}.fa-compact-disc:before{content:"\f51f"}.fa-compass:before{content:"\f14e"}.fa-compress:before{content:"\f066"}.fa-compress-arrows-alt:before{content:"\f78c"}.fa-concierge-bell:before{content:"\f562"}.fa-confluence:before{content:"\f78d"}.fa-connectdevelop:before{content:"\f20e"}.fa-contao:before{content:"\f26d"}.fa-cookie:before{content:"\f563"}.fa-cookie-bite:before{content:"\f564"}.fa-copy:before{content:"\f0c5"}.fa-copyright:before{content:"\f1f9"}.fa-cotton-bureau:before{content:"\f89e"}.fa-couch:before{content:"\f4b8"}.fa-cpanel:before{content:"\f388"}.fa-creative-commons:before{content:"\f25e"}.fa-creative-commons-by:before{content:"\f4e7"}.fa-creative-commons-nc:before{content:"\f4e8"}.fa-creative-commons-nc-eu:before{content:"\f4e9"}.fa-creative-commons-nc-jp:before{content:"\f4ea"}.fa-creative-commons-nd:before{content:"\f4eb"}.fa-creative-commons-pd:before{content:"\f4ec"}.fa-creative-commons-pd-alt:before{content:"\f4ed"}.fa-creative-commons-remix:before{content:"\f4ee"}.fa-creative-commons-sa:before{content:"\f4ef"}.fa-creative-commons-sampling:before{content:"\f4f0"}.fa-creative-commons-sampling-plus:before{content:"\f4f1"}.fa-creative-commons-share:before{content:"\f4f2"}.fa-creative-commons-zero:before{content:"\f4f3"}.fa-credit-card:before{content:"\f09d"}.fa-critical-role:before{content:"\f6c9"}.fa-crop:before{content:"\f125"}.fa-crop-alt:before{content:"\f565"}.fa-cross:before{content:"\f654"}.fa-crosshairs:before{content:"\f05b"}.fa-crow:before{content:"\f520"}.fa-crown:before{content:"\f521"}.fa-crutch:before{content:"\f7f7"}.fa-css3:before{content:"\f13c"}.fa-css3-alt:before{content:"\f38b"}.fa-cube:before{content:"\f1b2"}.fa-cubes:before{content:"\f1b3"}.fa-cut:before{content:"\f0c4"}.fa-cuttlefish:before{content:"\f38c"}.fa-d-and-d:before{content:"\f38d"}.fa-d-and-d-beyond:before{content:"\f6ca"}.fa-dashcube:before{content:"\f210"}.fa-database:before{content:"\f1c0"}.fa-deaf:before{content:"\f2a4"}.fa-delicious:before{content:"\f1a5"}.fa-democrat:before{content:"\f747"}.fa-deploydog:before{content:"\f38e"}.fa-deskpro:before{content:"\f38f"}.fa-desktop:before{content:"\f108"}.fa-dev:before{content:"\f6cc"}.fa-deviantart:before{content:"\f1bd"}.fa-dharmachakra:before{content:"\f655"}.fa-dhl:before{content:"\f790"}.fa-diagnoses:before{content:"\f470"}.fa-diaspora:before{content:"\f791"}.fa-dice:before{content:"\f522"}.fa-dice-d20:before{content:"\f6cf"}.fa-dice-d6:before{content:"\f6d1"}.fa-dice-five:before{content:"\f523"}.fa-dice-four:before{content:"\f524"}.fa-dice-one:before{content:"\f525"}.fa-dice-six:before{content:"\f526"}.fa-dice-three:before{content:"\f527"}.fa-dice-two:before{content:"\f528"}.fa-digg:before{content:"\f1a6"}.fa-digital-ocean:before{content:"\f391"}.fa-digital-tachograph:before{content:"\f566"}.fa-directions:before{content:"\f5eb"}.fa-discord:before{content:"\f392"}.fa-discourse:before{content:"\f393"}.fa-divide:before{content:"\f529"}.fa-dizzy:before{content:"\f567"}.fa-dna:before{content:"\f471"}.fa-dochub:before{content:"\f394"}.fa-docker:before{content:"\f395"}.fa-dog:before{content:"\f6d3"}.fa-dollar-sign:before{content:"\f155"}.fa-dolly:before{content:"\f472"}.fa-dolly-flatbed:before{content:"\f474"}.fa-donate:before{content:"\f4b9"}.fa-door-closed:before{content:"\f52a"}.fa-door-open:before{content:"\f52b"}.fa-dot-circle:before{content:"\f192"}.fa-dove:before{content:"\f4ba"}.fa-download:before{content:"\f019"}.fa-draft2digital:before{content:"\f396"}.fa-drafting-compass:before{content:"\f568"}.fa-dragon:before{content:"\f6d5"}.fa-draw-polygon:before{content:"\f5ee"}.fa-dribbble:before{content:"\f17d"}.fa-dribbble-square:before{content:"\f397"}.fa-dropbox:before{content:"\f16b"}.fa-drum:before{content:"\f569"}.fa-drum-steelpan:before{content:"\f56a"}.fa-drumstick-bite:before{content:"\f6d7"}.fa-drupal:before{content:"\f1a9"}.fa-dumbbell:before{content:"\f44b"}.fa-dumpster:before{content:"\f793"}.fa-dumpster-fire:before{content:"\f794"}.fa-dungeon:before{content:"\f6d9"}.fa-dyalog:before{content:"\f399"}.fa-earlybirds:before{content:"\f39a"}.fa-ebay:before{content:"\f4f4"}.fa-edge:before{content:"\f282"}.fa-edit:before{content:"\f044"}.fa-egg:before{content:"\f7fb"}.fa-eject:before{content:"\f052"}.fa-elementor:before{content:"\f430"}.fa-ellipsis-h:before{content:"\f141"}.fa-ellipsis-v:before{content:"\f142"}.fa-ello:before{content:"\f5f1"}.fa-ember:before{content:"\f423"}.fa-empire:before{content:"\f1d1"}.fa-envelope:before{content:"\f0e0"}.fa-envelope-open:before{content:"\f2b6"}.fa-envelope-open-text:before{content:"\f658"}.fa-envelope-square:before{content:"\f199"}.fa-envira:before{content:"\f299"}.fa-equals:before{content:"\f52c"}.fa-eraser:before{content:"\f12d"}.fa-erlang:before{content:"\f39d"}.fa-ethereum:before{content:"\f42e"}.fa-ethernet:before{content:"\f796"}.fa-etsy:before{content:"\f2d7"}.fa-euro-sign:before{content:"\f153"}.fa-evernote:before{content:"\f839"}.fa-exchange-alt:before{content:"\f362"}.fa-exclamation:before{content:"\f12a"}.fa-exclamation-circle:before{content:"\f06a"}.fa-exclamation-triangle:before{content:"\f071"}.fa-expand:before{content:"\f065"}.fa-expand-arrows-alt:before{content:"\f31e"}.fa-expeditedssl:before{content:"\f23e"}.fa-external-link-alt:before{content:"\f35d"}.fa-external-link-square-alt:before{content:"\f360"}.fa-eye:before{content:"\f06e"}.fa-eye-dropper:before{content:"\f1fb"}.fa-eye-slash:before{content:"\f070"}.fa-facebook:before{content:"\f09a"}.fa-facebook-f:before{content:"\f39e"}.fa-facebook-messenger:before{content:"\f39f"}.fa-facebook-square:before{content:"\f082"}.fa-fan:before{content:"\f863"}.fa-fantasy-flight-games:before{content:"\f6dc"}.fa-fast-backward:before{content:"\f049"}.fa-fast-forward:before{content:"\f050"}.fa-fax:before{content:"\f1ac"}.fa-feather:before{content:"\f52d"}.fa-feather-alt:before{content:"\f56b"}.fa-fedex:before{content:"\f797"}.fa-fedora:before{content:"\f798"}.fa-female:before{content:"\f182"}.fa-fighter-jet:before{content:"\f0fb"}.fa-figma:before{content:"\f799"}.fa-file:before{content:"\f15b"}.fa-file-alt:before{content:"\f15c"}.fa-file-archive:before{content:"\f1c6"}.fa-file-audio:before{content:"\f1c7"}.fa-file-code:before{content:"\f1c9"}.fa-file-contract:before{content:"\f56c"}.fa-file-csv:before{content:"\f6dd"}.fa-file-download:before{content:"\f56d"}.fa-file-excel:before{content:"\f1c3"}.fa-file-export:before{content:"\f56e"}.fa-file-image:before{content:"\f1c5"}.fa-file-import:before{content:"\f56f"}.fa-file-invoice:before{content:"\f570"}.fa-file-invoice-dollar:before{content:"\f571"}.fa-file-medical:before{content:"\f477"}.fa-file-medical-alt:before{content:"\f478"}.fa-file-pdf:before{content:"\f1c1"}.fa-file-powerpoint:before{content:"\f1c4"}.fa-file-prescription:before{content:"\f572"}.fa-file-signature:before{content:"\f573"}.fa-file-upload:before{content:"\f574"}.fa-file-video:before{content:"\f1c8"}.fa-file-word:before{content:"\f1c2"}.fa-fill:before{content:"\f575"}.fa-fill-drip:before{content:"\f576"}.fa-film:before{content:"\f008"}.fa-filter:before{content:"\f0b0"}.fa-fingerprint:before{content:"\f577"}.fa-fire:before{content:"\f06d"}.fa-fire-alt:before{content:"\f7e4"}.fa-fire-extinguisher:before{content:"\f134"}.fa-firefox:before{content:"\f269"}.fa-first-aid:before{content:"\f479"}.fa-first-order:before{content:"\f2b0"}.fa-first-order-alt:before{content:"\f50a"}.fa-firstdraft:before{content:"\f3a1"}.fa-fish:before{content:"\f578"}.fa-fist-raised:before{content:"\f6de"}.fa-flag:before{content:"\f024"}.fa-flag-checkered:before{content:"\f11e"}.fa-flag-usa:before{content:"\f74d"}.fa-flask:before{content:"\f0c3"}.fa-flickr:before{content:"\f16e"}.fa-flipboard:before{content:"\f44d"}.fa-flushed:before{content:"\f579"}.fa-fly:before{content:"\f417"}.fa-folder:before{content:"\f07b"}.fa-folder-minus:before{content:"\f65d"}.fa-folder-open:before{content:"\f07c"}.fa-folder-plus:before{content:"\f65e"}.fa-font:before{content:"\f031"}.fa-font-awesome:before{content:"\f2b4"}.fa-font-awesome-alt:before{content:"\f35c"}.fa-font-awesome-flag:before{content:"\f425"}.fa-font-awesome-logo-full:before{content:"\f4e6"}.fa-fonticons:before{content:"\f280"}.fa-fonticons-fi:before{content:"\f3a2"}.fa-football-ball:before{content:"\f44e"}.fa-fort-awesome:before{content:"\f286"}.fa-fort-awesome-alt:before{content:"\f3a3"}.fa-forumbee:before{content:"\f211"}.fa-forward:before{content:"\f04e"}.fa-foursquare:before{content:"\f180"}.fa-free-code-camp:before{content:"\f2c5"}.fa-freebsd:before{content:"\f3a4"}.fa-frog:before{content:"\f52e"}.fa-frown:before{content:"\f119"}.fa-frown-open:before{content:"\f57a"}.fa-fulcrum:before{content:"\f50b"}.fa-funnel-dollar:before{content:"\f662"}.fa-futbol:before{content:"\f1e3"}.fa-galactic-republic:before{content:"\f50c"}.fa-galactic-senate:before{content:"\f50d"}.fa-gamepad:before{content:"\f11b"}.fa-gas-pump:before{content:"\f52f"}.fa-gavel:before{content:"\f0e3"}.fa-gem:before{content:"\f3a5"}.fa-genderless:before{content:"\f22d"}.fa-get-pocket:before{content:"\f265"}.fa-gg:before{content:"\f260"}.fa-gg-circle:before{content:"\f261"}.fa-ghost:before{content:"\f6e2"}.fa-gift:before{content:"\f06b"}.fa-gifts:before{content:"\f79c"}.fa-git:before{content:"\f1d3"}.fa-git-alt:before{content:"\f841"}.fa-git-square:before{content:"\f1d2"}.fa-github:before{content:"\f09b"}.fa-github-alt:before{content:"\f113"}.fa-github-square:before{content:"\f092"}.fa-gitkraken:before{content:"\f3a6"}.fa-gitlab:before{content:"\f296"}.fa-gitter:before{content:"\f426"}.fa-glass-cheers:before{content:"\f79f"}.fa-glass-martini:before{content:"\f000"}.fa-glass-martini-alt:before{content:"\f57b"}.fa-glass-whiskey:before{content:"\f7a0"}.fa-glasses:before{content:"\f530"}.fa-glide:before{content:"\f2a5"}.fa-glide-g:before{content:"\f2a6"}.fa-globe:before{content:"\f0ac"}.fa-globe-africa:before{content:"\f57c"}.fa-globe-americas:before{content:"\f57d"}.fa-globe-asia:before{content:"\f57e"}.fa-globe-europe:before{content:"\f7a2"}.fa-gofore:before{content:"\f3a7"}.fa-golf-ball:before{content:"\f450"}.fa-goodreads:before{content:"\f3a8"}.fa-goodreads-g:before{content:"\f3a9"}.fa-google:before{content:"\f1a0"}.fa-google-drive:before{content:"\f3aa"}.fa-google-play:before{content:"\f3ab"}.fa-google-plus:before{content:"\f2b3"}.fa-google-plus-g:before{content:"\f0d5"}.fa-google-plus-square:before{content:"\f0d4"}.fa-google-wallet:before{content:"\f1ee"}.fa-gopuram:before{content:"\f664"}.fa-graduation-cap:before{content:"\f19d"}.fa-gratipay:before{content:"\f184"}.fa-grav:before{content:"\f2d6"}.fa-greater-than:before{content:"\f531"}.fa-greater-than-equal:before{content:"\f532"}.fa-grimace:before{content:"\f57f"}.fa-grin:before{content:"\f580"}.fa-grin-alt:before{content:"\f581"}.fa-grin-beam:before{content:"\f582"}.fa-grin-beam-sweat:before{content:"\f583"}.fa-grin-hearts:before{content:"\f584"}.fa-grin-squint:before{content:"\f585"}.fa-grin-squint-tears:before{content:"\f586"}.fa-grin-stars:before{content:"\f587"}.fa-grin-tears:before{content:"\f588"}.fa-grin-tongue:before{content:"\f589"}.fa-grin-tongue-squint:before{content:"\f58a"}.fa-grin-tongue-wink:before{content:"\f58b"}.fa-grin-wink:before{content:"\f58c"}.fa-grip-horizontal:before{content:"\f58d"}.fa-grip-lines:before{content:"\f7a4"}.fa-grip-lines-vertical:before{content:"\f7a5"}.fa-grip-vertical:before{content:"\f58e"}.fa-gripfire:before{content:"\f3ac"}.fa-grunt:before{content:"\f3ad"}.fa-guitar:before{content:"\f7a6"}.fa-gulp:before{content:"\f3ae"}.fa-h-square:before{content:"\f0fd"}.fa-hacker-news:before{content:"\f1d4"}.fa-hacker-news-square:before{content:"\f3af"}.fa-hackerrank:before{content:"\f5f7"}.fa-hamburger:before{content:"\f805"}.fa-hammer:before{content:"\f6e3"}.fa-hamsa:before{content:"\f665"}.fa-hand-holding:before{content:"\f4bd"}.fa-hand-holding-heart:before{content:"\f4be"}.fa-hand-holding-usd:before{content:"\f4c0"}.fa-hand-lizard:before{content:"\f258"}.fa-hand-middle-finger:before{content:"\f806"}.fa-hand-paper:before{content:"\f256"}.fa-hand-peace:before{content:"\f25b"}.fa-hand-point-down:before{content:"\f0a7"}.fa-hand-point-left:before{content:"\f0a5"}.fa-hand-point-right:before{content:"\f0a4"}.fa-hand-point-up:before{content:"\f0a6"}.fa-hand-pointer:before{content:"\f25a"}.fa-hand-rock:before{content:"\f255"}.fa-hand-scissors:before{content:"\f257"}.fa-hand-spock:before{content:"\f259"}.fa-hands:before{content:"\f4c2"}.fa-hands-helping:before{content:"\f4c4"}.fa-handshake:before{content:"\f2b5"}.fa-hanukiah:before{content:"\f6e6"}.fa-hard-hat:before{content:"\f807"}.fa-hashtag:before{content:"\f292"}.fa-hat-wizard:before{content:"\f6e8"}.fa-haykal:before{content:"\f666"}.fa-hdd:before{content:"\f0a0"}.fa-heading:before{content:"\f1dc"}.fa-headphones:before{content:"\f025"}.fa-headphones-alt:before{content:"\f58f"}.fa-headset:before{content:"\f590"}.fa-heart:before{content:"\f004"}.fa-heart-broken:before{content:"\f7a9"}.fa-heartbeat:before{content:"\f21e"}.fa-helicopter:before{content:"\f533"}.fa-highlighter:before{content:"\f591"}.fa-hiking:before{content:"\f6ec"}.fa-hippo:before{content:"\f6ed"}.fa-hips:before{content:"\f452"}.fa-hire-a-helper:before{content:"\f3b0"}.fa-history:before{content:"\f1da"}.fa-hockey-puck:before{content:"\f453"}.fa-holly-berry:before{content:"\f7aa"}.fa-home:before{content:"\f015"}.fa-hooli:before{content:"\f427"}.fa-hornbill:before{content:"\f592"}.fa-horse:before{content:"\f6f0"}.fa-horse-head:before{content:"\f7ab"}.fa-hospital:before{content:"\f0f8"}.fa-hospital-alt:before{content:"\f47d"}.fa-hospital-symbol:before{content:"\f47e"}.fa-hot-tub:before{content:"\f593"}.fa-hotdog:before{content:"\f80f"}.fa-hotel:before{content:"\f594"}.fa-hotjar:before{content:"\f3b1"}.fa-hourglass:before{content:"\f254"}.fa-hourglass-end:before{content:"\f253"}.fa-hourglass-half:before{content:"\f252"}.fa-hourglass-start:before{content:"\f251"}.fa-house-damage:before{content:"\f6f1"}.fa-houzz:before{content:"\f27c"}.fa-hryvnia:before{content:"\f6f2"}.fa-html5:before{content:"\f13b"}.fa-hubspot:before{content:"\f3b2"}.fa-i-cursor:before{content:"\f246"}.fa-ice-cream:before{content:"\f810"}.fa-icicles:before{content:"\f7ad"}.fa-icons:before{content:"\f86d"}.fa-id-badge:before{content:"\f2c1"}.fa-id-card:before{content:"\f2c2"}.fa-id-card-alt:before{content:"\f47f"}.fa-igloo:before{content:"\f7ae"}.fa-image:before{content:"\f03e"}.fa-images:before{content:"\f302"}.fa-imdb:before{content:"\f2d8"}.fa-inbox:before{content:"\f01c"}.fa-indent:before{content:"\f03c"}.fa-industry:before{content:"\f275"}.fa-infinity:before{content:"\f534"}.fa-info:before{content:"\f129"}.fa-info-circle:before{content:"\f05a"}.fa-instagram:before{content:"\f16d"}.fa-intercom:before{content:"\f7af"}.fa-internet-explorer:before{content:"\f26b"}.fa-invision:before{content:"\f7b0"}.fa-ioxhost:before{content:"\f208"}.fa-italic:before{content:"\f033"}.fa-itch-io:before{content:"\f83a"}.fa-itunes:before{content:"\f3b4"}.fa-itunes-note:before{content:"\f3b5"}.fa-java:before{content:"\f4e4"}.fa-jedi:before{content:"\f669"}.fa-jedi-order:before{content:"\f50e"}.fa-jenkins:before{content:"\f3b6"}.fa-jira:before{content:"\f7b1"}.fa-joget:before{content:"\f3b7"}.fa-joint:before{content:"\f595"}.fa-joomla:before{content:"\f1aa"}.fa-journal-whills:before{content:"\f66a"}.fa-js:before{content:"\f3b8"}.fa-js-square:before{content:"\f3b9"}.fa-jsfiddle:before{content:"\f1cc"}.fa-kaaba:before{content:"\f66b"}.fa-kaggle:before{content:"\f5fa"}.fa-key:before{content:"\f084"}.fa-keybase:before{content:"\f4f5"}.fa-keyboard:before{content:"\f11c"}.fa-keycdn:before{content:"\f3ba"}.fa-khanda:before{content:"\f66d"}.fa-kickstarter:before{content:"\f3bb"}.fa-kickstarter-k:before{content:"\f3bc"}.fa-kiss:before{content:"\f596"}.fa-kiss-beam:before{content:"\f597"}.fa-kiss-wink-heart:before{content:"\f598"}.fa-kiwi-bird:before{content:"\f535"}.fa-korvue:before{content:"\f42f"}.fa-landmark:before{content:"\f66f"}.fa-language:before{content:"\f1ab"}.fa-laptop:before{content:"\f109"}.fa-laptop-code:before{content:"\f5fc"}.fa-laptop-medical:before{content:"\f812"}.fa-laravel:before{content:"\f3bd"}.fa-lastfm:before{content:"\f202"}.fa-lastfm-square:before{content:"\f203"}.fa-laugh:before{content:"\f599"}.fa-laugh-beam:before{content:"\f59a"}.fa-laugh-squint:before{content:"\f59b"}.fa-laugh-wink:before{content:"\f59c"}.fa-layer-group:before{content:"\f5fd"}.fa-leaf:before{content:"\f06c"}.fa-leanpub:before{content:"\f212"}.fa-lemon:before{content:"\f094"}.fa-less:before{content:"\f41d"}.fa-less-than:before{content:"\f536"}.fa-less-than-equal:before{content:"\f537"}.fa-level-down-alt:before{content:"\f3be"}.fa-level-up-alt:before{content:"\f3bf"}.fa-life-ring:before{content:"\f1cd"}.fa-lightbulb:before{content:"\f0eb"}.fa-line:before{content:"\f3c0"}.fa-link:before{content:"\f0c1"}.fa-linkedin:before{content:"\f08c"}.fa-linkedin-in:before{content:"\f0e1"}.fa-linode:before{content:"\f2b8"}.fa-linux:before{content:"\f17c"}.fa-lira-sign:before{content:"\f195"}.fa-list:before{content:"\f03a"}.fa-list-alt:before{content:"\f022"}.fa-list-ol:before{content:"\f0cb"}.fa-list-ul:before{content:"\f0ca"}.fa-location-arrow:before{content:"\f124"}.fa-lock:before{content:"\f023"}.fa-lock-open:before{content:"\f3c1"}.fa-long-arrow-alt-down:before{content:"\f309"}.fa-long-arrow-alt-left:before{content:"\f30a"}.fa-long-arrow-alt-right:before{content:"\f30b"}.fa-long-arrow-alt-up:before{content:"\f30c"}.fa-low-vision:before{content:"\f2a8"}.fa-luggage-cart:before{content:"\f59d"}.fa-lyft:before{content:"\f3c3"}.fa-magento:before{content:"\f3c4"}.fa-magic:before{content:"\f0d0"}.fa-magnet:before{content:"\f076"}.fa-mail-bulk:before{content:"\f674"}.fa-mailchimp:before{content:"\f59e"}.fa-male:before{content:"\f183"}.fa-mandalorian:before{content:"\f50f"}.fa-map:before{content:"\f279"}.fa-map-marked:before{content:"\f59f"}.fa-map-marked-alt:before{content:"\f5a0"}.fa-map-marker:before{content:"\f041"}.fa-map-marker-alt:before{content:"\f3c5"}.fa-map-pin:before{content:"\f276"}.fa-map-signs:before{content:"\f277"}.fa-markdown:before{content:"\f60f"}.fa-marker:before{content:"\f5a1"}.fa-mars:before{content:"\f222"}.fa-mars-double:before{content:"\f227"}.fa-mars-stroke:before{content:"\f229"}.fa-mars-stroke-h:before{content:"\f22b"}.fa-mars-stroke-v:before{content:"\f22a"}.fa-mask:before{content:"\f6fa"}.fa-mastodon:before{content:"\f4f6"}.fa-maxcdn:before{content:"\f136"}.fa-medal:before{content:"\f5a2"}.fa-medapps:before{content:"\f3c6"}.fa-medium:before{content:"\f23a"}.fa-medium-m:before{content:"\f3c7"}.fa-medkit:before{content:"\f0fa"}.fa-medrt:before{content:"\f3c8"}.fa-meetup:before{content:"\f2e0"}.fa-megaport:before{content:"\f5a3"}.fa-meh:before{content:"\f11a"}.fa-meh-blank:before{content:"\f5a4"}.fa-meh-rolling-eyes:before{content:"\f5a5"}.fa-memory:before{content:"\f538"}.fa-mendeley:before{content:"\f7b3"}.fa-menorah:before{content:"\f676"}.fa-mercury:before{content:"\f223"}.fa-meteor:before{content:"\f753"}.fa-microchip:before{content:"\f2db"}.fa-microphone:before{content:"\f130"}.fa-microphone-alt:before{content:"\f3c9"}.fa-microphone-alt-slash:before{content:"\f539"}.fa-microphone-slash:before{content:"\f131"}.fa-microscope:before{content:"\f610"}.fa-microsoft:before{content:"\f3ca"}.fa-minus:before{content:"\f068"}.fa-minus-circle:before{content:"\f056"}.fa-minus-square:before{content:"\f146"}.fa-mitten:before{content:"\f7b5"}.fa-mix:before{content:"\f3cb"}.fa-mixcloud:before{content:"\f289"}.fa-mizuni:before{content:"\f3cc"}.fa-mobile:before{content:"\f10b"}.fa-mobile-alt:before{content:"\f3cd"}.fa-modx:before{content:"\f285"}.fa-monero:before{content:"\f3d0"}.fa-money-bill:before{content:"\f0d6"}.fa-money-bill-alt:before{content:"\f3d1"}.fa-money-bill-wave:before{content:"\f53a"}.fa-money-bill-wave-alt:before{content:"\f53b"}.fa-money-check:before{content:"\f53c"}.fa-money-check-alt:before{content:"\f53d"}.fa-monument:before{content:"\f5a6"}.fa-moon:before{content:"\f186"}.fa-mortar-pestle:before{content:"\f5a7"}.fa-mosque:before{content:"\f678"}.fa-motorcycle:before{content:"\f21c"}.fa-mountain:before{content:"\f6fc"}.fa-mouse-pointer:before{content:"\f245"}.fa-mug-hot:before{content:"\f7b6"}.fa-music:before{content:"\f001"}.fa-napster:before{content:"\f3d2"}.fa-neos:before{content:"\f612"}.fa-network-wired:before{content:"\f6ff"}.fa-neuter:before{content:"\f22c"}.fa-newspaper:before{content:"\f1ea"}.fa-nimblr:before{content:"\f5a8"}.fa-node:before{content:"\f419"}.fa-node-js:before{content:"\f3d3"}.fa-not-equal:before{content:"\f53e"}.fa-notes-medical:before{content:"\f481"}.fa-npm:before{content:"\f3d4"}.fa-ns8:before{content:"\f3d5"}.fa-nutritionix:before{content:"\f3d6"}.fa-object-group:before{content:"\f247"}.fa-object-ungroup:before{content:"\f248"}.fa-odnoklassniki:before{content:"\f263"}.fa-odnoklassniki-square:before{content:"\f264"}.fa-oil-can:before{content:"\f613"}.fa-old-republic:before{content:"\f510"}.fa-om:before{content:"\f679"}.fa-opencart:before{content:"\f23d"}.fa-openid:before{content:"\f19b"}.fa-opera:before{content:"\f26a"}.fa-optin-monster:before{content:"\f23c"}.fa-osi:before{content:"\f41a"}.fa-otter:before{content:"\f700"}.fa-outdent:before{content:"\f03b"}.fa-page4:before{content:"\f3d7"}.fa-pagelines:before{content:"\f18c"}.fa-pager:before{content:"\f815"}.fa-paint-brush:before{content:"\f1fc"}.fa-paint-roller:before{content:"\f5aa"}.fa-palette:before{content:"\f53f"}.fa-palfed:before{content:"\f3d8"}.fa-pallet:before{content:"\f482"}.fa-paper-plane:before{content:"\f1d8"}.fa-paperclip:before{content:"\f0c6"}.fa-parachute-box:before{content:"\f4cd"}.fa-paragraph:before{content:"\f1dd"}.fa-parking:before{content:"\f540"}.fa-passport:before{content:"\f5ab"}.fa-pastafarianism:before{content:"\f67b"}.fa-paste:before{content:"\f0ea"}.fa-patreon:before{content:"\f3d9"}.fa-pause:before{content:"\f04c"}.fa-pause-circle:before{content:"\f28b"}.fa-paw:before{content:"\f1b0"}.fa-paypal:before{content:"\f1ed"}.fa-peace:before{content:"\f67c"}.fa-pen:before{content:"\f304"}.fa-pen-alt:before{content:"\f305"}.fa-pen-fancy:before{content:"\f5ac"}.fa-pen-nib:before{content:"\f5ad"}.fa-pen-square:before{content:"\f14b"}.fa-pencil-alt:before{content:"\f303"}.fa-pencil-ruler:before{content:"\f5ae"}.fa-penny-arcade:before{content:"\f704"}.fa-people-carry:before{content:"\f4ce"}.fa-pepper-hot:before{content:"\f816"}.fa-percent:before{content:"\f295"}.fa-percentage:before{content:"\f541"}.fa-periscope:before{content:"\f3da"}.fa-person-booth:before{content:"\f756"}.fa-phabricator:before{content:"\f3db"}.fa-phoenix-framework:before{content:"\f3dc"}.fa-phoenix-squadron:before{content:"\f511"}.fa-phone:before{content:"\f095"}.fa-phone-alt:before{content:"\f879"}.fa-phone-slash:before{content:"\f3dd"}.fa-phone-square:before{content:"\f098"}.fa-phone-square-alt:before{content:"\f87b"}.fa-phone-volume:before{content:"\f2a0"}.fa-photo-video:before{content:"\f87c"}.fa-php:before{content:"\f457"}.fa-pied-piper:before{content:"\f2ae"}.fa-pied-piper-alt:before{content:"\f1a8"}.fa-pied-piper-hat:before{content:"\f4e5"}.fa-pied-piper-pp:before{content:"\f1a7"}.fa-piggy-bank:before{content:"\f4d3"}.fa-pills:before{content:"\f484"}.fa-pinterest:before{content:"\f0d2"}.fa-pinterest-p:before{content:"\f231"}.fa-pinterest-square:before{content:"\f0d3"}.fa-pizza-slice:before{content:"\f818"}.fa-place-of-worship:before{content:"\f67f"}.fa-plane:before{content:"\f072"}.fa-plane-arrival:before{content:"\f5af"}.fa-plane-departure:before{content:"\f5b0"}.fa-play:before{content:"\f04b"}.fa-play-circle:before{content:"\f144"}.fa-playstation:before{content:"\f3df"}.fa-plug:before{content:"\f1e6"}.fa-plus:before{content:"\f067"}.fa-plus-circle:before{content:"\f055"}.fa-plus-square:before{content:"\f0fe"}.fa-podcast:before{content:"\f2ce"}.fa-poll:before{content:"\f681"}.fa-poll-h:before{content:"\f682"}.fa-poo:before{content:"\f2fe"}.fa-poo-storm:before{content:"\f75a"}.fa-poop:before{content:"\f619"}.fa-portrait:before{content:"\f3e0"}.fa-pound-sign:before{content:"\f154"}.fa-power-off:before{content:"\f011"}.fa-pray:before{content:"\f683"}.fa-praying-hands:before{content:"\f684"}.fa-prescription:before{content:"\f5b1"}.fa-prescription-bottle:before{content:"\f485"}.fa-prescription-bottle-alt:before{content:"\f486"}.fa-print:before{content:"\f02f"}.fa-procedures:before{content:"\f487"}.fa-product-hunt:before{content:"\f288"}.fa-project-diagram:before{content:"\f542"}.fa-pushed:before{content:"\f3e1"}.fa-puzzle-piece:before{content:"\f12e"}.fa-python:before{content:"\f3e2"}.fa-qq:before{content:"\f1d6"}.fa-qrcode:before{content:"\f029"}.fa-question:before{content:"\f128"}.fa-question-circle:before{content:"\f059"}.fa-quidditch:before{content:"\f458"}.fa-quinscape:before{content:"\f459"}.fa-quora:before{content:"\f2c4"}.fa-quote-left:before{content:"\f10d"}.fa-quote-right:before{content:"\f10e"}.fa-quran:before{content:"\f687"}.fa-r-project:before{content:"\f4f7"}.fa-radiation:before{content:"\f7b9"}.fa-radiation-alt:before{content:"\f7ba"}.fa-rainbow:before{content:"\f75b"}.fa-random:before{content:"\f074"}.fa-raspberry-pi:before{content:"\f7bb"}.fa-ravelry:before{content:"\f2d9"}.fa-react:before{content:"\f41b"}.fa-reacteurope:before{content:"\f75d"}.fa-readme:before{content:"\f4d5"}.fa-rebel:before{content:"\f1d0"}.fa-receipt:before{content:"\f543"}.fa-recycle:before{content:"\f1b8"}.fa-red-river:before{content:"\f3e3"}.fa-reddit:before{content:"\f1a1"}.fa-reddit-alien:before{content:"\f281"}.fa-reddit-square:before{content:"\f1a2"}.fa-redhat:before{content:"\f7bc"}.fa-redo:before{content:"\f01e"}.fa-redo-alt:before{content:"\f2f9"}.fa-registered:before{content:"\f25d"}.fa-remove-format:before{content:"\f87d"}.fa-renren:before{content:"\f18b"}.fa-reply:before{content:"\f3e5"}.fa-reply-all:before{content:"\f122"}.fa-replyd:before{content:"\f3e6"}.fa-republican:before{content:"\f75e"}.fa-researchgate:before{content:"\f4f8"}.fa-resolving:before{content:"\f3e7"}.fa-restroom:before{content:"\f7bd"}.fa-retweet:before{content:"\f079"}.fa-rev:before{content:"\f5b2"}.fa-ribbon:before{content:"\f4d6"}.fa-ring:before{content:"\f70b"}.fa-road:before{content:"\f018"}.fa-robot:before{content:"\f544"}.fa-rocket:before{content:"\f135"}.fa-rocketchat:before{content:"\f3e8"}.fa-rockrms:before{content:"\f3e9"}.fa-route:before{content:"\f4d7"}.fa-rss:before{content:"\f09e"}.fa-rss-square:before{content:"\f143"}.fa-ruble-sign:before{content:"\f158"}.fa-ruler:before{content:"\f545"}.fa-ruler-combined:before{content:"\f546"}.fa-ruler-horizontal:before{content:"\f547"}.fa-ruler-vertical:before{content:"\f548"}.fa-running:before{content:"\f70c"}.fa-rupee-sign:before{content:"\f156"}.fa-sad-cry:before{content:"\f5b3"}.fa-sad-tear:before{content:"\f5b4"}.fa-safari:before{content:"\f267"}.fa-salesforce:before{content:"\f83b"}.fa-sass:before{content:"\f41e"}.fa-satellite:before{content:"\f7bf"}.fa-satellite-dish:before{content:"\f7c0"}.fa-save:before{content:"\f0c7"}.fa-schlix:before{content:"\f3ea"}.fa-school:before{content:"\f549"}.fa-screwdriver:before{content:"\f54a"}.fa-scribd:before{content:"\f28a"}.fa-scroll:before{content:"\f70e"}.fa-sd-card:before{content:"\f7c2"}.fa-search:before{content:"\f002"}.fa-search-dollar:before{content:"\f688"}.fa-search-location:before{content:"\f689"}.fa-search-minus:before{content:"\f010"}.fa-search-plus:before{content:"\f00e"}.fa-searchengin:before{content:"\f3eb"}.fa-seedling:before{content:"\f4d8"}.fa-sellcast:before{content:"\f2da"}.fa-sellsy:before{content:"\f213"}.fa-server:before{content:"\f233"}.fa-servicestack:before{content:"\f3ec"}.fa-shapes:before{content:"\f61f"}.fa-share:before{content:"\f064"}.fa-share-alt:before{content:"\f1e0"}.fa-share-alt-square:before{content:"\f1e1"}.fa-share-square:before{content:"\f14d"}.fa-shekel-sign:before{content:"\f20b"}.fa-shield-alt:before{content:"\f3ed"}.fa-ship:before{content:"\f21a"}.fa-shipping-fast:before{content:"\f48b"}.fa-shirtsinbulk:before{content:"\f214"}.fa-shoe-prints:before{content:"\f54b"}.fa-shopping-bag:before{content:"\f290"}.fa-shopping-basket:before{content:"\f291"}.fa-shopping-cart:before{content:"\f07a"}.fa-shopware:before{content:"\f5b5"}.fa-shower:before{content:"\f2cc"}.fa-shuttle-van:before{content:"\f5b6"}.fa-sign:before{content:"\f4d9"}.fa-sign-in-alt:before{content:"\f2f6"}.fa-sign-language:before{content:"\f2a7"}.fa-sign-out-alt:before{content:"\f2f5"}.fa-signal:before{content:"\f012"}.fa-signature:before{content:"\f5b7"}.fa-sim-card:before{content:"\f7c4"}.fa-simplybuilt:before{content:"\f215"}.fa-sistrix:before{content:"\f3ee"}.fa-sitemap:before{content:"\f0e8"}.fa-sith:before{content:"\f512"}.fa-skating:before{content:"\f7c5"}.fa-sketch:before{content:"\f7c6"}.fa-skiing:before{content:"\f7c9"}.fa-skiing-nordic:before{content:"\f7ca"}.fa-skull:before{content:"\f54c"}.fa-skull-crossbones:before{content:"\f714"}.fa-skyatlas:before{content:"\f216"}.fa-skype:before{content:"\f17e"}.fa-slack:before{content:"\f198"}.fa-slack-hash:before{content:"\f3ef"}.fa-slash:before{content:"\f715"}.fa-sleigh:before{content:"\f7cc"}.fa-sliders-h:before{content:"\f1de"}.fa-slideshare:before{content:"\f1e7"}.fa-smile:before{content:"\f118"}.fa-smile-beam:before{content:"\f5b8"}.fa-smile-wink:before{content:"\f4da"}.fa-smog:before{content:"\f75f"}.fa-smoking:before{content:"\f48d"}.fa-smoking-ban:before{content:"\f54d"}.fa-sms:before{content:"\f7cd"}.fa-snapchat:before{content:"\f2ab"}.fa-snapchat-ghost:before{content:"\f2ac"}.fa-snapchat-square:before{content:"\f2ad"}.fa-snowboarding:before{content:"\f7ce"}.fa-snowflake:before{content:"\f2dc"}.fa-snowman:before{content:"\f7d0"}.fa-snowplow:before{content:"\f7d2"}.fa-socks:before{content:"\f696"}.fa-solar-panel:before{content:"\f5ba"}.fa-sort:before{content:"\f0dc"}.fa-sort-alpha-down:before{content:"\f15d"}.fa-sort-alpha-down-alt:before{content:"\f881"}.fa-sort-alpha-up:before{content:"\f15e"}.fa-sort-alpha-up-alt:before{content:"\f882"}.fa-sort-amount-down:before{content:"\f160"}.fa-sort-amount-down-alt:before{content:"\f884"}.fa-sort-amount-up:before{content:"\f161"}.fa-sort-amount-up-alt:before{content:"\f885"}.fa-sort-down:before{content:"\f0dd"}.fa-sort-numeric-down:before{content:"\f162"}.fa-sort-numeric-down-alt:before{content:"\f886"}.fa-sort-numeric-up:before{content:"\f163"}.fa-sort-numeric-up-alt:before{content:"\f887"}.fa-sort-up:before{content:"\f0de"}.fa-soundcloud:before{content:"\f1be"}.fa-sourcetree:before{content:"\f7d3"}.fa-spa:before{content:"\f5bb"}.fa-space-shuttle:before{content:"\f197"}.fa-speakap:before{content:"\f3f3"}.fa-speaker-deck:before{content:"\f83c"}.fa-spell-check:before{content:"\f891"}.fa-spider:before{content:"\f717"}.fa-spinner:before{content:"\f110"}.fa-splotch:before{content:"\f5bc"}.fa-spotify:before{content:"\f1bc"}.fa-spray-can:before{content:"\f5bd"}.fa-square:before{content:"\f0c8"}.fa-square-full:before{content:"\f45c"}.fa-square-root-alt:before{content:"\f698"}.fa-squarespace:before{content:"\f5be"}.fa-stack-exchange:before{content:"\f18d"}.fa-stack-overflow:before{content:"\f16c"}.fa-stackpath:before{content:"\f842"}.fa-stamp:before{content:"\f5bf"}.fa-star:before{content:"\f005"}.fa-star-and-crescent:before{content:"\f699"}.fa-star-half:before{content:"\f089"}.fa-star-half-alt:before{content:"\f5c0"}.fa-star-of-david:before{content:"\f69a"}.fa-star-of-life:before{content:"\f621"}.fa-staylinked:before{content:"\f3f5"}.fa-steam:before{content:"\f1b6"}.fa-steam-square:before{content:"\f1b7"}.fa-steam-symbol:before{content:"\f3f6"}.fa-step-backward:before{content:"\f048"}.fa-step-forward:before{content:"\f051"}.fa-stethoscope:before{content:"\f0f1"}.fa-sticker-mule:before{content:"\f3f7"}.fa-sticky-note:before{content:"\f249"}.fa-stop:before{content:"\f04d"}.fa-stop-circle:before{content:"\f28d"}.fa-stopwatch:before{content:"\f2f2"}.fa-store:before{content:"\f54e"}.fa-store-alt:before{content:"\f54f"}.fa-strava:before{content:"\f428"}.fa-stream:before{content:"\f550"}.fa-street-view:before{content:"\f21d"}.fa-strikethrough:before{content:"\f0cc"}.fa-stripe:before{content:"\f429"}.fa-stripe-s:before{content:"\f42a"}.fa-stroopwafel:before{content:"\f551"}.fa-studiovinari:before{content:"\f3f8"}.fa-stumbleupon:before{content:"\f1a4"}.fa-stumbleupon-circle:before{content:"\f1a3"}.fa-subscript:before{content:"\f12c"}.fa-subway:before{content:"\f239"}.fa-suitcase:before{content:"\f0f2"}.fa-suitcase-rolling:before{content:"\f5c1"}.fa-sun:before{content:"\f185"}.fa-superpowers:before{content:"\f2dd"}.fa-superscript:before{content:"\f12b"}.fa-supple:before{content:"\f3f9"}.fa-surprise:before{content:"\f5c2"}.fa-suse:before{content:"\f7d6"}.fa-swatchbook:before{content:"\f5c3"}.fa-swimmer:before{content:"\f5c4"}.fa-swimming-pool:before{content:"\f5c5"}.fa-symfony:before{content:"\f83d"}.fa-synagogue:before{content:"\f69b"}.fa-sync:before{content:"\f021"}.fa-sync-alt:before{content:"\f2f1"}.fa-syringe:before{content:"\f48e"}.fa-table:before{content:"\f0ce"}.fa-table-tennis:before{content:"\f45d"}.fa-tablet:before{content:"\f10a"}.fa-tablet-alt:before{content:"\f3fa"}.fa-tablets:before{content:"\f490"}.fa-tachometer-alt:before{content:"\f3fd"}.fa-tag:before{content:"\f02b"}.fa-tags:before{content:"\f02c"}.fa-tape:before{content:"\f4db"}.fa-tasks:before{content:"\f0ae"}.fa-taxi:before{content:"\f1ba"}.fa-teamspeak:before{content:"\f4f9"}.fa-teeth:before{content:"\f62e"}.fa-teeth-open:before{content:"\f62f"}.fa-telegram:before{content:"\f2c6"}.fa-telegram-plane:before{content:"\f3fe"}.fa-temperature-high:before{content:"\f769"}.fa-temperature-low:before{content:"\f76b"}.fa-tencent-weibo:before{content:"\f1d5"}.fa-tenge:before{content:"\f7d7"}.fa-terminal:before{content:"\f120"}.fa-text-height:before{content:"\f034"}.fa-text-width:before{content:"\f035"}.fa-th:before{content:"\f00a"}.fa-th-large:before{content:"\f009"}.fa-th-list:before{content:"\f00b"}.fa-the-red-yeti:before{content:"\f69d"}.fa-theater-masks:before{content:"\f630"}.fa-themeco:before{content:"\f5c6"}.fa-themeisle:before{content:"\f2b2"}.fa-thermometer:before{content:"\f491"}.fa-thermometer-empty:before{content:"\f2cb"}.fa-thermometer-full:before{content:"\f2c7"}.fa-thermometer-half:before{content:"\f2c9"}.fa-thermometer-quarter:before{content:"\f2ca"}.fa-thermometer-three-quarters:before{content:"\f2c8"}.fa-think-peaks:before{content:"\f731"}.fa-thumbs-down:before{content:"\f165"}.fa-thumbs-up:before{content:"\f164"}.fa-thumbtack:before{content:"\f08d"}.fa-ticket-alt:before{content:"\f3ff"}.fa-times:before{content:"\f00d"}.fa-times-circle:before{content:"\f057"}.fa-tint:before{content:"\f043"}.fa-tint-slash:before{content:"\f5c7"}.fa-tired:before{content:"\f5c8"}.fa-toggle-off:before{content:"\f204"}.fa-toggle-on:before{content:"\f205"}.fa-toilet:before{content:"\f7d8"}.fa-toilet-paper:before{content:"\f71e"}.fa-toolbox:before{content:"\f552"}.fa-tools:before{content:"\f7d9"}.fa-tooth:before{content:"\f5c9"}.fa-torah:before{content:"\f6a0"}.fa-torii-gate:before{content:"\f6a1"}.fa-tractor:before{content:"\f722"}.fa-trade-federation:before{content:"\f513"}.fa-trademark:before{content:"\f25c"}.fa-traffic-light:before{content:"\f637"}.fa-train:before{content:"\f238"}.fa-tram:before{content:"\f7da"}.fa-transgender:before{content:"\f224"}.fa-transgender-alt:before{content:"\f225"}.fa-trash:before{content:"\f1f8"}.fa-trash-alt:before{content:"\f2ed"}.fa-trash-restore:before{content:"\f829"}.fa-trash-restore-alt:before{content:"\f82a"}.fa-tree:before{content:"\f1bb"}.fa-trello:before{content:"\f181"}.fa-tripadvisor:before{content:"\f262"}.fa-trophy:before{content:"\f091"}.fa-truck:before{content:"\f0d1"}.fa-truck-loading:before{content:"\f4de"}.fa-truck-monster:before{content:"\f63b"}.fa-truck-moving:before{content:"\f4df"}.fa-truck-pickup:before{content:"\f63c"}.fa-tshirt:before{content:"\f553"}.fa-tty:before{content:"\f1e4"}.fa-tumblr:before{content:"\f173"}.fa-tumblr-square:before{content:"\f174"}.fa-tv:before{content:"\f26c"}.fa-twitch:before{content:"\f1e8"}.fa-twitter:before{content:"\f099"}.fa-twitter-square:before{content:"\f081"}.fa-typo3:before{content:"\f42b"}.fa-uber:before{content:"\f402"}.fa-ubuntu:before{content:"\f7df"}.fa-uikit:before{content:"\f403"}.fa-umbrella:before{content:"\f0e9"}.fa-umbrella-beach:before{content:"\f5ca"}.fa-underline:before{content:"\f0cd"}.fa-undo:before{content:"\f0e2"}.fa-undo-alt:before{content:"\f2ea"}.fa-uniregistry:before{content:"\f404"}.fa-universal-access:before{content:"\f29a"}.fa-university:before{content:"\f19c"}.fa-unlink:before{content:"\f127"}.fa-unlock:before{content:"\f09c"}.fa-unlock-alt:before{content:"\f13e"}.fa-untappd:before{content:"\f405"}.fa-upload:before{content:"\f093"}.fa-ups:before{content:"\f7e0"}.fa-usb:before{content:"\f287"}.fa-user:before{content:"\f007"}.fa-user-alt:before{content:"\f406"}.fa-user-alt-slash:before{content:"\f4fa"}.fa-user-astronaut:before{content:"\f4fb"}.fa-user-check:before{content:"\f4fc"}.fa-user-circle:before{content:"\f2bd"}.fa-user-clock:before{content:"\f4fd"}.fa-user-cog:before{content:"\f4fe"}.fa-user-edit:before{content:"\f4ff"}.fa-user-friends:before{content:"\f500"}.fa-user-graduate:before{content:"\f501"}.fa-user-injured:before{content:"\f728"}.fa-user-lock:before{content:"\f502"}.fa-user-md:before{content:"\f0f0"}.fa-user-minus:before{content:"\f503"}.fa-user-ninja:before{content:"\f504"}.fa-user-nurse:before{content:"\f82f"}.fa-user-plus:before{content:"\f234"}.fa-user-secret:before{content:"\f21b"}.fa-user-shield:before{content:"\f505"}.fa-user-slash:before{content:"\f506"}.fa-user-tag:before{content:"\f507"}.fa-user-tie:before{content:"\f508"}.fa-user-times:before{content:"\f235"}.fa-users:before{content:"\f0c0"}.fa-users-cog:before{content:"\f509"}.fa-usps:before{content:"\f7e1"}.fa-ussunnah:before{content:"\f407"}.fa-utensil-spoon:before{content:"\f2e5"}.fa-utensils:before{content:"\f2e7"}.fa-vaadin:before{content:"\f408"}.fa-vector-square:before{content:"\f5cb"}.fa-venus:before{content:"\f221"}.fa-venus-double:before{content:"\f226"}.fa-venus-mars:before{content:"\f228"}.fa-viacoin:before{content:"\f237"}.fa-viadeo:before{content:"\f2a9"}.fa-viadeo-square:before{content:"\f2aa"}.fa-vial:before{content:"\f492"}.fa-vials:before{content:"\f493"}.fa-viber:before{content:"\f409"}.fa-video:before{content:"\f03d"}.fa-video-slash:before{content:"\f4e2"}.fa-vihara:before{content:"\f6a7"}.fa-vimeo:before{content:"\f40a"}.fa-vimeo-square:before{content:"\f194"}.fa-vimeo-v:before{content:"\f27d"}.fa-vine:before{content:"\f1ca"}.fa-vk:before{content:"\f189"}.fa-vnv:before{content:"\f40b"}.fa-voicemail:before{content:"\f897"}.fa-volleyball-ball:before{content:"\f45f"}.fa-volume-down:before{content:"\f027"}.fa-volume-mute:before{content:"\f6a9"}.fa-volume-off:before{content:"\f026"}.fa-volume-up:before{content:"\f028"}.fa-vote-yea:before{content:"\f772"}.fa-vr-cardboard:before{content:"\f729"}.fa-vuejs:before{content:"\f41f"}.fa-walking:before{content:"\f554"}.fa-wallet:before{content:"\f555"}.fa-warehouse:before{content:"\f494"}.fa-water:before{content:"\f773"}.fa-wave-square:before{content:"\f83e"}.fa-waze:before{content:"\f83f"}.fa-weebly:before{content:"\f5cc"}.fa-weibo:before{content:"\f18a"}.fa-weight:before{content:"\f496"}.fa-weight-hanging:before{content:"\f5cd"}.fa-weixin:before{content:"\f1d7"}.fa-whatsapp:before{content:"\f232"}.fa-whatsapp-square:before{content:"\f40c"}.fa-wheelchair:before{content:"\f193"}.fa-whmcs:before{content:"\f40d"}.fa-wifi:before{content:"\f1eb"}.fa-wikipedia-w:before{content:"\f266"}.fa-wind:before{content:"\f72e"}.fa-window-close:before{content:"\f410"}.fa-window-maximize:before{content:"\f2d0"}.fa-window-minimize:before{content:"\f2d1"}.fa-window-restore:before{content:"\f2d2"}.fa-windows:before{content:"\f17a"}.fa-wine-bottle:before{content:"\f72f"}.fa-wine-glass:before{content:"\f4e3"}.fa-wine-glass-alt:before{content:"\f5ce"}.fa-wix:before{content:"\f5cf"}.fa-wizards-of-the-coast:before{content:"\f730"}.fa-wolf-pack-battalion:before{content:"\f514"}.fa-won-sign:before{content:"\f159"}.fa-wordpress:before{content:"\f19a"}.fa-wordpress-simple:before{content:"\f411"}.fa-wpbeginner:before{content:"\f297"}.fa-wpexplorer:before{content:"\f2de"}.fa-wpforms:before{content:"\f298"}.fa-wpressr:before{content:"\f3e4"}.fa-wrench:before{content:"\f0ad"}.fa-x-ray:before{content:"\f497"}.fa-xbox:before{content:"\f412"}.fa-xing:before{content:"\f168"}.fa-xing-square:before{content:"\f169"}.fa-y-combinator:before{content:"\f23b"}.fa-yahoo:before{content:"\f19e"}.fa-yammer:before{content:"\f840"}.fa-yandex:before{content:"\f413"}.fa-yandex-international:before{content:"\f414"}.fa-yarn:before{content:"\f7e3"}.fa-yelp:before{content:"\f1e9"}.fa-yen-sign:before{content:"\f157"}.fa-yin-yang:before{content:"\f6ad"}.fa-yoast:before{content:"\f2b1"}.fa-youtube:before{content:"\f167"}.fa-youtube-square:before{content:"\f431"}.fa-zhihu:before{content:"\f63f"}.sr-only{border:0;clip:rect(0,0,0,0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.sr-only-focusable:active,.sr-only-focusable:focus{clip:auto;height:auto;margin:0;overflow:visible;position:static;width:auto}/*!* Font Awesome Free 5.10.1 by @fontawesome - https://fontawesome.com +* License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License)*/@font-face{font-family:'font awesome 5 free';font-style:normal;font-weight:900;font-display:auto;src:url(../webfonts/fa-solid-900.eot);src:url(../webfonts/fa-solid-900.eot?#iefix)format("embedded-opentype"),url(../webfonts/fa-solid-900.woff2)format("woff2"),url(../webfonts/fa-solid-900.woff)format("woff"),url(../webfonts/fa-solid-900.ttf)format("truetype"),url(../webfonts/fa-solid-900.svg#fontawesome)format("svg")}.fa,.fas{font-family:'font awesome 5 free';font-weight:900}/*!* Font Awesome Free 5.10.1 by @fontawesome - https://fontawesome.com +* License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License)*/@font-face{font-family:'font awesome 5 brands';font-style:normal;font-weight:400;font-display:auto;src:url(../webfonts/fa-brands-400.eot);src:url(../webfonts/fa-brands-400.eot?#iefix)format("embedded-opentype"),url(../webfonts/fa-brands-400.woff2)format("woff2"),url(../webfonts/fa-brands-400.woff)format("woff"),url(../webfonts/fa-brands-400.ttf)format("truetype"),url(../webfonts/fa-brands-400.svg#fontawesome)format("svg")}.fab{font-family:'font awesome 5 brands'}.td-border-top{border:none;border-top:1px solid #eee}.td-border-none{border:none}.td-block-padding,.td-default main section{padding-top:4rem;padding-bottom:4rem}@media(min-width:768px){.td-block-padding,.td-default main section{padding-top:5rem;padding-bottom:5rem}}.td-overlay{position:relative}.td-overlay::after{content:"";position:absolute;top:0;right:0;bottom:0;left:0}.td-overlay--dark::after{background-color:rgba(64,63,76,.3)}.td-overlay--light::after{background-color:rgba(211,243,238,.3)}.td-overlay__inner{position:relative;z-index:1}@media(min-width:992px){.td-max-width-on-larger-screens,.td-content>pre,.td-content>.highlight,.td-content>.lead,.td-content>h1,.td-content>h2,.td-content>ul,.td-content>ol,.td-content>p,.td-content>blockquote,.td-content>dl dd,.td-content .footnotes,.td-content>.alert{max-width:80%}}.td-box--height-min{min-height:300px}.td-box--height-med{min-height:400px}.td-box--height-max{min-height:500px}.td-box--height-full{min-height:100vh}@media(min-width:768px){.td-box--height-min{min-height:450px}.td-box--height-med{min-height:500px}.td-box--height-max{min-height:650px}}.td-box .row.section{padding-left:5rem;padding-right:5rem;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column}.td-box .row{padding-left:5rem;padding-right:5rem;-webkit-box-orient:horizontal;-webkit-box-direction:normal;-webkit-flex-direction:row;-ms-flex-direction:row;flex-direction:row}.td-box.linkbox{padding:5rem}.td-box--0{color:#fff;background-color:#403f4c}.td-box--0 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#403f4c transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--0 p>a{color:#d9e5f8}.td-box--10.td-box--gradient{background:#403f4c -webkit-gradient(linear,left top,left bottom,from(#5d5c67),to(#403F4C))repeat-x!important;background:#403f4c -webkit-linear-gradient(top,#5d5c67,#403F4C)repeat-x!important;background:#403f4c -o-linear-gradient(top,#5d5c67,#403F4C)repeat-x!important;background:#403f4c linear-gradient(180deg,#5d5c67,#403F4C)repeat-x!important}.td-box--1{color:#fff;background-color:#30638e}.td-box--1 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#30638e transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--1 p>a{color:#cadcf5}.td-box--11.td-box--gradient{background:#30638e -webkit-gradient(linear,left top,left bottom,from(#4f7a9f),to(#30638E))repeat-x!important;background:#30638e -webkit-linear-gradient(top,#4f7a9f,#30638E)repeat-x!important;background:#30638e -o-linear-gradient(top,#4f7a9f,#30638E)repeat-x!important;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x!important}.td-box--2{color:#fff;background-color:#ffa630}.td-box--2 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#ffa630 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--2 p>a{color:#abc7f0}.td-box--12.td-box--gradient{background:#ffa630 -webkit-gradient(linear,left top,left bottom,from(#ffb34f),to(#FFA630))repeat-x!important;background:#ffa630 -webkit-linear-gradient(top,#ffb34f,#FFA630)repeat-x!important;background:#ffa630 -o-linear-gradient(top,#ffb34f,#FFA630)repeat-x!important;background:#ffa630 linear-gradient(180deg,#ffb34f,#FFA630)repeat-x!important}.td-box--3{color:#222;background-color:#c0e0de}.td-box--3 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#c0e0de transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--3 p>a{color:#638ac1}.td-box--13.td-box--gradient{background:#c0e0de -webkit-gradient(linear,left top,left bottom,from(#c9e5e3),to(#C0E0DE))repeat-x!important;background:#c0e0de -webkit-linear-gradient(top,#c9e5e3,#C0E0DE)repeat-x!important;background:#c0e0de -o-linear-gradient(top,#c9e5e3,#C0E0DE)repeat-x!important;background:#c0e0de linear-gradient(180deg,#c9e5e3,#C0E0DE)repeat-x!important}.td-box--4{color:#222;background-color:#fff}.td-box--4 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#fff transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--4 p>a{color:#72a1e5}.td-box--14.td-box--gradient{background:#fff -webkit-gradient(linear,left top,left bottom,from(white),to(white))repeat-x!important;background:#fff -webkit-linear-gradient(top,white,white)repeat-x!important;background:#fff -o-linear-gradient(top,white,white)repeat-x!important;background:#fff linear-gradient(180deg,white,white)repeat-x!important}.td-box--5{color:#fff;background-color:#888}.td-box--5 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#888 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--5 p>a{color:#b4cdf1}.td-box--15.td-box--gradient{background:#888 -webkit-gradient(linear,left top,left bottom,from(#9a9a9a),to(#888))repeat-x!important;background:#888 -webkit-linear-gradient(top,#9a9a9a,#888)repeat-x!important;background:#888 -o-linear-gradient(top,#9a9a9a,#888)repeat-x!important;background:#888 linear-gradient(180deg,#9a9a9a,#888)repeat-x!important}.td-box--6{color:#fff;background-color:#3772ff}.td-box--6 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#3772ff transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--6 p>a{color:#a9c6ef}.td-box--16.td-box--gradient{background:#3772ff -webkit-gradient(linear,left top,left bottom,from(#5587ff),to(#3772FF))repeat-x!important;background:#3772ff -webkit-linear-gradient(top,#5587ff,#3772FF)repeat-x!important;background:#3772ff -o-linear-gradient(top,#5587ff,#3772FF)repeat-x!important;background:#3772ff linear-gradient(180deg,#5587ff,#3772FF)repeat-x!important}.td-box--7{color:#fff;background-color:#ed6a5a}.td-box--7 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#ed6a5a transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--7 p>a{color:#a5c3ee}.td-box--17.td-box--gradient{background:#ed6a5a -webkit-gradient(linear,left top,left bottom,from(#f08073),to(#ED6A5A))repeat-x!important;background:#ed6a5a -webkit-linear-gradient(top,#f08073,#ED6A5A)repeat-x!important;background:#ed6a5a -o-linear-gradient(top,#f08073,#ED6A5A)repeat-x!important;background:#ed6a5a linear-gradient(180deg,#f08073,#ED6A5A)repeat-x!important}.td-box--8{color:#fff;background-color:#403f4c}.td-box--8 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#403f4c transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--8 p>a{color:#d9e5f8}.td-box--18.td-box--gradient{background:#403f4c -webkit-gradient(linear,left top,left bottom,from(#5d5c67),to(#403F4C))repeat-x!important;background:#403f4c -webkit-linear-gradient(top,#5d5c67,#403F4C)repeat-x!important;background:#403f4c -o-linear-gradient(top,#5d5c67,#403F4C)repeat-x!important;background:#403f4c linear-gradient(180deg,#5d5c67,#403F4C)repeat-x!important}.td-box--9{color:#fff;background-color:#ed6a5a}.td-box--9 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#ed6a5a transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--9 p>a{color:#a5c3ee}.td-box--19.td-box--gradient{background:#ed6a5a -webkit-gradient(linear,left top,left bottom,from(#f08073),to(#ED6A5A))repeat-x!important;background:#ed6a5a -webkit-linear-gradient(top,#f08073,#ED6A5A)repeat-x!important;background:#ed6a5a -o-linear-gradient(top,#f08073,#ED6A5A)repeat-x!important;background:#ed6a5a linear-gradient(180deg,#f08073,#ED6A5A)repeat-x!important}.td-box--10{color:#fff;background-color:#30638e}.td-box--10 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#30638e transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--10 p>a{color:#cadcf5}.td-box--110.td-box--gradient{background:#30638e -webkit-gradient(linear,left top,left bottom,from(#4f7a9f),to(#30638E))repeat-x!important;background:#30638e -webkit-linear-gradient(top,#4f7a9f,#30638E)repeat-x!important;background:#30638e -o-linear-gradient(top,#4f7a9f,#30638E)repeat-x!important;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x!important}.td-box--11{color:#fff;background-color:#ffa630}.td-box--11 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#ffa630 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--11 p>a{color:#abc7f0}.td-box--111.td-box--gradient{background:#ffa630 -webkit-gradient(linear,left top,left bottom,from(#ffb34f),to(#FFA630))repeat-x!important;background:#ffa630 -webkit-linear-gradient(top,#ffb34f,#FFA630)repeat-x!important;background:#ffa630 -o-linear-gradient(top,#ffb34f,#FFA630)repeat-x!important;background:#ffa630 linear-gradient(180deg,#ffb34f,#FFA630)repeat-x!important}.td-box--12{color:#222;background-color:#fff}.td-box--12 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#fff transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--12 p>a{color:#72a1e5}.td-box--112.td-box--gradient{background:#fff -webkit-gradient(linear,left top,left bottom,from(white),to(white))repeat-x!important;background:#fff -webkit-linear-gradient(top,white,white)repeat-x!important;background:#fff -o-linear-gradient(top,white,white)repeat-x!important;background:#fff linear-gradient(180deg,white,white)repeat-x!important}.td-box--13{color:#222;background-color:#c0e0de}.td-box--13 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#c0e0de transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--13 p>a{color:#638ac1}.td-box--113.td-box--gradient{background:#c0e0de -webkit-gradient(linear,left top,left bottom,from(#c9e5e3),to(#C0E0DE))repeat-x!important;background:#c0e0de -webkit-linear-gradient(top,#c9e5e3,#C0E0DE)repeat-x!important;background:#c0e0de -o-linear-gradient(top,#c9e5e3,#C0E0DE)repeat-x!important;background:#c0e0de linear-gradient(180deg,#c9e5e3,#C0E0DE)repeat-x!important}.td-box--cerulean-blue{color:#fff;background-color:#017cee}.td-box--cerulean-blue .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#017cee transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--cerulean-blue p>a{color:#bdd3f3}.td-box--1cerulean-blue.td-box--gradient{background:#017cee -webkit-gradient(linear,left top,left bottom,from(#2790f1),to(#017cee))repeat-x!important;background:#017cee -webkit-linear-gradient(top,#2790f1,#017cee)repeat-x!important;background:#017cee -o-linear-gradient(top,#2790f1,#017cee)repeat-x!important;background:#017cee linear-gradient(180deg,#2790f1,#017cee)repeat-x!important}.td-box--shamrock{color:#fff;background-color:#00ad46}.td-box--shamrock .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#00ad46 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--shamrock p>a{color:#cfdff6}.td-box--1shamrock.td-box--gradient{background:#00ad46 -webkit-gradient(linear,left top,left bottom,from(#26b962),to(#00ad46))repeat-x!important;background:#00ad46 -webkit-linear-gradient(top,#26b962,#00ad46)repeat-x!important;background:#00ad46 -o-linear-gradient(top,#26b962,#00ad46)repeat-x!important;background:#00ad46 linear-gradient(180deg,#26b962,#00ad46)repeat-x!important}.td-box--bright-sky-blue{color:#fff;background-color:#0cb6ff}.td-box--bright-sky-blue .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#0cb6ff transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--bright-sky-blue p>a{color:#b5cef1}.td-box--1bright-sky-blue.td-box--gradient{background:#0cb6ff -webkit-gradient(linear,left top,left bottom,from(#30c1ff),to(#0cb6ff))repeat-x!important;background:#0cb6ff -webkit-linear-gradient(top,#30c1ff,#0cb6ff)repeat-x!important;background:#0cb6ff -o-linear-gradient(top,#30c1ff,#0cb6ff)repeat-x!important;background:#0cb6ff linear-gradient(180deg,#30c1ff,#0cb6ff)repeat-x!important}.td-box--melon{color:#fff;background-color:#ff7557}.td-box--melon .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#ff7557 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--melon p>a{color:#a0c0ee}.td-box--1melon.td-box--gradient{background:#ff7557 -webkit-gradient(linear,left top,left bottom,from(#ff8a70),to(#ff7557))repeat-x!important;background:#ff7557 -webkit-linear-gradient(top,#ff8a70,#ff7557)repeat-x!important;background:#ff7557 -o-linear-gradient(top,#ff8a70,#ff7557)repeat-x!important;background:#ff7557 linear-gradient(180deg,#ff8a70,#ff7557)repeat-x!important}.td-box--vermillion{color:#fff;background-color:#e43921}.td-box--vermillion .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#e43921 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--vermillion p>a{color:#b7cff2}.td-box--1vermillion.td-box--gradient{background:#e43921 -webkit-gradient(linear,left top,left bottom,from(#e85742),to(#e43921))repeat-x!important;background:#e43921 -webkit-linear-gradient(top,#e85742,#e43921)repeat-x!important;background:#e43921 -o-linear-gradient(top,#e85742,#e43921)repeat-x!important;background:#e43921 linear-gradient(180deg,#e85742,#e43921)repeat-x!important}.td-box--aqua{color:#fff;background-color:#11e1ee}.td-box--aqua .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#11e1ee transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--aqua p>a{color:#b9d0f2}.td-box--1aqua.td-box--gradient{background:#11e1ee -webkit-gradient(linear,left top,left bottom,from(#35e6f1),to(#11e1ee))repeat-x!important;background:#11e1ee -webkit-linear-gradient(top,#35e6f1,#11e1ee)repeat-x!important;background:#11e1ee -o-linear-gradient(top,#35e6f1,#11e1ee)repeat-x!important;background:#11e1ee linear-gradient(180deg,#35e6f1,#11e1ee)repeat-x!important}.td-box--shamrock-green{color:#fff;background-color:#04d659}.td-box--shamrock-green .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#04d659 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--shamrock-green p>a{color:#c3d7f4}.td-box--1shamrock-green.td-box--gradient{background:#04d659 -webkit-gradient(linear,left top,left bottom,from(#2adc72),to(#04d659))repeat-x!important;background:#04d659 -webkit-linear-gradient(top,#2adc72,#04d659)repeat-x!important;background:#04d659 -o-linear-gradient(top,#2adc72,#04d659)repeat-x!important;background:#04d659 linear-gradient(180deg,#2adc72,#04d659)repeat-x!important}.td-box--aqua-blue{color:#fff;background-color:#00c7d4}.td-box--aqua-blue .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#00c7d4 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--aqua-blue p>a{color:#c4d8f4}.td-box--1aqua-blue.td-box--gradient{background:#00c7d4 -webkit-gradient(linear,left top,left bottom,from(#26cfda),to(#00c7d4))repeat-x!important;background:#00c7d4 -webkit-linear-gradient(top,#26cfda,#00c7d4)repeat-x!important;background:#00c7d4 -o-linear-gradient(top,#26cfda,#00c7d4)repeat-x!important;background:#00c7d4 linear-gradient(180deg,#26cfda,#00c7d4)repeat-x!important}.td-box--white{color:#222;background-color:#fff}.td-box--white .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#fff transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--white p>a{color:#72a1e5}.td-box--1white.td-box--gradient{background:#fff -webkit-gradient(linear,left top,left bottom,from(white),to(#ffffff))repeat-x!important;background:#fff -webkit-linear-gradient(top,white,#ffffff)repeat-x!important;background:#fff -o-linear-gradient(top,white,#ffffff)repeat-x!important;background:#fff linear-gradient(180deg,white,#ffffff)repeat-x!important}.td-box--brownish-grey{color:#fff;background-color:#707070}.td-box--brownish-grey .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#707070 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--brownish-grey p>a{color:#c1d6f4}.td-box--1brownish-grey.td-box--gradient{background:#707070 -webkit-gradient(linear,left top,left bottom,from(#858585),to(#707070))repeat-x!important;background:#707070 -webkit-linear-gradient(top,#858585,#707070)repeat-x!important;background:#707070 -o-linear-gradient(top,#858585,#707070)repeat-x!important;background:#707070 linear-gradient(180deg,#858585,#707070)repeat-x!important}.td-box--very-light-pink{color:#222;background-color:#cbcbcb}.td-box--very-light-pink .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#cbcbcb transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--very-light-pink p>a{color:#6287bd}.td-box--1very-light-pink.td-box--gradient{background:#cbcbcb -webkit-gradient(linear,left top,left bottom,from(lightgray),to(#cbcbcb))repeat-x!important;background:#cbcbcb -webkit-linear-gradient(top,lightgray,#cbcbcb)repeat-x!important;background:#cbcbcb -o-linear-gradient(top,lightgray,#cbcbcb)repeat-x!important;background:#cbcbcb linear-gradient(180deg,lightgray,#cbcbcb)repeat-x!important}.td-box--slate-grey{color:#fff;background-color:#636365}.td-box--slate-grey .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#636365 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--slate-grey p>a{color:#c8daf5}.td-box--1slate-grey.td-box--gradient{background:#636365 -webkit-gradient(linear,left top,left bottom,from(#7a7a7c),to(#636365))repeat-x!important;background:#636365 -webkit-linear-gradient(top,#7a7a7c,#636365)repeat-x!important;background:#636365 -o-linear-gradient(top,#7a7a7c,#636365)repeat-x!important;background:#636365 linear-gradient(180deg,#7a7a7c,#636365)repeat-x!important}.td-box--greyish-brown{color:#fff;background-color:#51504f}.td-box--greyish-brown .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#51504f transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--greyish-brown p>a{color:#d3e2f7}.td-box--1greyish-brown.td-box--gradient{background:#51504f -webkit-gradient(linear,left top,left bottom,from(#6b6a69),to(#51504f))repeat-x!important;background:#51504f -webkit-linear-gradient(top,#6b6a69,#51504f)repeat-x!important;background:#51504f -o-linear-gradient(top,#6b6a69,#51504f)repeat-x!important;background:#51504f linear-gradient(180deg,#6b6a69,#51504f)repeat-x!important}.td-box--primary{color:#fff;background-color:#30638e}.td-box--primary .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#30638e transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--primary p>a{color:#cadcf5}.td-box--1primary.td-box--gradient{background:#30638e -webkit-gradient(linear,left top,left bottom,from(#4f7a9f),to(#30638E))repeat-x!important;background:#30638e -webkit-linear-gradient(top,#4f7a9f,#30638E)repeat-x!important;background:#30638e -o-linear-gradient(top,#4f7a9f,#30638E)repeat-x!important;background:#30638e linear-gradient(180deg,#4f7a9f,#30638E)repeat-x!important}.td-box--secondary{color:#fff;background-color:#ffa630}.td-box--secondary .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#ffa630 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--secondary p>a{color:#abc7f0}.td-box--1secondary.td-box--gradient{background:#ffa630 -webkit-gradient(linear,left top,left bottom,from(#ffb34f),to(#FFA630))repeat-x!important;background:#ffa630 -webkit-linear-gradient(top,#ffb34f,#FFA630)repeat-x!important;background:#ffa630 -o-linear-gradient(top,#ffb34f,#FFA630)repeat-x!important;background:#ffa630 linear-gradient(180deg,#ffb34f,#FFA630)repeat-x!important}.td-box--success{color:#fff;background-color:#3772ff}.td-box--success .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#3772ff transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--success p>a{color:#a9c6ef}.td-box--1success.td-box--gradient{background:#3772ff -webkit-gradient(linear,left top,left bottom,from(#5587ff),to(#3772FF))repeat-x!important;background:#3772ff -webkit-linear-gradient(top,#5587ff,#3772FF)repeat-x!important;background:#3772ff -o-linear-gradient(top,#5587ff,#3772FF)repeat-x!important;background:#3772ff linear-gradient(180deg,#5587ff,#3772FF)repeat-x!important}.td-box--info{color:#222;background-color:#c0e0de}.td-box--info .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#c0e0de transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--info p>a{color:#638ac1}.td-box--1info.td-box--gradient{background:#c0e0de -webkit-gradient(linear,left top,left bottom,from(#c9e5e3),to(#C0E0DE))repeat-x!important;background:#c0e0de -webkit-linear-gradient(top,#c9e5e3,#C0E0DE)repeat-x!important;background:#c0e0de -o-linear-gradient(top,#c9e5e3,#C0E0DE)repeat-x!important;background:#c0e0de linear-gradient(180deg,#c9e5e3,#C0E0DE)repeat-x!important}.td-box--warning{color:#fff;background-color:#ed6a5a}.td-box--warning .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#ed6a5a transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--warning p>a{color:#a5c3ee}.td-box--1warning.td-box--gradient{background:#ed6a5a -webkit-gradient(linear,left top,left bottom,from(#f08073),to(#ED6A5A))repeat-x!important;background:#ed6a5a -webkit-linear-gradient(top,#f08073,#ED6A5A)repeat-x!important;background:#ed6a5a -o-linear-gradient(top,#f08073,#ED6A5A)repeat-x!important;background:#ed6a5a linear-gradient(180deg,#f08073,#ED6A5A)repeat-x!important}.td-box--danger{color:#fff;background-color:#ed6a5a}.td-box--danger .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#ed6a5a transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--danger p>a{color:#a5c3ee}.td-box--1danger.td-box--gradient{background:#ed6a5a -webkit-gradient(linear,left top,left bottom,from(#f08073),to(#ED6A5A))repeat-x!important;background:#ed6a5a -webkit-linear-gradient(top,#f08073,#ED6A5A)repeat-x!important;background:#ed6a5a -o-linear-gradient(top,#f08073,#ED6A5A)repeat-x!important;background:#ed6a5a linear-gradient(180deg,#f08073,#ED6A5A)repeat-x!important}.td-box--light{color:#222;background-color:#d3f3ee}.td-box--light .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#d3f3ee transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--light p>a{color:#6993d0}.td-box--1light.td-box--gradient{background:#d3f3ee -webkit-gradient(linear,left top,left bottom,from(#daf5f1),to(#D3F3EE))repeat-x!important;background:#d3f3ee -webkit-linear-gradient(top,#daf5f1,#D3F3EE)repeat-x!important;background:#d3f3ee -o-linear-gradient(top,#daf5f1,#D3F3EE)repeat-x!important;background:#d3f3ee linear-gradient(180deg,#daf5f1,#D3F3EE)repeat-x!important}.td-box--dark{color:#fff;background-color:#403f4c}.td-box--dark .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#403f4c transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--dark p>a{color:#d9e5f8}.td-box--1dark.td-box--gradient{background:#403f4c -webkit-gradient(linear,left top,left bottom,from(#5d5c67),to(#403F4C))repeat-x!important;background:#403f4c -webkit-linear-gradient(top,#5d5c67,#403F4C)repeat-x!important;background:#403f4c -o-linear-gradient(top,#5d5c67,#403F4C)repeat-x!important;background:#403f4c linear-gradient(180deg,#5d5c67,#403F4C)repeat-x!important}.td-box--100{color:#222;background-color:#f8f9fa}.td-box--100 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#f8f9fa transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--100 p>a{color:#709ee0}.td-box--1100.td-box--gradient{background:#f8f9fa -webkit-gradient(linear,left top,left bottom,from(#f9fafb),to(#f8f9fa))repeat-x!important;background:#f8f9fa -webkit-linear-gradient(top,#f9fafb,#f8f9fa)repeat-x!important;background:#f8f9fa -o-linear-gradient(top,#f9fafb,#f8f9fa)repeat-x!important;background:#f8f9fa linear-gradient(180deg,#f9fafb,#f8f9fa)repeat-x!important}.td-box--200{color:#222;background-color:#eee}.td-box--200 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#eee transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--200 p>a{color:#6d99d8}.td-box--1200.td-box--gradient{background:#eee -webkit-gradient(linear,left top,left bottom,from(#f1f1f1),to(#eee))repeat-x!important;background:#eee -webkit-linear-gradient(top,#f1f1f1,#eee)repeat-x!important;background:#eee -o-linear-gradient(top,#f1f1f1,#eee)repeat-x!important;background:#eee linear-gradient(180deg,#f1f1f1,#eee)repeat-x!important}.td-box--300{color:#222;background-color:#dee2e6}.td-box--300 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#dee2e6 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--300 p>a{color:#6993cf}.td-box--1300.td-box--gradient{background:#dee2e6 -webkit-gradient(linear,left top,left bottom,from(#e3e6ea),to(#dee2e6))repeat-x!important;background:#dee2e6 -webkit-linear-gradient(top,#e3e6ea,#dee2e6)repeat-x!important;background:#dee2e6 -o-linear-gradient(top,#e3e6ea,#dee2e6)repeat-x!important;background:#dee2e6 linear-gradient(180deg,#e3e6ea,#dee2e6)repeat-x!important}.td-box--400{color:#222;background-color:#ccc}.td-box--400 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#ccc transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--400 p>a{color:#6288be}.td-box--1400.td-box--gradient{background:#ccc -webkit-gradient(linear,left top,left bottom,from(#d4d4d4),to(#ccc))repeat-x!important;background:#ccc -webkit-linear-gradient(top,#d4d4d4,#ccc)repeat-x!important;background:#ccc -o-linear-gradient(top,#d4d4d4,#ccc)repeat-x!important;background:#ccc linear-gradient(180deg,#d4d4d4,#ccc)repeat-x!important}.td-box--500{color:#fff;background-color:#adb5bd}.td-box--500 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#adb5bd transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--500 p>a{color:#9bbced}.td-box--1500.td-box--gradient{background:#adb5bd -webkit-gradient(linear,left top,left bottom,from(#b9c0c7),to(#adb5bd))repeat-x!important;background:#adb5bd -webkit-linear-gradient(top,#b9c0c7,#adb5bd)repeat-x!important;background:#adb5bd -o-linear-gradient(top,#b9c0c7,#adb5bd)repeat-x!important;background:#adb5bd linear-gradient(180deg,#b9c0c7,#adb5bd)repeat-x!important}.td-box--600{color:#fff;background-color:#888}.td-box--600 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#888 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--600 p>a{color:#b4cdf1}.td-box--1600.td-box--gradient{background:#888 -webkit-gradient(linear,left top,left bottom,from(#9a9a9a),to(#888))repeat-x!important;background:#888 -webkit-linear-gradient(top,#9a9a9a,#888)repeat-x!important;background:#888 -o-linear-gradient(top,#9a9a9a,#888)repeat-x!important;background:#888 linear-gradient(180deg,#9a9a9a,#888)repeat-x!important}.td-box--700{color:#fff;background-color:#495057}.td-box--700 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#495057 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--700 p>a{color:#d3e2f7}.td-box--1700.td-box--gradient{background:#495057 -webkit-gradient(linear,left top,left bottom,from(#646a70),to(#495057))repeat-x!important;background:#495057 -webkit-linear-gradient(top,#646a70,#495057)repeat-x!important;background:#495057 -o-linear-gradient(top,#646a70,#495057)repeat-x!important;background:#495057 linear-gradient(180deg,#646a70,#495057)repeat-x!important}.td-box--800{color:#fff;background-color:#333}.td-box--800 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#333 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--800 p>a{color:#e3ecfa}.td-box--1800.td-box--gradient{background:#333 -webkit-gradient(linear,left top,left bottom,from(#525252),to(#333))repeat-x!important;background:#333 -webkit-linear-gradient(top,#525252,#333)repeat-x!important;background:#333 -o-linear-gradient(top,#525252,#333)repeat-x!important;background:#333 linear-gradient(180deg,#525252,#333)repeat-x!important}.td-box--900{color:#fff;background-color:#222}.td-box--900 .td-arrow-down::before{left:50%;margin-left:-30px;bottom:-25px;border-style:solid;border-width:25px 30px 0;border-color:#222 transparent transparent transparent;z-index:3;position:absolute;content:""}.td-box--900 p>a{color:#ecf2fc}.td-box--1900.td-box--gradient{background:#222 -webkit-gradient(linear,left top,left bottom,from(#434343),to(#222))repeat-x!important;background:#222 -webkit-linear-gradient(top,#434343,#222)repeat-x!important;background:#222 -o-linear-gradient(top,#434343,#222)repeat-x!important;background:#222 linear-gradient(180deg,#434343,#222)repeat-x!important}.td-blog .td-rss-button{position:absolute;top:5.5rem;right:1rem;z-index:22}.td-content .highlight{margin:2rem 0;padding:1rem;background-color:#f8f9fa}.td-content .highlight pre,.td-content .highlight div{background-color:inherit!important}.td-content .highlight pre{margin:0;padding:0}.td-content p code,.td-content li>code,.td-content table code{color:inherit;padding:.2em .4em;margin:0;font-size:85%;word-break:normal;background-color:rgba(0,0,0,5%);border-radius:.25rem}.td-content p code br,.td-content li>code br,.td-content table code br{display:none}.td-content pre{word-wrap:normal;background-color:#f8f9fa;padding:1rem}.td-content pre>code{padding:0;margin:0;font-size:100%;word-break:normal;white-space:pre;border:0}.td-navbar-cover{background:#30638e}@media(min-width:768px){.td-navbar-cover{background:0 0!important}.td-navbar-cover .nav-link{text-shadow:1px 1px 2px #403f4c}}.td-navbar-cover.navbar-bg-onscroll .nav-link{text-shadow:none}.navbar-bg-onscroll{background:#30638e!important;opacity:inherit}.td-navbar{background:#30638e;min-height:4rem;margin:0;z-index:32}@media(min-width:768px){.td-navbar{position:fixed;top:0;width:100%}}.td-navbar .navbar-brand{text-transform:none;text-align:middle}.td-navbar .navbar-brand .nav-link{display:inline-block;margin-right:-30px}.td-navbar .navbar-brand svg{display:inline-block;margin:0 10px;height:30px}.td-navbar .nav-link{text-transform:none;font-weight:700}.td-navbar .td-search-input{border:none}.td-navbar .td-search-input::-webkit-input-placeholder{color:rgba(255,255,255,.75)}.td-navbar .td-search-input:-moz-placeholder{color:rgba(255,255,255,.75)}.td-navbar .td-search-input::-moz-placeholder{color:rgba(255,255,255,.75)}.td-navbar .td-search-input:-ms-input-placeholder{color:rgba(255,255,255,.75)}.td-navbar .dropdown{min-width:100px}@media(max-width:991.98px){.td-navbar{padding-right:.5rem;padding-left:.75rem}.td-navbar .td-navbar-nav-scroll{max-width:100%;height:2.5rem;margin-top:.25rem;overflow:hidden;font-size:.875rem}.td-navbar .td-navbar-nav-scroll .nav-link{padding-right:.25rem;padding-left:0}.td-navbar .td-navbar-nav-scroll .navbar-nav{padding-bottom:2rem;overflow-x:auto;white-space:nowrap;-webkit-overflow-scrolling:touch}}.td-sidebar-nav{padding-right:.5rem;margin-right:-15px;margin-left:-15px}@media(min-width:768px){@supports((position:-webkit-sticky) or (position:sticky)){.td-sidebar-nav{max-height:-webkit-calc(100vh - 10rem);max-height:calc(100vh - 10rem);overflow-y:auto}}}@media(min-width:768px){.td-sidebar-nav{display:block!important}}.td-sidebar-nav__section{padding-left:0}.td-sidebar-nav__section li{list-style:none}.td-sidebar-nav__section ul{padding:0;margin:0}@media(min-width:768px){.td-sidebar-nav__section>ul{padding-left:.5rem}}.td-sidebar-nav__section-title{display:block;font-weight:500}.td-sidebar-nav__section-title .active{font-weight:700}.td-sidebar-nav__section-title a{color:#222}.td-sidebar-nav .td-sidebar-link{display:block;padding-bottom:.375rem}.td-sidebar-nav .td-sidebar-link__page{color:#495057;font-weight:300}.td-sidebar-nav a:hover{color:#72a1e5;text-decoration:none}.td-sidebar-nav a.active{font-weight:700}.td-sidebar-nav .dropdown a{color:#495057}.td-sidebar-nav .dropdown .nav-link{padding:0 0 1rem}.td-sidebar{padding-bottom:1rem}@media(min-width:768px){.td-sidebar{padding-top:4rem;background-color:rgba(48,99,142,3%);padding-right:1rem;border-right:1px solid #dee2e6}}.td-sidebar__toggle{line-height:1;color:#222;margin:1rem}.td-sidebar__search{padding:1rem 15px;margin-right:-15px;margin-left:-15px}.td-sidebar__inner{-webkit-box-ordinal-group:1;-webkit-order:0;-ms-flex-order:0;order:0}@media(min-width:768px){@supports((position:-webkit-sticky) or (position:sticky)){.td-sidebar__inner{position:-webkit-sticky;position:sticky;top:4rem;z-index:10;height:-webkit-calc(100vh - 6rem);height:calc(100vh - 6rem)}}}@media(min-width:1200px){.td-sidebar__inner{-webkit-box-flex:0;-webkit-flex:0 1 320px;-ms-flex:0 1 320px;flex:0 1 320px}}.td-sidebar__inner .td-search-box{width:100%}.td-toc{border-left:1px solid #dee2e6;-webkit-box-ordinal-group:3;-webkit-order:2;-ms-flex-order:2;order:2;padding-top:.75rem;padding-bottom:1.5rem;vertical-align:top}@supports((position:-webkit-sticky) or (position:sticky)){.td-toc{position:-webkit-sticky;position:sticky;top:4rem;height:-webkit-calc(100vh - 10rem);height:calc(100vh - 10rem);overflow-y:auto}}.td-toc a{display:block;font-weight:300;padding-bottom:.25rem}.td-toc li{list-style:none;display:block}.td-toc li li{margin-left:.5rem}.td-toc .td-page-meta a{font-weight:500}.td-toc #TableOfContents a{color:#888}.td-toc #TableOfContents a:hover{color:#72a1e5;text-decoration:none}.td-toc ul{padding-left:0}button{cursor:pointer;border:1px solid;border-radius:5px;padding:9px 29px;-webkit-transition:all ease-out .2s;-o-transition:all ease-out .2s;transition:all ease-out .2s}button:disabled{cursor:not-allowed}button.btn-filled{border-color:#017cee;background-color:#017cee}button.btn-filled:hover{border-color:#0cb6ff;background-color:#0cb6ff}button.btn-with-icon{padding:14px 20px}button.btn-with-icon svg{height:30px;width:auto;padding-right:15px}button.btn-with-icon span{display:inline-block;line-height:30px;vertical-align:middle}button.btn-hollow{background-color:#fff}button.btn-hollow.btn-blue{color:#017cee;border-color:#017cee}button.btn-hollow.btn-blue:disabled{color:#cbcbcb;border-color:#cbcbcb}button.btn-hollow.btn-blue:hover:enabled{color:#fff;background-color:#017cee}button.btn-hollow.btn-brown{border-color:#cbcbcb}button.btn-hollow.btn-brown:hover{background-color:#51504f;border-color:#51504f}button.btn-hollow.btn-brown:hover span{color:#fff}button.btn-hollow.btn-brown:hover svg path{fill:#fff}button.with-box-shadow{-webkit-box-shadow:0 2px 6px 0 rgba(0,0,0,.12);box-shadow:0 2px 6px rgba(0,0,0,.12)}@media(max-width:1280px){button{padding:4px 17px}}.breadcrumb{background:0 0;padding-left:0;padding-top:0}.alert{font-weight:500;background:#fff;color:inherit;border-radius:0}.alert-primary{border-style:solid;border-color:#30638e;border-width:0 0 0 4px}.alert-primary .alert-heading{color:#30638e}.alert-secondary{border-style:solid;border-color:#ffa630;border-width:0 0 0 4px}.alert-secondary .alert-heading{color:#ffa630}.alert-success{border-style:solid;border-color:#3772ff;border-width:0 0 0 4px}.alert-success .alert-heading{color:#3772ff}.alert-info{border-style:solid;border-color:#c0e0de;border-width:0 0 0 4px}.alert-info .alert-heading{color:#c0e0de}.alert-warning{border-style:solid;border-color:#ed6a5a;border-width:0 0 0 4px}.alert-warning .alert-heading{color:#ed6a5a}.alert-danger{border-style:solid;border-color:#ed6a5a;border-width:0 0 0 4px}.alert-danger .alert-heading{color:#ed6a5a}.alert-light{border-style:solid;border-color:#d3f3ee;border-width:0 0 0 4px}.alert-light .alert-heading{color:#d3f3ee}.alert-dark{border-style:solid;border-color:#403f4c;border-width:0 0 0 4px}.alert-dark .alert-heading{color:#403f4c}.td-content{-webkit-box-ordinal-group:2;-webkit-order:1;-ms-flex-order:1;order:1}.td-content p,.td-content li,.td-content td{font-weight:400}.td-content>h1{font-weight:700;margin-bottom:1rem}.td-content>h2{margin-bottom:1rem}.td-content>h2:not(:first-child){margin-top:3rem}.td-content>h2+h3{margin-top:1rem}.td-content>h3,.td-content>h4,.td-content>h5,.td-content>h6{margin-bottom:1rem;margin-top:2rem}.td-content>blockquote{padding:0 0 0 1rem;margin-bottom:1rem;color:#888;border-left:6px solid #ffa630}.td-content>ul li,.td-content>ol li{margin-bottom:.25rem}.td-content strong{font-weight:700}.td-content .alert:not(:first-child){margin-top:2rem;margin-bottom:2rem}.td-content .lead{margin-bottom:1.5rem}.td-title{margin-top:1rem;margin-bottom:.5rem}@media(min-width:576px){.td-title{font-size:3rem}}.search-form{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;width:344px;padding:8px 20px;border:solid 1px #cbcbcb;border-radius:5px;margin:60px auto 0}.search-form__input{font-family:roboto,sans-serif;font-size:16px;color:#707070;line-height:1.63;-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1;padding-right:10px;border:none;background:0 0;outline:none;float:left}.search-form__button{border:none;background-color:transparent;padding:0}@media(max-width:1280px){.search-form{width:270px;padding:3px 20px;margin-top:30px}}.td-outer{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-flex-direction:column;-ms-flex-direction:column;flex-direction:column;height:100vh}@media(min-width:768px){.td-default main section:first-of-type{padding-top:8rem}}.td-main{-webkit-box-flex:1;-webkit-flex-grow:1;-ms-flex-positive:1;flex-grow:1}.td-main main{padding-bottom:2rem}@media(min-width:768px){.td-main main{padding-top:5.5rem}}.td-cover-block--height-min{min-height:300px}.td-cover-block--height-med{min-height:400px}.td-cover-block--height-max{min-height:500px}.td-cover-block--height-full{min-height:100vh}@media(min-width:768px){.td-cover-block--height-min{min-height:450px}.td-cover-block--height-med{min-height:500px}.td-cover-block--height-max{min-height:650px}}.td-cover-logo{margin-right:.5em}.td-cover-block{position:relative;padding-top:5rem;padding-bottom:5rem;background-repeat:no-repeat;background-position:50% 0;-webkit-background-size:cover;background-size:cover}.td-bg-arrow-wrapper{position:relative}.section-index .entry{padding:.75rem}.section-index h5{margin-bottom:0}.section-index h5 a{font-weight:700}.section-index p{margin-top:0}.pageinfo{font-weight:500;background:#f8f9fa;color:inherit;border-radius:0;margin:2rem;padding:1.5rem;padding-bottom:.5rem}.pageinfo-primary{border-style:solid;border-color:#30638e}.pageinfo-secondary{border-style:solid;border-color:#ffa630}.pageinfo-success{border-style:solid;border-color:#3772ff}.pageinfo-info{border-style:solid;border-color:#c0e0de}.pageinfo-warning{border-style:solid;border-color:#ed6a5a}.pageinfo-danger{border-style:solid;border-color:#ed6a5a}.pageinfo-light{border-style:solid;border-color:#d3f3ee}.pageinfo-dark{border-style:solid;border-color:#403f4c}footer{min-height:150px}@media(max-width:991.98px){footer{min-height:200px}}@media(min-width:768px){.td-offset-anchor:target{display:block;position:relative;top:-4rem;visibility:hidden}h2[id]:before,h3[id]:before,h4[id]:before,h5[id]:before{display:block;content:" ";margin-top:-5rem;height:5rem;visibility:hidden}} \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/_gen/js/docs.js b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/_gen/js/docs.js new file mode 100644 index 00000000000..2029b9f8147 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/_gen/js/docs.js @@ -0,0 +1 @@ +!function(r){var n={};function o(t){if(n[t])return n[t].exports;var e=n[t]={i:t,l:!1,exports:{}};return r[t].call(e.exports,e,e.exports,o),e.l=!0,e.exports}o.m=r,o.c=n,o.d=function(t,e,r){o.o(t,e)||Object.defineProperty(t,e,{enumerable:!0,get:r})},o.r=function(t){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})},o.t=function(e,t){if(1&t&&(e=o(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var r=Object.create(null);if(o.r(r),Object.defineProperty(r,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var n in e)o.d(r,n,function(t){return e[t]}.bind(null,n));return r},o.n=function(t){var e=t&&t.__esModule?function(){return t.default}:function(){return t};return o.d(e,"a",e),e},o.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},o.p="/",o(o.s=56)}([function(t,e,r){var m=r(1),w=r(14),g=r(15),E=Math.max,x=Math.min;t.exports=function(n,r,t){var o,i,a,s,u,c,f=0,l=!1,d=!1,e=!0;if("function"!=typeof n)throw new TypeError("Expected a function");function h(t){var e=o,r=i;return o=i=void 0,f=t,s=n.apply(r,e)}function p(t){var e=t-c;return void 0===c||r<=e||e<0||d&&a<=t-f}function y(){var t=w();if(p(t))return b(t);u=setTimeout(y,function(t){var e=r-(t-c);return d?x(e,a-(t-f)):e}(t))}function b(t){return u=void 0,e&&o?h(t):(o=i=void 0,s)}function v(){var t=w(),e=p(t);if(o=arguments,i=this,c=t,e){if(void 0===u)return function(t){return f=t,u=setTimeout(y,r),l?h(t):s}(c);if(d)return clearTimeout(u),u=setTimeout(y,r),h(c)}return void 0===u&&(u=setTimeout(y,r)),s}return r=g(r)||0,m(t)&&(l=!!t.leading,a=(d="maxWait"in t)?E(g(t.maxWait)||0,r):a,e="trailing"in t?!!t.trailing:e),v.cancel=function(){void 0!==u&&clearTimeout(u),o=c=i=u=void(f=0)},v.flush=function(){return void 0===u?s:b(w())},v}},function(t,e){t.exports=function(t){var e=typeof t;return null!=t&&("object"==e||"function"==e)}},function(t,e,r){var n=r(8),o="object"==typeof self&&self&&self.Object===Object&&self,i=n||o||Function("return this")();t.exports=i},function(t,e,r){var n=r(6),o=r(19),i=r(20),a=n?n.toStringTag:void 0;t.exports=function(t){return null==t?void 0===t?"[object Undefined]":"[object Null]":a&&a in Object(t)?o(t):i(t)}},function(t,e){t.exports=function(t){return null!=t&&"object"==typeof t}},function(t,e,r){!function(s){"use strict";var u={searchParams:"URLSearchParams"in self,iterable:"Symbol"in self&&"iterator"in Symbol,blob:"FileReader"in self&&"Blob"in self&&function(){try{return new Blob,!0}catch(t){return!1}}(),formData:"FormData"in self,arrayBuffer:"ArrayBuffer"in self};if(u.arrayBuffer)var e=["[object Int8Array]","[object Uint8Array]","[object Uint8ClampedArray]","[object Int16Array]","[object Uint16Array]","[object Int32Array]","[object Uint32Array]","[object Float32Array]","[object Float64Array]"],r=ArrayBuffer.isView||function(t){return t&&-1 nav").offsetHeight,n=function(){var t;e.sort(function(t,e){return t.targetElement.offsetTop-e.targetElement.offsetTop}),t=e[0].targetElement.offsetTop+r>window.scrollY?0:e[e.length-1].targetElement.offsetTop+rwindow.scrollY})-1,e.forEach(function(t){return t.navElement.classList.remove("current")}),e[t].navElement.classList.add("current")};window.addEventListener("scroll",o()(n,10)),window.addEventListener("resize",o()(n,10)),n()}}()},function(t,e,r){var n=r(2);t.exports=function(){return n.Date.now()}},function(t,e,r){var n=r(16),o=r(1),i=r(18),a=/^[-+]0x[0-9a-f]+$/i,s=/^0b[01]+$/i,u=/^0o[0-7]+$/i,c=parseInt;t.exports=function(t){if("number"==typeof t)return t;if(i(t))return NaN;if(o(t)){var e="function"==typeof t.valueOf?t.valueOf():t;t=o(e)?e+"":e}if("string"!=typeof t)return 0===t?t:+t;t=n(t);var r=s.test(t);return r||u.test(t)?c(t.slice(2),r?2:8):a.test(t)?NaN:+t}},function(t,e,r){var n=r(17),o=/^\s+/;t.exports=function(t){return t?t.slice(0,n(t)+1).replace(o,""):t}},function(t,e){var r=/\s/;t.exports=function(t){for(var e=t.length;e--&&r.test(t.charAt(e)););return e}},function(t,e,r){var n=r(3),o=r(4);t.exports=function(t){return"symbol"==typeof t||o(t)&&"[object Symbol]"==n(t)}},function(t,e,r){var n=r(6),o=Object.prototype,i=o.hasOwnProperty,a=o.toString,s=n?n.toStringTag:void 0;t.exports=function(t){var e=i.call(t,s),r=t[s];try{var n=!(t[s]=void 0)}catch(t){}var o=a.call(t);return n&&(e?t[s]=r:delete t[s]),o}},function(t,e){var r=Object.prototype.toString;t.exports=function(t){return r.call(t)}},function(t,e){!function(){var e=window.document.querySelector(".rating");if(e){function t(t){e.querySelector("#rate-star-".concat(t)).addEventListener("click",function(){!function(t){window._paq.push(["trackEvent","Docs","Rating",window.location.pathname,t])}(t),e.innerHTML="

Thank you!

"})}for(var r=1;r<=5;r++)t(r)}}()},function(t,e){var r=window.document.querySelector(".rst-content");!function(){if(r){var t=r.querySelectorAll("table");t&&0!==t.length&&t.forEach(function(t){if(!t.parentNode.classList.contains("wy-table-responsive")){var e=document.createElement("div");e.classList.add("wy-table-responsive"),t.parentNode.insertBefore(e,t),e.appendChild(t)}})}}()},function(t,e,n){"use strict";(function(t){var i=n(7);function r(t){return function(t){if(Array.isArray(t))return t}(t)||function(t){if(Symbol.iterator in Object(t)||"[object Arguments]"===Object.prototype.toString.call(t))return Array.from(t)}(t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance")}()}function f(){var t=r(document.location.pathname.split("/")),e=t[2];return{currentVersion:t[3],currentPackageName:e,pagePath:t.slice(4).join("/")}}var a;(a=window.document.querySelectorAll(".docs-version-selector"))&&0!==a.length&&t("/_gen/packages-metadata.json").then(function(t){return t.json()}).then(function(t){var e=f().currentPackageName,r=t.find(function(t){return t["package-name"]===e});if(r){var n=r["all-versions"].sort(i.a).reverse(),o=r["stable-version"];a.forEach(function(t){return function(t,e,r){var n=t.querySelector("#version-item-template").innerText,o=document.createElement("div");function i(t,e){var r=o.cloneNode(!0),n="/docs/".concat(u,"/").concat(t,"/").concat(c);r.setAttribute("href",n),r.innerText=e,a.appendChild(r)}o.innerHTML=n,o=o.firstElementChild;var a=t.querySelector(".dropdown-menu"),s=f(),u=s.currentPackageName,c=s.pagePath;i("stable","Stable (".concat(r,")")),e.forEach(function(t){return i(t,t)})}(t,n,o)})}})}).call(this,n(5))},,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,function(t,e,r){"use strict";r.r(e);r(10),r(11),r(12),r(13),r(21),r(22),r(57),r(23)},function(t,e){Array.from(document.querySelectorAll(".toctree ul")).forEach(function(t){Array.from(t.parentNode.children).filter(function(t){return"A"===t.tagName}).forEach(function(t){var e=document.createElement("span");e.classList.add("toctree-expand"),t.insertBefore(e,t.firstChild)})})}]); \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/_sphinx_javascript_frameworks_compat.js b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/_sphinx_javascript_frameworks_compat.js new file mode 100644 index 00000000000..8549469dc29 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/_sphinx_javascript_frameworks_compat.js @@ -0,0 +1,134 @@ +/* + * _sphinx_javascript_frameworks_compat.js + * ~~~~~~~~~~ + * + * Compatability shim for jQuery and underscores.js. + * + * WILL BE REMOVED IN Sphinx 6.0 + * xref RemovedInSphinx60Warning + * + */ + +/** + * select a different prefix for underscore + */ +$u = _.noConflict(); + + +/** + * small helper function to urldecode strings + * + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/decodeURIComponent#Decoding_query_parameters_from_a_URL + */ +jQuery.urldecode = function(x) { + if (!x) { + return x + } + return decodeURIComponent(x.replace(/\+/g, ' ')); +}; + +/** + * small helper function to urlencode strings + */ +jQuery.urlencode = encodeURIComponent; + +/** + * This function returns the parsed url parameters of the + * current request. Multiple values per key are supported, + * it will always return arrays of strings for the value parts. + */ +jQuery.getQueryParameters = function(s) { + if (typeof s === 'undefined') + s = document.location.search; + var parts = s.substr(s.indexOf('?') + 1).split('&'); + var result = {}; + for (var i = 0; i < parts.length; i++) { + var tmp = parts[i].split('=', 2); + var key = jQuery.urldecode(tmp[0]); + var value = jQuery.urldecode(tmp[1]); + if (key in result) + result[key].push(value); + else + result[key] = [value]; + } + return result; +}; + +/** + * highlight a given string on a jquery object by wrapping it in + * span elements with the given class name. + */ +jQuery.fn.highlightText = function(text, className) { + function highlight(node, addItems) { + if (node.nodeType === 3) { + var val = node.nodeValue; + var pos = val.toLowerCase().indexOf(text); + if (pos >= 0 && + !jQuery(node.parentNode).hasClass(className) && + !jQuery(node.parentNode).hasClass("nohighlight")) { + var span; + var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.className = className; + } + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + node.parentNode.insertBefore(span, node.parentNode.insertBefore( + document.createTextNode(val.substr(pos + text.length)), + node.nextSibling)); + node.nodeValue = val.substr(0, pos); + if (isInSVG) { + var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect"); + var bbox = node.parentElement.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute('class', className); + addItems.push({ + "parent": node.parentNode, + "target": rect}); + } + } + } + else if (!jQuery(node).is("button, select, textarea")) { + jQuery.each(node.childNodes, function() { + highlight(this, addItems); + }); + } + } + var addItems = []; + var result = this.each(function() { + highlight(this, addItems); + }); + for (var i = 0; i < addItems.length; ++i) { + jQuery(addItems[i].parent).before(addItems[i].target); + } + return result; +}; + +/* + * backward compatibility for jQuery.browser + * This will be supported until firefox bug is fixed. + */ +if (!jQuery.browser) { + jQuery.uaMatch = function(ua) { + ua = ua.toLowerCase(); + + var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || + /(webkit)[ \/]([\w.]+)/.exec(ua) || + /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || + /(msie) ([\w.]+)/.exec(ua) || + ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || + []; + + return { + browser: match[ 1 ] || "", + version: match[ 2 ] || "0" + }; + }; + jQuery.browser = {}; + jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; +} diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/basic.css b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/basic.css new file mode 100644 index 00000000000..eeb0519a69b --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/basic.css @@ -0,0 +1,899 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2022 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +div.section::after { + display: block; + content: ''; + clear: left; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 230px; + margin-left: -100%; + font-size: 90%; + word-wrap: break-word; + overflow-wrap : break-word; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox form.search { + overflow: hidden; +} + +div.sphinxsidebar #searchbox input[type="text"] { + float: left; + width: 80%; + padding: 0.25em; + box-sizing: border-box; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + float: left; + width: 20%; + border-left: none; + padding: 0.25em; + box-sizing: border-box; +} + + +img { + border: 0; + max-width: 100%; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li p.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; + margin-left: auto; + margin-right: auto; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable ul { + margin-top: 0; + margin-bottom: 0; + list-style-type: none; +} + +table.indextable > tbody > tr > td > ul { + padding-left: 0em; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- domain module index --------------------------------------------------- */ + +table.modindextable td { + padding: 2px; + border-collapse: collapse; +} + +/* -- general body styles --------------------------------------------------- */ + +div.body { + min-width: 360px; + max-width: 800px; +} + +div.body p, div.body dd, div.body li, div.body blockquote { + -moz-hyphens: auto; + -ms-hyphens: auto; + -webkit-hyphens: auto; + hyphens: auto; +} + +a.headerlink { + visibility: hidden; +} +a.brackets:before, +span.brackets > a:before{ + content: "["; +} + +a.brackets:after, +span.brackets > a:after { + content: "]"; +} + + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink, +caption:hover > a.headerlink, +p.caption:hover > a.headerlink, +div.code-block-caption:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, figure.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, figure.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, figure.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +img.align-default, figure.align-default, .figure.align-default { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-default { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar, +aside.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px; + background-color: #ffe; + width: 40%; + float: right; + clear: right; + overflow-x: auto; +} + +p.sidebar-title { + font-weight: bold; +} +div.admonition, div.topic, blockquote { + clear: left; +} + +/* -- topics ---------------------------------------------------------------- */ +div.topic { + border: 1px solid #ccc; + padding: 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- content of sidebars/topics/admonitions -------------------------------- */ + +div.sidebar > :last-child, +aside.sidebar > :last-child, +div.topic > :last-child, +div.admonition > :last-child { + margin-bottom: 0; +} + +div.sidebar::after, +aside.sidebar::after, +div.topic::after, +div.admonition::after, +blockquote::after { + display: block; + content: ''; + clear: both; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + margin-top: 10px; + margin-bottom: 10px; + border: 0; + border-collapse: collapse; +} + +table.align-center { + margin-left: auto; + margin-right: auto; +} + +table.align-default { + margin-left: auto; + margin-right: auto; +} + +table caption span.caption-number { + font-style: italic; +} + +table caption span.caption-text { +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +th > :first-child, +td > :first-child { + margin-top: 0px; +} + +th > :last-child, +td > :last-child { + margin-bottom: 0px; +} + +/* -- figures --------------------------------------------------------------- */ + +div.figure, figure { + margin: 0.5em; + padding: 0.5em; +} + +div.figure p.caption, figcaption { + padding: 0.3em; +} + +div.figure p.caption span.caption-number, +figcaption span.caption-number { + font-style: italic; +} + +div.figure p.caption span.caption-text, +figcaption span.caption-text { +} + +/* -- field list styles ----------------------------------------------------- */ + +table.field-list td, table.field-list th { + border: 0 !important; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +/* -- hlist styles ---------------------------------------------------------- */ + +table.hlist { + margin: 1em 0; +} + +table.hlist td { + vertical-align: top; +} + +/* -- object description styles --------------------------------------------- */ + +.sig { + font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; +} + +.sig-name, code.descname { + background-color: transparent; + font-weight: bold; +} + +.sig-name { + font-size: 1.1em; +} + +code.descname { + font-size: 1.2em; +} + +.sig-prename, code.descclassname { + background-color: transparent; +} + +.optional { + font-size: 1.3em; +} + +.sig-paren { + font-size: larger; +} + +.sig-param.n { + font-style: italic; +} + +/* C++ specific styling */ + +.sig-inline.c-texpr, +.sig-inline.cpp-texpr { + font-family: unset; +} + +.sig.c .k, .sig.c .kt, +.sig.cpp .k, .sig.cpp .kt { + color: #0033B3; +} + +.sig.c .m, +.sig.cpp .m { + color: #1750EB; +} + +.sig.c .s, .sig.c .sc, +.sig.cpp .s, .sig.cpp .sc { + color: #067D17; +} + + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +:not(li) > ol > li:first-child > :first-child, +:not(li) > ul > li:first-child > :first-child { + margin-top: 0px; +} + +:not(li) > ol > li:last-child > :last-child, +:not(li) > ul > li:last-child > :last-child { + margin-bottom: 0px; +} + +ol.simple ol p, +ol.simple ul p, +ul.simple ol p, +ul.simple ul p { + margin-top: 0; +} + +ol.simple > li:not(:first-child) > p, +ul.simple > li:not(:first-child) > p { + margin-top: 0; +} + +ol.simple p, +ul.simple p { + margin-bottom: 0; +} +dl.footnote > dt, +dl.citation > dt { + float: left; + margin-right: 0.5em; +} + +dl.footnote > dd, +dl.citation > dd { + margin-bottom: 0em; +} + +dl.footnote > dd:after, +dl.citation > dd:after { + content: ""; + clear: both; +} + +dl.field-list { + display: grid; + grid-template-columns: fit-content(30%) auto; +} + +dl.field-list > dt { + font-weight: bold; + word-break: break-word; + padding-left: 0.5em; + padding-right: 5px; +} +dl.field-list > dt:after { + content: ":"; +} + + +dl.field-list > dd { + padding-left: 0.5em; + margin-top: 0em; + margin-left: 0em; + margin-bottom: 0em; +} + +dl { + margin-bottom: 15px; +} + +dd > :first-child { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +dl > dd:last-child, +dl > dd:last-child > :last-child { + margin-bottom: 0; +} + +dt:target, span.highlighted { + background-color: #fbe54e; +} + +rect.highlighted { + fill: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +.classifier:before { + font-style: normal; + margin: 0 0.5em; + content: ":"; + display: inline-block; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +pre, div[class*="highlight-"] { + clear: both; +} + +span.pre { + -moz-hyphens: none; + -ms-hyphens: none; + -webkit-hyphens: none; + hyphens: none; + white-space: nowrap; +} + +div[class*="highlight-"] { + margin: 1em 0; +} + +td.linenos pre { + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + display: block; +} + +table.highlighttable tbody { + display: block; +} + +table.highlighttable tr { + display: flex; +} + +table.highlighttable td { + margin: 0; + padding: 0; +} + +table.highlighttable td.linenos { + padding-right: 0.5em; +} + +table.highlighttable td.code { + flex: 1; + overflow: hidden; +} + +.highlight .hll { + display: block; +} + +div.highlight pre, +table.highlighttable pre { + margin: 0; +} + +div.code-block-caption + div { + margin-top: 0; +} + +div.code-block-caption { + margin-top: 1em; + padding: 2px 5px; + font-size: small; +} + +div.code-block-caption code { + background-color: transparent; +} + +table.highlighttable td.linenos, +span.linenos, +div.highlight span.gp { /* gp: Generic.Prompt */ + user-select: none; + -webkit-user-select: text; /* Safari fallback only */ + -webkit-user-select: none; /* Chrome/Safari */ + -moz-user-select: none; /* Firefox */ + -ms-user-select: none; /* IE10+ */ +} + +div.code-block-caption span.caption-number { + padding: 0.1em 0.3em; + font-style: italic; +} + +div.code-block-caption span.caption-text { +} + +div.literal-block-wrapper { + margin: 1em 0; +} + +code.xref, a code { + background-color: transparent; + font-weight: bold; +} + +h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +span.eqno a.headerlink { + position: absolute; + z-index: 1; +} + +div.math:hover a.headerlink { + visibility: visible; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/check-solid.svg b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/check-solid.svg new file mode 100644 index 00000000000..92fad4b5c0b --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/check-solid.svg @@ -0,0 +1,4 @@ + + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/clipboard.min.js b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/clipboard.min.js new file mode 100644 index 00000000000..54b3c463811 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/clipboard.min.js @@ -0,0 +1,7 @@ +/*! + * clipboard.js v2.0.8 + * https://clipboardjs.com/ + * + * Licensed MIT © Zeno Rocha + */ +!function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e():"function"==typeof define&&define.amd?define([],e):"object"==typeof exports?exports.ClipboardJS=e():t.ClipboardJS=e()}(this,function(){return n={686:function(t,e,n){"use strict";n.d(e,{default:function(){return o}});var e=n(279),i=n.n(e),e=n(370),u=n.n(e),e=n(817),c=n.n(e);function a(t){try{return document.execCommand(t)}catch(t){return}}var f=function(t){t=c()(t);return a("cut"),t};var l=function(t){var e,n,o,r=1 + + + + diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/copybutton.css b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/copybutton.css new file mode 100644 index 00000000000..f1916ec7d1b --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/copybutton.css @@ -0,0 +1,94 @@ +/* Copy buttons */ +button.copybtn { + position: absolute; + display: flex; + top: .3em; + right: .3em; + width: 1.7em; + height: 1.7em; + opacity: 0; + transition: opacity 0.3s, border .3s, background-color .3s; + user-select: none; + padding: 0; + border: none; + outline: none; + border-radius: 0.4em; + /* The colors that GitHub uses */ + border: #1b1f2426 1px solid; + background-color: #f6f8fa; + color: #57606a; +} + +button.copybtn.success { + border-color: #22863a; + color: #22863a; +} + +button.copybtn svg { + stroke: currentColor; + width: 1.5em; + height: 1.5em; + padding: 0.1em; +} + +div.highlight { + position: relative; +} + +/* Show the copybutton */ +.highlight:hover button.copybtn, button.copybtn.success { + opacity: 1; +} + +.highlight button.copybtn:hover { + background-color: rgb(235, 235, 235); +} + +.highlight button.copybtn:active { + background-color: rgb(187, 187, 187); +} + +/** + * A minimal CSS-only tooltip copied from: + * https://codepen.io/mildrenben/pen/rVBrpK + * + * To use, write HTML like the following: + * + *

Short

+ */ + .o-tooltip--left { + position: relative; + } + + .o-tooltip--left:after { + opacity: 0; + visibility: hidden; + position: absolute; + content: attr(data-tooltip); + padding: .2em; + font-size: .8em; + left: -.2em; + background: grey; + color: white; + white-space: nowrap; + z-index: 2; + border-radius: 2px; + transform: translateX(-102%) translateY(0); + transition: opacity 0.2s cubic-bezier(0.64, 0.09, 0.08, 1), transform 0.2s cubic-bezier(0.64, 0.09, 0.08, 1); +} + +.o-tooltip--left:hover:after { + display: block; + opacity: 1; + visibility: visible; + transform: translateX(-100%) translateY(0); + transition: opacity 0.2s cubic-bezier(0.64, 0.09, 0.08, 1), transform 0.2s cubic-bezier(0.64, 0.09, 0.08, 1); + transition-delay: .5s; +} + +/* By default the copy button shouldn't show up when printing a page */ +@media print { + button.copybtn { + display: none; + } +} diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/copybutton.js b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/copybutton.js new file mode 100644 index 00000000000..02c5c82d9d5 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/copybutton.js @@ -0,0 +1,248 @@ +// Localization support +const messages = { + 'en': { + 'copy': 'Copy', + 'copy_to_clipboard': 'Copy to clipboard', + 'copy_success': 'Copied!', + 'copy_failure': 'Failed to copy', + }, + 'es' : { + 'copy': 'Copiar', + 'copy_to_clipboard': 'Copiar al portapapeles', + 'copy_success': '¡Copiado!', + 'copy_failure': 'Error al copiar', + }, + 'de' : { + 'copy': 'Kopieren', + 'copy_to_clipboard': 'In die Zwischenablage kopieren', + 'copy_success': 'Kopiert!', + 'copy_failure': 'Fehler beim Kopieren', + }, + 'fr' : { + 'copy': 'Copier', + 'copy_to_clipboard': 'Copié dans le presse-papier', + 'copy_success': 'Copié !', + 'copy_failure': 'Échec de la copie', + }, + 'ru': { + 'copy': 'Скопировать', + 'copy_to_clipboard': 'Скопировать в буфер', + 'copy_success': 'Скопировано!', + 'copy_failure': 'Не удалось скопировать', + }, + 'zh-CN': { + 'copy': '复制', + 'copy_to_clipboard': '复制到剪贴板', + 'copy_success': '复制成功!', + 'copy_failure': '复制失败', + }, + 'it' : { + 'copy': 'Copiare', + 'copy_to_clipboard': 'Copiato negli appunti', + 'copy_success': 'Copiato!', + 'copy_failure': 'Errore durante la copia', + } +} + +let locale = 'en' +if( document.documentElement.lang !== undefined + && messages[document.documentElement.lang] !== undefined ) { + locale = document.documentElement.lang +} + +let doc_url_root = DOCUMENTATION_OPTIONS.URL_ROOT; +if (doc_url_root == '#') { + doc_url_root = ''; +} + +/** + * SVG files for our copy buttons + */ +let iconCheck = ` + ${messages[locale]['copy_success']} + + +` + +// If the user specified their own SVG use that, otherwise use the default +let iconCopy = ``; +if (!iconCopy) { + iconCopy = ` + ${messages[locale]['copy_to_clipboard']} + + + +` +} + +/** + * Set up copy/paste for code blocks + */ + +const runWhenDOMLoaded = cb => { + if (document.readyState != 'loading') { + cb() + } else if (document.addEventListener) { + document.addEventListener('DOMContentLoaded', cb) + } else { + document.attachEvent('onreadystatechange', function() { + if (document.readyState == 'complete') cb() + }) + } +} + +const codeCellId = index => `codecell${index}` + +// Clears selected text since ClipboardJS will select the text when copying +const clearSelection = () => { + if (window.getSelection) { + window.getSelection().removeAllRanges() + } else if (document.selection) { + document.selection.empty() + } +} + +// Changes tooltip text for a moment, then changes it back +// We want the timeout of our `success` class to be a bit shorter than the +// tooltip and icon change, so that we can hide the icon before changing back. +var timeoutIcon = 2000; +var timeoutSuccessClass = 1500; + +const temporarilyChangeTooltip = (el, oldText, newText) => { + el.setAttribute('data-tooltip', newText) + el.classList.add('success') + // Remove success a little bit sooner than we change the tooltip + // So that we can use CSS to hide the copybutton first + setTimeout(() => el.classList.remove('success'), timeoutSuccessClass) + setTimeout(() => el.setAttribute('data-tooltip', oldText), timeoutIcon) +} + +// Changes the copy button icon for two seconds, then changes it back +const temporarilyChangeIcon = (el) => { + el.innerHTML = iconCheck; + setTimeout(() => {el.innerHTML = iconCopy}, timeoutIcon) +} + +const addCopyButtonToCodeCells = () => { + // If ClipboardJS hasn't loaded, wait a bit and try again. This + // happens because we load ClipboardJS asynchronously. + if (window.ClipboardJS === undefined) { + setTimeout(addCopyButtonToCodeCells, 250) + return + } + + // Add copybuttons to all of our code cells + const COPYBUTTON_SELECTOR = 'div.highlight pre'; + const codeCells = document.querySelectorAll(COPYBUTTON_SELECTOR) + codeCells.forEach((codeCell, index) => { + const id = codeCellId(index) + codeCell.setAttribute('id', id) + + const clipboardButton = id => + `` + codeCell.insertAdjacentHTML('afterend', clipboardButton(id)) + }) + +function escapeRegExp(string) { + return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string +} + +/** + * Removes excluded text from a Node. + * + * @param {Node} target Node to filter. + * @param {string} exclude CSS selector of nodes to exclude. + * @returns {DOMString} Text from `target` with text removed. + */ +function filterText(target, exclude) { + const clone = target.cloneNode(true); // clone as to not modify the live DOM + if (exclude) { + // remove excluded nodes + clone.querySelectorAll(exclude).forEach(node => node.remove()); + } + return clone.innerText; +} + +// Callback when a copy button is clicked. Will be passed the node that was clicked +// should then grab the text and replace pieces of text that shouldn't be used in output +function formatCopyText(textContent, copybuttonPromptText, isRegexp = false, onlyCopyPromptLines = true, removePrompts = true, copyEmptyLines = true, lineContinuationChar = "", hereDocDelim = "") { + var regexp; + var match; + + // Do we check for line continuation characters and "HERE-documents"? + var useLineCont = !!lineContinuationChar + var useHereDoc = !!hereDocDelim + + // create regexp to capture prompt and remaining line + if (isRegexp) { + regexp = new RegExp('^(' + copybuttonPromptText + ')(.*)') + } else { + regexp = new RegExp('^(' + escapeRegExp(copybuttonPromptText) + ')(.*)') + } + + const outputLines = []; + var promptFound = false; + var gotLineCont = false; + var gotHereDoc = false; + const lineGotPrompt = []; + for (const line of textContent.split('\n')) { + match = line.match(regexp) + if (match || gotLineCont || gotHereDoc) { + promptFound = regexp.test(line) + lineGotPrompt.push(promptFound) + if (removePrompts && promptFound) { + outputLines.push(match[2]) + } else { + outputLines.push(line) + } + gotLineCont = line.endsWith(lineContinuationChar) & useLineCont + if (line.includes(hereDocDelim) & useHereDoc) + gotHereDoc = !gotHereDoc + } else if (!onlyCopyPromptLines) { + outputLines.push(line) + } else if (copyEmptyLines && line.trim() === '') { + outputLines.push(line) + } + } + + // If no lines with the prompt were found then just use original lines + if (lineGotPrompt.some(v => v === true)) { + textContent = outputLines.join('\n'); + } + + // Remove a trailing newline to avoid auto-running when pasting + if (textContent.endsWith("\n")) { + textContent = textContent.slice(0, -1) + } + return textContent +} + + +var copyTargetText = (trigger) => { + var target = document.querySelector(trigger.attributes['data-clipboard-target'].value); + + // get filtered text + let exclude = '.linenos, .gp'; + + let text = filterText(target, exclude); + return formatCopyText(text, '', false, true, true, true, '', '') +} + + // Initialize with a callback so we can modify the text before copy + const clipboard = new ClipboardJS('.copybtn', {text: copyTargetText}) + + // Update UI with error/success messages + clipboard.on('success', event => { + clearSelection() + temporarilyChangeTooltip(event.trigger, messages[locale]['copy'], messages[locale]['copy_success']) + temporarilyChangeIcon(event.trigger) + }) + + clipboard.on('error', event => { + temporarilyChangeTooltip(event.trigger, messages[locale]['copy'], messages[locale]['copy_failure']) + }) +} + +runWhenDOMLoaded(addCopyButtonToCodeCells) \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/copybutton_funcs.js b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/copybutton_funcs.js new file mode 100644 index 00000000000..dbe1aaad79c --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/copybutton_funcs.js @@ -0,0 +1,73 @@ +function escapeRegExp(string) { + return string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string +} + +/** + * Removes excluded text from a Node. + * + * @param {Node} target Node to filter. + * @param {string} exclude CSS selector of nodes to exclude. + * @returns {DOMString} Text from `target` with text removed. + */ +export function filterText(target, exclude) { + const clone = target.cloneNode(true); // clone as to not modify the live DOM + if (exclude) { + // remove excluded nodes + clone.querySelectorAll(exclude).forEach(node => node.remove()); + } + return clone.innerText; +} + +// Callback when a copy button is clicked. Will be passed the node that was clicked +// should then grab the text and replace pieces of text that shouldn't be used in output +export function formatCopyText(textContent, copybuttonPromptText, isRegexp = false, onlyCopyPromptLines = true, removePrompts = true, copyEmptyLines = true, lineContinuationChar = "", hereDocDelim = "") { + var regexp; + var match; + + // Do we check for line continuation characters and "HERE-documents"? + var useLineCont = !!lineContinuationChar + var useHereDoc = !!hereDocDelim + + // create regexp to capture prompt and remaining line + if (isRegexp) { + regexp = new RegExp('^(' + copybuttonPromptText + ')(.*)') + } else { + regexp = new RegExp('^(' + escapeRegExp(copybuttonPromptText) + ')(.*)') + } + + const outputLines = []; + var promptFound = false; + var gotLineCont = false; + var gotHereDoc = false; + const lineGotPrompt = []; + for (const line of textContent.split('\n')) { + match = line.match(regexp) + if (match || gotLineCont || gotHereDoc) { + promptFound = regexp.test(line) + lineGotPrompt.push(promptFound) + if (removePrompts && promptFound) { + outputLines.push(match[2]) + } else { + outputLines.push(line) + } + gotLineCont = line.endsWith(lineContinuationChar) & useLineCont + if (line.includes(hereDocDelim) & useHereDoc) + gotHereDoc = !gotHereDoc + } else if (!onlyCopyPromptLines) { + outputLines.push(line) + } else if (copyEmptyLines && line.trim() === '') { + outputLines.push(line) + } + } + + // If no lines with the prompt were found then just use original lines + if (lineGotPrompt.some(v => v === true)) { + textContent = outputLines.join('\n'); + } + + // Remove a trailing newline to avoid auto-running when pasting + if (textContent.endsWith("\n")) { + textContent = textContent.slice(0, -1) + } + return textContent +} diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/doctools.js b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/doctools.js new file mode 100644 index 00000000000..527b876ca63 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/doctools.js @@ -0,0 +1,156 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Base JavaScript utilities for all Sphinx HTML documentation. + * + * :copyright: Copyright 2007-2022 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ +"use strict"; + +const BLACKLISTED_KEY_CONTROL_ELEMENTS = new Set([ + "TEXTAREA", + "INPUT", + "SELECT", + "BUTTON", +]); + +const _ready = (callback) => { + if (document.readyState !== "loading") { + callback(); + } else { + document.addEventListener("DOMContentLoaded", callback); + } +}; + +/** + * Small JavaScript module for the documentation. + */ +const Documentation = { + init: () => { + Documentation.initDomainIndexTable(); + Documentation.initOnKeyListeners(); + }, + + /** + * i18n support + */ + TRANSLATIONS: {}, + PLURAL_EXPR: (n) => (n === 1 ? 0 : 1), + LOCALE: "unknown", + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext: (string) => { + const translated = Documentation.TRANSLATIONS[string]; + switch (typeof translated) { + case "undefined": + return string; // no translation + case "string": + return translated; // translation exists + default: + return translated[0]; // (singular, plural) translation tuple exists + } + }, + + ngettext: (singular, plural, n) => { + const translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated !== "undefined") + return translated[Documentation.PLURAL_EXPR(n)]; + return n === 1 ? singular : plural; + }, + + addTranslations: (catalog) => { + Object.assign(Documentation.TRANSLATIONS, catalog.messages); + Documentation.PLURAL_EXPR = new Function( + "n", + `return (${catalog.plural_expr})` + ); + Documentation.LOCALE = catalog.locale; + }, + + /** + * helper function to focus on search bar + */ + focusSearchBar: () => { + document.querySelectorAll("input[name=q]")[0]?.focus(); + }, + + /** + * Initialise the domain index toggle buttons + */ + initDomainIndexTable: () => { + const toggler = (el) => { + const idNumber = el.id.substr(7); + const toggledRows = document.querySelectorAll(`tr.cg-${idNumber}`); + if (el.src.substr(-9) === "minus.png") { + el.src = `${el.src.substr(0, el.src.length - 9)}plus.png`; + toggledRows.forEach((el) => (el.style.display = "none")); + } else { + el.src = `${el.src.substr(0, el.src.length - 8)}minus.png`; + toggledRows.forEach((el) => (el.style.display = "")); + } + }; + + const togglerElements = document.querySelectorAll("img.toggler"); + togglerElements.forEach((el) => + el.addEventListener("click", (event) => toggler(event.currentTarget)) + ); + togglerElements.forEach((el) => (el.style.display = "")); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler); + }, + + initOnKeyListeners: () => { + // only install a listener if it is really needed + if ( + !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS && + !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS + ) + return; + + document.addEventListener("keydown", (event) => { + // bail for input elements + if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return; + // bail with special keys + if (event.altKey || event.ctrlKey || event.metaKey) return; + + if (!event.shiftKey) { + switch (event.key) { + case "ArrowLeft": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const prevLink = document.querySelector('link[rel="prev"]'); + if (prevLink && prevLink.href) { + window.location.href = prevLink.href; + event.preventDefault(); + } + break; + case "ArrowRight": + if (!DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) break; + + const nextLink = document.querySelector('link[rel="next"]'); + if (nextLink && nextLink.href) { + window.location.href = nextLink.href; + event.preventDefault(); + } + break; + } + } + + // some keyboard layouts may need Shift to get / + switch (event.key) { + case "/": + if (!DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS) break; + Documentation.focusSearchBar(); + event.preventDefault(); + } + }); + }, +}; + +// quick alias for translations +const _ = Documentation.gettext; + +_ready(Documentation.init); diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/documentation_options.js b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/documentation_options.js new file mode 100644 index 00000000000..22ce8b918b8 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/documentation_options.js @@ -0,0 +1,14 @@ +var DOCUMENTATION_OPTIONS = { + URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'), + VERSION: '5.0.1', + LANGUAGE: 'en', + COLLAPSE_INDEX: false, + BUILDER: 'html', + FILE_SUFFIX: '.html', + LINK_SUFFIX: '.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt', + NAVIGATION_WITH_KEYS: false, + SHOW_SEARCH_SUMMARY: true, + ENABLE_SEARCH_SHORTCUTS: true, +}; \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/file.png b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/file.png new file mode 100644 index 0000000000000000000000000000000000000000..a858a410e4faa62ce324d814e4b816fff83a6fb3 GIT binary patch literal 286 zcmV+(0pb3MP)s`hMrGg#P~ix$^RISR_I47Y|r1 z_CyJOe}D1){SET-^Amu_i71Lt6eYfZjRyw@I6OQAIXXHDfiX^GbOlHe=Ae4>0m)d(f|Me07*qoM6N<$f}vM^LjV8( literal 0 HcmV?d00001 diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/graphviz.css b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/graphviz.css new file mode 100644 index 00000000000..19e7afd385b --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/graphviz.css @@ -0,0 +1,19 @@ +/* + * graphviz.css + * ~~~~~~~~~~~~ + * + * Sphinx stylesheet -- graphviz extension. + * + * :copyright: Copyright 2007-2022 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +img.graphviz { + border: 0; + max-width: 100%; +} + +object.graphviz { + max-width: 100%; +} diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/jquery-3.6.0.js b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/jquery-3.6.0.js new file mode 100644 index 00000000000..fc6c299b73e --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/_static/jquery-3.6.0.js @@ -0,0 +1,10881 @@ +/*! + * jQuery JavaScript Library v3.6.0 + * https://jquery.com/ + * + * Includes Sizzle.js + * https://sizzlejs.com/ + * + * Copyright OpenJS Foundation and other contributors + * Released under the MIT license + * https://jquery.org/license + * + * Date: 2021-03-02T17:08Z + */ +( function( global, factory ) { + + "use strict"; + + if ( typeof module === "object" && typeof module.exports === "object" ) { + + // For CommonJS and CommonJS-like environments where a proper `window` + // is present, execute the factory and get jQuery. + // For environments that do not have a `window` with a `document` + // (such as Node.js), expose a factory as module.exports. + // This accentuates the need for the creation of a real `window`. + // e.g. var jQuery = require("jquery")(window); + // See ticket #14549 for more info. + module.exports = global.document ? + factory( global, true ) : + function( w ) { + if ( !w.document ) { + throw new Error( "jQuery requires a window with a document" ); + } + return factory( w ); + }; + } else { + factory( global ); + } + +// Pass this if window is not defined yet +} )( typeof window !== "undefined" ? window : this, function( window, noGlobal ) { + +// Edge <= 12 - 13+, Firefox <=18 - 45+, IE 10 - 11, Safari 5.1 - 9+, iOS 6 - 9.1 +// throw exceptions when non-strict code (e.g., ASP.NET 4.5) accesses strict mode +// arguments.callee.caller (trac-13335). But as of jQuery 3.0 (2016), strict mode should be common +// enough that all such attempts are guarded in a try block. +"use strict"; + +var arr = []; + +var getProto = Object.getPrototypeOf; + +var slice = arr.slice; + +var flat = arr.flat ? function( array ) { + return arr.flat.call( array ); +} : function( array ) { + return arr.concat.apply( [], array ); +}; + + +var push = arr.push; + +var indexOf = arr.indexOf; + +var class2type = {}; + +var toString = class2type.toString; + +var hasOwn = class2type.hasOwnProperty; + +var fnToString = hasOwn.toString; + +var ObjectFunctionString = fnToString.call( Object ); + +var support = {}; + +var isFunction = function isFunction( obj ) { + + // Support: Chrome <=57, Firefox <=52 + // In some browsers, typeof returns "function" for HTML elements + // (i.e., `typeof document.createElement( "object" ) === "function"`). + // We don't want to classify *any* DOM node as a function. + // Support: QtWeb <=3.8.5, WebKit <=534.34, wkhtmltopdf tool <=0.12.5 + // Plus for old WebKit, typeof returns "function" for HTML collections + // (e.g., `typeof document.getElementsByTagName("div") === "function"`). (gh-4756) + return typeof obj === "function" && typeof obj.nodeType !== "number" && + typeof obj.item !== "function"; + }; + + +var isWindow = function isWindow( obj ) { + return obj != null && obj === obj.window; + }; + + +var document = window.document; + + + + var preservedScriptAttributes = { + type: true, + src: true, + nonce: true, + noModule: true + }; + + function DOMEval( code, node, doc ) { + doc = doc || document; + + var i, val, + script = doc.createElement( "script" ); + + script.text = code; + if ( node ) { + for ( i in preservedScriptAttributes ) { + + // Support: Firefox 64+, Edge 18+ + // Some browsers don't support the "nonce" property on scripts. + // On the other hand, just using `getAttribute` is not enough as + // the `nonce` attribute is reset to an empty string whenever it + // becomes browsing-context connected. + // See https://github.com/whatwg/html/issues/2369 + // See https://html.spec.whatwg.org/#nonce-attributes + // The `node.getAttribute` check was added for the sake of + // `jQuery.globalEval` so that it can fake a nonce-containing node + // via an object. + val = node[ i ] || node.getAttribute && node.getAttribute( i ); + if ( val ) { + script.setAttribute( i, val ); + } + } + } + doc.head.appendChild( script ).parentNode.removeChild( script ); + } + + +function toType( obj ) { + if ( obj == null ) { + return obj + ""; + } + + // Support: Android <=2.3 only (functionish RegExp) + return typeof obj === "object" || typeof obj === "function" ? + class2type[ toString.call( obj ) ] || "object" : + typeof obj; +} +/* global Symbol */ +// Defining this global in .eslintrc.json would create a danger of using the global +// unguarded in another place, it seems safer to define global only for this module + + + +var + version = "3.6.0", + + // Define a local copy of jQuery + jQuery = function( selector, context ) { + + // The jQuery object is actually just the init constructor 'enhanced' + // Need init if jQuery is called (just allow error to be thrown if not included) + return new jQuery.fn.init( selector, context ); + }; + +jQuery.fn = jQuery.prototype = { + + // The current version of jQuery being used + jquery: version, + + constructor: jQuery, + + // The default length of a jQuery object is 0 + length: 0, + + toArray: function() { + return slice.call( this ); + }, + + // Get the Nth element in the matched element set OR + // Get the whole matched element set as a clean array + get: function( num ) { + + // Return all the elements in a clean array + if ( num == null ) { + return slice.call( this ); + } + + // Return just the one element from the set + return num < 0 ? this[ num + this.length ] : this[ num ]; + }, + + // Take an array of elements and push it onto the stack + // (returning the new matched element set) + pushStack: function( elems ) { + + // Build a new jQuery matched element set + var ret = jQuery.merge( this.constructor(), elems ); + + // Add the old object onto the stack (as a reference) + ret.prevObject = this; + + // Return the newly-formed element set + return ret; + }, + + // Execute a callback for every element in the matched set. + each: function( callback ) { + return jQuery.each( this, callback ); + }, + + map: function( callback ) { + return this.pushStack( jQuery.map( this, function( elem, i ) { + return callback.call( elem, i, elem ); + } ) ); + }, + + slice: function() { + return this.pushStack( slice.apply( this, arguments ) ); + }, + + first: function() { + return this.eq( 0 ); + }, + + last: function() { + return this.eq( -1 ); + }, + + even: function() { + return this.pushStack( jQuery.grep( this, function( _elem, i ) { + return ( i + 1 ) % 2; + } ) ); + }, + + odd: function() { + return this.pushStack( jQuery.grep( this, function( _elem, i ) { + return i % 2; + } ) ); + }, + + eq: function( i ) { + var len = this.length, + j = +i + ( i < 0 ? len : 0 ); + return this.pushStack( j >= 0 && j < len ? [ this[ j ] ] : [] ); + }, + + end: function() { + return this.prevObject || this.constructor(); + }, + + // For internal use only. + // Behaves like an Array's method, not like a jQuery method. + push: push, + sort: arr.sort, + splice: arr.splice +}; + +jQuery.extend = jQuery.fn.extend = function() { + var options, name, src, copy, copyIsArray, clone, + target = arguments[ 0 ] || {}, + i = 1, + length = arguments.length, + deep = false; + + // Handle a deep copy situation + if ( typeof target === "boolean" ) { + deep = target; + + // Skip the boolean and the target + target = arguments[ i ] || {}; + i++; + } + + // Handle case when target is a string or something (possible in deep copy) + if ( typeof target !== "object" && !isFunction( target ) ) { + target = {}; + } + + // Extend jQuery itself if only one argument is passed + if ( i === length ) { + target = this; + i--; + } + + for ( ; i < length; i++ ) { + + // Only deal with non-null/undefined values + if ( ( options = arguments[ i ] ) != null ) { + + // Extend the base object + for ( name in options ) { + copy = options[ name ]; + + // Prevent Object.prototype pollution + // Prevent never-ending loop + if ( name === "__proto__" || target === copy ) { + continue; + } + + // Recurse if we're merging plain objects or arrays + if ( deep && copy && ( jQuery.isPlainObject( copy ) || + ( copyIsArray = Array.isArray( copy ) ) ) ) { + src = target[ name ]; + + // Ensure proper type for the source value + if ( copyIsArray && !Array.isArray( src ) ) { + clone = []; + } else if ( !copyIsArray && !jQuery.isPlainObject( src ) ) { + clone = {}; + } else { + clone = src; + } + copyIsArray = false; + + // Never move original objects, clone them + target[ name ] = jQuery.extend( deep, clone, copy ); + + // Don't bring in undefined values + } else if ( copy !== undefined ) { + target[ name ] = copy; + } + } + } + } + + // Return the modified object + return target; +}; + +jQuery.extend( { + + // Unique for each copy of jQuery on the page + expando: "jQuery" + ( version + Math.random() ).replace( /\D/g, "" ), + + // Assume jQuery is ready without the ready module + isReady: true, + + error: function( msg ) { + throw new Error( msg ); + }, + + noop: function() {}, + + isPlainObject: function( obj ) { + var proto, Ctor; + + // Detect obvious negatives + // Use toString instead of jQuery.type to catch host objects + if ( !obj || toString.call( obj ) !== "[object Object]" ) { + return false; + } + + proto = getProto( obj ); + + // Objects with no prototype (e.g., `Object.create( null )`) are plain + if ( !proto ) { + return true; + } + + // Objects with prototype are plain iff they were constructed by a global Object function + Ctor = hasOwn.call( proto, "constructor" ) && proto.constructor; + return typeof Ctor === "function" && fnToString.call( Ctor ) === ObjectFunctionString; + }, + + isEmptyObject: function( obj ) { + var name; + + for ( name in obj ) { + return false; + } + return true; + }, + + // Evaluates a script in a provided context; falls back to the global one + // if not specified. + globalEval: function( code, options, doc ) { + DOMEval( code, { nonce: options && options.nonce }, doc ); + }, + + each: function( obj, callback ) { + var length, i = 0; + + if ( isArrayLike( obj ) ) { + length = obj.length; + for ( ; i < length; i++ ) { + if ( callback.call( obj[ i ], i, obj[ i ] ) === false ) { + break; + } + } + } else { + for ( i in obj ) { + if ( callback.call( obj[ i ], i, obj[ i ] ) === false ) { + break; + } + } + } + + return obj; + }, + + // results is for internal usage only + makeArray: function( arr, results ) { + var ret = results || []; + + if ( arr != null ) { + if ( isArrayLike( Object( arr ) ) ) { + jQuery.merge( ret, + typeof arr === "string" ? + [ arr ] : arr + ); + } else { + push.call( ret, arr ); + } + } + + return ret; + }, + + inArray: function( elem, arr, i ) { + return arr == null ? -1 : indexOf.call( arr, elem, i ); + }, + + // Support: Android <=4.0 only, PhantomJS 1 only + // push.apply(_, arraylike) throws on ancient WebKit + merge: function( first, second ) { + var len = +second.length, + j = 0, + i = first.length; + + for ( ; j < len; j++ ) { + first[ i++ ] = second[ j ]; + } + + first.length = i; + + return first; + }, + + grep: function( elems, callback, invert ) { + var callbackInverse, + matches = [], + i = 0, + length = elems.length, + callbackExpect = !invert; + + // Go through the array, only saving the items + // that pass the validator function + for ( ; i < length; i++ ) { + callbackInverse = !callback( elems[ i ], i ); + if ( callbackInverse !== callbackExpect ) { + matches.push( elems[ i ] ); + } + } + + return matches; + }, + + // arg is for internal usage only + map: function( elems, callback, arg ) { + var length, value, + i = 0, + ret = []; + + // Go through the array, translating each of the items to their new values + if ( isArrayLike( elems ) ) { + length = elems.length; + for ( ; i < length; i++ ) { + value = callback( elems[ i ], i, arg ); + + if ( value != null ) { + ret.push( value ); + } + } + + // Go through every key on the object, + } else { + for ( i in elems ) { + value = callback( elems[ i ], i, arg ); + + if ( value != null ) { + ret.push( value ); + } + } + } + + // Flatten any nested arrays + return flat( ret ); + }, + + // A global GUID counter for objects + guid: 1, + + // jQuery.support is not used in Core but other projects attach their + // properties to it so it needs to exist. + support: support +} ); + +if ( typeof Symbol === "function" ) { + jQuery.fn[ Symbol.iterator ] = arr[ Symbol.iterator ]; +} + +// Populate the class2type map +jQuery.each( "Boolean Number String Function Array Date RegExp Object Error Symbol".split( " " ), + function( _i, name ) { + class2type[ "[object " + name + "]" ] = name.toLowerCase(); + } ); + +function isArrayLike( obj ) { + + // Support: real iOS 8.2 only (not reproducible in simulator) + // `in` check used to prevent JIT error (gh-2145) + // hasOwn isn't used here due to false negatives + // regarding Nodelist length in IE + var length = !!obj && "length" in obj && obj.length, + type = toType( obj ); + + if ( isFunction( obj ) || isWindow( obj ) ) { + return false; + } + + return type === "array" || length === 0 || + typeof length === "number" && length > 0 && ( length - 1 ) in obj; +} +var Sizzle = +/*! + * Sizzle CSS Selector Engine v2.3.6 + * https://sizzlejs.com/ + * + * Copyright JS Foundation and other contributors + * Released under the MIT license + * https://js.foundation/ + * + * Date: 2021-02-16 + */ +( function( window ) { +var i, + support, + Expr, + getText, + isXML, + tokenize, + compile, + select, + outermostContext, + sortInput, + hasDuplicate, + + // Local document vars + setDocument, + document, + docElem, + documentIsHTML, + rbuggyQSA, + rbuggyMatches, + matches, + contains, + + // Instance-specific data + expando = "sizzle" + 1 * new Date(), + preferredDoc = window.document, + dirruns = 0, + done = 0, + classCache = createCache(), + tokenCache = createCache(), + compilerCache = createCache(), + nonnativeSelectorCache = createCache(), + sortOrder = function( a, b ) { + if ( a === b ) { + hasDuplicate = true; + } + return 0; + }, + + // Instance methods + hasOwn = ( {} ).hasOwnProperty, + arr = [], + pop = arr.pop, + pushNative = arr.push, + push = arr.push, + slice = arr.slice, + + // Use a stripped-down indexOf as it's faster than native + // https://jsperf.com/thor-indexof-vs-for/5 + indexOf = function( list, elem ) { + var i = 0, + len = list.length; + for ( ; i < len; i++ ) { + if ( list[ i ] === elem ) { + return i; + } + } + return -1; + }, + + booleans = "checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|" + + "ismap|loop|multiple|open|readonly|required|scoped", + + // Regular expressions + + // http://www.w3.org/TR/css3-selectors/#whitespace + whitespace = "[\\x20\\t\\r\\n\\f]", + + // https://www.w3.org/TR/css-syntax-3/#ident-token-diagram + identifier = "(?:\\\\[\\da-fA-F]{1,6}" + whitespace + + "?|\\\\[^\\r\\n\\f]|[\\w-]|[^\0-\\x7f])+", + + // Attribute selectors: http://www.w3.org/TR/selectors/#attribute-selectors + attributes = "\\[" + whitespace + "*(" + identifier + ")(?:" + whitespace + + + // Operator (capture 2) + "*([*^$|!~]?=)" + whitespace + + + // "Attribute values must be CSS identifiers [capture 5] + // or strings [capture 3 or capture 4]" + "*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|(" + identifier + "))|)" + + whitespace + "*\\]", + + pseudos = ":(" + identifier + ")(?:\\((" + + + // To reduce the number of selectors needing tokenize in the preFilter, prefer arguments: + // 1. quoted (capture 3; capture 4 or capture 5) + "('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|" + + + // 2. simple (capture 6) + "((?:\\\\.|[^\\\\()[\\]]|" + attributes + ")*)|" + + + // 3. anything else (capture 2) + ".*" + + ")\\)|)", + + // Leading and non-escaped trailing whitespace, capturing some non-whitespace characters preceding the latter + rwhitespace = new RegExp( whitespace + "+", "g" ), + rtrim = new RegExp( "^" + whitespace + "+|((?:^|[^\\\\])(?:\\\\.)*)" + + whitespace + "+$", "g" ), + + rcomma = new RegExp( "^" + whitespace + "*," + whitespace + "*" ), + rcombinators = new RegExp( "^" + whitespace + "*([>+~]|" + whitespace + ")" + whitespace + + "*" ), + rdescend = new RegExp( whitespace + "|>" ), + + rpseudo = new RegExp( pseudos ), + ridentifier = new RegExp( "^" + identifier + "$" ), + + matchExpr = { + "ID": new RegExp( "^#(" + identifier + ")" ), + "CLASS": new RegExp( "^\\.(" + identifier + ")" ), + "TAG": new RegExp( "^(" + identifier + "|[*])" ), + "ATTR": new RegExp( "^" + attributes ), + "PSEUDO": new RegExp( "^" + pseudos ), + "CHILD": new RegExp( "^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\(" + + whitespace + "*(even|odd|(([+-]|)(\\d*)n|)" + whitespace + "*(?:([+-]|)" + + whitespace + "*(\\d+)|))" + whitespace + "*\\)|)", "i" ), + "bool": new RegExp( "^(?:" + booleans + ")$", "i" ), + + // For use in libraries implementing .is() + // We use this for POS matching in `select` + "needsContext": new RegExp( "^" + whitespace + + "*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\(" + whitespace + + "*((?:-\\d)?\\d*)" + whitespace + "*\\)|)(?=[^-]|$)", "i" ) + }, + + rhtml = /HTML$/i, + rinputs = /^(?:input|select|textarea|button)$/i, + rheader = /^h\d$/i, + + rnative = /^[^{]+\{\s*\[native \w/, + + // Easily-parseable/retrievable ID or TAG or CLASS selectors + rquickExpr = /^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/, + + rsibling = /[+~]/, + + // CSS escapes + // http://www.w3.org/TR/CSS21/syndata.html#escaped-characters + runescape = new RegExp( "\\\\[\\da-fA-F]{1,6}" + whitespace + "?|\\\\([^\\r\\n\\f])", "g" ), + funescape = function( escape, nonHex ) { + var high = "0x" + escape.slice( 1 ) - 0x10000; + + return nonHex ? + + // Strip the backslash prefix from a non-hex escape sequence + nonHex : + + // Replace a hexadecimal escape sequence with the encoded Unicode code point + // Support: IE <=11+ + // For values outside the Basic Multilingual Plane (BMP), manually construct a + // surrogate pair + high < 0 ? + String.fromCharCode( high + 0x10000 ) : + String.fromCharCode( high >> 10 | 0xD800, high & 0x3FF | 0xDC00 ); + }, + + // CSS string/identifier serialization + // https://drafts.csswg.org/cssom/#common-serializing-idioms + rcssescape = /([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g, + fcssescape = function( ch, asCodePoint ) { + if ( asCodePoint ) { + + // U+0000 NULL becomes U+FFFD REPLACEMENT CHARACTER + if ( ch === "\0" ) { + return "\uFFFD"; + } + + // Control characters and (dependent upon position) numbers get escaped as code points + return ch.slice( 0, -1 ) + "\\" + + ch.charCodeAt( ch.length - 1 ).toString( 16 ) + " "; + } + + // Other potentially-special ASCII characters get backslash-escaped + return "\\" + ch; + }, + + // Used for iframes + // See setDocument() + // Removing the function wrapper causes a "Permission Denied" + // error in IE + unloadHandler = function() { + setDocument(); + }, + + inDisabledFieldset = addCombinator( + function( elem ) { + return elem.disabled === true && elem.nodeName.toLowerCase() === "fieldset"; + }, + { dir: "parentNode", next: "legend" } + ); + +// Optimize for push.apply( _, NodeList ) +try { + push.apply( + ( arr = slice.call( preferredDoc.childNodes ) ), + preferredDoc.childNodes + ); + + // Support: Android<4.0 + // Detect silently failing push.apply + // eslint-disable-next-line no-unused-expressions + arr[ preferredDoc.childNodes.length ].nodeType; +} catch ( e ) { + push = { apply: arr.length ? + + // Leverage slice if possible + function( target, els ) { + pushNative.apply( target, slice.call( els ) ); + } : + + // Support: IE<9 + // Otherwise append directly + function( target, els ) { + var j = target.length, + i = 0; + + // Can't trust NodeList.length + while ( ( target[ j++ ] = els[ i++ ] ) ) {} + target.length = j - 1; + } + }; +} + +function Sizzle( selector, context, results, seed ) { + var m, i, elem, nid, match, groups, newSelector, + newContext = context && context.ownerDocument, + + // nodeType defaults to 9, since context defaults to document + nodeType = context ? context.nodeType : 9; + + results = results || []; + + // Return early from calls with invalid selector or context + if ( typeof selector !== "string" || !selector || + nodeType !== 1 && nodeType !== 9 && nodeType !== 11 ) { + + return results; + } + + // Try to shortcut find operations (as opposed to filters) in HTML documents + if ( !seed ) { + setDocument( context ); + context = context || document; + + if ( documentIsHTML ) { + + // If the selector is sufficiently simple, try using a "get*By*" DOM method + // (excepting DocumentFragment context, where the methods don't exist) + if ( nodeType !== 11 && ( match = rquickExpr.exec( selector ) ) ) { + + // ID selector + if ( ( m = match[ 1 ] ) ) { + + // Document context + if ( nodeType === 9 ) { + if ( ( elem = context.getElementById( m ) ) ) { + + // Support: IE, Opera, Webkit + // TODO: identify versions + // getElementById can match elements by name instead of ID + if ( elem.id === m ) { + results.push( elem ); + return results; + } + } else { + return results; + } + + // Element context + } else { + + // Support: IE, Opera, Webkit + // TODO: identify versions + // getElementById can match elements by name instead of ID + if ( newContext && ( elem = newContext.getElementById( m ) ) && + contains( context, elem ) && + elem.id === m ) { + + results.push( elem ); + return results; + } + } + + // Type selector + } else if ( match[ 2 ] ) { + push.apply( results, context.getElementsByTagName( selector ) ); + return results; + + // Class selector + } else if ( ( m = match[ 3 ] ) && support.getElementsByClassName && + context.getElementsByClassName ) { + + push.apply( results, context.getElementsByClassName( m ) ); + return results; + } + } + + // Take advantage of querySelectorAll + if ( support.qsa && + !nonnativeSelectorCache[ selector + " " ] && + ( !rbuggyQSA || !rbuggyQSA.test( selector ) ) && + + // Support: IE 8 only + // Exclude object elements + ( nodeType !== 1 || context.nodeName.toLowerCase() !== "object" ) ) { + + newSelector = selector; + newContext = context; + + // qSA considers elements outside a scoping root when evaluating child or + // descendant combinators, which is not what we want. + // In such cases, we work around the behavior by prefixing every selector in the + // list with an ID selector referencing the scope context. + // The technique has to be used as well when a leading combinator is used + // as such selectors are not recognized by querySelectorAll. + // Thanks to Andrew Dupont for this technique. + if ( nodeType === 1 && + ( rdescend.test( selector ) || rcombinators.test( selector ) ) ) { + + // Expand context for sibling selectors + newContext = rsibling.test( selector ) && testContext( context.parentNode ) || + context; + + // We can use :scope instead of the ID hack if the browser + // supports it & if we're not changing the context. + if ( newContext !== context || !support.scope ) { + + // Capture the context ID, setting it first if necessary + if ( ( nid = context.getAttribute( "id" ) ) ) { + nid = nid.replace( rcssescape, fcssescape ); + } else { + context.setAttribute( "id", ( nid = expando ) ); + } + } + + // Prefix every selector in the list + groups = tokenize( selector ); + i = groups.length; + while ( i-- ) { + groups[ i ] = ( nid ? "#" + nid : ":scope" ) + " " + + toSelector( groups[ i ] ); + } + newSelector = groups.join( "," ); + } + + try { + push.apply( results, + newContext.querySelectorAll( newSelector ) + ); + return results; + } catch ( qsaError ) { + nonnativeSelectorCache( selector, true ); + } finally { + if ( nid === expando ) { + context.removeAttribute( "id" ); + } + } + } + } + } + + // All others + return select( selector.replace( rtrim, "$1" ), context, results, seed ); +} + +/** + * Create key-value caches of limited size + * @returns {function(string, object)} Returns the Object data after storing it on itself with + * property name the (space-suffixed) string and (if the cache is larger than Expr.cacheLength) + * deleting the oldest entry + */ +function createCache() { + var keys = []; + + function cache( key, value ) { + + // Use (key + " ") to avoid collision with native prototype properties (see Issue #157) + if ( keys.push( key + " " ) > Expr.cacheLength ) { + + // Only keep the most recent entries + delete cache[ keys.shift() ]; + } + return ( cache[ key + " " ] = value ); + } + return cache; +} + +/** + * Mark a function for special use by Sizzle + * @param {Function} fn The function to mark + */ +function markFunction( fn ) { + fn[ expando ] = true; + return fn; +} + +/** + * Support testing using an element + * @param {Function} fn Passed the created element and returns a boolean result + */ +function assert( fn ) { + var el = document.createElement( "fieldset" ); + + try { + return !!fn( el ); + } catch ( e ) { + return false; + } finally { + + // Remove from its parent by default + if ( el.parentNode ) { + el.parentNode.removeChild( el ); + } + + // release memory in IE + el = null; + } +} + +/** + * Adds the same handler for all of the specified attrs + * @param {String} attrs Pipe-separated list of attributes + * @param {Function} handler The method that will be applied + */ +function addHandle( attrs, handler ) { + var arr = attrs.split( "|" ), + i = arr.length; + + while ( i-- ) { + Expr.attrHandle[ arr[ i ] ] = handler; + } +} + +/** + * Checks document order of two siblings + * @param {Element} a + * @param {Element} b + * @returns {Number} Returns less than 0 if a precedes b, greater than 0 if a follows b + */ +function siblingCheck( a, b ) { + var cur = b && a, + diff = cur && a.nodeType === 1 && b.nodeType === 1 && + a.sourceIndex - b.sourceIndex; + + // Use IE sourceIndex if available on both nodes + if ( diff ) { + return diff; + } + + // Check if b follows a + if ( cur ) { + while ( ( cur = cur.nextSibling ) ) { + if ( cur === b ) { + return -1; + } + } + } + + return a ? 1 : -1; +} + +/** + * Returns a function to use in pseudos for input types + * @param {String} type + */ +function createInputPseudo( type ) { + return function( elem ) { + var name = elem.nodeName.toLowerCase(); + return name === "input" && elem.type === type; + }; +} + +/** + * Returns a function to use in pseudos for buttons + * @param {String} type + */ +function createButtonPseudo( type ) { + return function( elem ) { + var name = elem.nodeName.toLowerCase(); + return ( name === "input" || name === "button" ) && elem.type === type; + }; +} + +/** + * Returns a function to use in pseudos for :enabled/:disabled + * @param {Boolean} disabled true for :disabled; false for :enabled + */ +function createDisabledPseudo( disabled ) { + + // Known :disabled false positives: fieldset[disabled] > legend:nth-of-type(n+2) :can-disable + return function( elem ) { + + // Only certain elements can match :enabled or :disabled + // https://html.spec.whatwg.org/multipage/scripting.html#selector-enabled + // https://html.spec.whatwg.org/multipage/scripting.html#selector-disabled + if ( "form" in elem ) { + + // Check for inherited disabledness on relevant non-disabled elements: + // * listed form-associated elements in a disabled fieldset + // https://html.spec.whatwg.org/multipage/forms.html#category-listed + // https://html.spec.whatwg.org/multipage/forms.html#concept-fe-disabled + // * option elements in a disabled optgroup + // https://html.spec.whatwg.org/multipage/forms.html#concept-option-disabled + // All such elements have a "form" property. + if ( elem.parentNode && elem.disabled === false ) { + + // Option elements defer to a parent optgroup if present + if ( "label" in elem ) { + if ( "label" in elem.parentNode ) { + return elem.parentNode.disabled === disabled; + } else { + return elem.disabled === disabled; + } + } + + // Support: IE 6 - 11 + // Use the isDisabled shortcut property to check for disabled fieldset ancestors + return elem.isDisabled === disabled || + + // Where there is no isDisabled, check manually + /* jshint -W018 */ + elem.isDisabled !== !disabled && + inDisabledFieldset( elem ) === disabled; + } + + return elem.disabled === disabled; + + // Try to winnow out elements that can't be disabled before trusting the disabled property. + // Some victims get caught in our net (label, legend, menu, track), but it shouldn't + // even exist on them, let alone have a boolean value. + } else if ( "label" in elem ) { + return elem.disabled === disabled; + } + + // Remaining elements are neither :enabled nor :disabled + return false; + }; +} + +/** + * Returns a function to use in pseudos for positionals + * @param {Function} fn + */ +function createPositionalPseudo( fn ) { + return markFunction( function( argument ) { + argument = +argument; + return markFunction( function( seed, matches ) { + var j, + matchIndexes = fn( [], seed.length, argument ), + i = matchIndexes.length; + + // Match elements found at the specified indexes + while ( i-- ) { + if ( seed[ ( j = matchIndexes[ i ] ) ] ) { + seed[ j ] = !( matches[ j ] = seed[ j ] ); + } + } + } ); + } ); +} + +/** + * Checks a node for validity as a Sizzle context + * @param {Element|Object=} context + * @returns {Element|Object|Boolean} The input node if acceptable, otherwise a falsy value + */ +function testContext( context ) { + return context && typeof context.getElementsByTagName !== "undefined" && context; +} + +// Expose support vars for convenience +support = Sizzle.support = {}; + +/** + * Detects XML nodes + * @param {Element|Object} elem An element or a document + * @returns {Boolean} True iff elem is a non-HTML XML node + */ +isXML = Sizzle.isXML = function( elem ) { + var namespace = elem && elem.namespaceURI, + docElem = elem && ( elem.ownerDocument || elem ).documentElement; + + // Support: IE <=8 + // Assume HTML when documentElement doesn't yet exist, such as inside loading iframes + // https://bugs.jquery.com/ticket/4833 + return !rhtml.test( namespace || docElem && docElem.nodeName || "HTML" ); +}; + +/** + * Sets document-related variables once based on the current document + * @param {Element|Object} [doc] An element or document object to use to set the document + * @returns {Object} Returns the current document + */ +setDocument = Sizzle.setDocument = function( node ) { + var hasCompare, subWindow, + doc = node ? node.ownerDocument || node : preferredDoc; + + // Return early if doc is invalid or already selected + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( doc == document || doc.nodeType !== 9 || !doc.documentElement ) { + return document; + } + + // Update global variables + document = doc; + docElem = document.documentElement; + documentIsHTML = !isXML( document ); + + // Support: IE 9 - 11+, Edge 12 - 18+ + // Accessing iframe documents after unload throws "permission denied" errors (jQuery #13936) + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( preferredDoc != document && + ( subWindow = document.defaultView ) && subWindow.top !== subWindow ) { + + // Support: IE 11, Edge + if ( subWindow.addEventListener ) { + subWindow.addEventListener( "unload", unloadHandler, false ); + + // Support: IE 9 - 10 only + } else if ( subWindow.attachEvent ) { + subWindow.attachEvent( "onunload", unloadHandler ); + } + } + + // Support: IE 8 - 11+, Edge 12 - 18+, Chrome <=16 - 25 only, Firefox <=3.6 - 31 only, + // Safari 4 - 5 only, Opera <=11.6 - 12.x only + // IE/Edge & older browsers don't support the :scope pseudo-class. + // Support: Safari 6.0 only + // Safari 6.0 supports :scope but it's an alias of :root there. + support.scope = assert( function( el ) { + docElem.appendChild( el ).appendChild( document.createElement( "div" ) ); + return typeof el.querySelectorAll !== "undefined" && + !el.querySelectorAll( ":scope fieldset div" ).length; + } ); + + /* Attributes + ---------------------------------------------------------------------- */ + + // Support: IE<8 + // Verify that getAttribute really returns attributes and not properties + // (excepting IE8 booleans) + support.attributes = assert( function( el ) { + el.className = "i"; + return !el.getAttribute( "className" ); + } ); + + /* getElement(s)By* + ---------------------------------------------------------------------- */ + + // Check if getElementsByTagName("*") returns only elements + support.getElementsByTagName = assert( function( el ) { + el.appendChild( document.createComment( "" ) ); + return !el.getElementsByTagName( "*" ).length; + } ); + + // Support: IE<9 + support.getElementsByClassName = rnative.test( document.getElementsByClassName ); + + // Support: IE<10 + // Check if getElementById returns elements by name + // The broken getElementById methods don't pick up programmatically-set names, + // so use a roundabout getElementsByName test + support.getById = assert( function( el ) { + docElem.appendChild( el ).id = expando; + return !document.getElementsByName || !document.getElementsByName( expando ).length; + } ); + + // ID filter and find + if ( support.getById ) { + Expr.filter[ "ID" ] = function( id ) { + var attrId = id.replace( runescape, funescape ); + return function( elem ) { + return elem.getAttribute( "id" ) === attrId; + }; + }; + Expr.find[ "ID" ] = function( id, context ) { + if ( typeof context.getElementById !== "undefined" && documentIsHTML ) { + var elem = context.getElementById( id ); + return elem ? [ elem ] : []; + } + }; + } else { + Expr.filter[ "ID" ] = function( id ) { + var attrId = id.replace( runescape, funescape ); + return function( elem ) { + var node = typeof elem.getAttributeNode !== "undefined" && + elem.getAttributeNode( "id" ); + return node && node.value === attrId; + }; + }; + + // Support: IE 6 - 7 only + // getElementById is not reliable as a find shortcut + Expr.find[ "ID" ] = function( id, context ) { + if ( typeof context.getElementById !== "undefined" && documentIsHTML ) { + var node, i, elems, + elem = context.getElementById( id ); + + if ( elem ) { + + // Verify the id attribute + node = elem.getAttributeNode( "id" ); + if ( node && node.value === id ) { + return [ elem ]; + } + + // Fall back on getElementsByName + elems = context.getElementsByName( id ); + i = 0; + while ( ( elem = elems[ i++ ] ) ) { + node = elem.getAttributeNode( "id" ); + if ( node && node.value === id ) { + return [ elem ]; + } + } + } + + return []; + } + }; + } + + // Tag + Expr.find[ "TAG" ] = support.getElementsByTagName ? + function( tag, context ) { + if ( typeof context.getElementsByTagName !== "undefined" ) { + return context.getElementsByTagName( tag ); + + // DocumentFragment nodes don't have gEBTN + } else if ( support.qsa ) { + return context.querySelectorAll( tag ); + } + } : + + function( tag, context ) { + var elem, + tmp = [], + i = 0, + + // By happy coincidence, a (broken) gEBTN appears on DocumentFragment nodes too + results = context.getElementsByTagName( tag ); + + // Filter out possible comments + if ( tag === "*" ) { + while ( ( elem = results[ i++ ] ) ) { + if ( elem.nodeType === 1 ) { + tmp.push( elem ); + } + } + + return tmp; + } + return results; + }; + + // Class + Expr.find[ "CLASS" ] = support.getElementsByClassName && function( className, context ) { + if ( typeof context.getElementsByClassName !== "undefined" && documentIsHTML ) { + return context.getElementsByClassName( className ); + } + }; + + /* QSA/matchesSelector + ---------------------------------------------------------------------- */ + + // QSA and matchesSelector support + + // matchesSelector(:active) reports false when true (IE9/Opera 11.5) + rbuggyMatches = []; + + // qSa(:focus) reports false when true (Chrome 21) + // We allow this because of a bug in IE8/9 that throws an error + // whenever `document.activeElement` is accessed on an iframe + // So, we allow :focus to pass through QSA all the time to avoid the IE error + // See https://bugs.jquery.com/ticket/13378 + rbuggyQSA = []; + + if ( ( support.qsa = rnative.test( document.querySelectorAll ) ) ) { + + // Build QSA regex + // Regex strategy adopted from Diego Perini + assert( function( el ) { + + var input; + + // Select is set to empty string on purpose + // This is to test IE's treatment of not explicitly + // setting a boolean content attribute, + // since its presence should be enough + // https://bugs.jquery.com/ticket/12359 + docElem.appendChild( el ).innerHTML = "" + + ""; + + // Support: IE8, Opera 11-12.16 + // Nothing should be selected when empty strings follow ^= or $= or *= + // The test attribute must be unknown in Opera but "safe" for WinRT + // https://msdn.microsoft.com/en-us/library/ie/hh465388.aspx#attribute_section + if ( el.querySelectorAll( "[msallowcapture^='']" ).length ) { + rbuggyQSA.push( "[*^$]=" + whitespace + "*(?:''|\"\")" ); + } + + // Support: IE8 + // Boolean attributes and "value" are not treated correctly + if ( !el.querySelectorAll( "[selected]" ).length ) { + rbuggyQSA.push( "\\[" + whitespace + "*(?:value|" + booleans + ")" ); + } + + // Support: Chrome<29, Android<4.4, Safari<7.0+, iOS<7.0+, PhantomJS<1.9.8+ + if ( !el.querySelectorAll( "[id~=" + expando + "-]" ).length ) { + rbuggyQSA.push( "~=" ); + } + + // Support: IE 11+, Edge 15 - 18+ + // IE 11/Edge don't find elements on a `[name='']` query in some cases. + // Adding a temporary attribute to the document before the selection works + // around the issue. + // Interestingly, IE 10 & older don't seem to have the issue. + input = document.createElement( "input" ); + input.setAttribute( "name", "" ); + el.appendChild( input ); + if ( !el.querySelectorAll( "[name='']" ).length ) { + rbuggyQSA.push( "\\[" + whitespace + "*name" + whitespace + "*=" + + whitespace + "*(?:''|\"\")" ); + } + + // Webkit/Opera - :checked should return selected option elements + // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked + // IE8 throws error here and will not see later tests + if ( !el.querySelectorAll( ":checked" ).length ) { + rbuggyQSA.push( ":checked" ); + } + + // Support: Safari 8+, iOS 8+ + // https://bugs.webkit.org/show_bug.cgi?id=136851 + // In-page `selector#id sibling-combinator selector` fails + if ( !el.querySelectorAll( "a#" + expando + "+*" ).length ) { + rbuggyQSA.push( ".#.+[+~]" ); + } + + // Support: Firefox <=3.6 - 5 only + // Old Firefox doesn't throw on a badly-escaped identifier. + el.querySelectorAll( "\\\f" ); + rbuggyQSA.push( "[\\r\\n\\f]" ); + } ); + + assert( function( el ) { + el.innerHTML = "" + + ""; + + // Support: Windows 8 Native Apps + // The type and name attributes are restricted during .innerHTML assignment + var input = document.createElement( "input" ); + input.setAttribute( "type", "hidden" ); + el.appendChild( input ).setAttribute( "name", "D" ); + + // Support: IE8 + // Enforce case-sensitivity of name attribute + if ( el.querySelectorAll( "[name=d]" ).length ) { + rbuggyQSA.push( "name" + whitespace + "*[*^$|!~]?=" ); + } + + // FF 3.5 - :enabled/:disabled and hidden elements (hidden elements are still enabled) + // IE8 throws error here and will not see later tests + if ( el.querySelectorAll( ":enabled" ).length !== 2 ) { + rbuggyQSA.push( ":enabled", ":disabled" ); + } + + // Support: IE9-11+ + // IE's :disabled selector does not pick up the children of disabled fieldsets + docElem.appendChild( el ).disabled = true; + if ( el.querySelectorAll( ":disabled" ).length !== 2 ) { + rbuggyQSA.push( ":enabled", ":disabled" ); + } + + // Support: Opera 10 - 11 only + // Opera 10-11 does not throw on post-comma invalid pseudos + el.querySelectorAll( "*,:x" ); + rbuggyQSA.push( ",.*:" ); + } ); + } + + if ( ( support.matchesSelector = rnative.test( ( matches = docElem.matches || + docElem.webkitMatchesSelector || + docElem.mozMatchesSelector || + docElem.oMatchesSelector || + docElem.msMatchesSelector ) ) ) ) { + + assert( function( el ) { + + // Check to see if it's possible to do matchesSelector + // on a disconnected node (IE 9) + support.disconnectedMatch = matches.call( el, "*" ); + + // This should fail with an exception + // Gecko does not error, returns false instead + matches.call( el, "[s!='']:x" ); + rbuggyMatches.push( "!=", pseudos ); + } ); + } + + rbuggyQSA = rbuggyQSA.length && new RegExp( rbuggyQSA.join( "|" ) ); + rbuggyMatches = rbuggyMatches.length && new RegExp( rbuggyMatches.join( "|" ) ); + + /* Contains + ---------------------------------------------------------------------- */ + hasCompare = rnative.test( docElem.compareDocumentPosition ); + + // Element contains another + // Purposefully self-exclusive + // As in, an element does not contain itself + contains = hasCompare || rnative.test( docElem.contains ) ? + function( a, b ) { + var adown = a.nodeType === 9 ? a.documentElement : a, + bup = b && b.parentNode; + return a === bup || !!( bup && bup.nodeType === 1 && ( + adown.contains ? + adown.contains( bup ) : + a.compareDocumentPosition && a.compareDocumentPosition( bup ) & 16 + ) ); + } : + function( a, b ) { + if ( b ) { + while ( ( b = b.parentNode ) ) { + if ( b === a ) { + return true; + } + } + } + return false; + }; + + /* Sorting + ---------------------------------------------------------------------- */ + + // Document order sorting + sortOrder = hasCompare ? + function( a, b ) { + + // Flag for duplicate removal + if ( a === b ) { + hasDuplicate = true; + return 0; + } + + // Sort on method existence if only one input has compareDocumentPosition + var compare = !a.compareDocumentPosition - !b.compareDocumentPosition; + if ( compare ) { + return compare; + } + + // Calculate position if both inputs belong to the same document + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + compare = ( a.ownerDocument || a ) == ( b.ownerDocument || b ) ? + a.compareDocumentPosition( b ) : + + // Otherwise we know they are disconnected + 1; + + // Disconnected nodes + if ( compare & 1 || + ( !support.sortDetached && b.compareDocumentPosition( a ) === compare ) ) { + + // Choose the first element that is related to our preferred document + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( a == document || a.ownerDocument == preferredDoc && + contains( preferredDoc, a ) ) { + return -1; + } + + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( b == document || b.ownerDocument == preferredDoc && + contains( preferredDoc, b ) ) { + return 1; + } + + // Maintain original order + return sortInput ? + ( indexOf( sortInput, a ) - indexOf( sortInput, b ) ) : + 0; + } + + return compare & 4 ? -1 : 1; + } : + function( a, b ) { + + // Exit early if the nodes are identical + if ( a === b ) { + hasDuplicate = true; + return 0; + } + + var cur, + i = 0, + aup = a.parentNode, + bup = b.parentNode, + ap = [ a ], + bp = [ b ]; + + // Parentless nodes are either documents or disconnected + if ( !aup || !bup ) { + + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + /* eslint-disable eqeqeq */ + return a == document ? -1 : + b == document ? 1 : + /* eslint-enable eqeqeq */ + aup ? -1 : + bup ? 1 : + sortInput ? + ( indexOf( sortInput, a ) - indexOf( sortInput, b ) ) : + 0; + + // If the nodes are siblings, we can do a quick check + } else if ( aup === bup ) { + return siblingCheck( a, b ); + } + + // Otherwise we need full lists of their ancestors for comparison + cur = a; + while ( ( cur = cur.parentNode ) ) { + ap.unshift( cur ); + } + cur = b; + while ( ( cur = cur.parentNode ) ) { + bp.unshift( cur ); + } + + // Walk down the tree looking for a discrepancy + while ( ap[ i ] === bp[ i ] ) { + i++; + } + + return i ? + + // Do a sibling check if the nodes have a common ancestor + siblingCheck( ap[ i ], bp[ i ] ) : + + // Otherwise nodes in our document sort first + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + /* eslint-disable eqeqeq */ + ap[ i ] == preferredDoc ? -1 : + bp[ i ] == preferredDoc ? 1 : + /* eslint-enable eqeqeq */ + 0; + }; + + return document; +}; + +Sizzle.matches = function( expr, elements ) { + return Sizzle( expr, null, null, elements ); +}; + +Sizzle.matchesSelector = function( elem, expr ) { + setDocument( elem ); + + if ( support.matchesSelector && documentIsHTML && + !nonnativeSelectorCache[ expr + " " ] && + ( !rbuggyMatches || !rbuggyMatches.test( expr ) ) && + ( !rbuggyQSA || !rbuggyQSA.test( expr ) ) ) { + + try { + var ret = matches.call( elem, expr ); + + // IE 9's matchesSelector returns false on disconnected nodes + if ( ret || support.disconnectedMatch || + + // As well, disconnected nodes are said to be in a document + // fragment in IE 9 + elem.document && elem.document.nodeType !== 11 ) { + return ret; + } + } catch ( e ) { + nonnativeSelectorCache( expr, true ); + } + } + + return Sizzle( expr, document, null, [ elem ] ).length > 0; +}; + +Sizzle.contains = function( context, elem ) { + + // Set document vars if needed + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( ( context.ownerDocument || context ) != document ) { + setDocument( context ); + } + return contains( context, elem ); +}; + +Sizzle.attr = function( elem, name ) { + + // Set document vars if needed + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( ( elem.ownerDocument || elem ) != document ) { + setDocument( elem ); + } + + var fn = Expr.attrHandle[ name.toLowerCase() ], + + // Don't get fooled by Object.prototype properties (jQuery #13807) + val = fn && hasOwn.call( Expr.attrHandle, name.toLowerCase() ) ? + fn( elem, name, !documentIsHTML ) : + undefined; + + return val !== undefined ? + val : + support.attributes || !documentIsHTML ? + elem.getAttribute( name ) : + ( val = elem.getAttributeNode( name ) ) && val.specified ? + val.value : + null; +}; + +Sizzle.escape = function( sel ) { + return ( sel + "" ).replace( rcssescape, fcssescape ); +}; + +Sizzle.error = function( msg ) { + throw new Error( "Syntax error, unrecognized expression: " + msg ); +}; + +/** + * Document sorting and removing duplicates + * @param {ArrayLike} results + */ +Sizzle.uniqueSort = function( results ) { + var elem, + duplicates = [], + j = 0, + i = 0; + + // Unless we *know* we can detect duplicates, assume their presence + hasDuplicate = !support.detectDuplicates; + sortInput = !support.sortStable && results.slice( 0 ); + results.sort( sortOrder ); + + if ( hasDuplicate ) { + while ( ( elem = results[ i++ ] ) ) { + if ( elem === results[ i ] ) { + j = duplicates.push( i ); + } + } + while ( j-- ) { + results.splice( duplicates[ j ], 1 ); + } + } + + // Clear input after sorting to release objects + // See https://github.com/jquery/sizzle/pull/225 + sortInput = null; + + return results; +}; + +/** + * Utility function for retrieving the text value of an array of DOM nodes + * @param {Array|Element} elem + */ +getText = Sizzle.getText = function( elem ) { + var node, + ret = "", + i = 0, + nodeType = elem.nodeType; + + if ( !nodeType ) { + + // If no nodeType, this is expected to be an array + while ( ( node = elem[ i++ ] ) ) { + + // Do not traverse comment nodes + ret += getText( node ); + } + } else if ( nodeType === 1 || nodeType === 9 || nodeType === 11 ) { + + // Use textContent for elements + // innerText usage removed for consistency of new lines (jQuery #11153) + if ( typeof elem.textContent === "string" ) { + return elem.textContent; + } else { + + // Traverse its children + for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) { + ret += getText( elem ); + } + } + } else if ( nodeType === 3 || nodeType === 4 ) { + return elem.nodeValue; + } + + // Do not include comment or processing instruction nodes + + return ret; +}; + +Expr = Sizzle.selectors = { + + // Can be adjusted by the user + cacheLength: 50, + + createPseudo: markFunction, + + match: matchExpr, + + attrHandle: {}, + + find: {}, + + relative: { + ">": { dir: "parentNode", first: true }, + " ": { dir: "parentNode" }, + "+": { dir: "previousSibling", first: true }, + "~": { dir: "previousSibling" } + }, + + preFilter: { + "ATTR": function( match ) { + match[ 1 ] = match[ 1 ].replace( runescape, funescape ); + + // Move the given value to match[3] whether quoted or unquoted + match[ 3 ] = ( match[ 3 ] || match[ 4 ] || + match[ 5 ] || "" ).replace( runescape, funescape ); + + if ( match[ 2 ] === "~=" ) { + match[ 3 ] = " " + match[ 3 ] + " "; + } + + return match.slice( 0, 4 ); + }, + + "CHILD": function( match ) { + + /* matches from matchExpr["CHILD"] + 1 type (only|nth|...) + 2 what (child|of-type) + 3 argument (even|odd|\d*|\d*n([+-]\d+)?|...) + 4 xn-component of xn+y argument ([+-]?\d*n|) + 5 sign of xn-component + 6 x of xn-component + 7 sign of y-component + 8 y of y-component + */ + match[ 1 ] = match[ 1 ].toLowerCase(); + + if ( match[ 1 ].slice( 0, 3 ) === "nth" ) { + + // nth-* requires argument + if ( !match[ 3 ] ) { + Sizzle.error( match[ 0 ] ); + } + + // numeric x and y parameters for Expr.filter.CHILD + // remember that false/true cast respectively to 0/1 + match[ 4 ] = +( match[ 4 ] ? + match[ 5 ] + ( match[ 6 ] || 1 ) : + 2 * ( match[ 3 ] === "even" || match[ 3 ] === "odd" ) ); + match[ 5 ] = +( ( match[ 7 ] + match[ 8 ] ) || match[ 3 ] === "odd" ); + + // other types prohibit arguments + } else if ( match[ 3 ] ) { + Sizzle.error( match[ 0 ] ); + } + + return match; + }, + + "PSEUDO": function( match ) { + var excess, + unquoted = !match[ 6 ] && match[ 2 ]; + + if ( matchExpr[ "CHILD" ].test( match[ 0 ] ) ) { + return null; + } + + // Accept quoted arguments as-is + if ( match[ 3 ] ) { + match[ 2 ] = match[ 4 ] || match[ 5 ] || ""; + + // Strip excess characters from unquoted arguments + } else if ( unquoted && rpseudo.test( unquoted ) && + + // Get excess from tokenize (recursively) + ( excess = tokenize( unquoted, true ) ) && + + // advance to the next closing parenthesis + ( excess = unquoted.indexOf( ")", unquoted.length - excess ) - unquoted.length ) ) { + + // excess is a negative index + match[ 0 ] = match[ 0 ].slice( 0, excess ); + match[ 2 ] = unquoted.slice( 0, excess ); + } + + // Return only captures needed by the pseudo filter method (type and argument) + return match.slice( 0, 3 ); + } + }, + + filter: { + + "TAG": function( nodeNameSelector ) { + var nodeName = nodeNameSelector.replace( runescape, funescape ).toLowerCase(); + return nodeNameSelector === "*" ? + function() { + return true; + } : + function( elem ) { + return elem.nodeName && elem.nodeName.toLowerCase() === nodeName; + }; + }, + + "CLASS": function( className ) { + var pattern = classCache[ className + " " ]; + + return pattern || + ( pattern = new RegExp( "(^|" + whitespace + + ")" + className + "(" + whitespace + "|$)" ) ) && classCache( + className, function( elem ) { + return pattern.test( + typeof elem.className === "string" && elem.className || + typeof elem.getAttribute !== "undefined" && + elem.getAttribute( "class" ) || + "" + ); + } ); + }, + + "ATTR": function( name, operator, check ) { + return function( elem ) { + var result = Sizzle.attr( elem, name ); + + if ( result == null ) { + return operator === "!="; + } + if ( !operator ) { + return true; + } + + result += ""; + + /* eslint-disable max-len */ + + return operator === "=" ? result === check : + operator === "!=" ? result !== check : + operator === "^=" ? check && result.indexOf( check ) === 0 : + operator === "*=" ? check && result.indexOf( check ) > -1 : + operator === "$=" ? check && result.slice( -check.length ) === check : + operator === "~=" ? ( " " + result.replace( rwhitespace, " " ) + " " ).indexOf( check ) > -1 : + operator === "|=" ? result === check || result.slice( 0, check.length + 1 ) === check + "-" : + false; + /* eslint-enable max-len */ + + }; + }, + + "CHILD": function( type, what, _argument, first, last ) { + var simple = type.slice( 0, 3 ) !== "nth", + forward = type.slice( -4 ) !== "last", + ofType = what === "of-type"; + + return first === 1 && last === 0 ? + + // Shortcut for :nth-*(n) + function( elem ) { + return !!elem.parentNode; + } : + + function( elem, _context, xml ) { + var cache, uniqueCache, outerCache, node, nodeIndex, start, + dir = simple !== forward ? "nextSibling" : "previousSibling", + parent = elem.parentNode, + name = ofType && elem.nodeName.toLowerCase(), + useCache = !xml && !ofType, + diff = false; + + if ( parent ) { + + // :(first|last|only)-(child|of-type) + if ( simple ) { + while ( dir ) { + node = elem; + while ( ( node = node[ dir ] ) ) { + if ( ofType ? + node.nodeName.toLowerCase() === name : + node.nodeType === 1 ) { + + return false; + } + } + + // Reverse direction for :only-* (if we haven't yet done so) + start = dir = type === "only" && !start && "nextSibling"; + } + return true; + } + + start = [ forward ? parent.firstChild : parent.lastChild ]; + + // non-xml :nth-child(...) stores cache data on `parent` + if ( forward && useCache ) { + + // Seek `elem` from a previously-cached index + + // ...in a gzip-friendly way + node = parent; + outerCache = node[ expando ] || ( node[ expando ] = {} ); + + // Support: IE <9 only + // Defend against cloned attroperties (jQuery gh-1709) + uniqueCache = outerCache[ node.uniqueID ] || + ( outerCache[ node.uniqueID ] = {} ); + + cache = uniqueCache[ type ] || []; + nodeIndex = cache[ 0 ] === dirruns && cache[ 1 ]; + diff = nodeIndex && cache[ 2 ]; + node = nodeIndex && parent.childNodes[ nodeIndex ]; + + while ( ( node = ++nodeIndex && node && node[ dir ] || + + // Fallback to seeking `elem` from the start + ( diff = nodeIndex = 0 ) || start.pop() ) ) { + + // When found, cache indexes on `parent` and break + if ( node.nodeType === 1 && ++diff && node === elem ) { + uniqueCache[ type ] = [ dirruns, nodeIndex, diff ]; + break; + } + } + + } else { + + // Use previously-cached element index if available + if ( useCache ) { + + // ...in a gzip-friendly way + node = elem; + outerCache = node[ expando ] || ( node[ expando ] = {} ); + + // Support: IE <9 only + // Defend against cloned attroperties (jQuery gh-1709) + uniqueCache = outerCache[ node.uniqueID ] || + ( outerCache[ node.uniqueID ] = {} ); + + cache = uniqueCache[ type ] || []; + nodeIndex = cache[ 0 ] === dirruns && cache[ 1 ]; + diff = nodeIndex; + } + + // xml :nth-child(...) + // or :nth-last-child(...) or :nth(-last)?-of-type(...) + if ( diff === false ) { + + // Use the same loop as above to seek `elem` from the start + while ( ( node = ++nodeIndex && node && node[ dir ] || + ( diff = nodeIndex = 0 ) || start.pop() ) ) { + + if ( ( ofType ? + node.nodeName.toLowerCase() === name : + node.nodeType === 1 ) && + ++diff ) { + + // Cache the index of each encountered element + if ( useCache ) { + outerCache = node[ expando ] || + ( node[ expando ] = {} ); + + // Support: IE <9 only + // Defend against cloned attroperties (jQuery gh-1709) + uniqueCache = outerCache[ node.uniqueID ] || + ( outerCache[ node.uniqueID ] = {} ); + + uniqueCache[ type ] = [ dirruns, diff ]; + } + + if ( node === elem ) { + break; + } + } + } + } + } + + // Incorporate the offset, then check against cycle size + diff -= last; + return diff === first || ( diff % first === 0 && diff / first >= 0 ); + } + }; + }, + + "PSEUDO": function( pseudo, argument ) { + + // pseudo-class names are case-insensitive + // http://www.w3.org/TR/selectors/#pseudo-classes + // Prioritize by case sensitivity in case custom pseudos are added with uppercase letters + // Remember that setFilters inherits from pseudos + var args, + fn = Expr.pseudos[ pseudo ] || Expr.setFilters[ pseudo.toLowerCase() ] || + Sizzle.error( "unsupported pseudo: " + pseudo ); + + // The user may use createPseudo to indicate that + // arguments are needed to create the filter function + // just as Sizzle does + if ( fn[ expando ] ) { + return fn( argument ); + } + + // But maintain support for old signatures + if ( fn.length > 1 ) { + args = [ pseudo, pseudo, "", argument ]; + return Expr.setFilters.hasOwnProperty( pseudo.toLowerCase() ) ? + markFunction( function( seed, matches ) { + var idx, + matched = fn( seed, argument ), + i = matched.length; + while ( i-- ) { + idx = indexOf( seed, matched[ i ] ); + seed[ idx ] = !( matches[ idx ] = matched[ i ] ); + } + } ) : + function( elem ) { + return fn( elem, 0, args ); + }; + } + + return fn; + } + }, + + pseudos: { + + // Potentially complex pseudos + "not": markFunction( function( selector ) { + + // Trim the selector passed to compile + // to avoid treating leading and trailing + // spaces as combinators + var input = [], + results = [], + matcher = compile( selector.replace( rtrim, "$1" ) ); + + return matcher[ expando ] ? + markFunction( function( seed, matches, _context, xml ) { + var elem, + unmatched = matcher( seed, null, xml, [] ), + i = seed.length; + + // Match elements unmatched by `matcher` + while ( i-- ) { + if ( ( elem = unmatched[ i ] ) ) { + seed[ i ] = !( matches[ i ] = elem ); + } + } + } ) : + function( elem, _context, xml ) { + input[ 0 ] = elem; + matcher( input, null, xml, results ); + + // Don't keep the element (issue #299) + input[ 0 ] = null; + return !results.pop(); + }; + } ), + + "has": markFunction( function( selector ) { + return function( elem ) { + return Sizzle( selector, elem ).length > 0; + }; + } ), + + "contains": markFunction( function( text ) { + text = text.replace( runescape, funescape ); + return function( elem ) { + return ( elem.textContent || getText( elem ) ).indexOf( text ) > -1; + }; + } ), + + // "Whether an element is represented by a :lang() selector + // is based solely on the element's language value + // being equal to the identifier C, + // or beginning with the identifier C immediately followed by "-". + // The matching of C against the element's language value is performed case-insensitively. + // The identifier C does not have to be a valid language name." + // http://www.w3.org/TR/selectors/#lang-pseudo + "lang": markFunction( function( lang ) { + + // lang value must be a valid identifier + if ( !ridentifier.test( lang || "" ) ) { + Sizzle.error( "unsupported lang: " + lang ); + } + lang = lang.replace( runescape, funescape ).toLowerCase(); + return function( elem ) { + var elemLang; + do { + if ( ( elemLang = documentIsHTML ? + elem.lang : + elem.getAttribute( "xml:lang" ) || elem.getAttribute( "lang" ) ) ) { + + elemLang = elemLang.toLowerCase(); + return elemLang === lang || elemLang.indexOf( lang + "-" ) === 0; + } + } while ( ( elem = elem.parentNode ) && elem.nodeType === 1 ); + return false; + }; + } ), + + // Miscellaneous + "target": function( elem ) { + var hash = window.location && window.location.hash; + return hash && hash.slice( 1 ) === elem.id; + }, + + "root": function( elem ) { + return elem === docElem; + }, + + "focus": function( elem ) { + return elem === document.activeElement && + ( !document.hasFocus || document.hasFocus() ) && + !!( elem.type || elem.href || ~elem.tabIndex ); + }, + + // Boolean properties + "enabled": createDisabledPseudo( false ), + "disabled": createDisabledPseudo( true ), + + "checked": function( elem ) { + + // In CSS3, :checked should return both checked and selected elements + // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked + var nodeName = elem.nodeName.toLowerCase(); + return ( nodeName === "input" && !!elem.checked ) || + ( nodeName === "option" && !!elem.selected ); + }, + + "selected": function( elem ) { + + // Accessing this property makes selected-by-default + // options in Safari work properly + if ( elem.parentNode ) { + // eslint-disable-next-line no-unused-expressions + elem.parentNode.selectedIndex; + } + + return elem.selected === true; + }, + + // Contents + "empty": function( elem ) { + + // http://www.w3.org/TR/selectors/#empty-pseudo + // :empty is negated by element (1) or content nodes (text: 3; cdata: 4; entity ref: 5), + // but not by others (comment: 8; processing instruction: 7; etc.) + // nodeType < 6 works because attributes (2) do not appear as children + for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) { + if ( elem.nodeType < 6 ) { + return false; + } + } + return true; + }, + + "parent": function( elem ) { + return !Expr.pseudos[ "empty" ]( elem ); + }, + + // Element/input types + "header": function( elem ) { + return rheader.test( elem.nodeName ); + }, + + "input": function( elem ) { + return rinputs.test( elem.nodeName ); + }, + + "button": function( elem ) { + var name = elem.nodeName.toLowerCase(); + return name === "input" && elem.type === "button" || name === "button"; + }, + + "text": function( elem ) { + var attr; + return elem.nodeName.toLowerCase() === "input" && + elem.type === "text" && + + // Support: IE<8 + // New HTML5 attribute values (e.g., "search") appear with elem.type === "text" + ( ( attr = elem.getAttribute( "type" ) ) == null || + attr.toLowerCase() === "text" ); + }, + + // Position-in-collection + "first": createPositionalPseudo( function() { + return [ 0 ]; + } ), + + "last": createPositionalPseudo( function( _matchIndexes, length ) { + return [ length - 1 ]; + } ), + + "eq": createPositionalPseudo( function( _matchIndexes, length, argument ) { + return [ argument < 0 ? argument + length : argument ]; + } ), + + "even": createPositionalPseudo( function( matchIndexes, length ) { + var i = 0; + for ( ; i < length; i += 2 ) { + matchIndexes.push( i ); + } + return matchIndexes; + } ), + + "odd": createPositionalPseudo( function( matchIndexes, length ) { + var i = 1; + for ( ; i < length; i += 2 ) { + matchIndexes.push( i ); + } + return matchIndexes; + } ), + + "lt": createPositionalPseudo( function( matchIndexes, length, argument ) { + var i = argument < 0 ? + argument + length : + argument > length ? + length : + argument; + for ( ; --i >= 0; ) { + matchIndexes.push( i ); + } + return matchIndexes; + } ), + + "gt": createPositionalPseudo( function( matchIndexes, length, argument ) { + var i = argument < 0 ? argument + length : argument; + for ( ; ++i < length; ) { + matchIndexes.push( i ); + } + return matchIndexes; + } ) + } +}; + +Expr.pseudos[ "nth" ] = Expr.pseudos[ "eq" ]; + +// Add button/input type pseudos +for ( i in { radio: true, checkbox: true, file: true, password: true, image: true } ) { + Expr.pseudos[ i ] = createInputPseudo( i ); +} +for ( i in { submit: true, reset: true } ) { + Expr.pseudos[ i ] = createButtonPseudo( i ); +} + +// Easy API for creating new setFilters +function setFilters() {} +setFilters.prototype = Expr.filters = Expr.pseudos; +Expr.setFilters = new setFilters(); + +tokenize = Sizzle.tokenize = function( selector, parseOnly ) { + var matched, match, tokens, type, + soFar, groups, preFilters, + cached = tokenCache[ selector + " " ]; + + if ( cached ) { + return parseOnly ? 0 : cached.slice( 0 ); + } + + soFar = selector; + groups = []; + preFilters = Expr.preFilter; + + while ( soFar ) { + + // Comma and first run + if ( !matched || ( match = rcomma.exec( soFar ) ) ) { + if ( match ) { + + // Don't consume trailing commas as valid + soFar = soFar.slice( match[ 0 ].length ) || soFar; + } + groups.push( ( tokens = [] ) ); + } + + matched = false; + + // Combinators + if ( ( match = rcombinators.exec( soFar ) ) ) { + matched = match.shift(); + tokens.push( { + value: matched, + + // Cast descendant combinators to space + type: match[ 0 ].replace( rtrim, " " ) + } ); + soFar = soFar.slice( matched.length ); + } + + // Filters + for ( type in Expr.filter ) { + if ( ( match = matchExpr[ type ].exec( soFar ) ) && ( !preFilters[ type ] || + ( match = preFilters[ type ]( match ) ) ) ) { + matched = match.shift(); + tokens.push( { + value: matched, + type: type, + matches: match + } ); + soFar = soFar.slice( matched.length ); + } + } + + if ( !matched ) { + break; + } + } + + // Return the length of the invalid excess + // if we're just parsing + // Otherwise, throw an error or return tokens + return parseOnly ? + soFar.length : + soFar ? + Sizzle.error( selector ) : + + // Cache the tokens + tokenCache( selector, groups ).slice( 0 ); +}; + +function toSelector( tokens ) { + var i = 0, + len = tokens.length, + selector = ""; + for ( ; i < len; i++ ) { + selector += tokens[ i ].value; + } + return selector; +} + +function addCombinator( matcher, combinator, base ) { + var dir = combinator.dir, + skip = combinator.next, + key = skip || dir, + checkNonElements = base && key === "parentNode", + doneName = done++; + + return combinator.first ? + + // Check against closest ancestor/preceding element + function( elem, context, xml ) { + while ( ( elem = elem[ dir ] ) ) { + if ( elem.nodeType === 1 || checkNonElements ) { + return matcher( elem, context, xml ); + } + } + return false; + } : + + // Check against all ancestor/preceding elements + function( elem, context, xml ) { + var oldCache, uniqueCache, outerCache, + newCache = [ dirruns, doneName ]; + + // We can't set arbitrary data on XML nodes, so they don't benefit from combinator caching + if ( xml ) { + while ( ( elem = elem[ dir ] ) ) { + if ( elem.nodeType === 1 || checkNonElements ) { + if ( matcher( elem, context, xml ) ) { + return true; + } + } + } + } else { + while ( ( elem = elem[ dir ] ) ) { + if ( elem.nodeType === 1 || checkNonElements ) { + outerCache = elem[ expando ] || ( elem[ expando ] = {} ); + + // Support: IE <9 only + // Defend against cloned attroperties (jQuery gh-1709) + uniqueCache = outerCache[ elem.uniqueID ] || + ( outerCache[ elem.uniqueID ] = {} ); + + if ( skip && skip === elem.nodeName.toLowerCase() ) { + elem = elem[ dir ] || elem; + } else if ( ( oldCache = uniqueCache[ key ] ) && + oldCache[ 0 ] === dirruns && oldCache[ 1 ] === doneName ) { + + // Assign to newCache so results back-propagate to previous elements + return ( newCache[ 2 ] = oldCache[ 2 ] ); + } else { + + // Reuse newcache so results back-propagate to previous elements + uniqueCache[ key ] = newCache; + + // A match means we're done; a fail means we have to keep checking + if ( ( newCache[ 2 ] = matcher( elem, context, xml ) ) ) { + return true; + } + } + } + } + } + return false; + }; +} + +function elementMatcher( matchers ) { + return matchers.length > 1 ? + function( elem, context, xml ) { + var i = matchers.length; + while ( i-- ) { + if ( !matchers[ i ]( elem, context, xml ) ) { + return false; + } + } + return true; + } : + matchers[ 0 ]; +} + +function multipleContexts( selector, contexts, results ) { + var i = 0, + len = contexts.length; + for ( ; i < len; i++ ) { + Sizzle( selector, contexts[ i ], results ); + } + return results; +} + +function condense( unmatched, map, filter, context, xml ) { + var elem, + newUnmatched = [], + i = 0, + len = unmatched.length, + mapped = map != null; + + for ( ; i < len; i++ ) { + if ( ( elem = unmatched[ i ] ) ) { + if ( !filter || filter( elem, context, xml ) ) { + newUnmatched.push( elem ); + if ( mapped ) { + map.push( i ); + } + } + } + } + + return newUnmatched; +} + +function setMatcher( preFilter, selector, matcher, postFilter, postFinder, postSelector ) { + if ( postFilter && !postFilter[ expando ] ) { + postFilter = setMatcher( postFilter ); + } + if ( postFinder && !postFinder[ expando ] ) { + postFinder = setMatcher( postFinder, postSelector ); + } + return markFunction( function( seed, results, context, xml ) { + var temp, i, elem, + preMap = [], + postMap = [], + preexisting = results.length, + + // Get initial elements from seed or context + elems = seed || multipleContexts( + selector || "*", + context.nodeType ? [ context ] : context, + [] + ), + + // Prefilter to get matcher input, preserving a map for seed-results synchronization + matcherIn = preFilter && ( seed || !selector ) ? + condense( elems, preMap, preFilter, context, xml ) : + elems, + + matcherOut = matcher ? + + // If we have a postFinder, or filtered seed, or non-seed postFilter or preexisting results, + postFinder || ( seed ? preFilter : preexisting || postFilter ) ? + + // ...intermediate processing is necessary + [] : + + // ...otherwise use results directly + results : + matcherIn; + + // Find primary matches + if ( matcher ) { + matcher( matcherIn, matcherOut, context, xml ); + } + + // Apply postFilter + if ( postFilter ) { + temp = condense( matcherOut, postMap ); + postFilter( temp, [], context, xml ); + + // Un-match failing elements by moving them back to matcherIn + i = temp.length; + while ( i-- ) { + if ( ( elem = temp[ i ] ) ) { + matcherOut[ postMap[ i ] ] = !( matcherIn[ postMap[ i ] ] = elem ); + } + } + } + + if ( seed ) { + if ( postFinder || preFilter ) { + if ( postFinder ) { + + // Get the final matcherOut by condensing this intermediate into postFinder contexts + temp = []; + i = matcherOut.length; + while ( i-- ) { + if ( ( elem = matcherOut[ i ] ) ) { + + // Restore matcherIn since elem is not yet a final match + temp.push( ( matcherIn[ i ] = elem ) ); + } + } + postFinder( null, ( matcherOut = [] ), temp, xml ); + } + + // Move matched elements from seed to results to keep them synchronized + i = matcherOut.length; + while ( i-- ) { + if ( ( elem = matcherOut[ i ] ) && + ( temp = postFinder ? indexOf( seed, elem ) : preMap[ i ] ) > -1 ) { + + seed[ temp ] = !( results[ temp ] = elem ); + } + } + } + + // Add elements to results, through postFinder if defined + } else { + matcherOut = condense( + matcherOut === results ? + matcherOut.splice( preexisting, matcherOut.length ) : + matcherOut + ); + if ( postFinder ) { + postFinder( null, results, matcherOut, xml ); + } else { + push.apply( results, matcherOut ); + } + } + } ); +} + +function matcherFromTokens( tokens ) { + var checkContext, matcher, j, + len = tokens.length, + leadingRelative = Expr.relative[ tokens[ 0 ].type ], + implicitRelative = leadingRelative || Expr.relative[ " " ], + i = leadingRelative ? 1 : 0, + + // The foundational matcher ensures that elements are reachable from top-level context(s) + matchContext = addCombinator( function( elem ) { + return elem === checkContext; + }, implicitRelative, true ), + matchAnyContext = addCombinator( function( elem ) { + return indexOf( checkContext, elem ) > -1; + }, implicitRelative, true ), + matchers = [ function( elem, context, xml ) { + var ret = ( !leadingRelative && ( xml || context !== outermostContext ) ) || ( + ( checkContext = context ).nodeType ? + matchContext( elem, context, xml ) : + matchAnyContext( elem, context, xml ) ); + + // Avoid hanging onto element (issue #299) + checkContext = null; + return ret; + } ]; + + for ( ; i < len; i++ ) { + if ( ( matcher = Expr.relative[ tokens[ i ].type ] ) ) { + matchers = [ addCombinator( elementMatcher( matchers ), matcher ) ]; + } else { + matcher = Expr.filter[ tokens[ i ].type ].apply( null, tokens[ i ].matches ); + + // Return special upon seeing a positional matcher + if ( matcher[ expando ] ) { + + // Find the next relative operator (if any) for proper handling + j = ++i; + for ( ; j < len; j++ ) { + if ( Expr.relative[ tokens[ j ].type ] ) { + break; + } + } + return setMatcher( + i > 1 && elementMatcher( matchers ), + i > 1 && toSelector( + + // If the preceding token was a descendant combinator, insert an implicit any-element `*` + tokens + .slice( 0, i - 1 ) + .concat( { value: tokens[ i - 2 ].type === " " ? "*" : "" } ) + ).replace( rtrim, "$1" ), + matcher, + i < j && matcherFromTokens( tokens.slice( i, j ) ), + j < len && matcherFromTokens( ( tokens = tokens.slice( j ) ) ), + j < len && toSelector( tokens ) + ); + } + matchers.push( matcher ); + } + } + + return elementMatcher( matchers ); +} + +function matcherFromGroupMatchers( elementMatchers, setMatchers ) { + var bySet = setMatchers.length > 0, + byElement = elementMatchers.length > 0, + superMatcher = function( seed, context, xml, results, outermost ) { + var elem, j, matcher, + matchedCount = 0, + i = "0", + unmatched = seed && [], + setMatched = [], + contextBackup = outermostContext, + + // We must always have either seed elements or outermost context + elems = seed || byElement && Expr.find[ "TAG" ]( "*", outermost ), + + // Use integer dirruns iff this is the outermost matcher + dirrunsUnique = ( dirruns += contextBackup == null ? 1 : Math.random() || 0.1 ), + len = elems.length; + + if ( outermost ) { + + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + outermostContext = context == document || context || outermost; + } + + // Add elements passing elementMatchers directly to results + // Support: IE<9, Safari + // Tolerate NodeList properties (IE: "length"; Safari: ) matching elements by id + for ( ; i !== len && ( elem = elems[ i ] ) != null; i++ ) { + if ( byElement && elem ) { + j = 0; + + // Support: IE 11+, Edge 17 - 18+ + // IE/Edge sometimes throw a "Permission denied" error when strict-comparing + // two documents; shallow comparisons work. + // eslint-disable-next-line eqeqeq + if ( !context && elem.ownerDocument != document ) { + setDocument( elem ); + xml = !documentIsHTML; + } + while ( ( matcher = elementMatchers[ j++ ] ) ) { + if ( matcher( elem, context || document, xml ) ) { + results.push( elem ); + break; + } + } + if ( outermost ) { + dirruns = dirrunsUnique; + } + } + + // Track unmatched elements for set filters + if ( bySet ) { + + // They will have gone through all possible matchers + if ( ( elem = !matcher && elem ) ) { + matchedCount--; + } + + // Lengthen the array for every element, matched or not + if ( seed ) { + unmatched.push( elem ); + } + } + } + + // `i` is now the count of elements visited above, and adding it to `matchedCount` + // makes the latter nonnegative. + matchedCount += i; + + // Apply set filters to unmatched elements + // NOTE: This can be skipped if there are no unmatched elements (i.e., `matchedCount` + // equals `i`), unless we didn't visit _any_ elements in the above loop because we have + // no element matchers and no seed. + // Incrementing an initially-string "0" `i` allows `i` to remain a string only in that + // case, which will result in a "00" `matchedCount` that differs from `i` but is also + // numerically zero. + if ( bySet && i !== matchedCount ) { + j = 0; + while ( ( matcher = setMatchers[ j++ ] ) ) { + matcher( unmatched, setMatched, context, xml ); + } + + if ( seed ) { + + // Reintegrate element matches to eliminate the need for sorting + if ( matchedCount > 0 ) { + while ( i-- ) { + if ( !( unmatched[ i ] || setMatched[ i ] ) ) { + setMatched[ i ] = pop.call( results ); + } + } + } + + // Discard index placeholder values to get only actual matches + setMatched = condense( setMatched ); + } + + // Add matches to results + push.apply( results, setMatched ); + + // Seedless set matches succeeding multiple successful matchers stipulate sorting + if ( outermost && !seed && setMatched.length > 0 && + ( matchedCount + setMatchers.length ) > 1 ) { + + Sizzle.uniqueSort( results ); + } + } + + // Override manipulation of globals by nested matchers + if ( outermost ) { + dirruns = dirrunsUnique; + outermostContext = contextBackup; + } + + return unmatched; + }; + + return bySet ? + markFunction( superMatcher ) : + superMatcher; +} + +compile = Sizzle.compile = function( selector, match /* Internal Use Only */ ) { + var i, + setMatchers = [], + elementMatchers = [], + cached = compilerCache[ selector + " " ]; + + if ( !cached ) { + + // Generate a function of recursive functions that can be used to check each element + if ( !match ) { + match = tokenize( selector ); + } + i = match.length; + while ( i-- ) { + cached = matcherFromTokens( match[ i ] ); + if ( cached[ expando ] ) { + setMatchers.push( cached ); + } else { + elementMatchers.push( cached ); + } + } + + // Cache the compiled function + cached = compilerCache( + selector, + matcherFromGroupMatchers( elementMatchers, setMatchers ) + ); + + // Save selector and tokenization + cached.selector = selector; + } + return cached; +}; + +/** + * A low-level selection function that works with Sizzle's compiled + * selector functions + * @param {String|Function} selector A selector or a pre-compiled + * selector function built with Sizzle.compile + * @param {Element} context + * @param {Array} [results] + * @param {Array} [seed] A set of elements to match against + */ +select = Sizzle.select = function( selector, context, results, seed ) { + var i, tokens, token, type, find, + compiled = typeof selector === "function" && selector, + match = !seed && tokenize( ( selector = compiled.selector || selector ) ); + + results = results || []; + + // Try to minimize operations if there is only one selector in the list and no seed + // (the latter of which guarantees us context) + if ( match.length === 1 ) { + + // Reduce context if the leading compound selector is an ID + tokens = match[ 0 ] = match[ 0 ].slice( 0 ); + if ( tokens.length > 2 && ( token = tokens[ 0 ] ).type === "ID" && + context.nodeType === 9 && documentIsHTML && Expr.relative[ tokens[ 1 ].type ] ) { + + context = ( Expr.find[ "ID" ]( token.matches[ 0 ] + .replace( runescape, funescape ), context ) || [] )[ 0 ]; + if ( !context ) { + return results; + + // Precompiled matchers will still verify ancestry, so step up a level + } else if ( compiled ) { + context = context.parentNode; + } + + selector = selector.slice( tokens.shift().value.length ); + } + + // Fetch a seed set for right-to-left matching + i = matchExpr[ "needsContext" ].test( selector ) ? 0 : tokens.length; + while ( i-- ) { + token = tokens[ i ]; + + // Abort if we hit a combinator + if ( Expr.relative[ ( type = token.type ) ] ) { + break; + } + if ( ( find = Expr.find[ type ] ) ) { + + // Search, expanding context for leading sibling combinators + if ( ( seed = find( + token.matches[ 0 ].replace( runescape, funescape ), + rsibling.test( tokens[ 0 ].type ) && testContext( context.parentNode ) || + context + ) ) ) { + + // If seed is empty or no tokens remain, we can return early + tokens.splice( i, 1 ); + selector = seed.length && toSelector( tokens ); + if ( !selector ) { + push.apply( results, seed ); + return results; + } + + break; + } + } + } + } + + // Compile and execute a filtering function if one is not provided + // Provide `match` to avoid retokenization if we modified the selector above + ( compiled || compile( selector, match ) )( + seed, + context, + !documentIsHTML, + results, + !context || rsibling.test( selector ) && testContext( context.parentNode ) || context + ); + return results; +}; + +// One-time assignments + +// Sort stability +support.sortStable = expando.split( "" ).sort( sortOrder ).join( "" ) === expando; + +// Support: Chrome 14-35+ +// Always assume duplicates if they aren't passed to the comparison function +support.detectDuplicates = !!hasDuplicate; + +// Initialize against the default document +setDocument(); + +// Support: Webkit<537.32 - Safari 6.0.3/Chrome 25 (fixed in Chrome 27) +// Detached nodes confoundingly follow *each other* +support.sortDetached = assert( function( el ) { + + // Should return 1, but returns 4 (following) + return el.compareDocumentPosition( document.createElement( "fieldset" ) ) & 1; +} ); + +// Support: IE<8 +// Prevent attribute/property "interpolation" +// https://msdn.microsoft.com/en-us/library/ms536429%28VS.85%29.aspx +if ( !assert( function( el ) { + el.innerHTML = ""; + return el.firstChild.getAttribute( "href" ) === "#"; +} ) ) { + addHandle( "type|href|height|width", function( elem, name, isXML ) { + if ( !isXML ) { + return elem.getAttribute( name, name.toLowerCase() === "type" ? 1 : 2 ); + } + } ); +} + +// Support: IE<9 +// Use defaultValue in place of getAttribute("value") +if ( !support.attributes || !assert( function( el ) { + el.innerHTML = ""; + el.firstChild.setAttribute( "value", "" ); + return el.firstChild.getAttribute( "value" ) === ""; +} ) ) { + addHandle( "value", function( elem, _name, isXML ) { + if ( !isXML && elem.nodeName.toLowerCase() === "input" ) { + return elem.defaultValue; + } + } ); +} + +// Support: IE<9 +// Use getAttributeNode to fetch booleans when getAttribute lies +if ( !assert( function( el ) { + return el.getAttribute( "disabled" ) == null; +} ) ) { + addHandle( booleans, function( elem, name, isXML ) { + var val; + if ( !isXML ) { + return elem[ name ] === true ? name.toLowerCase() : + ( val = elem.getAttributeNode( name ) ) && val.specified ? + val.value : + null; + } + } ); +} + +return Sizzle; + +} )( window ); + + + +jQuery.find = Sizzle; +jQuery.expr = Sizzle.selectors; + +// Deprecated +jQuery.expr[ ":" ] = jQuery.expr.pseudos; +jQuery.uniqueSort = jQuery.unique = Sizzle.uniqueSort; +jQuery.text = Sizzle.getText; +jQuery.isXMLDoc = Sizzle.isXML; +jQuery.contains = Sizzle.contains; +jQuery.escapeSelector = Sizzle.escape; + + + + +var dir = function( elem, dir, until ) { + var matched = [], + truncate = until !== undefined; + + while ( ( elem = elem[ dir ] ) && elem.nodeType !== 9 ) { + if ( elem.nodeType === 1 ) { + if ( truncate && jQuery( elem ).is( until ) ) { + break; + } + matched.push( elem ); + } + } + return matched; +}; + + +var siblings = function( n, elem ) { + var matched = []; + + for ( ; n; n = n.nextSibling ) { + if ( n.nodeType === 1 && n !== elem ) { + matched.push( n ); + } + } + + return matched; +}; + + +var rneedsContext = jQuery.expr.match.needsContext; + + + +function nodeName( elem, name ) { + + return elem.nodeName && elem.nodeName.toLowerCase() === name.toLowerCase(); + +} +var rsingleTag = ( /^<([a-z][^\/\0>:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i ); + + + +// Implement the identical functionality for filter and not +function winnow( elements, qualifier, not ) { + if ( isFunction( qualifier ) ) { + return jQuery.grep( elements, function( elem, i ) { + return !!qualifier.call( elem, i, elem ) !== not; + } ); + } + + // Single element + if ( qualifier.nodeType ) { + return jQuery.grep( elements, function( elem ) { + return ( elem === qualifier ) !== not; + } ); + } + + // Arraylike of elements (jQuery, arguments, Array) + if ( typeof qualifier !== "string" ) { + return jQuery.grep( elements, function( elem ) { + return ( indexOf.call( qualifier, elem ) > -1 ) !== not; + } ); + } + + // Filtered directly for both simple and complex selectors + return jQuery.filter( qualifier, elements, not ); +} + +jQuery.filter = function( expr, elems, not ) { + var elem = elems[ 0 ]; + + if ( not ) { + expr = ":not(" + expr + ")"; + } + + if ( elems.length === 1 && elem.nodeType === 1 ) { + return jQuery.find.matchesSelector( elem, expr ) ? [ elem ] : []; + } + + return jQuery.find.matches( expr, jQuery.grep( elems, function( elem ) { + return elem.nodeType === 1; + } ) ); +}; + +jQuery.fn.extend( { + find: function( selector ) { + var i, ret, + len = this.length, + self = this; + + if ( typeof selector !== "string" ) { + return this.pushStack( jQuery( selector ).filter( function() { + for ( i = 0; i < len; i++ ) { + if ( jQuery.contains( self[ i ], this ) ) { + return true; + } + } + } ) ); + } + + ret = this.pushStack( [] ); + + for ( i = 0; i < len; i++ ) { + jQuery.find( selector, self[ i ], ret ); + } + + return len > 1 ? jQuery.uniqueSort( ret ) : ret; + }, + filter: function( selector ) { + return this.pushStack( winnow( this, selector || [], false ) ); + }, + not: function( selector ) { + return this.pushStack( winnow( this, selector || [], true ) ); + }, + is: function( selector ) { + return !!winnow( + this, + + // If this is a positional/relative selector, check membership in the returned set + // so $("p:first").is("p:last") won't return true for a doc with two "p". + typeof selector === "string" && rneedsContext.test( selector ) ? + jQuery( selector ) : + selector || [], + false + ).length; + } +} ); + + +// Initialize a jQuery object + + +// A central reference to the root jQuery(document) +var rootjQuery, + + // A simple way to check for HTML strings + // Prioritize #id over to avoid XSS via location.hash (#9521) + // Strict HTML recognition (#11290: must start with <) + // Shortcut simple #id case for speed + rquickExpr = /^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]+))$/, + + init = jQuery.fn.init = function( selector, context, root ) { + var match, elem; + + // HANDLE: $(""), $(null), $(undefined), $(false) + if ( !selector ) { + return this; + } + + // Method init() accepts an alternate rootjQuery + // so migrate can support jQuery.sub (gh-2101) + root = root || rootjQuery; + + // Handle HTML strings + if ( typeof selector === "string" ) { + if ( selector[ 0 ] === "<" && + selector[ selector.length - 1 ] === ">" && + selector.length >= 3 ) { + + // Assume that strings that start and end with <> are HTML and skip the regex check + match = [ null, selector, null ]; + + } else { + match = rquickExpr.exec( selector ); + } + + // Match html or make sure no context is specified for #id + if ( match && ( match[ 1 ] || !context ) ) { + + // HANDLE: $(html) -> $(array) + if ( match[ 1 ] ) { + context = context instanceof jQuery ? context[ 0 ] : context; + + // Option to run scripts is true for back-compat + // Intentionally let the error be thrown if parseHTML is not present + jQuery.merge( this, jQuery.parseHTML( + match[ 1 ], + context && context.nodeType ? context.ownerDocument || context : document, + true + ) ); + + // HANDLE: $(html, props) + if ( rsingleTag.test( match[ 1 ] ) && jQuery.isPlainObject( context ) ) { + for ( match in context ) { + + // Properties of context are called as methods if possible + if ( isFunction( this[ match ] ) ) { + this[ match ]( context[ match ] ); + + // ...and otherwise set as attributes + } else { + this.attr( match, context[ match ] ); + } + } + } + + return this; + + // HANDLE: $(#id) + } else { + elem = document.getElementById( match[ 2 ] ); + + if ( elem ) { + + // Inject the element directly into the jQuery object + this[ 0 ] = elem; + this.length = 1; + } + return this; + } + + // HANDLE: $(expr, $(...)) + } else if ( !context || context.jquery ) { + return ( context || root ).find( selector ); + + // HANDLE: $(expr, context) + // (which is just equivalent to: $(context).find(expr) + } else { + return this.constructor( context ).find( selector ); + } + + // HANDLE: $(DOMElement) + } else if ( selector.nodeType ) { + this[ 0 ] = selector; + this.length = 1; + return this; + + // HANDLE: $(function) + // Shortcut for document ready + } else if ( isFunction( selector ) ) { + return root.ready !== undefined ? + root.ready( selector ) : + + // Execute immediately if ready is not present + selector( jQuery ); + } + + return jQuery.makeArray( selector, this ); + }; + +// Give the init function the jQuery prototype for later instantiation +init.prototype = jQuery.fn; + +// Initialize central reference +rootjQuery = jQuery( document ); + + +var rparentsprev = /^(?:parents|prev(?:Until|All))/, + + // Methods guaranteed to produce a unique set when starting from a unique set + guaranteedUnique = { + children: true, + contents: true, + next: true, + prev: true + }; + +jQuery.fn.extend( { + has: function( target ) { + var targets = jQuery( target, this ), + l = targets.length; + + return this.filter( function() { + var i = 0; + for ( ; i < l; i++ ) { + if ( jQuery.contains( this, targets[ i ] ) ) { + return true; + } + } + } ); + }, + + closest: function( selectors, context ) { + var cur, + i = 0, + l = this.length, + matched = [], + targets = typeof selectors !== "string" && jQuery( selectors ); + + // Positional selectors never match, since there's no _selection_ context + if ( !rneedsContext.test( selectors ) ) { + for ( ; i < l; i++ ) { + for ( cur = this[ i ]; cur && cur !== context; cur = cur.parentNode ) { + + // Always skip document fragments + if ( cur.nodeType < 11 && ( targets ? + targets.index( cur ) > -1 : + + // Don't pass non-elements to Sizzle + cur.nodeType === 1 && + jQuery.find.matchesSelector( cur, selectors ) ) ) { + + matched.push( cur ); + break; + } + } + } + } + + return this.pushStack( matched.length > 1 ? jQuery.uniqueSort( matched ) : matched ); + }, + + // Determine the position of an element within the set + index: function( elem ) { + + // No argument, return index in parent + if ( !elem ) { + return ( this[ 0 ] && this[ 0 ].parentNode ) ? this.first().prevAll().length : -1; + } + + // Index in selector + if ( typeof elem === "string" ) { + return indexOf.call( jQuery( elem ), this[ 0 ] ); + } + + // Locate the position of the desired element + return indexOf.call( this, + + // If it receives a jQuery object, the first element is used + elem.jquery ? elem[ 0 ] : elem + ); + }, + + add: function( selector, context ) { + return this.pushStack( + jQuery.uniqueSort( + jQuery.merge( this.get(), jQuery( selector, context ) ) + ) + ); + }, + + addBack: function( selector ) { + return this.add( selector == null ? + this.prevObject : this.prevObject.filter( selector ) + ); + } +} ); + +function sibling( cur, dir ) { + while ( ( cur = cur[ dir ] ) && cur.nodeType !== 1 ) {} + return cur; +} + +jQuery.each( { + parent: function( elem ) { + var parent = elem.parentNode; + return parent && parent.nodeType !== 11 ? parent : null; + }, + parents: function( elem ) { + return dir( elem, "parentNode" ); + }, + parentsUntil: function( elem, _i, until ) { + return dir( elem, "parentNode", until ); + }, + next: function( elem ) { + return sibling( elem, "nextSibling" ); + }, + prev: function( elem ) { + return sibling( elem, "previousSibling" ); + }, + nextAll: function( elem ) { + return dir( elem, "nextSibling" ); + }, + prevAll: function( elem ) { + return dir( elem, "previousSibling" ); + }, + nextUntil: function( elem, _i, until ) { + return dir( elem, "nextSibling", until ); + }, + prevUntil: function( elem, _i, until ) { + return dir( elem, "previousSibling", until ); + }, + siblings: function( elem ) { + return siblings( ( elem.parentNode || {} ).firstChild, elem ); + }, + children: function( elem ) { + return siblings( elem.firstChild ); + }, + contents: function( elem ) { + if ( elem.contentDocument != null && + + // Support: IE 11+ + // elements with no `data` attribute has an object + // `contentDocument` with a `null` prototype. + getProto( elem.contentDocument ) ) { + + return elem.contentDocument; + } + + // Support: IE 9 - 11 only, iOS 7 only, Android Browser <=4.3 only + // Treat the template element as a regular one in browsers that + // don't support it. + if ( nodeName( elem, "template" ) ) { + elem = elem.content || elem; + } + + return jQuery.merge( [], elem.childNodes ); + } +}, function( name, fn ) { + jQuery.fn[ name ] = function( until, selector ) { + var matched = jQuery.map( this, fn, until ); + + if ( name.slice( -5 ) !== "Until" ) { + selector = until; + } + + if ( selector && typeof selector === "string" ) { + matched = jQuery.filter( selector, matched ); + } + + if ( this.length > 1 ) { + + // Remove duplicates + if ( !guaranteedUnique[ name ] ) { + jQuery.uniqueSort( matched ); + } + + // Reverse order for parents* and prev-derivatives + if ( rparentsprev.test( name ) ) { + matched.reverse(); + } + } + + return this.pushStack( matched ); + }; +} ); +var rnothtmlwhite = ( /[^\x20\t\r\n\f]+/g ); + + + +// Convert String-formatted options into Object-formatted ones +function createOptions( options ) { + var object = {}; + jQuery.each( options.match( rnothtmlwhite ) || [], function( _, flag ) { + object[ flag ] = true; + } ); + return object; +} + +/* + * Create a callback list using the following parameters: + * + * options: an optional list of space-separated options that will change how + * the callback list behaves or a more traditional option object + * + * By default a callback list will act like an event callback list and can be + * "fired" multiple times. + * + * Possible options: + * + * once: will ensure the callback list can only be fired once (like a Deferred) + * + * memory: will keep track of previous values and will call any callback added + * after the list has been fired right away with the latest "memorized" + * values (like a Deferred) + * + * unique: will ensure a callback can only be added once (no duplicate in the list) + * + * stopOnFalse: interrupt callings when a callback returns false + * + */ +jQuery.Callbacks = function( options ) { + + // Convert options from String-formatted to Object-formatted if needed + // (we check in cache first) + options = typeof options === "string" ? + createOptions( options ) : + jQuery.extend( {}, options ); + + var // Flag to know if list is currently firing + firing, + + // Last fire value for non-forgettable lists + memory, + + // Flag to know if list was already fired + fired, + + // Flag to prevent firing + locked, + + // Actual callback list + list = [], + + // Queue of execution data for repeatable lists + queue = [], + + // Index of currently firing callback (modified by add/remove as needed) + firingIndex = -1, + + // Fire callbacks + fire = function() { + + // Enforce single-firing + locked = locked || options.once; + + // Execute callbacks for all pending executions, + // respecting firingIndex overrides and runtime changes + fired = firing = true; + for ( ; queue.length; firingIndex = -1 ) { + memory = queue.shift(); + while ( ++firingIndex < list.length ) { + + // Run callback and check for early termination + if ( list[ firingIndex ].apply( memory[ 0 ], memory[ 1 ] ) === false && + options.stopOnFalse ) { + + // Jump to end and forget the data so .add doesn't re-fire + firingIndex = list.length; + memory = false; + } + } + } + + // Forget the data if we're done with it + if ( !options.memory ) { + memory = false; + } + + firing = false; + + // Clean up if we're done firing for good + if ( locked ) { + + // Keep an empty list if we have data for future add calls + if ( memory ) { + list = []; + + // Otherwise, this object is spent + } else { + list = ""; + } + } + }, + + // Actual Callbacks object + self = { + + // Add a callback or a collection of callbacks to the list + add: function() { + if ( list ) { + + // If we have memory from a past run, we should fire after adding + if ( memory && !firing ) { + firingIndex = list.length - 1; + queue.push( memory ); + } + + ( function add( args ) { + jQuery.each( args, function( _, arg ) { + if ( isFunction( arg ) ) { + if ( !options.unique || !self.has( arg ) ) { + list.push( arg ); + } + } else if ( arg && arg.length && toType( arg ) !== "string" ) { + + // Inspect recursively + add( arg ); + } + } ); + } )( arguments ); + + if ( memory && !firing ) { + fire(); + } + } + return this; + }, + + // Remove a callback from the list + remove: function() { + jQuery.each( arguments, function( _, arg ) { + var index; + while ( ( index = jQuery.inArray( arg, list, index ) ) > -1 ) { + list.splice( index, 1 ); + + // Handle firing indexes + if ( index <= firingIndex ) { + firingIndex--; + } + } + } ); + return this; + }, + + // Check if a given callback is in the list. + // If no argument is given, return whether or not list has callbacks attached. + has: function( fn ) { + return fn ? + jQuery.inArray( fn, list ) > -1 : + list.length > 0; + }, + + // Remove all callbacks from the list + empty: function() { + if ( list ) { + list = []; + } + return this; + }, + + // Disable .fire and .add + // Abort any current/pending executions + // Clear all callbacks and values + disable: function() { + locked = queue = []; + list = memory = ""; + return this; + }, + disabled: function() { + return !list; + }, + + // Disable .fire + // Also disable .add unless we have memory (since it would have no effect) + // Abort any pending executions + lock: function() { + locked = queue = []; + if ( !memory && !firing ) { + list = memory = ""; + } + return this; + }, + locked: function() { + return !!locked; + }, + + // Call all callbacks with the given context and arguments + fireWith: function( context, args ) { + if ( !locked ) { + args = args || []; + args = [ context, args.slice ? args.slice() : args ]; + queue.push( args ); + if ( !firing ) { + fire(); + } + } + return this; + }, + + // Call all the callbacks with the given arguments + fire: function() { + self.fireWith( this, arguments ); + return this; + }, + + // To know if the callbacks have already been called at least once + fired: function() { + return !!fired; + } + }; + + return self; +}; + + +function Identity( v ) { + return v; +} +function Thrower( ex ) { + throw ex; +} + +function adoptValue( value, resolve, reject, noValue ) { + var method; + + try { + + // Check for promise aspect first to privilege synchronous behavior + if ( value && isFunction( ( method = value.promise ) ) ) { + method.call( value ).done( resolve ).fail( reject ); + + // Other thenables + } else if ( value && isFunction( ( method = value.then ) ) ) { + method.call( value, resolve, reject ); + + // Other non-thenables + } else { + + // Control `resolve` arguments by letting Array#slice cast boolean `noValue` to integer: + // * false: [ value ].slice( 0 ) => resolve( value ) + // * true: [ value ].slice( 1 ) => resolve() + resolve.apply( undefined, [ value ].slice( noValue ) ); + } + + // For Promises/A+, convert exceptions into rejections + // Since jQuery.when doesn't unwrap thenables, we can skip the extra checks appearing in + // Deferred#then to conditionally suppress rejection. + } catch ( value ) { + + // Support: Android 4.0 only + // Strict mode functions invoked without .call/.apply get global-object context + reject.apply( undefined, [ value ] ); + } +} + +jQuery.extend( { + + Deferred: function( func ) { + var tuples = [ + + // action, add listener, callbacks, + // ... .then handlers, argument index, [final state] + [ "notify", "progress", jQuery.Callbacks( "memory" ), + jQuery.Callbacks( "memory" ), 2 ], + [ "resolve", "done", jQuery.Callbacks( "once memory" ), + jQuery.Callbacks( "once memory" ), 0, "resolved" ], + [ "reject", "fail", jQuery.Callbacks( "once memory" ), + jQuery.Callbacks( "once memory" ), 1, "rejected" ] + ], + state = "pending", + promise = { + state: function() { + return state; + }, + always: function() { + deferred.done( arguments ).fail( arguments ); + return this; + }, + "catch": function( fn ) { + return promise.then( null, fn ); + }, + + // Keep pipe for back-compat + pipe: function( /* fnDone, fnFail, fnProgress */ ) { + var fns = arguments; + + return jQuery.Deferred( function( newDefer ) { + jQuery.each( tuples, function( _i, tuple ) { + + // Map tuples (progress, done, fail) to arguments (done, fail, progress) + var fn = isFunction( fns[ tuple[ 4 ] ] ) && fns[ tuple[ 4 ] ]; + + // deferred.progress(function() { bind to newDefer or newDefer.notify }) + // deferred.done(function() { bind to newDefer or newDefer.resolve }) + // deferred.fail(function() { bind to newDefer or newDefer.reject }) + deferred[ tuple[ 1 ] ]( function() { + var returned = fn && fn.apply( this, arguments ); + if ( returned && isFunction( returned.promise ) ) { + returned.promise() + .progress( newDefer.notify ) + .done( newDefer.resolve ) + .fail( newDefer.reject ); + } else { + newDefer[ tuple[ 0 ] + "With" ]( + this, + fn ? [ returned ] : arguments + ); + } + } ); + } ); + fns = null; + } ).promise(); + }, + then: function( onFulfilled, onRejected, onProgress ) { + var maxDepth = 0; + function resolve( depth, deferred, handler, special ) { + return function() { + var that = this, + args = arguments, + mightThrow = function() { + var returned, then; + + // Support: Promises/A+ section 2.3.3.3.3 + // https://promisesaplus.com/#point-59 + // Ignore double-resolution attempts + if ( depth < maxDepth ) { + return; + } + + returned = handler.apply( that, args ); + + // Support: Promises/A+ section 2.3.1 + // https://promisesaplus.com/#point-48 + if ( returned === deferred.promise() ) { + throw new TypeError( "Thenable self-resolution" ); + } + + // Support: Promises/A+ sections 2.3.3.1, 3.5 + // https://promisesaplus.com/#point-54 + // https://promisesaplus.com/#point-75 + // Retrieve `then` only once + then = returned && + + // Support: Promises/A+ section 2.3.4 + // https://promisesaplus.com/#point-64 + // Only check objects and functions for thenability + ( typeof returned === "object" || + typeof returned === "function" ) && + returned.then; + + // Handle a returned thenable + if ( isFunction( then ) ) { + + // Special processors (notify) just wait for resolution + if ( special ) { + then.call( + returned, + resolve( maxDepth, deferred, Identity, special ), + resolve( maxDepth, deferred, Thrower, special ) + ); + + // Normal processors (resolve) also hook into progress + } else { + + // ...and disregard older resolution values + maxDepth++; + + then.call( + returned, + resolve( maxDepth, deferred, Identity, special ), + resolve( maxDepth, deferred, Thrower, special ), + resolve( maxDepth, deferred, Identity, + deferred.notifyWith ) + ); + } + + // Handle all other returned values + } else { + + // Only substitute handlers pass on context + // and multiple values (non-spec behavior) + if ( handler !== Identity ) { + that = undefined; + args = [ returned ]; + } + + // Process the value(s) + // Default process is resolve + ( special || deferred.resolveWith )( that, args ); + } + }, + + // Only normal processors (resolve) catch and reject exceptions + process = special ? + mightThrow : + function() { + try { + mightThrow(); + } catch ( e ) { + + if ( jQuery.Deferred.exceptionHook ) { + jQuery.Deferred.exceptionHook( e, + process.stackTrace ); + } + + // Support: Promises/A+ section 2.3.3.3.4.1 + // https://promisesaplus.com/#point-61 + // Ignore post-resolution exceptions + if ( depth + 1 >= maxDepth ) { + + // Only substitute handlers pass on context + // and multiple values (non-spec behavior) + if ( handler !== Thrower ) { + that = undefined; + args = [ e ]; + } + + deferred.rejectWith( that, args ); + } + } + }; + + // Support: Promises/A+ section 2.3.3.3.1 + // https://promisesaplus.com/#point-57 + // Re-resolve promises immediately to dodge false rejection from + // subsequent errors + if ( depth ) { + process(); + } else { + + // Call an optional hook to record the stack, in case of exception + // since it's otherwise lost when execution goes async + if ( jQuery.Deferred.getStackHook ) { + process.stackTrace = jQuery.Deferred.getStackHook(); + } + window.setTimeout( process ); + } + }; + } + + return jQuery.Deferred( function( newDefer ) { + + // progress_handlers.add( ... ) + tuples[ 0 ][ 3 ].add( + resolve( + 0, + newDefer, + isFunction( onProgress ) ? + onProgress : + Identity, + newDefer.notifyWith + ) + ); + + // fulfilled_handlers.add( ... ) + tuples[ 1 ][ 3 ].add( + resolve( + 0, + newDefer, + isFunction( onFulfilled ) ? + onFulfilled : + Identity + ) + ); + + // rejected_handlers.add( ... ) + tuples[ 2 ][ 3 ].add( + resolve( + 0, + newDefer, + isFunction( onRejected ) ? + onRejected : + Thrower + ) + ); + } ).promise(); + }, + + // Get a promise for this deferred + // If obj is provided, the promise aspect is added to the object + promise: function( obj ) { + return obj != null ? jQuery.extend( obj, promise ) : promise; + } + }, + deferred = {}; + + // Add list-specific methods + jQuery.each( tuples, function( i, tuple ) { + var list = tuple[ 2 ], + stateString = tuple[ 5 ]; + + // promise.progress = list.add + // promise.done = list.add + // promise.fail = list.add + promise[ tuple[ 1 ] ] = list.add; + + // Handle state + if ( stateString ) { + list.add( + function() { + + // state = "resolved" (i.e., fulfilled) + // state = "rejected" + state = stateString; + }, + + // rejected_callbacks.disable + // fulfilled_callbacks.disable + tuples[ 3 - i ][ 2 ].disable, + + // rejected_handlers.disable + // fulfilled_handlers.disable + tuples[ 3 - i ][ 3 ].disable, + + // progress_callbacks.lock + tuples[ 0 ][ 2 ].lock, + + // progress_handlers.lock + tuples[ 0 ][ 3 ].lock + ); + } + + // progress_handlers.fire + // fulfilled_handlers.fire + // rejected_handlers.fire + list.add( tuple[ 3 ].fire ); + + // deferred.notify = function() { deferred.notifyWith(...) } + // deferred.resolve = function() { deferred.resolveWith(...) } + // deferred.reject = function() { deferred.rejectWith(...) } + deferred[ tuple[ 0 ] ] = function() { + deferred[ tuple[ 0 ] + "With" ]( this === deferred ? undefined : this, arguments ); + return this; + }; + + // deferred.notifyWith = list.fireWith + // deferred.resolveWith = list.fireWith + // deferred.rejectWith = list.fireWith + deferred[ tuple[ 0 ] + "With" ] = list.fireWith; + } ); + + // Make the deferred a promise + promise.promise( deferred ); + + // Call given func if any + if ( func ) { + func.call( deferred, deferred ); + } + + // All done! + return deferred; + }, + + // Deferred helper + when: function( singleValue ) { + var + + // count of uncompleted subordinates + remaining = arguments.length, + + // count of unprocessed arguments + i = remaining, + + // subordinate fulfillment data + resolveContexts = Array( i ), + resolveValues = slice.call( arguments ), + + // the primary Deferred + primary = jQuery.Deferred(), + + // subordinate callback factory + updateFunc = function( i ) { + return function( value ) { + resolveContexts[ i ] = this; + resolveValues[ i ] = arguments.length > 1 ? slice.call( arguments ) : value; + if ( !( --remaining ) ) { + primary.resolveWith( resolveContexts, resolveValues ); + } + }; + }; + + // Single- and empty arguments are adopted like Promise.resolve + if ( remaining <= 1 ) { + adoptValue( singleValue, primary.done( updateFunc( i ) ).resolve, primary.reject, + !remaining ); + + // Use .then() to unwrap secondary thenables (cf. gh-3000) + if ( primary.state() === "pending" || + isFunction( resolveValues[ i ] && resolveValues[ i ].then ) ) { + + return primary.then(); + } + } + + // Multiple arguments are aggregated like Promise.all array elements + while ( i-- ) { + adoptValue( resolveValues[ i ], updateFunc( i ), primary.reject ); + } + + return primary.promise(); + } +} ); + + +// These usually indicate a programmer mistake during development, +// warn about them ASAP rather than swallowing them by default. +var rerrorNames = /^(Eval|Internal|Range|Reference|Syntax|Type|URI)Error$/; + +jQuery.Deferred.exceptionHook = function( error, stack ) { + + // Support: IE 8 - 9 only + // Console exists when dev tools are open, which can happen at any time + if ( window.console && window.console.warn && error && rerrorNames.test( error.name ) ) { + window.console.warn( "jQuery.Deferred exception: " + error.message, error.stack, stack ); + } +}; + + + + +jQuery.readyException = function( error ) { + window.setTimeout( function() { + throw error; + } ); +}; + + + + +// The deferred used on DOM ready +var readyList = jQuery.Deferred(); + +jQuery.fn.ready = function( fn ) { + + readyList + .then( fn ) + + // Wrap jQuery.readyException in a function so that the lookup + // happens at the time of error handling instead of callback + // registration. + .catch( function( error ) { + jQuery.readyException( error ); + } ); + + return this; +}; + +jQuery.extend( { + + // Is the DOM ready to be used? Set to true once it occurs. + isReady: false, + + // A counter to track how many items to wait for before + // the ready event fires. See #6781 + readyWait: 1, + + // Handle when the DOM is ready + ready: function( wait ) { + + // Abort if there are pending holds or we're already ready + if ( wait === true ? --jQuery.readyWait : jQuery.isReady ) { + return; + } + + // Remember that the DOM is ready + jQuery.isReady = true; + + // If a normal DOM Ready event fired, decrement, and wait if need be + if ( wait !== true && --jQuery.readyWait > 0 ) { + return; + } + + // If there are functions bound, to execute + readyList.resolveWith( document, [ jQuery ] ); + } +} ); + +jQuery.ready.then = readyList.then; + +// The ready event handler and self cleanup method +function completed() { + document.removeEventListener( "DOMContentLoaded", completed ); + window.removeEventListener( "load", completed ); + jQuery.ready(); +} + +// Catch cases where $(document).ready() is called +// after the browser event has already occurred. +// Support: IE <=9 - 10 only +// Older IE sometimes signals "interactive" too soon +if ( document.readyState === "complete" || + ( document.readyState !== "loading" && !document.documentElement.doScroll ) ) { + + // Handle it asynchronously to allow scripts the opportunity to delay ready + window.setTimeout( jQuery.ready ); + +} else { + + // Use the handy event callback + document.addEventListener( "DOMContentLoaded", completed ); + + // A fallback to window.onload, that will always work + window.addEventListener( "load", completed ); +} + + + + +// Multifunctional method to get and set values of a collection +// The value/s can optionally be executed if it's a function +var access = function( elems, fn, key, value, chainable, emptyGet, raw ) { + var i = 0, + len = elems.length, + bulk = key == null; + + // Sets many values + if ( toType( key ) === "object" ) { + chainable = true; + for ( i in key ) { + access( elems, fn, i, key[ i ], true, emptyGet, raw ); + } + + // Sets one value + } else if ( value !== undefined ) { + chainable = true; + + if ( !isFunction( value ) ) { + raw = true; + } + + if ( bulk ) { + + // Bulk operations run against the entire set + if ( raw ) { + fn.call( elems, value ); + fn = null; + + // ...except when executing function values + } else { + bulk = fn; + fn = function( elem, _key, value ) { + return bulk.call( jQuery( elem ), value ); + }; + } + } + + if ( fn ) { + for ( ; i < len; i++ ) { + fn( + elems[ i ], key, raw ? + value : + value.call( elems[ i ], i, fn( elems[ i ], key ) ) + ); + } + } + } + + if ( chainable ) { + return elems; + } + + // Gets + if ( bulk ) { + return fn.call( elems ); + } + + return len ? fn( elems[ 0 ], key ) : emptyGet; +}; + + +// Matches dashed string for camelizing +var rmsPrefix = /^-ms-/, + rdashAlpha = /-([a-z])/g; + +// Used by camelCase as callback to replace() +function fcamelCase( _all, letter ) { + return letter.toUpperCase(); +} + +// Convert dashed to camelCase; used by the css and data modules +// Support: IE <=9 - 11, Edge 12 - 15 +// Microsoft forgot to hump their vendor prefix (#9572) +function camelCase( string ) { + return string.replace( rmsPrefix, "ms-" ).replace( rdashAlpha, fcamelCase ); +} +var acceptData = function( owner ) { + + // Accepts only: + // - Node + // - Node.ELEMENT_NODE + // - Node.DOCUMENT_NODE + // - Object + // - Any + return owner.nodeType === 1 || owner.nodeType === 9 || !( +owner.nodeType ); +}; + + + + +function Data() { + this.expando = jQuery.expando + Data.uid++; +} + +Data.uid = 1; + +Data.prototype = { + + cache: function( owner ) { + + // Check if the owner object already has a cache + var value = owner[ this.expando ]; + + // If not, create one + if ( !value ) { + value = {}; + + // We can accept data for non-element nodes in modern browsers, + // but we should not, see #8335. + // Always return an empty object. + if ( acceptData( owner ) ) { + + // If it is a node unlikely to be stringify-ed or looped over + // use plain assignment + if ( owner.nodeType ) { + owner[ this.expando ] = value; + + // Otherwise secure it in a non-enumerable property + // configurable must be true to allow the property to be + // deleted when data is removed + } else { + Object.defineProperty( owner, this.expando, { + value: value, + configurable: true + } ); + } + } + } + + return value; + }, + set: function( owner, data, value ) { + var prop, + cache = this.cache( owner ); + + // Handle: [ owner, key, value ] args + // Always use camelCase key (gh-2257) + if ( typeof data === "string" ) { + cache[ camelCase( data ) ] = value; + + // Handle: [ owner, { properties } ] args + } else { + + // Copy the properties one-by-one to the cache object + for ( prop in data ) { + cache[ camelCase( prop ) ] = data[ prop ]; + } + } + return cache; + }, + get: function( owner, key ) { + return key === undefined ? + this.cache( owner ) : + + // Always use camelCase key (gh-2257) + owner[ this.expando ] && owner[ this.expando ][ camelCase( key ) ]; + }, + access: function( owner, key, value ) { + + // In cases where either: + // + // 1. No key was specified + // 2. A string key was specified, but no value provided + // + // Take the "read" path and allow the get method to determine + // which value to return, respectively either: + // + // 1. The entire cache object + // 2. The data stored at the key + // + if ( key === undefined || + ( ( key && typeof key === "string" ) && value === undefined ) ) { + + return this.get( owner, key ); + } + + // When the key is not a string, or both a key and value + // are specified, set or extend (existing objects) with either: + // + // 1. An object of properties + // 2. A key and value + // + this.set( owner, key, value ); + + // Since the "set" path can have two possible entry points + // return the expected data based on which path was taken[*] + return value !== undefined ? value : key; + }, + remove: function( owner, key ) { + var i, + cache = owner[ this.expando ]; + + if ( cache === undefined ) { + return; + } + + if ( key !== undefined ) { + + // Support array or space separated string of keys + if ( Array.isArray( key ) ) { + + // If key is an array of keys... + // We always set camelCase keys, so remove that. + key = key.map( camelCase ); + } else { + key = camelCase( key ); + + // If a key with the spaces exists, use it. + // Otherwise, create an array by matching non-whitespace + key = key in cache ? + [ key ] : + ( key.match( rnothtmlwhite ) || [] ); + } + + i = key.length; + + while ( i-- ) { + delete cache[ key[ i ] ]; + } + } + + // Remove the expando if there's no more data + if ( key === undefined || jQuery.isEmptyObject( cache ) ) { + + // Support: Chrome <=35 - 45 + // Webkit & Blink performance suffers when deleting properties + // from DOM nodes, so set to undefined instead + // https://bugs.chromium.org/p/chromium/issues/detail?id=378607 (bug restricted) + if ( owner.nodeType ) { + owner[ this.expando ] = undefined; + } else { + delete owner[ this.expando ]; + } + } + }, + hasData: function( owner ) { + var cache = owner[ this.expando ]; + return cache !== undefined && !jQuery.isEmptyObject( cache ); + } +}; +var dataPriv = new Data(); + +var dataUser = new Data(); + + + +// Implementation Summary +// +// 1. Enforce API surface and semantic compatibility with 1.9.x branch +// 2. Improve the module's maintainability by reducing the storage +// paths to a single mechanism. +// 3. Use the same single mechanism to support "private" and "user" data. +// 4. _Never_ expose "private" data to user code (TODO: Drop _data, _removeData) +// 5. Avoid exposing implementation details on user objects (eg. expando properties) +// 6. Provide a clear path for implementation upgrade to WeakMap in 2014 + +var rbrace = /^(?:\{[\w\W]*\}|\[[\w\W]*\])$/, + rmultiDash = /[A-Z]/g; + +function getData( data ) { + if ( data === "true" ) { + return true; + } + + if ( data === "false" ) { + return false; + } + + if ( data === "null" ) { + return null; + } + + // Only convert to a number if it doesn't change the string + if ( data === +data + "" ) { + return +data; + } + + if ( rbrace.test( data ) ) { + return JSON.parse( data ); + } + + return data; +} + +function dataAttr( elem, key, data ) { + var name; + + // If nothing was found internally, try to fetch any + // data from the HTML5 data-* attribute + if ( data === undefined && elem.nodeType === 1 ) { + name = "data-" + key.replace( rmultiDash, "-$&" ).toLowerCase(); + data = elem.getAttribute( name ); + + if ( typeof data === "string" ) { + try { + data = getData( data ); + } catch ( e ) {} + + // Make sure we set the data so it isn't changed later + dataUser.set( elem, key, data ); + } else { + data = undefined; + } + } + return data; +} + +jQuery.extend( { + hasData: function( elem ) { + return dataUser.hasData( elem ) || dataPriv.hasData( elem ); + }, + + data: function( elem, name, data ) { + return dataUser.access( elem, name, data ); + }, + + removeData: function( elem, name ) { + dataUser.remove( elem, name ); + }, + + // TODO: Now that all calls to _data and _removeData have been replaced + // with direct calls to dataPriv methods, these can be deprecated. + _data: function( elem, name, data ) { + return dataPriv.access( elem, name, data ); + }, + + _removeData: function( elem, name ) { + dataPriv.remove( elem, name ); + } +} ); + +jQuery.fn.extend( { + data: function( key, value ) { + var i, name, data, + elem = this[ 0 ], + attrs = elem && elem.attributes; + + // Gets all values + if ( key === undefined ) { + if ( this.length ) { + data = dataUser.get( elem ); + + if ( elem.nodeType === 1 && !dataPriv.get( elem, "hasDataAttrs" ) ) { + i = attrs.length; + while ( i-- ) { + + // Support: IE 11 only + // The attrs elements can be null (#14894) + if ( attrs[ i ] ) { + name = attrs[ i ].name; + if ( name.indexOf( "data-" ) === 0 ) { + name = camelCase( name.slice( 5 ) ); + dataAttr( elem, name, data[ name ] ); + } + } + } + dataPriv.set( elem, "hasDataAttrs", true ); + } + } + + return data; + } + + // Sets multiple values + if ( typeof key === "object" ) { + return this.each( function() { + dataUser.set( this, key ); + } ); + } + + return access( this, function( value ) { + var data; + + // The calling jQuery object (element matches) is not empty + // (and therefore has an element appears at this[ 0 ]) and the + // `value` parameter was not undefined. An empty jQuery object + // will result in `undefined` for elem = this[ 0 ] which will + // throw an exception if an attempt to read a data cache is made. + if ( elem && value === undefined ) { + + // Attempt to get data from the cache + // The key will always be camelCased in Data + data = dataUser.get( elem, key ); + if ( data !== undefined ) { + return data; + } + + // Attempt to "discover" the data in + // HTML5 custom data-* attrs + data = dataAttr( elem, key ); + if ( data !== undefined ) { + return data; + } + + // We tried really hard, but the data doesn't exist. + return; + } + + // Set the data... + this.each( function() { + + // We always store the camelCased key + dataUser.set( this, key, value ); + } ); + }, null, value, arguments.length > 1, null, true ); + }, + + removeData: function( key ) { + return this.each( function() { + dataUser.remove( this, key ); + } ); + } +} ); + + +jQuery.extend( { + queue: function( elem, type, data ) { + var queue; + + if ( elem ) { + type = ( type || "fx" ) + "queue"; + queue = dataPriv.get( elem, type ); + + // Speed up dequeue by getting out quickly if this is just a lookup + if ( data ) { + if ( !queue || Array.isArray( data ) ) { + queue = dataPriv.access( elem, type, jQuery.makeArray( data ) ); + } else { + queue.push( data ); + } + } + return queue || []; + } + }, + + dequeue: function( elem, type ) { + type = type || "fx"; + + var queue = jQuery.queue( elem, type ), + startLength = queue.length, + fn = queue.shift(), + hooks = jQuery._queueHooks( elem, type ), + next = function() { + jQuery.dequeue( elem, type ); + }; + + // If the fx queue is dequeued, always remove the progress sentinel + if ( fn === "inprogress" ) { + fn = queue.shift(); + startLength--; + } + + if ( fn ) { + + // Add a progress sentinel to prevent the fx queue from being + // automatically dequeued + if ( type === "fx" ) { + queue.unshift( "inprogress" ); + } + + // Clear up the last queue stop function + delete hooks.stop; + fn.call( elem, next, hooks ); + } + + if ( !startLength && hooks ) { + hooks.empty.fire(); + } + }, + + // Not public - generate a queueHooks object, or return the current one + _queueHooks: function( elem, type ) { + var key = type + "queueHooks"; + return dataPriv.get( elem, key ) || dataPriv.access( elem, key, { + empty: jQuery.Callbacks( "once memory" ).add( function() { + dataPriv.remove( elem, [ type + "queue", key ] ); + } ) + } ); + } +} ); + +jQuery.fn.extend( { + queue: function( type, data ) { + var setter = 2; + + if ( typeof type !== "string" ) { + data = type; + type = "fx"; + setter--; + } + + if ( arguments.length < setter ) { + return jQuery.queue( this[ 0 ], type ); + } + + return data === undefined ? + this : + this.each( function() { + var queue = jQuery.queue( this, type, data ); + + // Ensure a hooks for this queue + jQuery._queueHooks( this, type ); + + if ( type === "fx" && queue[ 0 ] !== "inprogress" ) { + jQuery.dequeue( this, type ); + } + } ); + }, + dequeue: function( type ) { + return this.each( function() { + jQuery.dequeue( this, type ); + } ); + }, + clearQueue: function( type ) { + return this.queue( type || "fx", [] ); + }, + + // Get a promise resolved when queues of a certain type + // are emptied (fx is the type by default) + promise: function( type, obj ) { + var tmp, + count = 1, + defer = jQuery.Deferred(), + elements = this, + i = this.length, + resolve = function() { + if ( !( --count ) ) { + defer.resolveWith( elements, [ elements ] ); + } + }; + + if ( typeof type !== "string" ) { + obj = type; + type = undefined; + } + type = type || "fx"; + + while ( i-- ) { + tmp = dataPriv.get( elements[ i ], type + "queueHooks" ); + if ( tmp && tmp.empty ) { + count++; + tmp.empty.add( resolve ); + } + } + resolve(); + return defer.promise( obj ); + } +} ); +var pnum = ( /[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/ ).source; + +var rcssNum = new RegExp( "^(?:([+-])=|)(" + pnum + ")([a-z%]*)$", "i" ); + + +var cssExpand = [ "Top", "Right", "Bottom", "Left" ]; + +var documentElement = document.documentElement; + + + + var isAttached = function( elem ) { + return jQuery.contains( elem.ownerDocument, elem ); + }, + composed = { composed: true }; + + // Support: IE 9 - 11+, Edge 12 - 18+, iOS 10.0 - 10.2 only + // Check attachment across shadow DOM boundaries when possible (gh-3504) + // Support: iOS 10.0-10.2 only + // Early iOS 10 versions support `attachShadow` but not `getRootNode`, + // leading to errors. We need to check for `getRootNode`. + if ( documentElement.getRootNode ) { + isAttached = function( elem ) { + return jQuery.contains( elem.ownerDocument, elem ) || + elem.getRootNode( composed ) === elem.ownerDocument; + }; + } +var isHiddenWithinTree = function( elem, el ) { + + // isHiddenWithinTree might be called from jQuery#filter function; + // in that case, element will be second argument + elem = el || elem; + + // Inline style trumps all + return elem.style.display === "none" || + elem.style.display === "" && + + // Otherwise, check computed style + // Support: Firefox <=43 - 45 + // Disconnected elements can have computed display: none, so first confirm that elem is + // in the document. + isAttached( elem ) && + + jQuery.css( elem, "display" ) === "none"; + }; + + + +function adjustCSS( elem, prop, valueParts, tween ) { + var adjusted, scale, + maxIterations = 20, + currentValue = tween ? + function() { + return tween.cur(); + } : + function() { + return jQuery.css( elem, prop, "" ); + }, + initial = currentValue(), + unit = valueParts && valueParts[ 3 ] || ( jQuery.cssNumber[ prop ] ? "" : "px" ), + + // Starting value computation is required for potential unit mismatches + initialInUnit = elem.nodeType && + ( jQuery.cssNumber[ prop ] || unit !== "px" && +initial ) && + rcssNum.exec( jQuery.css( elem, prop ) ); + + if ( initialInUnit && initialInUnit[ 3 ] !== unit ) { + + // Support: Firefox <=54 + // Halve the iteration target value to prevent interference from CSS upper bounds (gh-2144) + initial = initial / 2; + + // Trust units reported by jQuery.css + unit = unit || initialInUnit[ 3 ]; + + // Iteratively approximate from a nonzero starting point + initialInUnit = +initial || 1; + + while ( maxIterations-- ) { + + // Evaluate and update our best guess (doubling guesses that zero out). + // Finish if the scale equals or crosses 1 (making the old*new product non-positive). + jQuery.style( elem, prop, initialInUnit + unit ); + if ( ( 1 - scale ) * ( 1 - ( scale = currentValue() / initial || 0.5 ) ) <= 0 ) { + maxIterations = 0; + } + initialInUnit = initialInUnit / scale; + + } + + initialInUnit = initialInUnit * 2; + jQuery.style( elem, prop, initialInUnit + unit ); + + // Make sure we update the tween properties later on + valueParts = valueParts || []; + } + + if ( valueParts ) { + initialInUnit = +initialInUnit || +initial || 0; + + // Apply relative offset (+=/-=) if specified + adjusted = valueParts[ 1 ] ? + initialInUnit + ( valueParts[ 1 ] + 1 ) * valueParts[ 2 ] : + +valueParts[ 2 ]; + if ( tween ) { + tween.unit = unit; + tween.start = initialInUnit; + tween.end = adjusted; + } + } + return adjusted; +} + + +var defaultDisplayMap = {}; + +function getDefaultDisplay( elem ) { + var temp, + doc = elem.ownerDocument, + nodeName = elem.nodeName, + display = defaultDisplayMap[ nodeName ]; + + if ( display ) { + return display; + } + + temp = doc.body.appendChild( doc.createElement( nodeName ) ); + display = jQuery.css( temp, "display" ); + + temp.parentNode.removeChild( temp ); + + if ( display === "none" ) { + display = "block"; + } + defaultDisplayMap[ nodeName ] = display; + + return display; +} + +function showHide( elements, show ) { + var display, elem, + values = [], + index = 0, + length = elements.length; + + // Determine new display value for elements that need to change + for ( ; index < length; index++ ) { + elem = elements[ index ]; + if ( !elem.style ) { + continue; + } + + display = elem.style.display; + if ( show ) { + + // Since we force visibility upon cascade-hidden elements, an immediate (and slow) + // check is required in this first loop unless we have a nonempty display value (either + // inline or about-to-be-restored) + if ( display === "none" ) { + values[ index ] = dataPriv.get( elem, "display" ) || null; + if ( !values[ index ] ) { + elem.style.display = ""; + } + } + if ( elem.style.display === "" && isHiddenWithinTree( elem ) ) { + values[ index ] = getDefaultDisplay( elem ); + } + } else { + if ( display !== "none" ) { + values[ index ] = "none"; + + // Remember what we're overwriting + dataPriv.set( elem, "display", display ); + } + } + } + + // Set the display of the elements in a second loop to avoid constant reflow + for ( index = 0; index < length; index++ ) { + if ( values[ index ] != null ) { + elements[ index ].style.display = values[ index ]; + } + } + + return elements; +} + +jQuery.fn.extend( { + show: function() { + return showHide( this, true ); + }, + hide: function() { + return showHide( this ); + }, + toggle: function( state ) { + if ( typeof state === "boolean" ) { + return state ? this.show() : this.hide(); + } + + return this.each( function() { + if ( isHiddenWithinTree( this ) ) { + jQuery( this ).show(); + } else { + jQuery( this ).hide(); + } + } ); + } +} ); +var rcheckableType = ( /^(?:checkbox|radio)$/i ); + +var rtagName = ( /<([a-z][^\/\0>\x20\t\r\n\f]*)/i ); + +var rscriptType = ( /^$|^module$|\/(?:java|ecma)script/i ); + + + +( function() { + var fragment = document.createDocumentFragment(), + div = fragment.appendChild( document.createElement( "div" ) ), + input = document.createElement( "input" ); + + // Support: Android 4.0 - 4.3 only + // Check state lost if the name is set (#11217) + // Support: Windows Web Apps (WWA) + // `name` and `type` must use .setAttribute for WWA (#14901) + input.setAttribute( "type", "radio" ); + input.setAttribute( "checked", "checked" ); + input.setAttribute( "name", "t" ); + + div.appendChild( input ); + + // Support: Android <=4.1 only + // Older WebKit doesn't clone checked state correctly in fragments + support.checkClone = div.cloneNode( true ).cloneNode( true ).lastChild.checked; + + // Support: IE <=11 only + // Make sure textarea (and checkbox) defaultValue is properly cloned + div.innerHTML = ""; + support.noCloneChecked = !!div.cloneNode( true ).lastChild.defaultValue; + + // Support: IE <=9 only + // IE <=9 replaces "; + support.option = !!div.lastChild; +} )(); + + +// We have to close these tags to support XHTML (#13200) +var wrapMap = { + + // XHTML parsers do not magically insert elements in the + // same way that tag soup parsers do. So we cannot shorten + // this by omitting or other required elements. + thead: [ 1, "", "
" ], + col: [ 2, "", "
" ], + tr: [ 2, "", "
" ], + td: [ 3, "", "
" ], + + _default: [ 0, "", "" ] +}; + +wrapMap.tbody = wrapMap.tfoot = wrapMap.colgroup = wrapMap.caption = wrapMap.thead; +wrapMap.th = wrapMap.td; + +// Support: IE <=9 only +if ( !support.option ) { + wrapMap.optgroup = wrapMap.option = [ 1, "" ]; +} + + +function getAll( context, tag ) { + + // Support: IE <=9 - 11 only + // Use typeof to avoid zero-argument method invocation on host objects (#15151) + var ret; + + if ( typeof context.getElementsByTagName !== "undefined" ) { + ret = context.getElementsByTagName( tag || "*" ); + + } else if ( typeof context.querySelectorAll !== "undefined" ) { + ret = context.querySelectorAll( tag || "*" ); + + } else { + ret = []; + } + + if ( tag === undefined || tag && nodeName( context, tag ) ) { + return jQuery.merge( [ context ], ret ); + } + + return ret; +} + + +// Mark scripts as having already been evaluated +function setGlobalEval( elems, refElements ) { + var i = 0, + l = elems.length; + + for ( ; i < l; i++ ) { + dataPriv.set( + elems[ i ], + "globalEval", + !refElements || dataPriv.get( refElements[ i ], "globalEval" ) + ); + } +} + + +var rhtml = /<|&#?\w+;/; + +function buildFragment( elems, context, scripts, selection, ignored ) { + var elem, tmp, tag, wrap, attached, j, + fragment = context.createDocumentFragment(), + nodes = [], + i = 0, + l = elems.length; + + for ( ; i < l; i++ ) { + elem = elems[ i ]; + + if ( elem || elem === 0 ) { + + // Add nodes directly + if ( toType( elem ) === "object" ) { + + // Support: Android <=4.0 only, PhantomJS 1 only + // push.apply(_, arraylike) throws on ancient WebKit + jQuery.merge( nodes, elem.nodeType ? [ elem ] : elem ); + + // Convert non-html into a text node + } else if ( !rhtml.test( elem ) ) { + nodes.push( context.createTextNode( elem ) ); + + // Convert html into DOM nodes + } else { + tmp = tmp || fragment.appendChild( context.createElement( "div" ) ); + + // Deserialize a standard representation + tag = ( rtagName.exec( elem ) || [ "", "" ] )[ 1 ].toLowerCase(); + wrap = wrapMap[ tag ] || wrapMap._default; + tmp.innerHTML = wrap[ 1 ] + jQuery.htmlPrefilter( elem ) + wrap[ 2 ]; + + // Descend through wrappers to the right content + j = wrap[ 0 ]; + while ( j-- ) { + tmp = tmp.lastChild; + } + + // Support: Android <=4.0 only, PhantomJS 1 only + // push.apply(_, arraylike) throws on ancient WebKit + jQuery.merge( nodes, tmp.childNodes ); + + // Remember the top-level container + tmp = fragment.firstChild; + + // Ensure the created nodes are orphaned (#12392) + tmp.textContent = ""; + } + } + } + + // Remove wrapper from fragment + fragment.textContent = ""; + + i = 0; + while ( ( elem = nodes[ i++ ] ) ) { + + // Skip elements already in the context collection (trac-4087) + if ( selection && jQuery.inArray( elem, selection ) > -1 ) { + if ( ignored ) { + ignored.push( elem ); + } + continue; + } + + attached = isAttached( elem ); + + // Append to fragment + tmp = getAll( fragment.appendChild( elem ), "script" ); + + // Preserve script evaluation history + if ( attached ) { + setGlobalEval( tmp ); + } + + // Capture executables + if ( scripts ) { + j = 0; + while ( ( elem = tmp[ j++ ] ) ) { + if ( rscriptType.test( elem.type || "" ) ) { + scripts.push( elem ); + } + } + } + } + + return fragment; +} + + +var rtypenamespace = /^([^.]*)(?:\.(.+)|)/; + +function returnTrue() { + return true; +} + +function returnFalse() { + return false; +} + +// Support: IE <=9 - 11+ +// focus() and blur() are asynchronous, except when they are no-op. +// So expect focus to be synchronous when the element is already active, +// and blur to be synchronous when the element is not already active. +// (focus and blur are always synchronous in other supported browsers, +// this just defines when we can count on it). +function expectSync( elem, type ) { + return ( elem === safeActiveElement() ) === ( type === "focus" ); +} + +// Support: IE <=9 only +// Accessing document.activeElement can throw unexpectedly +// https://bugs.jquery.com/ticket/13393 +function safeActiveElement() { + try { + return document.activeElement; + } catch ( err ) { } +} + +function on( elem, types, selector, data, fn, one ) { + var origFn, type; + + // Types can be a map of types/handlers + if ( typeof types === "object" ) { + + // ( types-Object, selector, data ) + if ( typeof selector !== "string" ) { + + // ( types-Object, data ) + data = data || selector; + selector = undefined; + } + for ( type in types ) { + on( elem, type, selector, data, types[ type ], one ); + } + return elem; + } + + if ( data == null && fn == null ) { + + // ( types, fn ) + fn = selector; + data = selector = undefined; + } else if ( fn == null ) { + if ( typeof selector === "string" ) { + + // ( types, selector, fn ) + fn = data; + data = undefined; + } else { + + // ( types, data, fn ) + fn = data; + data = selector; + selector = undefined; + } + } + if ( fn === false ) { + fn = returnFalse; + } else if ( !fn ) { + return elem; + } + + if ( one === 1 ) { + origFn = fn; + fn = function( event ) { + + // Can use an empty set, since event contains the info + jQuery().off( event ); + return origFn.apply( this, arguments ); + }; + + // Use same guid so caller can remove using origFn + fn.guid = origFn.guid || ( origFn.guid = jQuery.guid++ ); + } + return elem.each( function() { + jQuery.event.add( this, types, fn, data, selector ); + } ); +} + +/* + * Helper functions for managing events -- not part of the public interface. + * Props to Dean Edwards' addEvent library for many of the ideas. + */ +jQuery.event = { + + global: {}, + + add: function( elem, types, handler, data, selector ) { + + var handleObjIn, eventHandle, tmp, + events, t, handleObj, + special, handlers, type, namespaces, origType, + elemData = dataPriv.get( elem ); + + // Only attach events to objects that accept data + if ( !acceptData( elem ) ) { + return; + } + + // Caller can pass in an object of custom data in lieu of the handler + if ( handler.handler ) { + handleObjIn = handler; + handler = handleObjIn.handler; + selector = handleObjIn.selector; + } + + // Ensure that invalid selectors throw exceptions at attach time + // Evaluate against documentElement in case elem is a non-element node (e.g., document) + if ( selector ) { + jQuery.find.matchesSelector( documentElement, selector ); + } + + // Make sure that the handler has a unique ID, used to find/remove it later + if ( !handler.guid ) { + handler.guid = jQuery.guid++; + } + + // Init the element's event structure and main handler, if this is the first + if ( !( events = elemData.events ) ) { + events = elemData.events = Object.create( null ); + } + if ( !( eventHandle = elemData.handle ) ) { + eventHandle = elemData.handle = function( e ) { + + // Discard the second event of a jQuery.event.trigger() and + // when an event is called after a page has unloaded + return typeof jQuery !== "undefined" && jQuery.event.triggered !== e.type ? + jQuery.event.dispatch.apply( elem, arguments ) : undefined; + }; + } + + // Handle multiple events separated by a space + types = ( types || "" ).match( rnothtmlwhite ) || [ "" ]; + t = types.length; + while ( t-- ) { + tmp = rtypenamespace.exec( types[ t ] ) || []; + type = origType = tmp[ 1 ]; + namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort(); + + // There *must* be a type, no attaching namespace-only handlers + if ( !type ) { + continue; + } + + // If event changes its type, use the special event handlers for the changed type + special = jQuery.event.special[ type ] || {}; + + // If selector defined, determine special event api type, otherwise given type + type = ( selector ? special.delegateType : special.bindType ) || type; + + // Update special based on newly reset type + special = jQuery.event.special[ type ] || {}; + + // handleObj is passed to all event handlers + handleObj = jQuery.extend( { + type: type, + origType: origType, + data: data, + handler: handler, + guid: handler.guid, + selector: selector, + needsContext: selector && jQuery.expr.match.needsContext.test( selector ), + namespace: namespaces.join( "." ) + }, handleObjIn ); + + // Init the event handler queue if we're the first + if ( !( handlers = events[ type ] ) ) { + handlers = events[ type ] = []; + handlers.delegateCount = 0; + + // Only use addEventListener if the special events handler returns false + if ( !special.setup || + special.setup.call( elem, data, namespaces, eventHandle ) === false ) { + + if ( elem.addEventListener ) { + elem.addEventListener( type, eventHandle ); + } + } + } + + if ( special.add ) { + special.add.call( elem, handleObj ); + + if ( !handleObj.handler.guid ) { + handleObj.handler.guid = handler.guid; + } + } + + // Add to the element's handler list, delegates in front + if ( selector ) { + handlers.splice( handlers.delegateCount++, 0, handleObj ); + } else { + handlers.push( handleObj ); + } + + // Keep track of which events have ever been used, for event optimization + jQuery.event.global[ type ] = true; + } + + }, + + // Detach an event or set of events from an element + remove: function( elem, types, handler, selector, mappedTypes ) { + + var j, origCount, tmp, + events, t, handleObj, + special, handlers, type, namespaces, origType, + elemData = dataPriv.hasData( elem ) && dataPriv.get( elem ); + + if ( !elemData || !( events = elemData.events ) ) { + return; + } + + // Once for each type.namespace in types; type may be omitted + types = ( types || "" ).match( rnothtmlwhite ) || [ "" ]; + t = types.length; + while ( t-- ) { + tmp = rtypenamespace.exec( types[ t ] ) || []; + type = origType = tmp[ 1 ]; + namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort(); + + // Unbind all events (on this namespace, if provided) for the element + if ( !type ) { + for ( type in events ) { + jQuery.event.remove( elem, type + types[ t ], handler, selector, true ); + } + continue; + } + + special = jQuery.event.special[ type ] || {}; + type = ( selector ? special.delegateType : special.bindType ) || type; + handlers = events[ type ] || []; + tmp = tmp[ 2 ] && + new RegExp( "(^|\\.)" + namespaces.join( "\\.(?:.*\\.|)" ) + "(\\.|$)" ); + + // Remove matching events + origCount = j = handlers.length; + while ( j-- ) { + handleObj = handlers[ j ]; + + if ( ( mappedTypes || origType === handleObj.origType ) && + ( !handler || handler.guid === handleObj.guid ) && + ( !tmp || tmp.test( handleObj.namespace ) ) && + ( !selector || selector === handleObj.selector || + selector === "**" && handleObj.selector ) ) { + handlers.splice( j, 1 ); + + if ( handleObj.selector ) { + handlers.delegateCount--; + } + if ( special.remove ) { + special.remove.call( elem, handleObj ); + } + } + } + + // Remove generic event handler if we removed something and no more handlers exist + // (avoids potential for endless recursion during removal of special event handlers) + if ( origCount && !handlers.length ) { + if ( !special.teardown || + special.teardown.call( elem, namespaces, elemData.handle ) === false ) { + + jQuery.removeEvent( elem, type, elemData.handle ); + } + + delete events[ type ]; + } + } + + // Remove data and the expando if it's no longer used + if ( jQuery.isEmptyObject( events ) ) { + dataPriv.remove( elem, "handle events" ); + } + }, + + dispatch: function( nativeEvent ) { + + var i, j, ret, matched, handleObj, handlerQueue, + args = new Array( arguments.length ), + + // Make a writable jQuery.Event from the native event object + event = jQuery.event.fix( nativeEvent ), + + handlers = ( + dataPriv.get( this, "events" ) || Object.create( null ) + )[ event.type ] || [], + special = jQuery.event.special[ event.type ] || {}; + + // Use the fix-ed jQuery.Event rather than the (read-only) native event + args[ 0 ] = event; + + for ( i = 1; i < arguments.length; i++ ) { + args[ i ] = arguments[ i ]; + } + + event.delegateTarget = this; + + // Call the preDispatch hook for the mapped type, and let it bail if desired + if ( special.preDispatch && special.preDispatch.call( this, event ) === false ) { + return; + } + + // Determine handlers + handlerQueue = jQuery.event.handlers.call( this, event, handlers ); + + // Run delegates first; they may want to stop propagation beneath us + i = 0; + while ( ( matched = handlerQueue[ i++ ] ) && !event.isPropagationStopped() ) { + event.currentTarget = matched.elem; + + j = 0; + while ( ( handleObj = matched.handlers[ j++ ] ) && + !event.isImmediatePropagationStopped() ) { + + // If the event is namespaced, then each handler is only invoked if it is + // specially universal or its namespaces are a superset of the event's. + if ( !event.rnamespace || handleObj.namespace === false || + event.rnamespace.test( handleObj.namespace ) ) { + + event.handleObj = handleObj; + event.data = handleObj.data; + + ret = ( ( jQuery.event.special[ handleObj.origType ] || {} ).handle || + handleObj.handler ).apply( matched.elem, args ); + + if ( ret !== undefined ) { + if ( ( event.result = ret ) === false ) { + event.preventDefault(); + event.stopPropagation(); + } + } + } + } + } + + // Call the postDispatch hook for the mapped type + if ( special.postDispatch ) { + special.postDispatch.call( this, event ); + } + + return event.result; + }, + + handlers: function( event, handlers ) { + var i, handleObj, sel, matchedHandlers, matchedSelectors, + handlerQueue = [], + delegateCount = handlers.delegateCount, + cur = event.target; + + // Find delegate handlers + if ( delegateCount && + + // Support: IE <=9 + // Black-hole SVG instance trees (trac-13180) + cur.nodeType && + + // Support: Firefox <=42 + // Suppress spec-violating clicks indicating a non-primary pointer button (trac-3861) + // https://www.w3.org/TR/DOM-Level-3-Events/#event-type-click + // Support: IE 11 only + // ...but not arrow key "clicks" of radio inputs, which can have `button` -1 (gh-2343) + !( event.type === "click" && event.button >= 1 ) ) { + + for ( ; cur !== this; cur = cur.parentNode || this ) { + + // Don't check non-elements (#13208) + // Don't process clicks on disabled elements (#6911, #8165, #11382, #11764) + if ( cur.nodeType === 1 && !( event.type === "click" && cur.disabled === true ) ) { + matchedHandlers = []; + matchedSelectors = {}; + for ( i = 0; i < delegateCount; i++ ) { + handleObj = handlers[ i ]; + + // Don't conflict with Object.prototype properties (#13203) + sel = handleObj.selector + " "; + + if ( matchedSelectors[ sel ] === undefined ) { + matchedSelectors[ sel ] = handleObj.needsContext ? + jQuery( sel, this ).index( cur ) > -1 : + jQuery.find( sel, this, null, [ cur ] ).length; + } + if ( matchedSelectors[ sel ] ) { + matchedHandlers.push( handleObj ); + } + } + if ( matchedHandlers.length ) { + handlerQueue.push( { elem: cur, handlers: matchedHandlers } ); + } + } + } + } + + // Add the remaining (directly-bound) handlers + cur = this; + if ( delegateCount < handlers.length ) { + handlerQueue.push( { elem: cur, handlers: handlers.slice( delegateCount ) } ); + } + + return handlerQueue; + }, + + addProp: function( name, hook ) { + Object.defineProperty( jQuery.Event.prototype, name, { + enumerable: true, + configurable: true, + + get: isFunction( hook ) ? + function() { + if ( this.originalEvent ) { + return hook( this.originalEvent ); + } + } : + function() { + if ( this.originalEvent ) { + return this.originalEvent[ name ]; + } + }, + + set: function( value ) { + Object.defineProperty( this, name, { + enumerable: true, + configurable: true, + writable: true, + value: value + } ); + } + } ); + }, + + fix: function( originalEvent ) { + return originalEvent[ jQuery.expando ] ? + originalEvent : + new jQuery.Event( originalEvent ); + }, + + special: { + load: { + + // Prevent triggered image.load events from bubbling to window.load + noBubble: true + }, + click: { + + // Utilize native event to ensure correct state for checkable inputs + setup: function( data ) { + + // For mutual compressibility with _default, replace `this` access with a local var. + // `|| data` is dead code meant only to preserve the variable through minification. + var el = this || data; + + // Claim the first handler + if ( rcheckableType.test( el.type ) && + el.click && nodeName( el, "input" ) ) { + + // dataPriv.set( el, "click", ... ) + leverageNative( el, "click", returnTrue ); + } + + // Return false to allow normal processing in the caller + return false; + }, + trigger: function( data ) { + + // For mutual compressibility with _default, replace `this` access with a local var. + // `|| data` is dead code meant only to preserve the variable through minification. + var el = this || data; + + // Force setup before triggering a click + if ( rcheckableType.test( el.type ) && + el.click && nodeName( el, "input" ) ) { + + leverageNative( el, "click" ); + } + + // Return non-false to allow normal event-path propagation + return true; + }, + + // For cross-browser consistency, suppress native .click() on links + // Also prevent it if we're currently inside a leveraged native-event stack + _default: function( event ) { + var target = event.target; + return rcheckableType.test( target.type ) && + target.click && nodeName( target, "input" ) && + dataPriv.get( target, "click" ) || + nodeName( target, "a" ); + } + }, + + beforeunload: { + postDispatch: function( event ) { + + // Support: Firefox 20+ + // Firefox doesn't alert if the returnValue field is not set. + if ( event.result !== undefined && event.originalEvent ) { + event.originalEvent.returnValue = event.result; + } + } + } + } +}; + +// Ensure the presence of an event listener that handles manually-triggered +// synthetic events by interrupting progress until reinvoked in response to +// *native* events that it fires directly, ensuring that state changes have +// already occurred before other listeners are invoked. +function leverageNative( el, type, expectSync ) { + + // Missing expectSync indicates a trigger call, which must force setup through jQuery.event.add + if ( !expectSync ) { + if ( dataPriv.get( el, type ) === undefined ) { + jQuery.event.add( el, type, returnTrue ); + } + return; + } + + // Register the controller as a special universal handler for all event namespaces + dataPriv.set( el, type, false ); + jQuery.event.add( el, type, { + namespace: false, + handler: function( event ) { + var notAsync, result, + saved = dataPriv.get( this, type ); + + if ( ( event.isTrigger & 1 ) && this[ type ] ) { + + // Interrupt processing of the outer synthetic .trigger()ed event + // Saved data should be false in such cases, but might be a leftover capture object + // from an async native handler (gh-4350) + if ( !saved.length ) { + + // Store arguments for use when handling the inner native event + // There will always be at least one argument (an event object), so this array + // will not be confused with a leftover capture object. + saved = slice.call( arguments ); + dataPriv.set( this, type, saved ); + + // Trigger the native event and capture its result + // Support: IE <=9 - 11+ + // focus() and blur() are asynchronous + notAsync = expectSync( this, type ); + this[ type ](); + result = dataPriv.get( this, type ); + if ( saved !== result || notAsync ) { + dataPriv.set( this, type, false ); + } else { + result = {}; + } + if ( saved !== result ) { + + // Cancel the outer synthetic event + event.stopImmediatePropagation(); + event.preventDefault(); + + // Support: Chrome 86+ + // In Chrome, if an element having a focusout handler is blurred by + // clicking outside of it, it invokes the handler synchronously. If + // that handler calls `.remove()` on the element, the data is cleared, + // leaving `result` undefined. We need to guard against this. + return result && result.value; + } + + // If this is an inner synthetic event for an event with a bubbling surrogate + // (focus or blur), assume that the surrogate already propagated from triggering the + // native event and prevent that from happening again here. + // This technically gets the ordering wrong w.r.t. to `.trigger()` (in which the + // bubbling surrogate propagates *after* the non-bubbling base), but that seems + // less bad than duplication. + } else if ( ( jQuery.event.special[ type ] || {} ).delegateType ) { + event.stopPropagation(); + } + + // If this is a native event triggered above, everything is now in order + // Fire an inner synthetic event with the original arguments + } else if ( saved.length ) { + + // ...and capture the result + dataPriv.set( this, type, { + value: jQuery.event.trigger( + + // Support: IE <=9 - 11+ + // Extend with the prototype to reset the above stopImmediatePropagation() + jQuery.extend( saved[ 0 ], jQuery.Event.prototype ), + saved.slice( 1 ), + this + ) + } ); + + // Abort handling of the native event + event.stopImmediatePropagation(); + } + } + } ); +} + +jQuery.removeEvent = function( elem, type, handle ) { + + // This "if" is needed for plain objects + if ( elem.removeEventListener ) { + elem.removeEventListener( type, handle ); + } +}; + +jQuery.Event = function( src, props ) { + + // Allow instantiation without the 'new' keyword + if ( !( this instanceof jQuery.Event ) ) { + return new jQuery.Event( src, props ); + } + + // Event object + if ( src && src.type ) { + this.originalEvent = src; + this.type = src.type; + + // Events bubbling up the document may have been marked as prevented + // by a handler lower down the tree; reflect the correct value. + this.isDefaultPrevented = src.defaultPrevented || + src.defaultPrevented === undefined && + + // Support: Android <=2.3 only + src.returnValue === false ? + returnTrue : + returnFalse; + + // Create target properties + // Support: Safari <=6 - 7 only + // Target should not be a text node (#504, #13143) + this.target = ( src.target && src.target.nodeType === 3 ) ? + src.target.parentNode : + src.target; + + this.currentTarget = src.currentTarget; + this.relatedTarget = src.relatedTarget; + + // Event type + } else { + this.type = src; + } + + // Put explicitly provided properties onto the event object + if ( props ) { + jQuery.extend( this, props ); + } + + // Create a timestamp if incoming event doesn't have one + this.timeStamp = src && src.timeStamp || Date.now(); + + // Mark it as fixed + this[ jQuery.expando ] = true; +}; + +// jQuery.Event is based on DOM3 Events as specified by the ECMAScript Language Binding +// https://www.w3.org/TR/2003/WD-DOM-Level-3-Events-20030331/ecma-script-binding.html +jQuery.Event.prototype = { + constructor: jQuery.Event, + isDefaultPrevented: returnFalse, + isPropagationStopped: returnFalse, + isImmediatePropagationStopped: returnFalse, + isSimulated: false, + + preventDefault: function() { + var e = this.originalEvent; + + this.isDefaultPrevented = returnTrue; + + if ( e && !this.isSimulated ) { + e.preventDefault(); + } + }, + stopPropagation: function() { + var e = this.originalEvent; + + this.isPropagationStopped = returnTrue; + + if ( e && !this.isSimulated ) { + e.stopPropagation(); + } + }, + stopImmediatePropagation: function() { + var e = this.originalEvent; + + this.isImmediatePropagationStopped = returnTrue; + + if ( e && !this.isSimulated ) { + e.stopImmediatePropagation(); + } + + this.stopPropagation(); + } +}; + +// Includes all common event props including KeyEvent and MouseEvent specific props +jQuery.each( { + altKey: true, + bubbles: true, + cancelable: true, + changedTouches: true, + ctrlKey: true, + detail: true, + eventPhase: true, + metaKey: true, + pageX: true, + pageY: true, + shiftKey: true, + view: true, + "char": true, + code: true, + charCode: true, + key: true, + keyCode: true, + button: true, + buttons: true, + clientX: true, + clientY: true, + offsetX: true, + offsetY: true, + pointerId: true, + pointerType: true, + screenX: true, + screenY: true, + targetTouches: true, + toElement: true, + touches: true, + which: true +}, jQuery.event.addProp ); + +jQuery.each( { focus: "focusin", blur: "focusout" }, function( type, delegateType ) { + jQuery.event.special[ type ] = { + + // Utilize native event if possible so blur/focus sequence is correct + setup: function() { + + // Claim the first handler + // dataPriv.set( this, "focus", ... ) + // dataPriv.set( this, "blur", ... ) + leverageNative( this, type, expectSync ); + + // Return false to allow normal processing in the caller + return false; + }, + trigger: function() { + + // Force setup before trigger + leverageNative( this, type ); + + // Return non-false to allow normal event-path propagation + return true; + }, + + // Suppress native focus or blur as it's already being fired + // in leverageNative. + _default: function() { + return true; + }, + + delegateType: delegateType + }; +} ); + +// Create mouseenter/leave events using mouseover/out and event-time checks +// so that event delegation works in jQuery. +// Do the same for pointerenter/pointerleave and pointerover/pointerout +// +// Support: Safari 7 only +// Safari sends mouseenter too often; see: +// https://bugs.chromium.org/p/chromium/issues/detail?id=470258 +// for the description of the bug (it existed in older Chrome versions as well). +jQuery.each( { + mouseenter: "mouseover", + mouseleave: "mouseout", + pointerenter: "pointerover", + pointerleave: "pointerout" +}, function( orig, fix ) { + jQuery.event.special[ orig ] = { + delegateType: fix, + bindType: fix, + + handle: function( event ) { + var ret, + target = this, + related = event.relatedTarget, + handleObj = event.handleObj; + + // For mouseenter/leave call the handler if related is outside the target. + // NB: No relatedTarget if the mouse left/entered the browser window + if ( !related || ( related !== target && !jQuery.contains( target, related ) ) ) { + event.type = handleObj.origType; + ret = handleObj.handler.apply( this, arguments ); + event.type = fix; + } + return ret; + } + }; +} ); + +jQuery.fn.extend( { + + on: function( types, selector, data, fn ) { + return on( this, types, selector, data, fn ); + }, + one: function( types, selector, data, fn ) { + return on( this, types, selector, data, fn, 1 ); + }, + off: function( types, selector, fn ) { + var handleObj, type; + if ( types && types.preventDefault && types.handleObj ) { + + // ( event ) dispatched jQuery.Event + handleObj = types.handleObj; + jQuery( types.delegateTarget ).off( + handleObj.namespace ? + handleObj.origType + "." + handleObj.namespace : + handleObj.origType, + handleObj.selector, + handleObj.handler + ); + return this; + } + if ( typeof types === "object" ) { + + // ( types-object [, selector] ) + for ( type in types ) { + this.off( type, selector, types[ type ] ); + } + return this; + } + if ( selector === false || typeof selector === "function" ) { + + // ( types [, fn] ) + fn = selector; + selector = undefined; + } + if ( fn === false ) { + fn = returnFalse; + } + return this.each( function() { + jQuery.event.remove( this, types, fn, selector ); + } ); + } +} ); + + +var + + // Support: IE <=10 - 11, Edge 12 - 13 only + // In IE/Edge using regex groups here causes severe slowdowns. + // See https://connect.microsoft.com/IE/feedback/details/1736512/ + rnoInnerhtml = /\s*$/g; + +// Prefer a tbody over its parent table for containing new rows +function manipulationTarget( elem, content ) { + if ( nodeName( elem, "table" ) && + nodeName( content.nodeType !== 11 ? content : content.firstChild, "tr" ) ) { + + return jQuery( elem ).children( "tbody" )[ 0 ] || elem; + } + + return elem; +} + +// Replace/restore the type attribute of script elements for safe DOM manipulation +function disableScript( elem ) { + elem.type = ( elem.getAttribute( "type" ) !== null ) + "/" + elem.type; + return elem; +} +function restoreScript( elem ) { + if ( ( elem.type || "" ).slice( 0, 5 ) === "true/" ) { + elem.type = elem.type.slice( 5 ); + } else { + elem.removeAttribute( "type" ); + } + + return elem; +} + +function cloneCopyEvent( src, dest ) { + var i, l, type, pdataOld, udataOld, udataCur, events; + + if ( dest.nodeType !== 1 ) { + return; + } + + // 1. Copy private data: events, handlers, etc. + if ( dataPriv.hasData( src ) ) { + pdataOld = dataPriv.get( src ); + events = pdataOld.events; + + if ( events ) { + dataPriv.remove( dest, "handle events" ); + + for ( type in events ) { + for ( i = 0, l = events[ type ].length; i < l; i++ ) { + jQuery.event.add( dest, type, events[ type ][ i ] ); + } + } + } + } + + // 2. Copy user data + if ( dataUser.hasData( src ) ) { + udataOld = dataUser.access( src ); + udataCur = jQuery.extend( {}, udataOld ); + + dataUser.set( dest, udataCur ); + } +} + +// Fix IE bugs, see support tests +function fixInput( src, dest ) { + var nodeName = dest.nodeName.toLowerCase(); + + // Fails to persist the checked state of a cloned checkbox or radio button. + if ( nodeName === "input" && rcheckableType.test( src.type ) ) { + dest.checked = src.checked; + + // Fails to return the selected option to the default selected state when cloning options + } else if ( nodeName === "input" || nodeName === "textarea" ) { + dest.defaultValue = src.defaultValue; + } +} + +function domManip( collection, args, callback, ignored ) { + + // Flatten any nested arrays + args = flat( args ); + + var fragment, first, scripts, hasScripts, node, doc, + i = 0, + l = collection.length, + iNoClone = l - 1, + value = args[ 0 ], + valueIsFunction = isFunction( value ); + + // We can't cloneNode fragments that contain checked, in WebKit + if ( valueIsFunction || + ( l > 1 && typeof value === "string" && + !support.checkClone && rchecked.test( value ) ) ) { + return collection.each( function( index ) { + var self = collection.eq( index ); + if ( valueIsFunction ) { + args[ 0 ] = value.call( this, index, self.html() ); + } + domManip( self, args, callback, ignored ); + } ); + } + + if ( l ) { + fragment = buildFragment( args, collection[ 0 ].ownerDocument, false, collection, ignored ); + first = fragment.firstChild; + + if ( fragment.childNodes.length === 1 ) { + fragment = first; + } + + // Require either new content or an interest in ignored elements to invoke the callback + if ( first || ignored ) { + scripts = jQuery.map( getAll( fragment, "script" ), disableScript ); + hasScripts = scripts.length; + + // Use the original fragment for the last item + // instead of the first because it can end up + // being emptied incorrectly in certain situations (#8070). + for ( ; i < l; i++ ) { + node = fragment; + + if ( i !== iNoClone ) { + node = jQuery.clone( node, true, true ); + + // Keep references to cloned scripts for later restoration + if ( hasScripts ) { + + // Support: Android <=4.0 only, PhantomJS 1 only + // push.apply(_, arraylike) throws on ancient WebKit + jQuery.merge( scripts, getAll( node, "script" ) ); + } + } + + callback.call( collection[ i ], node, i ); + } + + if ( hasScripts ) { + doc = scripts[ scripts.length - 1 ].ownerDocument; + + // Reenable scripts + jQuery.map( scripts, restoreScript ); + + // Evaluate executable scripts on first document insertion + for ( i = 0; i < hasScripts; i++ ) { + node = scripts[ i ]; + if ( rscriptType.test( node.type || "" ) && + !dataPriv.access( node, "globalEval" ) && + jQuery.contains( doc, node ) ) { + + if ( node.src && ( node.type || "" ).toLowerCase() !== "module" ) { + + // Optional AJAX dependency, but won't run scripts if not present + if ( jQuery._evalUrl && !node.noModule ) { + jQuery._evalUrl( node.src, { + nonce: node.nonce || node.getAttribute( "nonce" ) + }, doc ); + } + } else { + DOMEval( node.textContent.replace( rcleanScript, "" ), node, doc ); + } + } + } + } + } + } + + return collection; +} + +function remove( elem, selector, keepData ) { + var node, + nodes = selector ? jQuery.filter( selector, elem ) : elem, + i = 0; + + for ( ; ( node = nodes[ i ] ) != null; i++ ) { + if ( !keepData && node.nodeType === 1 ) { + jQuery.cleanData( getAll( node ) ); + } + + if ( node.parentNode ) { + if ( keepData && isAttached( node ) ) { + setGlobalEval( getAll( node, "script" ) ); + } + node.parentNode.removeChild( node ); + } + } + + return elem; +} + +jQuery.extend( { + htmlPrefilter: function( html ) { + return html; + }, + + clone: function( elem, dataAndEvents, deepDataAndEvents ) { + var i, l, srcElements, destElements, + clone = elem.cloneNode( true ), + inPage = isAttached( elem ); + + // Fix IE cloning issues + if ( !support.noCloneChecked && ( elem.nodeType === 1 || elem.nodeType === 11 ) && + !jQuery.isXMLDoc( elem ) ) { + + // We eschew Sizzle here for performance reasons: https://jsperf.com/getall-vs-sizzle/2 + destElements = getAll( clone ); + srcElements = getAll( elem ); + + for ( i = 0, l = srcElements.length; i < l; i++ ) { + fixInput( srcElements[ i ], destElements[ i ] ); + } + } + + // Copy the events from the original to the clone + if ( dataAndEvents ) { + if ( deepDataAndEvents ) { + srcElements = srcElements || getAll( elem ); + destElements = destElements || getAll( clone ); + + for ( i = 0, l = srcElements.length; i < l; i++ ) { + cloneCopyEvent( srcElements[ i ], destElements[ i ] ); + } + } else { + cloneCopyEvent( elem, clone ); + } + } + + // Preserve script evaluation history + destElements = getAll( clone, "script" ); + if ( destElements.length > 0 ) { + setGlobalEval( destElements, !inPage && getAll( elem, "script" ) ); + } + + // Return the cloned set + return clone; + }, + + cleanData: function( elems ) { + var data, elem, type, + special = jQuery.event.special, + i = 0; + + for ( ; ( elem = elems[ i ] ) !== undefined; i++ ) { + if ( acceptData( elem ) ) { + if ( ( data = elem[ dataPriv.expando ] ) ) { + if ( data.events ) { + for ( type in data.events ) { + if ( special[ type ] ) { + jQuery.event.remove( elem, type ); + + // This is a shortcut to avoid jQuery.event.remove's overhead + } else { + jQuery.removeEvent( elem, type, data.handle ); + } + } + } + + // Support: Chrome <=35 - 45+ + // Assign undefined instead of using delete, see Data#remove + elem[ dataPriv.expando ] = undefined; + } + if ( elem[ dataUser.expando ] ) { + + // Support: Chrome <=35 - 45+ + // Assign undefined instead of using delete, see Data#remove + elem[ dataUser.expando ] = undefined; + } + } + } + } +} ); + +jQuery.fn.extend( { + detach: function( selector ) { + return remove( this, selector, true ); + }, + + remove: function( selector ) { + return remove( this, selector ); + }, + + text: function( value ) { + return access( this, function( value ) { + return value === undefined ? + jQuery.text( this ) : + this.empty().each( function() { + if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { + this.textContent = value; + } + } ); + }, null, value, arguments.length ); + }, + + append: function() { + return domManip( this, arguments, function( elem ) { + if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { + var target = manipulationTarget( this, elem ); + target.appendChild( elem ); + } + } ); + }, + + prepend: function() { + return domManip( this, arguments, function( elem ) { + if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { + var target = manipulationTarget( this, elem ); + target.insertBefore( elem, target.firstChild ); + } + } ); + }, + + before: function() { + return domManip( this, arguments, function( elem ) { + if ( this.parentNode ) { + this.parentNode.insertBefore( elem, this ); + } + } ); + }, + + after: function() { + return domManip( this, arguments, function( elem ) { + if ( this.parentNode ) { + this.parentNode.insertBefore( elem, this.nextSibling ); + } + } ); + }, + + empty: function() { + var elem, + i = 0; + + for ( ; ( elem = this[ i ] ) != null; i++ ) { + if ( elem.nodeType === 1 ) { + + // Prevent memory leaks + jQuery.cleanData( getAll( elem, false ) ); + + // Remove any remaining nodes + elem.textContent = ""; + } + } + + return this; + }, + + clone: function( dataAndEvents, deepDataAndEvents ) { + dataAndEvents = dataAndEvents == null ? false : dataAndEvents; + deepDataAndEvents = deepDataAndEvents == null ? dataAndEvents : deepDataAndEvents; + + return this.map( function() { + return jQuery.clone( this, dataAndEvents, deepDataAndEvents ); + } ); + }, + + html: function( value ) { + return access( this, function( value ) { + var elem = this[ 0 ] || {}, + i = 0, + l = this.length; + + if ( value === undefined && elem.nodeType === 1 ) { + return elem.innerHTML; + } + + // See if we can take a shortcut and just use innerHTML + if ( typeof value === "string" && !rnoInnerhtml.test( value ) && + !wrapMap[ ( rtagName.exec( value ) || [ "", "" ] )[ 1 ].toLowerCase() ] ) { + + value = jQuery.htmlPrefilter( value ); + + try { + for ( ; i < l; i++ ) { + elem = this[ i ] || {}; + + // Remove element nodes and prevent memory leaks + if ( elem.nodeType === 1 ) { + jQuery.cleanData( getAll( elem, false ) ); + elem.innerHTML = value; + } + } + + elem = 0; + + // If using innerHTML throws an exception, use the fallback method + } catch ( e ) {} + } + + if ( elem ) { + this.empty().append( value ); + } + }, null, value, arguments.length ); + }, + + replaceWith: function() { + var ignored = []; + + // Make the changes, replacing each non-ignored context element with the new content + return domManip( this, arguments, function( elem ) { + var parent = this.parentNode; + + if ( jQuery.inArray( this, ignored ) < 0 ) { + jQuery.cleanData( getAll( this ) ); + if ( parent ) { + parent.replaceChild( elem, this ); + } + } + + // Force callback invocation + }, ignored ); + } +} ); + +jQuery.each( { + appendTo: "append", + prependTo: "prepend", + insertBefore: "before", + insertAfter: "after", + replaceAll: "replaceWith" +}, function( name, original ) { + jQuery.fn[ name ] = function( selector ) { + var elems, + ret = [], + insert = jQuery( selector ), + last = insert.length - 1, + i = 0; + + for ( ; i <= last; i++ ) { + elems = i === last ? this : this.clone( true ); + jQuery( insert[ i ] )[ original ]( elems ); + + // Support: Android <=4.0 only, PhantomJS 1 only + // .get() because push.apply(_, arraylike) throws on ancient WebKit + push.apply( ret, elems.get() ); + } + + return this.pushStack( ret ); + }; +} ); +var rnumnonpx = new RegExp( "^(" + pnum + ")(?!px)[a-z%]+$", "i" ); + +var getStyles = function( elem ) { + + // Support: IE <=11 only, Firefox <=30 (#15098, #14150) + // IE throws on elements created in popups + // FF meanwhile throws on frame elements through "defaultView.getComputedStyle" + var view = elem.ownerDocument.defaultView; + + if ( !view || !view.opener ) { + view = window; + } + + return view.getComputedStyle( elem ); + }; + +var swap = function( elem, options, callback ) { + var ret, name, + old = {}; + + // Remember the old values, and insert the new ones + for ( name in options ) { + old[ name ] = elem.style[ name ]; + elem.style[ name ] = options[ name ]; + } + + ret = callback.call( elem ); + + // Revert the old values + for ( name in options ) { + elem.style[ name ] = old[ name ]; + } + + return ret; +}; + + +var rboxStyle = new RegExp( cssExpand.join( "|" ), "i" ); + + + +( function() { + + // Executing both pixelPosition & boxSizingReliable tests require only one layout + // so they're executed at the same time to save the second computation. + function computeStyleTests() { + + // This is a singleton, we need to execute it only once + if ( !div ) { + return; + } + + container.style.cssText = "position:absolute;left:-11111px;width:60px;" + + "margin-top:1px;padding:0;border:0"; + div.style.cssText = + "position:relative;display:block;box-sizing:border-box;overflow:scroll;" + + "margin:auto;border:1px;padding:1px;" + + "width:60%;top:1%"; + documentElement.appendChild( container ).appendChild( div ); + + var divStyle = window.getComputedStyle( div ); + pixelPositionVal = divStyle.top !== "1%"; + + // Support: Android 4.0 - 4.3 only, Firefox <=3 - 44 + reliableMarginLeftVal = roundPixelMeasures( divStyle.marginLeft ) === 12; + + // Support: Android 4.0 - 4.3 only, Safari <=9.1 - 10.1, iOS <=7.0 - 9.3 + // Some styles come back with percentage values, even though they shouldn't + div.style.right = "60%"; + pixelBoxStylesVal = roundPixelMeasures( divStyle.right ) === 36; + + // Support: IE 9 - 11 only + // Detect misreporting of content dimensions for box-sizing:border-box elements + boxSizingReliableVal = roundPixelMeasures( divStyle.width ) === 36; + + // Support: IE 9 only + // Detect overflow:scroll screwiness (gh-3699) + // Support: Chrome <=64 + // Don't get tricked when zoom affects offsetWidth (gh-4029) + div.style.position = "absolute"; + scrollboxSizeVal = roundPixelMeasures( div.offsetWidth / 3 ) === 12; + + documentElement.removeChild( container ); + + // Nullify the div so it wouldn't be stored in the memory and + // it will also be a sign that checks already performed + div = null; + } + + function roundPixelMeasures( measure ) { + return Math.round( parseFloat( measure ) ); + } + + var pixelPositionVal, boxSizingReliableVal, scrollboxSizeVal, pixelBoxStylesVal, + reliableTrDimensionsVal, reliableMarginLeftVal, + container = document.createElement( "div" ), + div = document.createElement( "div" ); + + // Finish early in limited (non-browser) environments + if ( !div.style ) { + return; + } + + // Support: IE <=9 - 11 only + // Style of cloned element affects source element cloned (#8908) + div.style.backgroundClip = "content-box"; + div.cloneNode( true ).style.backgroundClip = ""; + support.clearCloneStyle = div.style.backgroundClip === "content-box"; + + jQuery.extend( support, { + boxSizingReliable: function() { + computeStyleTests(); + return boxSizingReliableVal; + }, + pixelBoxStyles: function() { + computeStyleTests(); + return pixelBoxStylesVal; + }, + pixelPosition: function() { + computeStyleTests(); + return pixelPositionVal; + }, + reliableMarginLeft: function() { + computeStyleTests(); + return reliableMarginLeftVal; + }, + scrollboxSize: function() { + computeStyleTests(); + return scrollboxSizeVal; + }, + + // Support: IE 9 - 11+, Edge 15 - 18+ + // IE/Edge misreport `getComputedStyle` of table rows with width/height + // set in CSS while `offset*` properties report correct values. + // Behavior in IE 9 is more subtle than in newer versions & it passes + // some versions of this test; make sure not to make it pass there! + // + // Support: Firefox 70+ + // Only Firefox includes border widths + // in computed dimensions. (gh-4529) + reliableTrDimensions: function() { + var table, tr, trChild, trStyle; + if ( reliableTrDimensionsVal == null ) { + table = document.createElement( "table" ); + tr = document.createElement( "tr" ); + trChild = document.createElement( "div" ); + + table.style.cssText = "position:absolute;left:-11111px;border-collapse:separate"; + tr.style.cssText = "border:1px solid"; + + // Support: Chrome 86+ + // Height set through cssText does not get applied. + // Computed height then comes back as 0. + tr.style.height = "1px"; + trChild.style.height = "9px"; + + // Support: Android 8 Chrome 86+ + // In our bodyBackground.html iframe, + // display for all div elements is set to "inline", + // which causes a problem only in Android 8 Chrome 86. + // Ensuring the div is display: block + // gets around this issue. + trChild.style.display = "block"; + + documentElement + .appendChild( table ) + .appendChild( tr ) + .appendChild( trChild ); + + trStyle = window.getComputedStyle( tr ); + reliableTrDimensionsVal = ( parseInt( trStyle.height, 10 ) + + parseInt( trStyle.borderTopWidth, 10 ) + + parseInt( trStyle.borderBottomWidth, 10 ) ) === tr.offsetHeight; + + documentElement.removeChild( table ); + } + return reliableTrDimensionsVal; + } + } ); +} )(); + + +function curCSS( elem, name, computed ) { + var width, minWidth, maxWidth, ret, + + // Support: Firefox 51+ + // Retrieving style before computed somehow + // fixes an issue with getting wrong values + // on detached elements + style = elem.style; + + computed = computed || getStyles( elem ); + + // getPropertyValue is needed for: + // .css('filter') (IE 9 only, #12537) + // .css('--customProperty) (#3144) + if ( computed ) { + ret = computed.getPropertyValue( name ) || computed[ name ]; + + if ( ret === "" && !isAttached( elem ) ) { + ret = jQuery.style( elem, name ); + } + + // A tribute to the "awesome hack by Dean Edwards" + // Android Browser returns percentage for some values, + // but width seems to be reliably pixels. + // This is against the CSSOM draft spec: + // https://drafts.csswg.org/cssom/#resolved-values + if ( !support.pixelBoxStyles() && rnumnonpx.test( ret ) && rboxStyle.test( name ) ) { + + // Remember the original values + width = style.width; + minWidth = style.minWidth; + maxWidth = style.maxWidth; + + // Put in the new values to get a computed value out + style.minWidth = style.maxWidth = style.width = ret; + ret = computed.width; + + // Revert the changed values + style.width = width; + style.minWidth = minWidth; + style.maxWidth = maxWidth; + } + } + + return ret !== undefined ? + + // Support: IE <=9 - 11 only + // IE returns zIndex value as an integer. + ret + "" : + ret; +} + + +function addGetHookIf( conditionFn, hookFn ) { + + // Define the hook, we'll check on the first run if it's really needed. + return { + get: function() { + if ( conditionFn() ) { + + // Hook not needed (or it's not possible to use it due + // to missing dependency), remove it. + delete this.get; + return; + } + + // Hook needed; redefine it so that the support test is not executed again. + return ( this.get = hookFn ).apply( this, arguments ); + } + }; +} + + +var cssPrefixes = [ "Webkit", "Moz", "ms" ], + emptyStyle = document.createElement( "div" ).style, + vendorProps = {}; + +// Return a vendor-prefixed property or undefined +function vendorPropName( name ) { + + // Check for vendor prefixed names + var capName = name[ 0 ].toUpperCase() + name.slice( 1 ), + i = cssPrefixes.length; + + while ( i-- ) { + name = cssPrefixes[ i ] + capName; + if ( name in emptyStyle ) { + return name; + } + } +} + +// Return a potentially-mapped jQuery.cssProps or vendor prefixed property +function finalPropName( name ) { + var final = jQuery.cssProps[ name ] || vendorProps[ name ]; + + if ( final ) { + return final; + } + if ( name in emptyStyle ) { + return name; + } + return vendorProps[ name ] = vendorPropName( name ) || name; +} + + +var + + // Swappable if display is none or starts with table + // except "table", "table-cell", or "table-caption" + // See here for display values: https://developer.mozilla.org/en-US/docs/CSS/display + rdisplayswap = /^(none|table(?!-c[ea]).+)/, + rcustomProp = /^--/, + cssShow = { position: "absolute", visibility: "hidden", display: "block" }, + cssNormalTransform = { + letterSpacing: "0", + fontWeight: "400" + }; + +function setPositiveNumber( _elem, value, subtract ) { + + // Any relative (+/-) values have already been + // normalized at this point + var matches = rcssNum.exec( value ); + return matches ? + + // Guard against undefined "subtract", e.g., when used as in cssHooks + Math.max( 0, matches[ 2 ] - ( subtract || 0 ) ) + ( matches[ 3 ] || "px" ) : + value; +} + +function boxModelAdjustment( elem, dimension, box, isBorderBox, styles, computedVal ) { + var i = dimension === "width" ? 1 : 0, + extra = 0, + delta = 0; + + // Adjustment may not be necessary + if ( box === ( isBorderBox ? "border" : "content" ) ) { + return 0; + } + + for ( ; i < 4; i += 2 ) { + + // Both box models exclude margin + if ( box === "margin" ) { + delta += jQuery.css( elem, box + cssExpand[ i ], true, styles ); + } + + // If we get here with a content-box, we're seeking "padding" or "border" or "margin" + if ( !isBorderBox ) { + + // Add padding + delta += jQuery.css( elem, "padding" + cssExpand[ i ], true, styles ); + + // For "border" or "margin", add border + if ( box !== "padding" ) { + delta += jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); + + // But still keep track of it otherwise + } else { + extra += jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); + } + + // If we get here with a border-box (content + padding + border), we're seeking "content" or + // "padding" or "margin" + } else { + + // For "content", subtract padding + if ( box === "content" ) { + delta -= jQuery.css( elem, "padding" + cssExpand[ i ], true, styles ); + } + + // For "content" or "padding", subtract border + if ( box !== "margin" ) { + delta -= jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); + } + } + } + + // Account for positive content-box scroll gutter when requested by providing computedVal + if ( !isBorderBox && computedVal >= 0 ) { + + // offsetWidth/offsetHeight is a rounded sum of content, padding, scroll gutter, and border + // Assuming integer scroll gutter, subtract the rest and round down + delta += Math.max( 0, Math.ceil( + elem[ "offset" + dimension[ 0 ].toUpperCase() + dimension.slice( 1 ) ] - + computedVal - + delta - + extra - + 0.5 + + // If offsetWidth/offsetHeight is unknown, then we can't determine content-box scroll gutter + // Use an explicit zero to avoid NaN (gh-3964) + ) ) || 0; + } + + return delta; +} + +function getWidthOrHeight( elem, dimension, extra ) { + + // Start with computed style + var styles = getStyles( elem ), + + // To avoid forcing a reflow, only fetch boxSizing if we need it (gh-4322). + // Fake content-box until we know it's needed to know the true value. + boxSizingNeeded = !support.boxSizingReliable() || extra, + isBorderBox = boxSizingNeeded && + jQuery.css( elem, "boxSizing", false, styles ) === "border-box", + valueIsBorderBox = isBorderBox, + + val = curCSS( elem, dimension, styles ), + offsetProp = "offset" + dimension[ 0 ].toUpperCase() + dimension.slice( 1 ); + + // Support: Firefox <=54 + // Return a confounding non-pixel value or feign ignorance, as appropriate. + if ( rnumnonpx.test( val ) ) { + if ( !extra ) { + return val; + } + val = "auto"; + } + + + // Support: IE 9 - 11 only + // Use offsetWidth/offsetHeight for when box sizing is unreliable. + // In those cases, the computed value can be trusted to be border-box. + if ( ( !support.boxSizingReliable() && isBorderBox || + + // Support: IE 10 - 11+, Edge 15 - 18+ + // IE/Edge misreport `getComputedStyle` of table rows with width/height + // set in CSS while `offset*` properties report correct values. + // Interestingly, in some cases IE 9 doesn't suffer from this issue. + !support.reliableTrDimensions() && nodeName( elem, "tr" ) || + + // Fall back to offsetWidth/offsetHeight when value is "auto" + // This happens for inline elements with no explicit setting (gh-3571) + val === "auto" || + + // Support: Android <=4.1 - 4.3 only + // Also use offsetWidth/offsetHeight for misreported inline dimensions (gh-3602) + !parseFloat( val ) && jQuery.css( elem, "display", false, styles ) === "inline" ) && + + // Make sure the element is visible & connected + elem.getClientRects().length ) { + + isBorderBox = jQuery.css( elem, "boxSizing", false, styles ) === "border-box"; + + // Where available, offsetWidth/offsetHeight approximate border box dimensions. + // Where not available (e.g., SVG), assume unreliable box-sizing and interpret the + // retrieved value as a content box dimension. + valueIsBorderBox = offsetProp in elem; + if ( valueIsBorderBox ) { + val = elem[ offsetProp ]; + } + } + + // Normalize "" and auto + val = parseFloat( val ) || 0; + + // Adjust for the element's box model + return ( val + + boxModelAdjustment( + elem, + dimension, + extra || ( isBorderBox ? "border" : "content" ), + valueIsBorderBox, + styles, + + // Provide the current computed size to request scroll gutter calculation (gh-3589) + val + ) + ) + "px"; +} + +jQuery.extend( { + + // Add in style property hooks for overriding the default + // behavior of getting and setting a style property + cssHooks: { + opacity: { + get: function( elem, computed ) { + if ( computed ) { + + // We should always get a number back from opacity + var ret = curCSS( elem, "opacity" ); + return ret === "" ? "1" : ret; + } + } + } + }, + + // Don't automatically add "px" to these possibly-unitless properties + cssNumber: { + "animationIterationCount": true, + "columnCount": true, + "fillOpacity": true, + "flexGrow": true, + "flexShrink": true, + "fontWeight": true, + "gridArea": true, + "gridColumn": true, + "gridColumnEnd": true, + "gridColumnStart": true, + "gridRow": true, + "gridRowEnd": true, + "gridRowStart": true, + "lineHeight": true, + "opacity": true, + "order": true, + "orphans": true, + "widows": true, + "zIndex": true, + "zoom": true + }, + + // Add in properties whose names you wish to fix before + // setting or getting the value + cssProps: {}, + + // Get and set the style property on a DOM Node + style: function( elem, name, value, extra ) { + + // Don't set styles on text and comment nodes + if ( !elem || elem.nodeType === 3 || elem.nodeType === 8 || !elem.style ) { + return; + } + + // Make sure that we're working with the right name + var ret, type, hooks, + origName = camelCase( name ), + isCustomProp = rcustomProp.test( name ), + style = elem.style; + + // Make sure that we're working with the right name. We don't + // want to query the value if it is a CSS custom property + // since they are user-defined. + if ( !isCustomProp ) { + name = finalPropName( origName ); + } + + // Gets hook for the prefixed version, then unprefixed version + hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ]; + + // Check if we're setting a value + if ( value !== undefined ) { + type = typeof value; + + // Convert "+=" or "-=" to relative numbers (#7345) + if ( type === "string" && ( ret = rcssNum.exec( value ) ) && ret[ 1 ] ) { + value = adjustCSS( elem, name, ret ); + + // Fixes bug #9237 + type = "number"; + } + + // Make sure that null and NaN values aren't set (#7116) + if ( value == null || value !== value ) { + return; + } + + // If a number was passed in, add the unit (except for certain CSS properties) + // The isCustomProp check can be removed in jQuery 4.0 when we only auto-append + // "px" to a few hardcoded values. + if ( type === "number" && !isCustomProp ) { + value += ret && ret[ 3 ] || ( jQuery.cssNumber[ origName ] ? "" : "px" ); + } + + // background-* props affect original clone's values + if ( !support.clearCloneStyle && value === "" && name.indexOf( "background" ) === 0 ) { + style[ name ] = "inherit"; + } + + // If a hook was provided, use that value, otherwise just set the specified value + if ( !hooks || !( "set" in hooks ) || + ( value = hooks.set( elem, value, extra ) ) !== undefined ) { + + if ( isCustomProp ) { + style.setProperty( name, value ); + } else { + style[ name ] = value; + } + } + + } else { + + // If a hook was provided get the non-computed value from there + if ( hooks && "get" in hooks && + ( ret = hooks.get( elem, false, extra ) ) !== undefined ) { + + return ret; + } + + // Otherwise just get the value from the style object + return style[ name ]; + } + }, + + css: function( elem, name, extra, styles ) { + var val, num, hooks, + origName = camelCase( name ), + isCustomProp = rcustomProp.test( name ); + + // Make sure that we're working with the right name. We don't + // want to modify the value if it is a CSS custom property + // since they are user-defined. + if ( !isCustomProp ) { + name = finalPropName( origName ); + } + + // Try prefixed name followed by the unprefixed name + hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ]; + + // If a hook was provided get the computed value from there + if ( hooks && "get" in hooks ) { + val = hooks.get( elem, true, extra ); + } + + // Otherwise, if a way to get the computed value exists, use that + if ( val === undefined ) { + val = curCSS( elem, name, styles ); + } + + // Convert "normal" to computed value + if ( val === "normal" && name in cssNormalTransform ) { + val = cssNormalTransform[ name ]; + } + + // Make numeric if forced or a qualifier was provided and val looks numeric + if ( extra === "" || extra ) { + num = parseFloat( val ); + return extra === true || isFinite( num ) ? num || 0 : val; + } + + return val; + } +} ); + +jQuery.each( [ "height", "width" ], function( _i, dimension ) { + jQuery.cssHooks[ dimension ] = { + get: function( elem, computed, extra ) { + if ( computed ) { + + // Certain elements can have dimension info if we invisibly show them + // but it must have a current display style that would benefit + return rdisplayswap.test( jQuery.css( elem, "display" ) ) && + + // Support: Safari 8+ + // Table columns in Safari have non-zero offsetWidth & zero + // getBoundingClientRect().width unless display is changed. + // Support: IE <=11 only + // Running getBoundingClientRect on a disconnected node + // in IE throws an error. + ( !elem.getClientRects().length || !elem.getBoundingClientRect().width ) ? + swap( elem, cssShow, function() { + return getWidthOrHeight( elem, dimension, extra ); + } ) : + getWidthOrHeight( elem, dimension, extra ); + } + }, + + set: function( elem, value, extra ) { + var matches, + styles = getStyles( elem ), + + // Only read styles.position if the test has a chance to fail + // to avoid forcing a reflow. + scrollboxSizeBuggy = !support.scrollboxSize() && + styles.position === "absolute", + + // To avoid forcing a reflow, only fetch boxSizing if we need it (gh-3991) + boxSizingNeeded = scrollboxSizeBuggy || extra, + isBorderBox = boxSizingNeeded && + jQuery.css( elem, "boxSizing", false, styles ) === "border-box", + subtract = extra ? + boxModelAdjustment( + elem, + dimension, + extra, + isBorderBox, + styles + ) : + 0; + + // Account for unreliable border-box dimensions by comparing offset* to computed and + // faking a content-box to get border and padding (gh-3699) + if ( isBorderBox && scrollboxSizeBuggy ) { + subtract -= Math.ceil( + elem[ "offset" + dimension[ 0 ].toUpperCase() + dimension.slice( 1 ) ] - + parseFloat( styles[ dimension ] ) - + boxModelAdjustment( elem, dimension, "border", false, styles ) - + 0.5 + ); + } + + // Convert to pixels if value adjustment is needed + if ( subtract && ( matches = rcssNum.exec( value ) ) && + ( matches[ 3 ] || "px" ) !== "px" ) { + + elem.style[ dimension ] = value; + value = jQuery.css( elem, dimension ); + } + + return setPositiveNumber( elem, value, subtract ); + } + }; +} ); + +jQuery.cssHooks.marginLeft = addGetHookIf( support.reliableMarginLeft, + function( elem, computed ) { + if ( computed ) { + return ( parseFloat( curCSS( elem, "marginLeft" ) ) || + elem.getBoundingClientRect().left - + swap( elem, { marginLeft: 0 }, function() { + return elem.getBoundingClientRect().left; + } ) + ) + "px"; + } + } +); + +// These hooks are used by animate to expand properties +jQuery.each( { + margin: "", + padding: "", + border: "Width" +}, function( prefix, suffix ) { + jQuery.cssHooks[ prefix + suffix ] = { + expand: function( value ) { + var i = 0, + expanded = {}, + + // Assumes a single number if not a string + parts = typeof value === "string" ? value.split( " " ) : [ value ]; + + for ( ; i < 4; i++ ) { + expanded[ prefix + cssExpand[ i ] + suffix ] = + parts[ i ] || parts[ i - 2 ] || parts[ 0 ]; + } + + return expanded; + } + }; + + if ( prefix !== "margin" ) { + jQuery.cssHooks[ prefix + suffix ].set = setPositiveNumber; + } +} ); + +jQuery.fn.extend( { + css: function( name, value ) { + return access( this, function( elem, name, value ) { + var styles, len, + map = {}, + i = 0; + + if ( Array.isArray( name ) ) { + styles = getStyles( elem ); + len = name.length; + + for ( ; i < len; i++ ) { + map[ name[ i ] ] = jQuery.css( elem, name[ i ], false, styles ); + } + + return map; + } + + return value !== undefined ? + jQuery.style( elem, name, value ) : + jQuery.css( elem, name ); + }, name, value, arguments.length > 1 ); + } +} ); + + +function Tween( elem, options, prop, end, easing ) { + return new Tween.prototype.init( elem, options, prop, end, easing ); +} +jQuery.Tween = Tween; + +Tween.prototype = { + constructor: Tween, + init: function( elem, options, prop, end, easing, unit ) { + this.elem = elem; + this.prop = prop; + this.easing = easing || jQuery.easing._default; + this.options = options; + this.start = this.now = this.cur(); + this.end = end; + this.unit = unit || ( jQuery.cssNumber[ prop ] ? "" : "px" ); + }, + cur: function() { + var hooks = Tween.propHooks[ this.prop ]; + + return hooks && hooks.get ? + hooks.get( this ) : + Tween.propHooks._default.get( this ); + }, + run: function( percent ) { + var eased, + hooks = Tween.propHooks[ this.prop ]; + + if ( this.options.duration ) { + this.pos = eased = jQuery.easing[ this.easing ]( + percent, this.options.duration * percent, 0, 1, this.options.duration + ); + } else { + this.pos = eased = percent; + } + this.now = ( this.end - this.start ) * eased + this.start; + + if ( this.options.step ) { + this.options.step.call( this.elem, this.now, this ); + } + + if ( hooks && hooks.set ) { + hooks.set( this ); + } else { + Tween.propHooks._default.set( this ); + } + return this; + } +}; + +Tween.prototype.init.prototype = Tween.prototype; + +Tween.propHooks = { + _default: { + get: function( tween ) { + var result; + + // Use a property on the element directly when it is not a DOM element, + // or when there is no matching style property that exists. + if ( tween.elem.nodeType !== 1 || + tween.elem[ tween.prop ] != null && tween.elem.style[ tween.prop ] == null ) { + return tween.elem[ tween.prop ]; + } + + // Passing an empty string as a 3rd parameter to .css will automatically + // attempt a parseFloat and fallback to a string if the parse fails. + // Simple values such as "10px" are parsed to Float; + // complex values such as "rotate(1rad)" are returned as-is. + result = jQuery.css( tween.elem, tween.prop, "" ); + + // Empty strings, null, undefined and "auto" are converted to 0. + return !result || result === "auto" ? 0 : result; + }, + set: function( tween ) { + + // Use step hook for back compat. + // Use cssHook if its there. + // Use .style if available and use plain properties where available. + if ( jQuery.fx.step[ tween.prop ] ) { + jQuery.fx.step[ tween.prop ]( tween ); + } else if ( tween.elem.nodeType === 1 && ( + jQuery.cssHooks[ tween.prop ] || + tween.elem.style[ finalPropName( tween.prop ) ] != null ) ) { + jQuery.style( tween.elem, tween.prop, tween.now + tween.unit ); + } else { + tween.elem[ tween.prop ] = tween.now; + } + } + } +}; + +// Support: IE <=9 only +// Panic based approach to setting things on disconnected nodes +Tween.propHooks.scrollTop = Tween.propHooks.scrollLeft = { + set: function( tween ) { + if ( tween.elem.nodeType && tween.elem.parentNode ) { + tween.elem[ tween.prop ] = tween.now; + } + } +}; + +jQuery.easing = { + linear: function( p ) { + return p; + }, + swing: function( p ) { + return 0.5 - Math.cos( p * Math.PI ) / 2; + }, + _default: "swing" +}; + +jQuery.fx = Tween.prototype.init; + +// Back compat <1.8 extension point +jQuery.fx.step = {}; + + + + +var + fxNow, inProgress, + rfxtypes = /^(?:toggle|show|hide)$/, + rrun = /queueHooks$/; + +function schedule() { + if ( inProgress ) { + if ( document.hidden === false && window.requestAnimationFrame ) { + window.requestAnimationFrame( schedule ); + } else { + window.setTimeout( schedule, jQuery.fx.interval ); + } + + jQuery.fx.tick(); + } +} + +// Animations created synchronously will run synchronously +function createFxNow() { + window.setTimeout( function() { + fxNow = undefined; + } ); + return ( fxNow = Date.now() ); +} + +// Generate parameters to create a standard animation +function genFx( type, includeWidth ) { + var which, + i = 0, + attrs = { height: type }; + + // If we include width, step value is 1 to do all cssExpand values, + // otherwise step value is 2 to skip over Left and Right + includeWidth = includeWidth ? 1 : 0; + for ( ; i < 4; i += 2 - includeWidth ) { + which = cssExpand[ i ]; + attrs[ "margin" + which ] = attrs[ "padding" + which ] = type; + } + + if ( includeWidth ) { + attrs.opacity = attrs.width = type; + } + + return attrs; +} + +function createTween( value, prop, animation ) { + var tween, + collection = ( Animation.tweeners[ prop ] || [] ).concat( Animation.tweeners[ "*" ] ), + index = 0, + length = collection.length; + for ( ; index < length; index++ ) { + if ( ( tween = collection[ index ].call( animation, prop, value ) ) ) { + + // We're done with this property + return tween; + } + } +} + +function defaultPrefilter( elem, props, opts ) { + var prop, value, toggle, hooks, oldfire, propTween, restoreDisplay, display, + isBox = "width" in props || "height" in props, + anim = this, + orig = {}, + style = elem.style, + hidden = elem.nodeType && isHiddenWithinTree( elem ), + dataShow = dataPriv.get( elem, "fxshow" ); + + // Queue-skipping animations hijack the fx hooks + if ( !opts.queue ) { + hooks = jQuery._queueHooks( elem, "fx" ); + if ( hooks.unqueued == null ) { + hooks.unqueued = 0; + oldfire = hooks.empty.fire; + hooks.empty.fire = function() { + if ( !hooks.unqueued ) { + oldfire(); + } + }; + } + hooks.unqueued++; + + anim.always( function() { + + // Ensure the complete handler is called before this completes + anim.always( function() { + hooks.unqueued--; + if ( !jQuery.queue( elem, "fx" ).length ) { + hooks.empty.fire(); + } + } ); + } ); + } + + // Detect show/hide animations + for ( prop in props ) { + value = props[ prop ]; + if ( rfxtypes.test( value ) ) { + delete props[ prop ]; + toggle = toggle || value === "toggle"; + if ( value === ( hidden ? "hide" : "show" ) ) { + + // Pretend to be hidden if this is a "show" and + // there is still data from a stopped show/hide + if ( value === "show" && dataShow && dataShow[ prop ] !== undefined ) { + hidden = true; + + // Ignore all other no-op show/hide data + } else { + continue; + } + } + orig[ prop ] = dataShow && dataShow[ prop ] || jQuery.style( elem, prop ); + } + } + + // Bail out if this is a no-op like .hide().hide() + propTween = !jQuery.isEmptyObject( props ); + if ( !propTween && jQuery.isEmptyObject( orig ) ) { + return; + } + + // Restrict "overflow" and "display" styles during box animations + if ( isBox && elem.nodeType === 1 ) { + + // Support: IE <=9 - 11, Edge 12 - 15 + // Record all 3 overflow attributes because IE does not infer the shorthand + // from identically-valued overflowX and overflowY and Edge just mirrors + // the overflowX value there. + opts.overflow = [ style.overflow, style.overflowX, style.overflowY ]; + + // Identify a display type, preferring old show/hide data over the CSS cascade + restoreDisplay = dataShow && dataShow.display; + if ( restoreDisplay == null ) { + restoreDisplay = dataPriv.get( elem, "display" ); + } + display = jQuery.css( elem, "display" ); + if ( display === "none" ) { + if ( restoreDisplay ) { + display = restoreDisplay; + } else { + + // Get nonempty value(s) by temporarily forcing visibility + showHide( [ elem ], true ); + restoreDisplay = elem.style.display || restoreDisplay; + display = jQuery.css( elem, "display" ); + showHide( [ elem ] ); + } + } + + // Animate inline elements as inline-block + if ( display === "inline" || display === "inline-block" && restoreDisplay != null ) { + if ( jQuery.css( elem, "float" ) === "none" ) { + + // Restore the original display value at the end of pure show/hide animations + if ( !propTween ) { + anim.done( function() { + style.display = restoreDisplay; + } ); + if ( restoreDisplay == null ) { + display = style.display; + restoreDisplay = display === "none" ? "" : display; + } + } + style.display = "inline-block"; + } + } + } + + if ( opts.overflow ) { + style.overflow = "hidden"; + anim.always( function() { + style.overflow = opts.overflow[ 0 ]; + style.overflowX = opts.overflow[ 1 ]; + style.overflowY = opts.overflow[ 2 ]; + } ); + } + + // Implement show/hide animations + propTween = false; + for ( prop in orig ) { + + // General show/hide setup for this element animation + if ( !propTween ) { + if ( dataShow ) { + if ( "hidden" in dataShow ) { + hidden = dataShow.hidden; + } + } else { + dataShow = dataPriv.access( elem, "fxshow", { display: restoreDisplay } ); + } + + // Store hidden/visible for toggle so `.stop().toggle()` "reverses" + if ( toggle ) { + dataShow.hidden = !hidden; + } + + // Show elements before animating them + if ( hidden ) { + showHide( [ elem ], true ); + } + + /* eslint-disable no-loop-func */ + + anim.done( function() { + + /* eslint-enable no-loop-func */ + + // The final step of a "hide" animation is actually hiding the element + if ( !hidden ) { + showHide( [ elem ] ); + } + dataPriv.remove( elem, "fxshow" ); + for ( prop in orig ) { + jQuery.style( elem, prop, orig[ prop ] ); + } + } ); + } + + // Per-property setup + propTween = createTween( hidden ? dataShow[ prop ] : 0, prop, anim ); + if ( !( prop in dataShow ) ) { + dataShow[ prop ] = propTween.start; + if ( hidden ) { + propTween.end = propTween.start; + propTween.start = 0; + } + } + } +} + +function propFilter( props, specialEasing ) { + var index, name, easing, value, hooks; + + // camelCase, specialEasing and expand cssHook pass + for ( index in props ) { + name = camelCase( index ); + easing = specialEasing[ name ]; + value = props[ index ]; + if ( Array.isArray( value ) ) { + easing = value[ 1 ]; + value = props[ index ] = value[ 0 ]; + } + + if ( index !== name ) { + props[ name ] = value; + delete props[ index ]; + } + + hooks = jQuery.cssHooks[ name ]; + if ( hooks && "expand" in hooks ) { + value = hooks.expand( value ); + delete props[ name ]; + + // Not quite $.extend, this won't overwrite existing keys. + // Reusing 'index' because we have the correct "name" + for ( index in value ) { + if ( !( index in props ) ) { + props[ index ] = value[ index ]; + specialEasing[ index ] = easing; + } + } + } else { + specialEasing[ name ] = easing; + } + } +} + +function Animation( elem, properties, options ) { + var result, + stopped, + index = 0, + length = Animation.prefilters.length, + deferred = jQuery.Deferred().always( function() { + + // Don't match elem in the :animated selector + delete tick.elem; + } ), + tick = function() { + if ( stopped ) { + return false; + } + var currentTime = fxNow || createFxNow(), + remaining = Math.max( 0, animation.startTime + animation.duration - currentTime ), + + // Support: Android 2.3 only + // Archaic crash bug won't allow us to use `1 - ( 0.5 || 0 )` (#12497) + temp = remaining / animation.duration || 0, + percent = 1 - temp, + index = 0, + length = animation.tweens.length; + + for ( ; index < length; index++ ) { + animation.tweens[ index ].run( percent ); + } + + deferred.notifyWith( elem, [ animation, percent, remaining ] ); + + // If there's more to do, yield + if ( percent < 1 && length ) { + return remaining; + } + + // If this was an empty animation, synthesize a final progress notification + if ( !length ) { + deferred.notifyWith( elem, [ animation, 1, 0 ] ); + } + + // Resolve the animation and report its conclusion + deferred.resolveWith( elem, [ animation ] ); + return false; + }, + animation = deferred.promise( { + elem: elem, + props: jQuery.extend( {}, properties ), + opts: jQuery.extend( true, { + specialEasing: {}, + easing: jQuery.easing._default + }, options ), + originalProperties: properties, + originalOptions: options, + startTime: fxNow || createFxNow(), + duration: options.duration, + tweens: [], + createTween: function( prop, end ) { + var tween = jQuery.Tween( elem, animation.opts, prop, end, + animation.opts.specialEasing[ prop ] || animation.opts.easing ); + animation.tweens.push( tween ); + return tween; + }, + stop: function( gotoEnd ) { + var index = 0, + + // If we are going to the end, we want to run all the tweens + // otherwise we skip this part + length = gotoEnd ? animation.tweens.length : 0; + if ( stopped ) { + return this; + } + stopped = true; + for ( ; index < length; index++ ) { + animation.tweens[ index ].run( 1 ); + } + + // Resolve when we played the last frame; otherwise, reject + if ( gotoEnd ) { + deferred.notifyWith( elem, [ animation, 1, 0 ] ); + deferred.resolveWith( elem, [ animation, gotoEnd ] ); + } else { + deferred.rejectWith( elem, [ animation, gotoEnd ] ); + } + return this; + } + } ), + props = animation.props; + + propFilter( props, animation.opts.specialEasing ); + + for ( ; index < length; index++ ) { + result = Animation.prefilters[ index ].call( animation, elem, props, animation.opts ); + if ( result ) { + if ( isFunction( result.stop ) ) { + jQuery._queueHooks( animation.elem, animation.opts.queue ).stop = + result.stop.bind( result ); + } + return result; + } + } + + jQuery.map( props, createTween, animation ); + + if ( isFunction( animation.opts.start ) ) { + animation.opts.start.call( elem, animation ); + } + + // Attach callbacks from options + animation + .progress( animation.opts.progress ) + .done( animation.opts.done, animation.opts.complete ) + .fail( animation.opts.fail ) + .always( animation.opts.always ); + + jQuery.fx.timer( + jQuery.extend( tick, { + elem: elem, + anim: animation, + queue: animation.opts.queue + } ) + ); + + return animation; +} + +jQuery.Animation = jQuery.extend( Animation, { + + tweeners: { + "*": [ function( prop, value ) { + var tween = this.createTween( prop, value ); + adjustCSS( tween.elem, prop, rcssNum.exec( value ), tween ); + return tween; + } ] + }, + + tweener: function( props, callback ) { + if ( isFunction( props ) ) { + callback = props; + props = [ "*" ]; + } else { + props = props.match( rnothtmlwhite ); + } + + var prop, + index = 0, + length = props.length; + + for ( ; index < length; index++ ) { + prop = props[ index ]; + Animation.tweeners[ prop ] = Animation.tweeners[ prop ] || []; + Animation.tweeners[ prop ].unshift( callback ); + } + }, + + prefilters: [ defaultPrefilter ], + + prefilter: function( callback, prepend ) { + if ( prepend ) { + Animation.prefilters.unshift( callback ); + } else { + Animation.prefilters.push( callback ); + } + } +} ); + +jQuery.speed = function( speed, easing, fn ) { + var opt = speed && typeof speed === "object" ? jQuery.extend( {}, speed ) : { + complete: fn || !fn && easing || + isFunction( speed ) && speed, + duration: speed, + easing: fn && easing || easing && !isFunction( easing ) && easing + }; + + // Go to the end state if fx are off + if ( jQuery.fx.off ) { + opt.duration = 0; + + } else { + if ( typeof opt.duration !== "number" ) { + if ( opt.duration in jQuery.fx.speeds ) { + opt.duration = jQuery.fx.speeds[ opt.duration ]; + + } else { + opt.duration = jQuery.fx.speeds._default; + } + } + } + + // Normalize opt.queue - true/undefined/null -> "fx" + if ( opt.queue == null || opt.queue === true ) { + opt.queue = "fx"; + } + + // Queueing + opt.old = opt.complete; + + opt.complete = function() { + if ( isFunction( opt.old ) ) { + opt.old.call( this ); + } + + if ( opt.queue ) { + jQuery.dequeue( this, opt.queue ); + } + }; + + return opt; +}; + +jQuery.fn.extend( { + fadeTo: function( speed, to, easing, callback ) { + + // Show any hidden elements after setting opacity to 0 + return this.filter( isHiddenWithinTree ).css( "opacity", 0 ).show() + + // Animate to the value specified + .end().animate( { opacity: to }, speed, easing, callback ); + }, + animate: function( prop, speed, easing, callback ) { + var empty = jQuery.isEmptyObject( prop ), + optall = jQuery.speed( speed, easing, callback ), + doAnimation = function() { + + // Operate on a copy of prop so per-property easing won't be lost + var anim = Animation( this, jQuery.extend( {}, prop ), optall ); + + // Empty animations, or finishing resolves immediately + if ( empty || dataPriv.get( this, "finish" ) ) { + anim.stop( true ); + } + }; + + doAnimation.finish = doAnimation; + + return empty || optall.queue === false ? + this.each( doAnimation ) : + this.queue( optall.queue, doAnimation ); + }, + stop: function( type, clearQueue, gotoEnd ) { + var stopQueue = function( hooks ) { + var stop = hooks.stop; + delete hooks.stop; + stop( gotoEnd ); + }; + + if ( typeof type !== "string" ) { + gotoEnd = clearQueue; + clearQueue = type; + type = undefined; + } + if ( clearQueue ) { + this.queue( type || "fx", [] ); + } + + return this.each( function() { + var dequeue = true, + index = type != null && type + "queueHooks", + timers = jQuery.timers, + data = dataPriv.get( this ); + + if ( index ) { + if ( data[ index ] && data[ index ].stop ) { + stopQueue( data[ index ] ); + } + } else { + for ( index in data ) { + if ( data[ index ] && data[ index ].stop && rrun.test( index ) ) { + stopQueue( data[ index ] ); + } + } + } + + for ( index = timers.length; index--; ) { + if ( timers[ index ].elem === this && + ( type == null || timers[ index ].queue === type ) ) { + + timers[ index ].anim.stop( gotoEnd ); + dequeue = false; + timers.splice( index, 1 ); + } + } + + // Start the next in the queue if the last step wasn't forced. + // Timers currently will call their complete callbacks, which + // will dequeue but only if they were gotoEnd. + if ( dequeue || !gotoEnd ) { + jQuery.dequeue( this, type ); + } + } ); + }, + finish: function( type ) { + if ( type !== false ) { + type = type || "fx"; + } + return this.each( function() { + var index, + data = dataPriv.get( this ), + queue = data[ type + "queue" ], + hooks = data[ type + "queueHooks" ], + timers = jQuery.timers, + length = queue ? queue.length : 0; + + // Enable finishing flag on private data + data.finish = true; + + // Empty the queue first + jQuery.queue( this, type, [] ); + + if ( hooks && hooks.stop ) { + hooks.stop.call( this, true ); + } + + // Look for any active animations, and finish them + for ( index = timers.length; index--; ) { + if ( timers[ index ].elem === this && timers[ index ].queue === type ) { + timers[ index ].anim.stop( true ); + timers.splice( index, 1 ); + } + } + + // Look for any animations in the old queue and finish them + for ( index = 0; index < length; index++ ) { + if ( queue[ index ] && queue[ index ].finish ) { + queue[ index ].finish.call( this ); + } + } + + // Turn off finishing flag + delete data.finish; + } ); + } +} ); + +jQuery.each( [ "toggle", "show", "hide" ], function( _i, name ) { + var cssFn = jQuery.fn[ name ]; + jQuery.fn[ name ] = function( speed, easing, callback ) { + return speed == null || typeof speed === "boolean" ? + cssFn.apply( this, arguments ) : + this.animate( genFx( name, true ), speed, easing, callback ); + }; +} ); + +// Generate shortcuts for custom animations +jQuery.each( { + slideDown: genFx( "show" ), + slideUp: genFx( "hide" ), + slideToggle: genFx( "toggle" ), + fadeIn: { opacity: "show" }, + fadeOut: { opacity: "hide" }, + fadeToggle: { opacity: "toggle" } +}, function( name, props ) { + jQuery.fn[ name ] = function( speed, easing, callback ) { + return this.animate( props, speed, easing, callback ); + }; +} ); + +jQuery.timers = []; +jQuery.fx.tick = function() { + var timer, + i = 0, + timers = jQuery.timers; + + fxNow = Date.now(); + + for ( ; i < timers.length; i++ ) { + timer = timers[ i ]; + + // Run the timer and safely remove it when done (allowing for external removal) + if ( !timer() && timers[ i ] === timer ) { + timers.splice( i--, 1 ); + } + } + + if ( !timers.length ) { + jQuery.fx.stop(); + } + fxNow = undefined; +}; + +jQuery.fx.timer = function( timer ) { + jQuery.timers.push( timer ); + jQuery.fx.start(); +}; + +jQuery.fx.interval = 13; +jQuery.fx.start = function() { + if ( inProgress ) { + return; + } + + inProgress = true; + schedule(); +}; + +jQuery.fx.stop = function() { + inProgress = null; +}; + +jQuery.fx.speeds = { + slow: 600, + fast: 200, + + // Default speed + _default: 400 +}; + + +// Based off of the plugin by Clint Helfers, with permission. +// https://web.archive.org/web/20100324014747/http://blindsignals.com/index.php/2009/07/jquery-delay/ +jQuery.fn.delay = function( time, type ) { + time = jQuery.fx ? jQuery.fx.speeds[ time ] || time : time; + type = type || "fx"; + + return this.queue( type, function( next, hooks ) { + var timeout = window.setTimeout( next, time ); + hooks.stop = function() { + window.clearTimeout( timeout ); + }; + } ); +}; + + +( function() { + var input = document.createElement( "input" ), + select = document.createElement( "select" ), + opt = select.appendChild( document.createElement( "option" ) ); + + input.type = "checkbox"; + + // Support: Android <=4.3 only + // Default value for a checkbox should be "on" + support.checkOn = input.value !== ""; + + // Support: IE <=11 only + // Must access selectedIndex to make default options select + support.optSelected = opt.selected; + + // Support: IE <=11 only + // An input loses its value after becoming a radio + input = document.createElement( "input" ); + input.value = "t"; + input.type = "radio"; + support.radioValue = input.value === "t"; +} )(); + + +var boolHook, + attrHandle = jQuery.expr.attrHandle; + +jQuery.fn.extend( { + attr: function( name, value ) { + return access( this, jQuery.attr, name, value, arguments.length > 1 ); + }, + + removeAttr: function( name ) { + return this.each( function() { + jQuery.removeAttr( this, name ); + } ); + } +} ); + +jQuery.extend( { + attr: function( elem, name, value ) { + var ret, hooks, + nType = elem.nodeType; + + // Don't get/set attributes on text, comment and attribute nodes + if ( nType === 3 || nType === 8 || nType === 2 ) { + return; + } + + // Fallback to prop when attributes are not supported + if ( typeof elem.getAttribute === "undefined" ) { + return jQuery.prop( elem, name, value ); + } + + // Attribute hooks are determined by the lowercase version + // Grab necessary hook if one is defined + if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) { + hooks = jQuery.attrHooks[ name.toLowerCase() ] || + ( jQuery.expr.match.bool.test( name ) ? boolHook : undefined ); + } + + if ( value !== undefined ) { + if ( value === null ) { + jQuery.removeAttr( elem, name ); + return; + } + + if ( hooks && "set" in hooks && + ( ret = hooks.set( elem, value, name ) ) !== undefined ) { + return ret; + } + + elem.setAttribute( name, value + "" ); + return value; + } + + if ( hooks && "get" in hooks && ( ret = hooks.get( elem, name ) ) !== null ) { + return ret; + } + + ret = jQuery.find.attr( elem, name ); + + // Non-existent attributes return null, we normalize to undefined + return ret == null ? undefined : ret; + }, + + attrHooks: { + type: { + set: function( elem, value ) { + if ( !support.radioValue && value === "radio" && + nodeName( elem, "input" ) ) { + var val = elem.value; + elem.setAttribute( "type", value ); + if ( val ) { + elem.value = val; + } + return value; + } + } + } + }, + + removeAttr: function( elem, value ) { + var name, + i = 0, + + // Attribute names can contain non-HTML whitespace characters + // https://html.spec.whatwg.org/multipage/syntax.html#attributes-2 + attrNames = value && value.match( rnothtmlwhite ); + + if ( attrNames && elem.nodeType === 1 ) { + while ( ( name = attrNames[ i++ ] ) ) { + elem.removeAttribute( name ); + } + } + } +} ); + +// Hooks for boolean attributes +boolHook = { + set: function( elem, value, name ) { + if ( value === false ) { + + // Remove boolean attributes when set to false + jQuery.removeAttr( elem, name ); + } else { + elem.setAttribute( name, name ); + } + return name; + } +}; + +jQuery.each( jQuery.expr.match.bool.source.match( /\w+/g ), function( _i, name ) { + var getter = attrHandle[ name ] || jQuery.find.attr; + + attrHandle[ name ] = function( elem, name, isXML ) { + var ret, handle, + lowercaseName = name.toLowerCase(); + + if ( !isXML ) { + + // Avoid an infinite loop by temporarily removing this function from the getter + handle = attrHandle[ lowercaseName ]; + attrHandle[ lowercaseName ] = ret; + ret = getter( elem, name, isXML ) != null ? + lowercaseName : + null; + attrHandle[ lowercaseName ] = handle; + } + return ret; + }; +} ); + + + + +var rfocusable = /^(?:input|select|textarea|button)$/i, + rclickable = /^(?:a|area)$/i; + +jQuery.fn.extend( { + prop: function( name, value ) { + return access( this, jQuery.prop, name, value, arguments.length > 1 ); + }, + + removeProp: function( name ) { + return this.each( function() { + delete this[ jQuery.propFix[ name ] || name ]; + } ); + } +} ); + +jQuery.extend( { + prop: function( elem, name, value ) { + var ret, hooks, + nType = elem.nodeType; + + // Don't get/set properties on text, comment and attribute nodes + if ( nType === 3 || nType === 8 || nType === 2 ) { + return; + } + + if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) { + + // Fix name and attach hooks + name = jQuery.propFix[ name ] || name; + hooks = jQuery.propHooks[ name ]; + } + + if ( value !== undefined ) { + if ( hooks && "set" in hooks && + ( ret = hooks.set( elem, value, name ) ) !== undefined ) { + return ret; + } + + return ( elem[ name ] = value ); + } + + if ( hooks && "get" in hooks && ( ret = hooks.get( elem, name ) ) !== null ) { + return ret; + } + + return elem[ name ]; + }, + + propHooks: { + tabIndex: { + get: function( elem ) { + + // Support: IE <=9 - 11 only + // elem.tabIndex doesn't always return the + // correct value when it hasn't been explicitly set + // https://web.archive.org/web/20141116233347/http://fluidproject.org/blog/2008/01/09/getting-setting-and-removing-tabindex-values-with-javascript/ + // Use proper attribute retrieval(#12072) + var tabindex = jQuery.find.attr( elem, "tabindex" ); + + if ( tabindex ) { + return parseInt( tabindex, 10 ); + } + + if ( + rfocusable.test( elem.nodeName ) || + rclickable.test( elem.nodeName ) && + elem.href + ) { + return 0; + } + + return -1; + } + } + }, + + propFix: { + "for": "htmlFor", + "class": "className" + } +} ); + +// Support: IE <=11 only +// Accessing the selectedIndex property +// forces the browser to respect setting selected +// on the option +// The getter ensures a default option is selected +// when in an optgroup +// eslint rule "no-unused-expressions" is disabled for this code +// since it considers such accessions noop +if ( !support.optSelected ) { + jQuery.propHooks.selected = { + get: function( elem ) { + + /* eslint no-unused-expressions: "off" */ + + var parent = elem.parentNode; + if ( parent && parent.parentNode ) { + parent.parentNode.selectedIndex; + } + return null; + }, + set: function( elem ) { + + /* eslint no-unused-expressions: "off" */ + + var parent = elem.parentNode; + if ( parent ) { + parent.selectedIndex; + + if ( parent.parentNode ) { + parent.parentNode.selectedIndex; + } + } + } + }; +} + +jQuery.each( [ + "tabIndex", + "readOnly", + "maxLength", + "cellSpacing", + "cellPadding", + "rowSpan", + "colSpan", + "useMap", + "frameBorder", + "contentEditable" +], function() { + jQuery.propFix[ this.toLowerCase() ] = this; +} ); + + + + + // Strip and collapse whitespace according to HTML spec + // https://infra.spec.whatwg.org/#strip-and-collapse-ascii-whitespace + function stripAndCollapse( value ) { + var tokens = value.match( rnothtmlwhite ) || []; + return tokens.join( " " ); + } + + +function getClass( elem ) { + return elem.getAttribute && elem.getAttribute( "class" ) || ""; +} + +function classesToArray( value ) { + if ( Array.isArray( value ) ) { + return value; + } + if ( typeof value === "string" ) { + return value.match( rnothtmlwhite ) || []; + } + return []; +} + +jQuery.fn.extend( { + addClass: function( value ) { + var classes, elem, cur, curValue, clazz, j, finalValue, + i = 0; + + if ( isFunction( value ) ) { + return this.each( function( j ) { + jQuery( this ).addClass( value.call( this, j, getClass( this ) ) ); + } ); + } + + classes = classesToArray( value ); + + if ( classes.length ) { + while ( ( elem = this[ i++ ] ) ) { + curValue = getClass( elem ); + cur = elem.nodeType === 1 && ( " " + stripAndCollapse( curValue ) + " " ); + + if ( cur ) { + j = 0; + while ( ( clazz = classes[ j++ ] ) ) { + if ( cur.indexOf( " " + clazz + " " ) < 0 ) { + cur += clazz + " "; + } + } + + // Only assign if different to avoid unneeded rendering. + finalValue = stripAndCollapse( cur ); + if ( curValue !== finalValue ) { + elem.setAttribute( "class", finalValue ); + } + } + } + } + + return this; + }, + + removeClass: function( value ) { + var classes, elem, cur, curValue, clazz, j, finalValue, + i = 0; + + if ( isFunction( value ) ) { + return this.each( function( j ) { + jQuery( this ).removeClass( value.call( this, j, getClass( this ) ) ); + } ); + } + + if ( !arguments.length ) { + return this.attr( "class", "" ); + } + + classes = classesToArray( value ); + + if ( classes.length ) { + while ( ( elem = this[ i++ ] ) ) { + curValue = getClass( elem ); + + // This expression is here for better compressibility (see addClass) + cur = elem.nodeType === 1 && ( " " + stripAndCollapse( curValue ) + " " ); + + if ( cur ) { + j = 0; + while ( ( clazz = classes[ j++ ] ) ) { + + // Remove *all* instances + while ( cur.indexOf( " " + clazz + " " ) > -1 ) { + cur = cur.replace( " " + clazz + " ", " " ); + } + } + + // Only assign if different to avoid unneeded rendering. + finalValue = stripAndCollapse( cur ); + if ( curValue !== finalValue ) { + elem.setAttribute( "class", finalValue ); + } + } + } + } + + return this; + }, + + toggleClass: function( value, stateVal ) { + var type = typeof value, + isValidValue = type === "string" || Array.isArray( value ); + + if ( typeof stateVal === "boolean" && isValidValue ) { + return stateVal ? this.addClass( value ) : this.removeClass( value ); + } + + if ( isFunction( value ) ) { + return this.each( function( i ) { + jQuery( this ).toggleClass( + value.call( this, i, getClass( this ), stateVal ), + stateVal + ); + } ); + } + + return this.each( function() { + var className, i, self, classNames; + + if ( isValidValue ) { + + // Toggle individual class names + i = 0; + self = jQuery( this ); + classNames = classesToArray( value ); + + while ( ( className = classNames[ i++ ] ) ) { + + // Check each className given, space separated list + if ( self.hasClass( className ) ) { + self.removeClass( className ); + } else { + self.addClass( className ); + } + } + + // Toggle whole class name + } else if ( value === undefined || type === "boolean" ) { + className = getClass( this ); + if ( className ) { + + // Store className if set + dataPriv.set( this, "__className__", className ); + } + + // If the element has a class name or if we're passed `false`, + // then remove the whole classname (if there was one, the above saved it). + // Otherwise bring back whatever was previously saved (if anything), + // falling back to the empty string if nothing was stored. + if ( this.setAttribute ) { + this.setAttribute( "class", + className || value === false ? + "" : + dataPriv.get( this, "__className__" ) || "" + ); + } + } + } ); + }, + + hasClass: function( selector ) { + var className, elem, + i = 0; + + className = " " + selector + " "; + while ( ( elem = this[ i++ ] ) ) { + if ( elem.nodeType === 1 && + ( " " + stripAndCollapse( getClass( elem ) ) + " " ).indexOf( className ) > -1 ) { + return true; + } + } + + return false; + } +} ); + + + + +var rreturn = /\r/g; + +jQuery.fn.extend( { + val: function( value ) { + var hooks, ret, valueIsFunction, + elem = this[ 0 ]; + + if ( !arguments.length ) { + if ( elem ) { + hooks = jQuery.valHooks[ elem.type ] || + jQuery.valHooks[ elem.nodeName.toLowerCase() ]; + + if ( hooks && + "get" in hooks && + ( ret = hooks.get( elem, "value" ) ) !== undefined + ) { + return ret; + } + + ret = elem.value; + + // Handle most common string cases + if ( typeof ret === "string" ) { + return ret.replace( rreturn, "" ); + } + + // Handle cases where value is null/undef or number + return ret == null ? "" : ret; + } + + return; + } + + valueIsFunction = isFunction( value ); + + return this.each( function( i ) { + var val; + + if ( this.nodeType !== 1 ) { + return; + } + + if ( valueIsFunction ) { + val = value.call( this, i, jQuery( this ).val() ); + } else { + val = value; + } + + // Treat null/undefined as ""; convert numbers to string + if ( val == null ) { + val = ""; + + } else if ( typeof val === "number" ) { + val += ""; + + } else if ( Array.isArray( val ) ) { + val = jQuery.map( val, function( value ) { + return value == null ? "" : value + ""; + } ); + } + + hooks = jQuery.valHooks[ this.type ] || jQuery.valHooks[ this.nodeName.toLowerCase() ]; + + // If set returns undefined, fall back to normal setting + if ( !hooks || !( "set" in hooks ) || hooks.set( this, val, "value" ) === undefined ) { + this.value = val; + } + } ); + } +} ); + +jQuery.extend( { + valHooks: { + option: { + get: function( elem ) { + + var val = jQuery.find.attr( elem, "value" ); + return val != null ? + val : + + // Support: IE <=10 - 11 only + // option.text throws exceptions (#14686, #14858) + // Strip and collapse whitespace + // https://html.spec.whatwg.org/#strip-and-collapse-whitespace + stripAndCollapse( jQuery.text( elem ) ); + } + }, + select: { + get: function( elem ) { + var value, option, i, + options = elem.options, + index = elem.selectedIndex, + one = elem.type === "select-one", + values = one ? null : [], + max = one ? index + 1 : options.length; + + if ( index < 0 ) { + i = max; + + } else { + i = one ? index : 0; + } + + // Loop through all the selected options + for ( ; i < max; i++ ) { + option = options[ i ]; + + // Support: IE <=9 only + // IE8-9 doesn't update selected after form reset (#2551) + if ( ( option.selected || i === index ) && + + // Don't return options that are disabled or in a disabled optgroup + !option.disabled && + ( !option.parentNode.disabled || + !nodeName( option.parentNode, "optgroup" ) ) ) { + + // Get the specific value for the option + value = jQuery( option ).val(); + + // We don't need an array for one selects + if ( one ) { + return value; + } + + // Multi-Selects return an array + values.push( value ); + } + } + + return values; + }, + + set: function( elem, value ) { + var optionSet, option, + options = elem.options, + values = jQuery.makeArray( value ), + i = options.length; + + while ( i-- ) { + option = options[ i ]; + + /* eslint-disable no-cond-assign */ + + if ( option.selected = + jQuery.inArray( jQuery.valHooks.option.get( option ), values ) > -1 + ) { + optionSet = true; + } + + /* eslint-enable no-cond-assign */ + } + + // Force browsers to behave consistently when non-matching value is set + if ( !optionSet ) { + elem.selectedIndex = -1; + } + return values; + } + } + } +} ); + +// Radios and checkboxes getter/setter +jQuery.each( [ "radio", "checkbox" ], function() { + jQuery.valHooks[ this ] = { + set: function( elem, value ) { + if ( Array.isArray( value ) ) { + return ( elem.checked = jQuery.inArray( jQuery( elem ).val(), value ) > -1 ); + } + } + }; + if ( !support.checkOn ) { + jQuery.valHooks[ this ].get = function( elem ) { + return elem.getAttribute( "value" ) === null ? "on" : elem.value; + }; + } +} ); + + + + +// Return jQuery for attributes-only inclusion + + +support.focusin = "onfocusin" in window; + + +var rfocusMorph = /^(?:focusinfocus|focusoutblur)$/, + stopPropagationCallback = function( e ) { + e.stopPropagation(); + }; + +jQuery.extend( jQuery.event, { + + trigger: function( event, data, elem, onlyHandlers ) { + + var i, cur, tmp, bubbleType, ontype, handle, special, lastElement, + eventPath = [ elem || document ], + type = hasOwn.call( event, "type" ) ? event.type : event, + namespaces = hasOwn.call( event, "namespace" ) ? event.namespace.split( "." ) : []; + + cur = lastElement = tmp = elem = elem || document; + + // Don't do events on text and comment nodes + if ( elem.nodeType === 3 || elem.nodeType === 8 ) { + return; + } + + // focus/blur morphs to focusin/out; ensure we're not firing them right now + if ( rfocusMorph.test( type + jQuery.event.triggered ) ) { + return; + } + + if ( type.indexOf( "." ) > -1 ) { + + // Namespaced trigger; create a regexp to match event type in handle() + namespaces = type.split( "." ); + type = namespaces.shift(); + namespaces.sort(); + } + ontype = type.indexOf( ":" ) < 0 && "on" + type; + + // Caller can pass in a jQuery.Event object, Object, or just an event type string + event = event[ jQuery.expando ] ? + event : + new jQuery.Event( type, typeof event === "object" && event ); + + // Trigger bitmask: & 1 for native handlers; & 2 for jQuery (always true) + event.isTrigger = onlyHandlers ? 2 : 3; + event.namespace = namespaces.join( "." ); + event.rnamespace = event.namespace ? + new RegExp( "(^|\\.)" + namespaces.join( "\\.(?:.*\\.|)" ) + "(\\.|$)" ) : + null; + + // Clean up the event in case it is being reused + event.result = undefined; + if ( !event.target ) { + event.target = elem; + } + + // Clone any incoming data and prepend the event, creating the handler arg list + data = data == null ? + [ event ] : + jQuery.makeArray( data, [ event ] ); + + // Allow special events to draw outside the lines + special = jQuery.event.special[ type ] || {}; + if ( !onlyHandlers && special.trigger && special.trigger.apply( elem, data ) === false ) { + return; + } + + // Determine event propagation path in advance, per W3C events spec (#9951) + // Bubble up to document, then to window; watch for a global ownerDocument var (#9724) + if ( !onlyHandlers && !special.noBubble && !isWindow( elem ) ) { + + bubbleType = special.delegateType || type; + if ( !rfocusMorph.test( bubbleType + type ) ) { + cur = cur.parentNode; + } + for ( ; cur; cur = cur.parentNode ) { + eventPath.push( cur ); + tmp = cur; + } + + // Only add window if we got to document (e.g., not plain obj or detached DOM) + if ( tmp === ( elem.ownerDocument || document ) ) { + eventPath.push( tmp.defaultView || tmp.parentWindow || window ); + } + } + + // Fire handlers on the event path + i = 0; + while ( ( cur = eventPath[ i++ ] ) && !event.isPropagationStopped() ) { + lastElement = cur; + event.type = i > 1 ? + bubbleType : + special.bindType || type; + + // jQuery handler + handle = ( dataPriv.get( cur, "events" ) || Object.create( null ) )[ event.type ] && + dataPriv.get( cur, "handle" ); + if ( handle ) { + handle.apply( cur, data ); + } + + // Native handler + handle = ontype && cur[ ontype ]; + if ( handle && handle.apply && acceptData( cur ) ) { + event.result = handle.apply( cur, data ); + if ( event.result === false ) { + event.preventDefault(); + } + } + } + event.type = type; + + // If nobody prevented the default action, do it now + if ( !onlyHandlers && !event.isDefaultPrevented() ) { + + if ( ( !special._default || + special._default.apply( eventPath.pop(), data ) === false ) && + acceptData( elem ) ) { + + // Call a native DOM method on the target with the same name as the event. + // Don't do default actions on window, that's where global variables be (#6170) + if ( ontype && isFunction( elem[ type ] ) && !isWindow( elem ) ) { + + // Don't re-trigger an onFOO event when we call its FOO() method + tmp = elem[ ontype ]; + + if ( tmp ) { + elem[ ontype ] = null; + } + + // Prevent re-triggering of the same event, since we already bubbled it above + jQuery.event.triggered = type; + + if ( event.isPropagationStopped() ) { + lastElement.addEventListener( type, stopPropagationCallback ); + } + + elem[ type ](); + + if ( event.isPropagationStopped() ) { + lastElement.removeEventListener( type, stopPropagationCallback ); + } + + jQuery.event.triggered = undefined; + + if ( tmp ) { + elem[ ontype ] = tmp; + } + } + } + } + + return event.result; + }, + + // Piggyback on a donor event to simulate a different one + // Used only for `focus(in | out)` events + simulate: function( type, elem, event ) { + var e = jQuery.extend( + new jQuery.Event(), + event, + { + type: type, + isSimulated: true + } + ); + + jQuery.event.trigger( e, null, elem ); + } + +} ); + +jQuery.fn.extend( { + + trigger: function( type, data ) { + return this.each( function() { + jQuery.event.trigger( type, data, this ); + } ); + }, + triggerHandler: function( type, data ) { + var elem = this[ 0 ]; + if ( elem ) { + return jQuery.event.trigger( type, data, elem, true ); + } + } +} ); + + +// Support: Firefox <=44 +// Firefox doesn't have focus(in | out) events +// Related ticket - https://bugzilla.mozilla.org/show_bug.cgi?id=687787 +// +// Support: Chrome <=48 - 49, Safari <=9.0 - 9.1 +// focus(in | out) events fire after focus & blur events, +// which is spec violation - http://www.w3.org/TR/DOM-Level-3-Events/#events-focusevent-event-order +// Related ticket - https://bugs.chromium.org/p/chromium/issues/detail?id=449857 +if ( !support.focusin ) { + jQuery.each( { focus: "focusin", blur: "focusout" }, function( orig, fix ) { + + // Attach a single capturing handler on the document while someone wants focusin/focusout + var handler = function( event ) { + jQuery.event.simulate( fix, event.target, jQuery.event.fix( event ) ); + }; + + jQuery.event.special[ fix ] = { + setup: function() { + + // Handle: regular nodes (via `this.ownerDocument`), window + // (via `this.document`) & document (via `this`). + var doc = this.ownerDocument || this.document || this, + attaches = dataPriv.access( doc, fix ); + + if ( !attaches ) { + doc.addEventListener( orig, handler, true ); + } + dataPriv.access( doc, fix, ( attaches || 0 ) + 1 ); + }, + teardown: function() { + var doc = this.ownerDocument || this.document || this, + attaches = dataPriv.access( doc, fix ) - 1; + + if ( !attaches ) { + doc.removeEventListener( orig, handler, true ); + dataPriv.remove( doc, fix ); + + } else { + dataPriv.access( doc, fix, attaches ); + } + } + }; + } ); +} +var location = window.location; + +var nonce = { guid: Date.now() }; + +var rquery = ( /\?/ ); + + + +// Cross-browser xml parsing +jQuery.parseXML = function( data ) { + var xml, parserErrorElem; + if ( !data || typeof data !== "string" ) { + return null; + } + + // Support: IE 9 - 11 only + // IE throws on parseFromString with invalid input. + try { + xml = ( new window.DOMParser() ).parseFromString( data, "text/xml" ); + } catch ( e ) {} + + parserErrorElem = xml && xml.getElementsByTagName( "parsererror" )[ 0 ]; + if ( !xml || parserErrorElem ) { + jQuery.error( "Invalid XML: " + ( + parserErrorElem ? + jQuery.map( parserErrorElem.childNodes, function( el ) { + return el.textContent; + } ).join( "\n" ) : + data + ) ); + } + return xml; +}; + + +var + rbracket = /\[\]$/, + rCRLF = /\r?\n/g, + rsubmitterTypes = /^(?:submit|button|image|reset|file)$/i, + rsubmittable = /^(?:input|select|textarea|keygen)/i; + +function buildParams( prefix, obj, traditional, add ) { + var name; + + if ( Array.isArray( obj ) ) { + + // Serialize array item. + jQuery.each( obj, function( i, v ) { + if ( traditional || rbracket.test( prefix ) ) { + + // Treat each array item as a scalar. + add( prefix, v ); + + } else { + + // Item is non-scalar (array or object), encode its numeric index. + buildParams( + prefix + "[" + ( typeof v === "object" && v != null ? i : "" ) + "]", + v, + traditional, + add + ); + } + } ); + + } else if ( !traditional && toType( obj ) === "object" ) { + + // Serialize object item. + for ( name in obj ) { + buildParams( prefix + "[" + name + "]", obj[ name ], traditional, add ); + } + + } else { + + // Serialize scalar item. + add( prefix, obj ); + } +} + +// Serialize an array of form elements or a set of +// key/values into a query string +jQuery.param = function( a, traditional ) { + var prefix, + s = [], + add = function( key, valueOrFunction ) { + + // If value is a function, invoke it and use its return value + var value = isFunction( valueOrFunction ) ? + valueOrFunction() : + valueOrFunction; + + s[ s.length ] = encodeURIComponent( key ) + "=" + + encodeURIComponent( value == null ? "" : value ); + }; + + if ( a == null ) { + return ""; + } + + // If an array was passed in, assume that it is an array of form elements. + if ( Array.isArray( a ) || ( a.jquery && !jQuery.isPlainObject( a ) ) ) { + + // Serialize the form elements + jQuery.each( a, function() { + add( this.name, this.value ); + } ); + + } else { + + // If traditional, encode the "old" way (the way 1.3.2 or older + // did it), otherwise encode params recursively. + for ( prefix in a ) { + buildParams( prefix, a[ prefix ], traditional, add ); + } + } + + // Return the resulting serialization + return s.join( "&" ); +}; + +jQuery.fn.extend( { + serialize: function() { + return jQuery.param( this.serializeArray() ); + }, + serializeArray: function() { + return this.map( function() { + + // Can add propHook for "elements" to filter or add form elements + var elements = jQuery.prop( this, "elements" ); + return elements ? jQuery.makeArray( elements ) : this; + } ).filter( function() { + var type = this.type; + + // Use .is( ":disabled" ) so that fieldset[disabled] works + return this.name && !jQuery( this ).is( ":disabled" ) && + rsubmittable.test( this.nodeName ) && !rsubmitterTypes.test( type ) && + ( this.checked || !rcheckableType.test( type ) ); + } ).map( function( _i, elem ) { + var val = jQuery( this ).val(); + + if ( val == null ) { + return null; + } + + if ( Array.isArray( val ) ) { + return jQuery.map( val, function( val ) { + return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) }; + } ); + } + + return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) }; + } ).get(); + } +} ); + + +var + r20 = /%20/g, + rhash = /#.*$/, + rantiCache = /([?&])_=[^&]*/, + rheaders = /^(.*?):[ \t]*([^\r\n]*)$/mg, + + // #7653, #8125, #8152: local protocol detection + rlocalProtocol = /^(?:about|app|app-storage|.+-extension|file|res|widget):$/, + rnoContent = /^(?:GET|HEAD)$/, + rprotocol = /^\/\//, + + /* Prefilters + * 1) They are useful to introduce custom dataTypes (see ajax/jsonp.js for an example) + * 2) These are called: + * - BEFORE asking for a transport + * - AFTER param serialization (s.data is a string if s.processData is true) + * 3) key is the dataType + * 4) the catchall symbol "*" can be used + * 5) execution will start with transport dataType and THEN continue down to "*" if needed + */ + prefilters = {}, + + /* Transports bindings + * 1) key is the dataType + * 2) the catchall symbol "*" can be used + * 3) selection will start with transport dataType and THEN go to "*" if needed + */ + transports = {}, + + // Avoid comment-prolog char sequence (#10098); must appease lint and evade compression + allTypes = "*/".concat( "*" ), + + // Anchor tag for parsing the document origin + originAnchor = document.createElement( "a" ); + +originAnchor.href = location.href; + +// Base "constructor" for jQuery.ajaxPrefilter and jQuery.ajaxTransport +function addToPrefiltersOrTransports( structure ) { + + // dataTypeExpression is optional and defaults to "*" + return function( dataTypeExpression, func ) { + + if ( typeof dataTypeExpression !== "string" ) { + func = dataTypeExpression; + dataTypeExpression = "*"; + } + + var dataType, + i = 0, + dataTypes = dataTypeExpression.toLowerCase().match( rnothtmlwhite ) || []; + + if ( isFunction( func ) ) { + + // For each dataType in the dataTypeExpression + while ( ( dataType = dataTypes[ i++ ] ) ) { + + // Prepend if requested + if ( dataType[ 0 ] === "+" ) { + dataType = dataType.slice( 1 ) || "*"; + ( structure[ dataType ] = structure[ dataType ] || [] ).unshift( func ); + + // Otherwise append + } else { + ( structure[ dataType ] = structure[ dataType ] || [] ).push( func ); + } + } + } + }; +} + +// Base inspection function for prefilters and transports +function inspectPrefiltersOrTransports( structure, options, originalOptions, jqXHR ) { + + var inspected = {}, + seekingTransport = ( structure === transports ); + + function inspect( dataType ) { + var selected; + inspected[ dataType ] = true; + jQuery.each( structure[ dataType ] || [], function( _, prefilterOrFactory ) { + var dataTypeOrTransport = prefilterOrFactory( options, originalOptions, jqXHR ); + if ( typeof dataTypeOrTransport === "string" && + !seekingTransport && !inspected[ dataTypeOrTransport ] ) { + + options.dataTypes.unshift( dataTypeOrTransport ); + inspect( dataTypeOrTransport ); + return false; + } else if ( seekingTransport ) { + return !( selected = dataTypeOrTransport ); + } + } ); + return selected; + } + + return inspect( options.dataTypes[ 0 ] ) || !inspected[ "*" ] && inspect( "*" ); +} + +// A special extend for ajax options +// that takes "flat" options (not to be deep extended) +// Fixes #9887 +function ajaxExtend( target, src ) { + var key, deep, + flatOptions = jQuery.ajaxSettings.flatOptions || {}; + + for ( key in src ) { + if ( src[ key ] !== undefined ) { + ( flatOptions[ key ] ? target : ( deep || ( deep = {} ) ) )[ key ] = src[ key ]; + } + } + if ( deep ) { + jQuery.extend( true, target, deep ); + } + + return target; +} + +/* Handles responses to an ajax request: + * - finds the right dataType (mediates between content-type and expected dataType) + * - returns the corresponding response + */ +function ajaxHandleResponses( s, jqXHR, responses ) { + + var ct, type, finalDataType, firstDataType, + contents = s.contents, + dataTypes = s.dataTypes; + + // Remove auto dataType and get content-type in the process + while ( dataTypes[ 0 ] === "*" ) { + dataTypes.shift(); + if ( ct === undefined ) { + ct = s.mimeType || jqXHR.getResponseHeader( "Content-Type" ); + } + } + + // Check if we're dealing with a known content-type + if ( ct ) { + for ( type in contents ) { + if ( contents[ type ] && contents[ type ].test( ct ) ) { + dataTypes.unshift( type ); + break; + } + } + } + + // Check to see if we have a response for the expected dataType + if ( dataTypes[ 0 ] in responses ) { + finalDataType = dataTypes[ 0 ]; + } else { + + // Try convertible dataTypes + for ( type in responses ) { + if ( !dataTypes[ 0 ] || s.converters[ type + " " + dataTypes[ 0 ] ] ) { + finalDataType = type; + break; + } + if ( !firstDataType ) { + firstDataType = type; + } + } + + // Or just use first one + finalDataType = finalDataType || firstDataType; + } + + // If we found a dataType + // We add the dataType to the list if needed + // and return the corresponding response + if ( finalDataType ) { + if ( finalDataType !== dataTypes[ 0 ] ) { + dataTypes.unshift( finalDataType ); + } + return responses[ finalDataType ]; + } +} + +/* Chain conversions given the request and the original response + * Also sets the responseXXX fields on the jqXHR instance + */ +function ajaxConvert( s, response, jqXHR, isSuccess ) { + var conv2, current, conv, tmp, prev, + converters = {}, + + // Work with a copy of dataTypes in case we need to modify it for conversion + dataTypes = s.dataTypes.slice(); + + // Create converters map with lowercased keys + if ( dataTypes[ 1 ] ) { + for ( conv in s.converters ) { + converters[ conv.toLowerCase() ] = s.converters[ conv ]; + } + } + + current = dataTypes.shift(); + + // Convert to each sequential dataType + while ( current ) { + + if ( s.responseFields[ current ] ) { + jqXHR[ s.responseFields[ current ] ] = response; + } + + // Apply the dataFilter if provided + if ( !prev && isSuccess && s.dataFilter ) { + response = s.dataFilter( response, s.dataType ); + } + + prev = current; + current = dataTypes.shift(); + + if ( current ) { + + // There's only work to do if current dataType is non-auto + if ( current === "*" ) { + + current = prev; + + // Convert response if prev dataType is non-auto and differs from current + } else if ( prev !== "*" && prev !== current ) { + + // Seek a direct converter + conv = converters[ prev + " " + current ] || converters[ "* " + current ]; + + // If none found, seek a pair + if ( !conv ) { + for ( conv2 in converters ) { + + // If conv2 outputs current + tmp = conv2.split( " " ); + if ( tmp[ 1 ] === current ) { + + // If prev can be converted to accepted input + conv = converters[ prev + " " + tmp[ 0 ] ] || + converters[ "* " + tmp[ 0 ] ]; + if ( conv ) { + + // Condense equivalence converters + if ( conv === true ) { + conv = converters[ conv2 ]; + + // Otherwise, insert the intermediate dataType + } else if ( converters[ conv2 ] !== true ) { + current = tmp[ 0 ]; + dataTypes.unshift( tmp[ 1 ] ); + } + break; + } + } + } + } + + // Apply converter (if not an equivalence) + if ( conv !== true ) { + + // Unless errors are allowed to bubble, catch and return them + if ( conv && s.throws ) { + response = conv( response ); + } else { + try { + response = conv( response ); + } catch ( e ) { + return { + state: "parsererror", + error: conv ? e : "No conversion from " + prev + " to " + current + }; + } + } + } + } + } + } + + return { state: "success", data: response }; +} + +jQuery.extend( { + + // Counter for holding the number of active queries + active: 0, + + // Last-Modified header cache for next request + lastModified: {}, + etag: {}, + + ajaxSettings: { + url: location.href, + type: "GET", + isLocal: rlocalProtocol.test( location.protocol ), + global: true, + processData: true, + async: true, + contentType: "application/x-www-form-urlencoded; charset=UTF-8", + + /* + timeout: 0, + data: null, + dataType: null, + username: null, + password: null, + cache: null, + throws: false, + traditional: false, + headers: {}, + */ + + accepts: { + "*": allTypes, + text: "text/plain", + html: "text/html", + xml: "application/xml, text/xml", + json: "application/json, text/javascript" + }, + + contents: { + xml: /\bxml\b/, + html: /\bhtml/, + json: /\bjson\b/ + }, + + responseFields: { + xml: "responseXML", + text: "responseText", + json: "responseJSON" + }, + + // Data converters + // Keys separate source (or catchall "*") and destination types with a single space + converters: { + + // Convert anything to text + "* text": String, + + // Text to html (true = no transformation) + "text html": true, + + // Evaluate text as a json expression + "text json": JSON.parse, + + // Parse text as xml + "text xml": jQuery.parseXML + }, + + // For options that shouldn't be deep extended: + // you can add your own custom options here if + // and when you create one that shouldn't be + // deep extended (see ajaxExtend) + flatOptions: { + url: true, + context: true + } + }, + + // Creates a full fledged settings object into target + // with both ajaxSettings and settings fields. + // If target is omitted, writes into ajaxSettings. + ajaxSetup: function( target, settings ) { + return settings ? + + // Building a settings object + ajaxExtend( ajaxExtend( target, jQuery.ajaxSettings ), settings ) : + + // Extending ajaxSettings + ajaxExtend( jQuery.ajaxSettings, target ); + }, + + ajaxPrefilter: addToPrefiltersOrTransports( prefilters ), + ajaxTransport: addToPrefiltersOrTransports( transports ), + + // Main method + ajax: function( url, options ) { + + // If url is an object, simulate pre-1.5 signature + if ( typeof url === "object" ) { + options = url; + url = undefined; + } + + // Force options to be an object + options = options || {}; + + var transport, + + // URL without anti-cache param + cacheURL, + + // Response headers + responseHeadersString, + responseHeaders, + + // timeout handle + timeoutTimer, + + // Url cleanup var + urlAnchor, + + // Request state (becomes false upon send and true upon completion) + completed, + + // To know if global events are to be dispatched + fireGlobals, + + // Loop variable + i, + + // uncached part of the url + uncached, + + // Create the final options object + s = jQuery.ajaxSetup( {}, options ), + + // Callbacks context + callbackContext = s.context || s, + + // Context for global events is callbackContext if it is a DOM node or jQuery collection + globalEventContext = s.context && + ( callbackContext.nodeType || callbackContext.jquery ) ? + jQuery( callbackContext ) : + jQuery.event, + + // Deferreds + deferred = jQuery.Deferred(), + completeDeferred = jQuery.Callbacks( "once memory" ), + + // Status-dependent callbacks + statusCode = s.statusCode || {}, + + // Headers (they are sent all at once) + requestHeaders = {}, + requestHeadersNames = {}, + + // Default abort message + strAbort = "canceled", + + // Fake xhr + jqXHR = { + readyState: 0, + + // Builds headers hashtable if needed + getResponseHeader: function( key ) { + var match; + if ( completed ) { + if ( !responseHeaders ) { + responseHeaders = {}; + while ( ( match = rheaders.exec( responseHeadersString ) ) ) { + responseHeaders[ match[ 1 ].toLowerCase() + " " ] = + ( responseHeaders[ match[ 1 ].toLowerCase() + " " ] || [] ) + .concat( match[ 2 ] ); + } + } + match = responseHeaders[ key.toLowerCase() + " " ]; + } + return match == null ? null : match.join( ", " ); + }, + + // Raw string + getAllResponseHeaders: function() { + return completed ? responseHeadersString : null; + }, + + // Caches the header + setRequestHeader: function( name, value ) { + if ( completed == null ) { + name = requestHeadersNames[ name.toLowerCase() ] = + requestHeadersNames[ name.toLowerCase() ] || name; + requestHeaders[ name ] = value; + } + return this; + }, + + // Overrides response content-type header + overrideMimeType: function( type ) { + if ( completed == null ) { + s.mimeType = type; + } + return this; + }, + + // Status-dependent callbacks + statusCode: function( map ) { + var code; + if ( map ) { + if ( completed ) { + + // Execute the appropriate callbacks + jqXHR.always( map[ jqXHR.status ] ); + } else { + + // Lazy-add the new callbacks in a way that preserves old ones + for ( code in map ) { + statusCode[ code ] = [ statusCode[ code ], map[ code ] ]; + } + } + } + return this; + }, + + // Cancel the request + abort: function( statusText ) { + var finalText = statusText || strAbort; + if ( transport ) { + transport.abort( finalText ); + } + done( 0, finalText ); + return this; + } + }; + + // Attach deferreds + deferred.promise( jqXHR ); + + // Add protocol if not provided (prefilters might expect it) + // Handle falsy url in the settings object (#10093: consistency with old signature) + // We also use the url parameter if available + s.url = ( ( url || s.url || location.href ) + "" ) + .replace( rprotocol, location.protocol + "//" ); + + // Alias method option to type as per ticket #12004 + s.type = options.method || options.type || s.method || s.type; + + // Extract dataTypes list + s.dataTypes = ( s.dataType || "*" ).toLowerCase().match( rnothtmlwhite ) || [ "" ]; + + // A cross-domain request is in order when the origin doesn't match the current origin. + if ( s.crossDomain == null ) { + urlAnchor = document.createElement( "a" ); + + // Support: IE <=8 - 11, Edge 12 - 15 + // IE throws exception on accessing the href property if url is malformed, + // e.g. http://example.com:80x/ + try { + urlAnchor.href = s.url; + + // Support: IE <=8 - 11 only + // Anchor's host property isn't correctly set when s.url is relative + urlAnchor.href = urlAnchor.href; + s.crossDomain = originAnchor.protocol + "//" + originAnchor.host !== + urlAnchor.protocol + "//" + urlAnchor.host; + } catch ( e ) { + + // If there is an error parsing the URL, assume it is crossDomain, + // it can be rejected by the transport if it is invalid + s.crossDomain = true; + } + } + + // Convert data if not already a string + if ( s.data && s.processData && typeof s.data !== "string" ) { + s.data = jQuery.param( s.data, s.traditional ); + } + + // Apply prefilters + inspectPrefiltersOrTransports( prefilters, s, options, jqXHR ); + + // If request was aborted inside a prefilter, stop there + if ( completed ) { + return jqXHR; + } + + // We can fire global events as of now if asked to + // Don't fire events if jQuery.event is undefined in an AMD-usage scenario (#15118) + fireGlobals = jQuery.event && s.global; + + // Watch for a new set of requests + if ( fireGlobals && jQuery.active++ === 0 ) { + jQuery.event.trigger( "ajaxStart" ); + } + + // Uppercase the type + s.type = s.type.toUpperCase(); + + // Determine if request has content + s.hasContent = !rnoContent.test( s.type ); + + // Save the URL in case we're toying with the If-Modified-Since + // and/or If-None-Match header later on + // Remove hash to simplify url manipulation + cacheURL = s.url.replace( rhash, "" ); + + // More options handling for requests with no content + if ( !s.hasContent ) { + + // Remember the hash so we can put it back + uncached = s.url.slice( cacheURL.length ); + + // If data is available and should be processed, append data to url + if ( s.data && ( s.processData || typeof s.data === "string" ) ) { + cacheURL += ( rquery.test( cacheURL ) ? "&" : "?" ) + s.data; + + // #9682: remove data so that it's not used in an eventual retry + delete s.data; + } + + // Add or update anti-cache param if needed + if ( s.cache === false ) { + cacheURL = cacheURL.replace( rantiCache, "$1" ); + uncached = ( rquery.test( cacheURL ) ? "&" : "?" ) + "_=" + ( nonce.guid++ ) + + uncached; + } + + // Put hash and anti-cache on the URL that will be requested (gh-1732) + s.url = cacheURL + uncached; + + // Change '%20' to '+' if this is encoded form body content (gh-2658) + } else if ( s.data && s.processData && + ( s.contentType || "" ).indexOf( "application/x-www-form-urlencoded" ) === 0 ) { + s.data = s.data.replace( r20, "+" ); + } + + // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode. + if ( s.ifModified ) { + if ( jQuery.lastModified[ cacheURL ] ) { + jqXHR.setRequestHeader( "If-Modified-Since", jQuery.lastModified[ cacheURL ] ); + } + if ( jQuery.etag[ cacheURL ] ) { + jqXHR.setRequestHeader( "If-None-Match", jQuery.etag[ cacheURL ] ); + } + } + + // Set the correct header, if data is being sent + if ( s.data && s.hasContent && s.contentType !== false || options.contentType ) { + jqXHR.setRequestHeader( "Content-Type", s.contentType ); + } + + // Set the Accepts header for the server, depending on the dataType + jqXHR.setRequestHeader( + "Accept", + s.dataTypes[ 0 ] && s.accepts[ s.dataTypes[ 0 ] ] ? + s.accepts[ s.dataTypes[ 0 ] ] + + ( s.dataTypes[ 0 ] !== "*" ? ", " + allTypes + "; q=0.01" : "" ) : + s.accepts[ "*" ] + ); + + // Check for headers option + for ( i in s.headers ) { + jqXHR.setRequestHeader( i, s.headers[ i ] ); + } + + // Allow custom headers/mimetypes and early abort + if ( s.beforeSend && + ( s.beforeSend.call( callbackContext, jqXHR, s ) === false || completed ) ) { + + // Abort if not done already and return + return jqXHR.abort(); + } + + // Aborting is no longer a cancellation + strAbort = "abort"; + + // Install callbacks on deferreds + completeDeferred.add( s.complete ); + jqXHR.done( s.success ); + jqXHR.fail( s.error ); + + // Get transport + transport = inspectPrefiltersOrTransports( transports, s, options, jqXHR ); + + // If no transport, we auto-abort + if ( !transport ) { + done( -1, "No Transport" ); + } else { + jqXHR.readyState = 1; + + // Send global event + if ( fireGlobals ) { + globalEventContext.trigger( "ajaxSend", [ jqXHR, s ] ); + } + + // If request was aborted inside ajaxSend, stop there + if ( completed ) { + return jqXHR; + } + + // Timeout + if ( s.async && s.timeout > 0 ) { + timeoutTimer = window.setTimeout( function() { + jqXHR.abort( "timeout" ); + }, s.timeout ); + } + + try { + completed = false; + transport.send( requestHeaders, done ); + } catch ( e ) { + + // Rethrow post-completion exceptions + if ( completed ) { + throw e; + } + + // Propagate others as results + done( -1, e ); + } + } + + // Callback for when everything is done + function done( status, nativeStatusText, responses, headers ) { + var isSuccess, success, error, response, modified, + statusText = nativeStatusText; + + // Ignore repeat invocations + if ( completed ) { + return; + } + + completed = true; + + // Clear timeout if it exists + if ( timeoutTimer ) { + window.clearTimeout( timeoutTimer ); + } + + // Dereference transport for early garbage collection + // (no matter how long the jqXHR object will be used) + transport = undefined; + + // Cache response headers + responseHeadersString = headers || ""; + + // Set readyState + jqXHR.readyState = status > 0 ? 4 : 0; + + // Determine if successful + isSuccess = status >= 200 && status < 300 || status === 304; + + // Get response data + if ( responses ) { + response = ajaxHandleResponses( s, jqXHR, responses ); + } + + // Use a noop converter for missing script but not if jsonp + if ( !isSuccess && + jQuery.inArray( "script", s.dataTypes ) > -1 && + jQuery.inArray( "json", s.dataTypes ) < 0 ) { + s.converters[ "text script" ] = function() {}; + } + + // Convert no matter what (that way responseXXX fields are always set) + response = ajaxConvert( s, response, jqXHR, isSuccess ); + + // If successful, handle type chaining + if ( isSuccess ) { + + // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode. + if ( s.ifModified ) { + modified = jqXHR.getResponseHeader( "Last-Modified" ); + if ( modified ) { + jQuery.lastModified[ cacheURL ] = modified; + } + modified = jqXHR.getResponseHeader( "etag" ); + if ( modified ) { + jQuery.etag[ cacheURL ] = modified; + } + } + + // if no content + if ( status === 204 || s.type === "HEAD" ) { + statusText = "nocontent"; + + // if not modified + } else if ( status === 304 ) { + statusText = "notmodified"; + + // If we have data, let's convert it + } else { + statusText = response.state; + success = response.data; + error = response.error; + isSuccess = !error; + } + } else { + + // Extract error from statusText and normalize for non-aborts + error = statusText; + if ( status || !statusText ) { + statusText = "error"; + if ( status < 0 ) { + status = 0; + } + } + } + + // Set data for the fake xhr object + jqXHR.status = status; + jqXHR.statusText = ( nativeStatusText || statusText ) + ""; + + // Success/Error + if ( isSuccess ) { + deferred.resolveWith( callbackContext, [ success, statusText, jqXHR ] ); + } else { + deferred.rejectWith( callbackContext, [ jqXHR, statusText, error ] ); + } + + // Status-dependent callbacks + jqXHR.statusCode( statusCode ); + statusCode = undefined; + + if ( fireGlobals ) { + globalEventContext.trigger( isSuccess ? "ajaxSuccess" : "ajaxError", + [ jqXHR, s, isSuccess ? success : error ] ); + } + + // Complete + completeDeferred.fireWith( callbackContext, [ jqXHR, statusText ] ); + + if ( fireGlobals ) { + globalEventContext.trigger( "ajaxComplete", [ jqXHR, s ] ); + + // Handle the global AJAX counter + if ( !( --jQuery.active ) ) { + jQuery.event.trigger( "ajaxStop" ); + } + } + } + + return jqXHR; + }, + + getJSON: function( url, data, callback ) { + return jQuery.get( url, data, callback, "json" ); + }, + + getScript: function( url, callback ) { + return jQuery.get( url, undefined, callback, "script" ); + } +} ); + +jQuery.each( [ "get", "post" ], function( _i, method ) { + jQuery[ method ] = function( url, data, callback, type ) { + + // Shift arguments if data argument was omitted + if ( isFunction( data ) ) { + type = type || callback; + callback = data; + data = undefined; + } + + // The url can be an options object (which then must have .url) + return jQuery.ajax( jQuery.extend( { + url: url, + type: method, + dataType: type, + data: data, + success: callback + }, jQuery.isPlainObject( url ) && url ) ); + }; +} ); + +jQuery.ajaxPrefilter( function( s ) { + var i; + for ( i in s.headers ) { + if ( i.toLowerCase() === "content-type" ) { + s.contentType = s.headers[ i ] || ""; + } + } +} ); + + +jQuery._evalUrl = function( url, options, doc ) { + return jQuery.ajax( { + url: url, + + // Make this explicit, since user can override this through ajaxSetup (#11264) + type: "GET", + dataType: "script", + cache: true, + async: false, + global: false, + + // Only evaluate the response if it is successful (gh-4126) + // dataFilter is not invoked for failure responses, so using it instead + // of the default converter is kludgy but it works. + converters: { + "text script": function() {} + }, + dataFilter: function( response ) { + jQuery.globalEval( response, options, doc ); + } + } ); +}; + + +jQuery.fn.extend( { + wrapAll: function( html ) { + var wrap; + + if ( this[ 0 ] ) { + if ( isFunction( html ) ) { + html = html.call( this[ 0 ] ); + } + + // The elements to wrap the target around + wrap = jQuery( html, this[ 0 ].ownerDocument ).eq( 0 ).clone( true ); + + if ( this[ 0 ].parentNode ) { + wrap.insertBefore( this[ 0 ] ); + } + + wrap.map( function() { + var elem = this; + + while ( elem.firstElementChild ) { + elem = elem.firstElementChild; + } + + return elem; + } ).append( this ); + } + + return this; + }, + + wrapInner: function( html ) { + if ( isFunction( html ) ) { + return this.each( function( i ) { + jQuery( this ).wrapInner( html.call( this, i ) ); + } ); + } + + return this.each( function() { + var self = jQuery( this ), + contents = self.contents(); + + if ( contents.length ) { + contents.wrapAll( html ); + + } else { + self.append( html ); + } + } ); + }, + + wrap: function( html ) { + var htmlIsFunction = isFunction( html ); + + return this.each( function( i ) { + jQuery( this ).wrapAll( htmlIsFunction ? html.call( this, i ) : html ); + } ); + }, + + unwrap: function( selector ) { + this.parent( selector ).not( "body" ).each( function() { + jQuery( this ).replaceWith( this.childNodes ); + } ); + return this; + } +} ); + + +jQuery.expr.pseudos.hidden = function( elem ) { + return !jQuery.expr.pseudos.visible( elem ); +}; +jQuery.expr.pseudos.visible = function( elem ) { + return !!( elem.offsetWidth || elem.offsetHeight || elem.getClientRects().length ); +}; + + + + +jQuery.ajaxSettings.xhr = function() { + try { + return new window.XMLHttpRequest(); + } catch ( e ) {} +}; + +var xhrSuccessStatus = { + + // File protocol always yields status code 0, assume 200 + 0: 200, + + // Support: IE <=9 only + // #1450: sometimes IE returns 1223 when it should be 204 + 1223: 204 + }, + xhrSupported = jQuery.ajaxSettings.xhr(); + +support.cors = !!xhrSupported && ( "withCredentials" in xhrSupported ); +support.ajax = xhrSupported = !!xhrSupported; + +jQuery.ajaxTransport( function( options ) { + var callback, errorCallback; + + // Cross domain only allowed if supported through XMLHttpRequest + if ( support.cors || xhrSupported && !options.crossDomain ) { + return { + send: function( headers, complete ) { + var i, + xhr = options.xhr(); + + xhr.open( + options.type, + options.url, + options.async, + options.username, + options.password + ); + + // Apply custom fields if provided + if ( options.xhrFields ) { + for ( i in options.xhrFields ) { + xhr[ i ] = options.xhrFields[ i ]; + } + } + + // Override mime type if needed + if ( options.mimeType && xhr.overrideMimeType ) { + xhr.overrideMimeType( options.mimeType ); + } + + // X-Requested-With header + // For cross-domain requests, seeing as conditions for a preflight are + // akin to a jigsaw puzzle, we simply never set it to be sure. + // (it can always be set on a per-request basis or even using ajaxSetup) + // For same-domain requests, won't change header if already provided. + if ( !options.crossDomain && !headers[ "X-Requested-With" ] ) { + headers[ "X-Requested-With" ] = "XMLHttpRequest"; + } + + // Set headers + for ( i in headers ) { + xhr.setRequestHeader( i, headers[ i ] ); + } + + // Callback + callback = function( type ) { + return function() { + if ( callback ) { + callback = errorCallback = xhr.onload = + xhr.onerror = xhr.onabort = xhr.ontimeout = + xhr.onreadystatechange = null; + + if ( type === "abort" ) { + xhr.abort(); + } else if ( type === "error" ) { + + // Support: IE <=9 only + // On a manual native abort, IE9 throws + // errors on any property access that is not readyState + if ( typeof xhr.status !== "number" ) { + complete( 0, "error" ); + } else { + complete( + + // File: protocol always yields status 0; see #8605, #14207 + xhr.status, + xhr.statusText + ); + } + } else { + complete( + xhrSuccessStatus[ xhr.status ] || xhr.status, + xhr.statusText, + + // Support: IE <=9 only + // IE9 has no XHR2 but throws on binary (trac-11426) + // For XHR2 non-text, let the caller handle it (gh-2498) + ( xhr.responseType || "text" ) !== "text" || + typeof xhr.responseText !== "string" ? + { binary: xhr.response } : + { text: xhr.responseText }, + xhr.getAllResponseHeaders() + ); + } + } + }; + }; + + // Listen to events + xhr.onload = callback(); + errorCallback = xhr.onerror = xhr.ontimeout = callback( "error" ); + + // Support: IE 9 only + // Use onreadystatechange to replace onabort + // to handle uncaught aborts + if ( xhr.onabort !== undefined ) { + xhr.onabort = errorCallback; + } else { + xhr.onreadystatechange = function() { + + // Check readyState before timeout as it changes + if ( xhr.readyState === 4 ) { + + // Allow onerror to be called first, + // but that will not handle a native abort + // Also, save errorCallback to a variable + // as xhr.onerror cannot be accessed + window.setTimeout( function() { + if ( callback ) { + errorCallback(); + } + } ); + } + }; + } + + // Create the abort callback + callback = callback( "abort" ); + + try { + + // Do send the request (this may raise an exception) + xhr.send( options.hasContent && options.data || null ); + } catch ( e ) { + + // #14683: Only rethrow if this hasn't been notified as an error yet + if ( callback ) { + throw e; + } + } + }, + + abort: function() { + if ( callback ) { + callback(); + } + } + }; + } +} ); + + + + +// Prevent auto-execution of scripts when no explicit dataType was provided (See gh-2432) +jQuery.ajaxPrefilter( function( s ) { + if ( s.crossDomain ) { + s.contents.script = false; + } +} ); + +// Install script dataType +jQuery.ajaxSetup( { + accepts: { + script: "text/javascript, application/javascript, " + + "application/ecmascript, application/x-ecmascript" + }, + contents: { + script: /\b(?:java|ecma)script\b/ + }, + converters: { + "text script": function( text ) { + jQuery.globalEval( text ); + return text; + } + } +} ); + +// Handle cache's special case and crossDomain +jQuery.ajaxPrefilter( "script", function( s ) { + if ( s.cache === undefined ) { + s.cache = false; + } + if ( s.crossDomain ) { + s.type = "GET"; + } +} ); + +// Bind script tag hack transport +jQuery.ajaxTransport( "script", function( s ) { + + // This transport only deals with cross domain or forced-by-attrs requests + if ( s.crossDomain || s.scriptAttrs ) { + var script, callback; + return { + send: function( _, complete ) { + script = jQuery( " + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+
+
+

Package apache-airflow-providers-microsoft-azure

+

Microsoft Azure

+

This is detailed commit list of changes for versions provider package: microsoft.azure. +For high-level changelog, see package information including changelog.

+
+

5.0.1

+

Latest change: 2022-12-09

+ +++++ + + + + + + + + + + + + +

Commit

Committed

Subject

7e776db254

2022-12-09

Make arguments 'offset' and 'length' not required (#28234)

+
+
+

5.0.0

+

Latest change: 2022-11-15

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

12c3c39d1a

2022-11-15

pRepare docs for November 2022 wave of Providers (#27613)

547e6e80f3

2022-11-10

Fix Azure Batch errors revealed by added typing to azure batch lib (#27601)

a50195d617

2022-11-07

Add azure, google, authentication library limits to eaager upgrade (#27535)

5cd78cf425

2022-11-06

Upgrade dependencies in order to avoid backtracking (#27531)

a16f24b5d7

2022-11-06

Remove deprecated classes in Azure provider (#27417)

59da943428

2022-11-04

Suppress any Exception in wasb task handler (#27495)

680965b2ea

2022-11-03

Look for 'extra__' instead of 'extra_' in 'get_field' (#27489)

5df1d6ec20

2022-10-28

Allow and prefer non-prefixed extra fields for remaining azure (#27220)

c49740eb25

2022-10-28

Allow and prefer non-prefixed extra fields for AzureFileShareHook (#27041)

9ab1a6a3e7

2022-10-27

Update old style typing (#26872)

78b8ea2f22

2022-10-24

Move min airflow version to 2.3.0 for all providers (#27196)

3676d3a402

2022-10-24

Allow and prefer non-prefixed extra fields for AzureDataExplorerHook (#27219)

6b9e76b7b3

2022-10-23

Allow and prefer non-prefixed extra fields for AzureDataFactoryHook (#27047)

2a34dc9e84

2022-10-23

Enable string normalization in python formatting - providers (#27205)

d51de50e5c

2022-10-22

Update WasbHook to reflect preference for unprefixed extra (#27024)

59cba36db0

2022-10-13

Update azure-storage-blob version (#25426)

32434a128a

2022-09-30

Fix separator getting added to variables_prefix when empty (#26749)

+
+
+

4.3.0

+

Latest change: 2022-09-28

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

f8db64c35c

2022-09-28

Update docs for September Provider's release (#26731)

24d88e8fee

2022-09-19

Add DataFlow operations to Azure DataFactory hook (#26345)

1f7b296227

2022-09-18

Auto tail file logs in Web UI (#26169)

06acf40a43

2022-09-13

Apply PEP-563 (Postponed Evaluation of Annotations) to non-core airflow (#26289)

5060785988

2022-09-09

Add network_profile param in AzureContainerInstancesOperator (#26117)

4bd0734a35

2022-09-01

Add Azure synapse operator (#26038)

afb282aee4

2022-08-27

Fix AzureBatchOperator false negative task status (#25844)

5c7c518aa0

2022-08-16

Implement Azure Service Bus Topic Create, Delete Operators (#25436)

+
+
+

4.2.0

+

Latest change: 2022-08-10

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

e5ac6c7cfb

2022-08-10

Prepare docs for new providers release (August 2022) (#25618)

d5f40d739f

2022-08-09

Set default wasb Azure http logging level to warning; fixes #16224 (#18896)

8bb0c4fd32

2022-07-28

Add 'test_connection' method to AzureContainerInstanceHook (#25362)

eab0167f1b

2022-07-22

Add test_connection to Azure Batch hook (#25235)

e32e9c5880

2022-07-18

Bump typing-extensions and mypy for ParamSpec (#25088)

292440d54f

2022-07-14

Implement Azure Service Bus (Update and Receive) Subscription Operator (#25029)

+
+
+

4.1.0

+

Latest change: 2022-07-13

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

d2459a241b

2022-07-13

Add documentation for July 2022 Provider's release (#25030)

bfd506cbfc

2022-07-13

Add 'test_connection' method to AzureCosmosDBHook (#25018)

aa8bf2cf85

2022-07-12

Implement Azure service bus subscription Operators (#24625)

b27fc0367c

2022-07-06

Add test_connection method to AzureFileShareHook (#24843)

f18c609d12

2022-07-01

Add test_connection method to Azure WasbHook (#24771)

0de31bd73a

2022-06-29

Move provider dependencies to inside provider folders (#24672)

510a6bab45

2022-06-28

Remove 'hook-class-names' from provider.yaml (#24702)

09f38ad3f6

2022-06-23

Implement Azure Service Bus Queue Operators (#24038)

9c59831ee7

2022-06-21

Update providers to use functools compat for ''cached_property'' (#24582)

+
+
+

4.0.0

+

Latest change: 2022-06-09

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

dcdcf3a2b8

2022-06-09

Update release notes for RC2 release of Providers for May 2022 (#24307)

717a7588bc

2022-06-07

Update package description to remove double min-airflow specification (#24292)

aeabe994b3

2022-06-07

Prepare docs for May 2022 provider's release (#24231)

c23826915d

2022-06-07

Apply per-run log templates to log handlers (#24153)

027b707d21

2022-06-05

Add explanatory note for contributors about updating Changelog (#24229)

389e858d93

2022-06-03

Pass connection extra parameters to wasb BlobServiceClient (#24154)

6e83885c95

2022-06-03

Migrate Microsoft example DAGs to new design #22452 - azure (#24141)

3393647aa6

2022-05-26

Add typing to Azure Cosmos Client Hook (#23941)

ec6761a5c0

2022-05-23

Clean up f-strings in logging calls (#23597)

+
+
+

3.9.0

+

Latest change: 2022-05-12

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

75c60923e0

2022-05-12

Prepare provider documentation 2022.05.11 (#23631)

8f181c1034

2022-05-08

wasb hook: user defaultAzureCredentials instead of managedIdentity (#23394)

2d109401b3

2022-05-04

Bump pre-commit hook versions (#22887)

8b6b0848a3

2022-04-23

Use new Breese for building, pulling and verifying the images. (#23104)

49e336ae03

2022-04-13

Replace usage of 'DummyOperator' with 'EmptyOperator' (#22974)

6933022e94

2022-04-10

Fix new MyPy errors in main (#22884)

+
+
+

3.8.0

+

Latest change: 2022-04-07

+ +++++ + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

56ab82ed7a

2022-04-07

Prepare mid-April provider documentation. (#22819)

d3976d9b20

2022-04-04

Docs: Fix example usage for 'AzureCosmosDocumentSensor' (#22735)

7ab45d41d6

2022-03-24

Update secrets backends to use get_conn_value instead of get_conn_uri (#22348)

+
+
+

3.7.2

+

Latest change: 2022-03-22

+ +++++ + + + + + + + + + + + + +

Commit

Committed

Subject

d7dbfb7e26

2022-03-22

Add documentation for bugfix release of Providers (#22383)

+
+
+

3.7.1

+

Latest change: 2022-03-14

+ +++++ + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

16adc035b1

2022-03-14

Add documentation for Classifier release for March 2022 (#22226)

c1ab8e2d7b

2022-03-14

Protect against accidental misuse of XCom.get_value() (#22244)

d08284ed25

2022-03-11

`` Add map_index to XCom model and interface (#22112)``

+
+
+

3.7.0

+

Latest change: 2022-03-07

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

f5b96315fe

2022-03-07

Add documentation for Feb Providers release (#22056)

ba79adb631

2022-03-02

Make container creation configurable when uploading files via WasbHook (#20510)

f42559a773

2022-03-02

Add 'test_connection' method to 'AzureDataFactoryHook' (#21924)

08575ddd8a

2022-03-01

Change BaseOperatorLink interface to take a ti_key, not a datetime (#21798)

3c4524b4ec

2022-02-23

(AzureCosmosDBHook) Update to latest Cosmos API (#21514)

0a3ff43d41

2022-02-08

Add pre-commit check for docstring param types (#21398)

+
+
+

3.6.0

+

Latest change: 2022-02-08

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

d94fa37830

2022-02-08

Fixed changelog for January 2022 (delayed) provider's release (#21439)

6c3a67d4fc

2022-02-05

Add documentation for January 2021 providers release (#21257)

ddb5246bd1

2022-02-03

Refactor operator links to not create ad hoc TaskInstances (#21285)

cb73053211

2022-01-27

Add optional features in providers. (#21074)

602abe8394

2022-01-20

Remove ':type' lines now sphinx-autoapi supports typehints (#20951)

730db3fb77

2022-01-18

Remove all "fake" stub files (#20936)

f8fd0f7b4c

2022-01-13

Explain stub files are introduced for Mypy errors in examples (#20827)

+
+
+

3.5.0

+

Latest change: 2021-12-31

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

f77417eb0d

2021-12-31

Fix K8S changelog to be PyPI-compatible (#20614)

97496ba2b4

2021-12-31

Update documentation for provider December 2021 release (#20523)

a22d5bd076

2021-12-31

Fix mypy errors in Google Cloud provider (#20611)

83f8e178ba

2021-12-31

Even more typing in operators (template_fields/ext) (#20608)

d56e7b56bb

2021-12-30

Fix template_fields type to have MyPy friendly Sequence type (#20571)

a0821235fb

2021-12-30

Use typed Context EVERYWHERE (#20565)

3299064958

2021-12-29

Use isort on pyi files (#20556)

e63e23c582

2021-12-23

Fixing MyPy issues inside providers/microsoft (#20409)

341bf5ab1f

2021-12-22

Azure: New sftp to wasb operator (#18877)

05e4cd1c6a

2021-12-18

Add operator link to monitor Azure Data Factory pipeline runs (#20207)

2fb5e1d0ec

2021-12-15

Fix cached_property MyPy declaration and related MyPy errors (#20226)

42f133c5f6

2021-12-06

Removes InputRequired validation with azure extra (#20084)

374574b8d0

2021-12-06

Fix mypy errors in Microsoft Azure provider (#19923)

+
+
+

3.4.0

+

Latest change: 2021-11-30

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

853576d901

2021-11-30

Update documentation for November 2021 provider's release (#19882)

e25446a8b1

2021-11-18

Fix argument error in AzureContainerInstancesOperator (#19668)

11e73d2db1

2021-11-16

Remove unnecessary connection form customizations in Azure (#19595)

4212c49324

2021-11-14

Update Azure modules to comply with AIP-21 (#19431)

0f516458be

2021-11-08

Remove 'host' from hidden fields in 'WasbHook' (#19475)

ca679c014c

2021-11-07

use DefaultAzureCredential if login not provided for Data Factory (#19079)

490a382ed6

2021-11-04

Ensure ''catchup=False'' is used in example dags (#19396)

+
+
+

3.3.0

+

Latest change: 2021-10-29

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

d9567eb106

2021-10-29

Prepare documentation for October Provider's release (#19321)

61d0093054

2021-10-27

Added sas_token var to BlobServiceClient return. Updated tests (#19234)

ceb2b53a10

2021-10-20

Static start_date and default arg cleanup for Microsoft providers example DAGs (#19062)

86a2a19ad2

2021-10-17

More f-strings (#18855)

1571f80546

2021-10-14

Add pre-commit hook for common misspelling check in files (#18964)

1b75f9181f

2021-10-05

Fix changelog for Azure Provider (#18736)

181ac36db3

2021-10-05

update azure cosmos to latest version (#18695)

6d504b43ea

2021-10-04

Expanding docs on client auth for AzureKeyVaultBackend (#18659)

c8485a83bc

2021-10-03

Revert "update azure cosmos version (#18663)" (#18694)

10421c6931

2021-10-01

update azure cosmos version (#18663)

+
+
+

3.2.0

+

Latest change: 2021-09-30

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

840ea3efb9

2021-09-30

Update documentation for September providers release (#18613)

a458fcc573

2021-09-27

Updating miscellaneous provider DAGs to use TaskFlow API where applicable (#18278)

46484466c4

2021-09-25

Removing redundant relabeling of password conn field (#18386)

97d6892318

2021-09-25

Rename AzureDataLakeStorage to ADLS (#18493)

1d2924c94e

2021-09-24

Proper handling of Account URL custom conn field in AzureBatchHook (#18456)

11e34535e8

2021-09-19

Creating ADF pipeline run operator, sensor + ADF custom conn fields (#17885)

410e6d7967

2021-09-18

Initial commit (#18203)

2dac083ae2

2021-09-16

Fixed wasb hook attempting to create container when getting a blob client (#18287)

d119ae8f3f

2021-09-12

Rename LocalToAzureDataLakeStorageOperator to LocalFilesystemToADLSOperator (#18168)

28de326d61

2021-09-09

Rename FileToWasbOperator to LocalFilesystemToWasbOperator (#18109)

+
+
+

3.1.1

+

Latest change: 2021-08-30

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

0a68588479

2021-08-30

Add August 2021 Provider's documentation (#17890)

be75dcd39c

2021-08-23

Update description about the new ''connection-types'' provider meta-data

76ed2a49c6

2021-08-19

Import Hooks lazily individually in providers manager (#17682)

29aab6434f

2021-08-17

Adds secrets backend/logging/auth information to provider yaml (#17625)

+
+
+

3.1.0

+

Latest change: 2021-07-26

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

87f408b1e7

2021-07-26

Prepares docs for Rc2 release of July providers (#17116)

48ca9374bf

2021-07-26

Remove/refactor default_args pattern for Microsoft example DAGs (#16873)

d02ded65ea

2021-07-15

Fixed wrongly escaped characters in amazon's changelog (#17020)

b916b75079

2021-07-15

Prepare documentation for July release of providers. (#17015)

866a601b76

2021-06-28

Removes pylint from our toolchain (#16682)

caf0a8499f

2021-06-25

Add support for managed identity in WASB hook (#16628)

ffb1fcacff

2021-06-24

Fix multiple issues in Microsoft AzureContainerInstancesOperator (#15634)

a2a58d27ef

2021-06-24

Reduce log messages for happy path (#16626)

+
+
+

3.0.0

+

Latest change: 2021-06-18

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

bbc627a3da

2021-06-18

Prepares documentation for rc2 release of Providers (#16501)

cbf8001d76

2021-06-16

Synchronizes updated changelog after buggfix release (#16464)

1fba5402bb

2021-06-15

More documentation update for June providers release (#16405)

0c80a7d411

2021-06-11

Fixes AzureFileShare connection extras (#16388)

29b7f795d6

2021-06-07

fix wasb remote logging when blob already exists (#16280)

9c94b72d44

2021-06-07

Updated documentation for June 2021 provider release (#16294)

476d0f6e3d

2021-05-22

Bump pyupgrade v2.13.0 to v2.18.1 (#15991)

c844ff742e

2021-05-18

Fix colon spacing in ''AzureDataExplorerHook'' docstring (#15841)

37681bca00

2021-05-07

Auto-apply apply_default decorator (#15667)

3b4fdd0a7a

2021-05-06

add oracle  connection link (#15632)

b1bd59440b

2021-05-04

Add delimiter argument to WasbHook delete_file method (#15637)

0f97a3970d

2021-05-04

Rename example bucket names to use INVALID BUCKET NAME by default (#15651)

db557a8c4a

2021-05-01

Docs: Replace 'airflow' to 'apache-airflow' to install extra (#15628)

+
+
+

2.0.0

+

Latest change: 2021-05-01

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

807ad32ce5

2021-05-01

Prepares provider release after PIP 21 compatibility (#15576)

657384615f

2021-04-27

Fix 'logging.exception' redundancy (#14823)

d65e492a3e

2021-04-25

Removes unnecessary AzureContainerInstance connection type (#15514)

cb1344b63d

2021-04-16

Update azure connection documentation (#15352)

1a85ba9e93

2021-04-13

Add dynamic connection fields to Azure Connection (#15159)

+
+
+

1.3.0

+

Latest change: 2021-04-06

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

042be2e4e0

2021-04-06

Updated documentation for provider packages before April release (#15236)

9b76b94c94

2021-04-02

A bunch of template_fields_renderers additions (#15130)

a7ca1b3b0b

2021-03-26

Fix Sphinx Issues with Docstrings (#14968)

68e4c4dcb0

2021-03-20

Remove Backport Providers (#14886)

4372d45615

2021-03-12

Fix attributes for AzureDataFactory hook (#14704)

+
+
+

1.2.0

+

Latest change: 2021-03-08

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

b753c7fa60

2021-03-08

Prepare ad-hoc release of the four previously excluded providers (#14655)

e7bb17aeb8

2021-03-06

Use built-in 'cached_property' on Python 3.8 where possible (#14606)

630aeff72c

2021-03-02

Fix AzureDataFactoryHook failing to instantiate its connection (#14565)

589d6dec92

2021-02-27

Prepare to release the next wave of providers: (#14487)

11d03d2f63

2021-02-26

Add Azure Data Factory hook (#11015)

5bfa0f123b

2021-02-25

BugFix: Fix remote log in azure storage blob displays in one line (#14313)

ca35bd7f7f

2021-02-21

By default PIP will install all packages in .local folder (#14125)

10343ec29f

2021-02-05

Corrections in docs and tools after releasing provider RCs (#14082)

+
+
+

1.1.0

+

Latest change: 2021-02-04

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

88bdcfa0df

2021-02-04

Prepare to release a new wave of providers. (#14013)

ac2f72c98d

2021-02-01

Implement provider versioning tools (#13767)

94b1531230

2021-01-23

Upgrade azure blob to v12 (#12188)

a9ac2b040b

2021-01-23

Switch to f-strings using flynt. (#13732)

3fd5ef3555

2021-01-21

Add missing logos for integrations (#13717)

b2cb6ee5ba

2021-01-07

Fix Azure Data Explorer Operator (#13520)

295d66f914

2020-12-30

Fix Grammar in PIP warning (#13380)

a1e9195076

2020-12-26

add system test for azure local to adls operator (#13190)

5185d81ff9

2020-12-24

add AzureDatalakeStorageDeleteOperator (#13206)

6cf76d7ac0

2020-12-18

Fix typo in pip upgrade command :( (#13148)

5090fb0c89

2020-12-15

Add script to generate integrations.json (#13073)

+
+
+

1.0.0

+

Latest change: 2020-12-09

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Commit

Committed

Subject

32971a1a2d

2020-12-09

Updates providers versions to 1.0.0 (#12955)

b40dffa085

2020-12-08

Rename remaing modules to match AIP-21 (#12917)

9b39f24780

2020-12-08

Add support for dynamic connection form fields per provider (#12558)

bd90136aaf

2020-11-30

Move operator guides to provider documentation packages (#12681)

2037303eef

2020-11-29

Adds support for Connection/Hook discovery from providers (#12466)

543d88b3a1

2020-11-28

Add example dag and system tests for azure wasb and fileshare (#12673)

6b3c6add9e

2020-11-27

Update setup.py to get non-conflicting set of dependencies (#12636)

c34ef853c8

2020-11-20

Separate out documentation building per provider  (#12444)

0080354502

2020-11-18

Update provider READMEs for 1.0.0b2 batch release (#12449)

7ca0b6f121

2020-11-18

Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438)

ae7cb4a1e2

2020-11-17

Update wrong commit hash in backport provider changes (#12390)

6889a333cf

2020-11-15

Improvements for operators and hooks ref docs (#12366)

7825e8f590

2020-11-13

Docs installation improvements (#12304)

dd2095f4a8

2020-11-10

Simplify string expressions & Use f-string (#12216)

85a18e13d9

2020-11-09

Point at pypi project pages for cross-dependency of provider packages (#12212)

59eb5de78c

2020-11-09

Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)

b2a28d1590

2020-11-09

Moves provider packages scripts to dev (#12082)

3ff7e0743a

2020-11-08

azure key vault optional lookup (#12174)

41bf172c1d

2020-11-04

Simplify string expressions (#12093)

4e8f9cc8d0

2020-11-03

Enable Black - Python Auto Formmatter (#9550)

8c42cf1b00

2020-11-03

Use PyUpgrade to use Python 3.6 features (#11447)

5a439e84eb

2020-10-26

Prepare providers release 0.0.2a1 (#11855)

872b1566a1

2020-10-25

Generated backport providers readmes/setup for 2020.10.29 (#11826)

6ce855af11

2020-10-24

Fix spelling (#11821)

349b0811c3

2020-10-20

Add D200 pydocstyle check (#11688)

f8ff217e2f

2020-10-18

Fix incorrect typing and move config args out of extra connection config to operator args (#11635)

16e7129719

2020-10-13

Added support for provider packages for Airflow 2.0 (#11487)

686e0ee7df

2020-10-11

Fix incorrect typing, remove hardcoded argument values and improve code in AzureContainerInstancesOperator (#11408)

d2754ef769

2020-10-09

Strict type check for Microsoft  (#11359)

832a7850f1

2020-10-08

Add Azure Blob Storage to GCS transfer operator (#11321)

5d007fd2ff

2020-10-08

Strict type check for azure hooks (#11342)

b0fcf67559

2020-10-07

Add AzureFileShareToGCSOperator (#10991)

c51016b0b8

2020-10-05

Add LocalToAzureDataLakeStorageOperator (#10814)

fd682fd70a

2020-10-05

fix job deletion (#11272)

4210618789

2020-10-03

Ensure target_dedicated_nodes or enable_auto_scale is set in AzureBatchOperator (#11251)

0a0e1af800

2020-10-03

Fix Broken Markdown links in Providers README TOC (#11249)

ca4238eb4d

2020-10-02

Fixed month in backport packages to October (#11242)

5220e4c384

2020-10-02

Prepare Backport release 2020.09.07 (#11238)

5093245d6f

2020-09-30

Strict type coverage for Oracle and Yandex provider  (#11198)

f3e87c5030

2020-09-22

Add D202 pydocstyle check (#11032)

f77a11d5b1

2020-09-13

Add Secrets backend for Microsoft Azure Key Vault (#10898)

9549274d11

2020-09-09

Upgrade black to 20.8b1 (#10818)

fdd9b6f65b

2020-08-25

Enable Black on Providers Packages (#10543)

3696c34c28

2020-08-24

Fix typo in the word "release" (#10528)

ee7ca128a1

2020-08-22

Fix broken Markdown refernces in Providers README (#10483)

2f552233f5

2020-08-21

Add AzureBaseHook (#9747)

cdec301254

2020-08-07

Add correct signature to all operators and sensors (#10205)

24c8e4c2d6

2020-08-06

Changes to all the constructors to remove the args argument (#10163)

aeea71274d

2020-08-02

Remove 'args' parameter from provider operator constructors (#10097)

7d24b088cd

2020-07-25

Stop using start_date in default_args in example_dags (2) (#9985)

0bf330ba86

2020-07-24

Add get_blobs_list method to WasbHook (#9950)

33f0cd2657

2020-07-22

apply_default keeps the function signature for mypy (#9784)

d3c76da952

2020-07-12

Improve type hinting to provider microsoft  (#9774)

23f80f34ad

2020-07-08

Move gcs & wasb task handlers to their respective provider packages (#9714)

d0e7db4024

2020-06-19

Fixed release number for fresh release (#9408)

12af6a0800

2020-06-19

Final cleanup for 2020.6.23rc1 release preparation (#9404)

c7e5bce57f

2020-06-19

Prepare backport release candidate for 2020.6.23rc1 (#9370)

f6bd817a3a

2020-06-16

Introduce 'transfers' packages (#9320)

0b0e4f7a4c

2020-05-26

Preparing for RC3 relase of backports (#9026)

00642a46d0

2020-05-26

Fixed name of 20 remaining wrongly named operators. (#8994)

375d1ca229

2020-05-19

Release candidate 2 for backport packages 2020.05.20 (#8898)

12c5e5d8ae

2020-05-17

Prepare release candidate for backport packages (#8891)

f3521fb0e3

2020-05-16

Regenerate readme files for backport package release (#8886)

92585ca4cb

2020-05-15

Added automated release notes generation for backport operators (#8807)

87969a350d

2020-04-09

[AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170)

d99833c9b5

2020-04-03

[AIRFLOW-4529] Add support for Azure Batch Service (#8024)

4bde99f132

2020-03-23

Make airflow/providers pylint compatible (#7802)

a83eb335e5

2020-03-23

Add call to Super call in microsoft providers (#7821)

f0e2421807

2020-02-24

[AIRFLOW-6896] AzureCosmosDBHook: Move DB call out of __init__ (#7520)

4bec1cc489

2020-02-24

[AIRFLOW-6895] AzureFileShareHook: Move DB call out of __init__ (#7519)

3320e432a1

2020-02-24

[AIRFLOW-6817] Lazy-load 'airflow.DAG' to keep user-facing API untouched (#7517)

086e307245

2020-02-23

[AIRFLOW-6890] AzureDataLakeHook: Move DB call out of __init__ (#7513)

4d03e33c11

2020-02-22

[AIRFLOW-6817] remove imports from 'airflow/__init__.py', replaced implicit imports with explicit imports, added entry to 'UPDATING.MD' - squashed/rebased (#7456)

175a160463

2020-02-19

[AIRFLOW-6828] Stop using the zope library (#7448)

1e00243014

2020-02-10

[AIRFLOW-5176] Add Azure Data Explorer (Kusto) operator (#5785)

97a429f9d0

2020-02-02

[AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)

83c037873f

2020-01-30

[AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287)

057f3ae3a4

2020-01-29

[AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286)

290330ba60

2020-01-15

[AIRFLOW-6552] Move Azure classes to providers.microsoft package (#7158)

+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/acr.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/acr.html new file mode 100644 index 00000000000..d40fce22c4f --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/acr.html @@ -0,0 +1,882 @@ + + + + + + + + + + + + Microsoft Azure Container Registry Connection — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

Microsoft Azure Container Registry Connection

+

The Microsoft Azure Container Registry connection type enables the Azure Container Registry Integrations.

+
+

Authenticating to Azure Container Registry

+

There is one way to connect to Azure Container Registry using Airflow.

+
    +
  1. Use Individual login with Azure AD +i.e. add specific credentials to the Airflow connection.

  2. +
+
+
+

Default Connection IDs

+

All hooks and operators related to Microsoft Azure Container Registry use azure_container_registry_default by default.

+
+
+

Configuring the Connection

+
+
Login

Specify the Image Registry Username used for the initial connection.

+
+
Password

Specify the Image Registry Password used for the initial connection.

+
+
Host

Specify the Image Registry Server used for the initial connection.

+
+
+

When specifying the connection in environment variable you should specify +it using URI syntax.

+

Note that all components of the URI should be URL-encoded.

+

For example:

+
export AIRFLOW_CONN_AZURE_CONTAINER_REGISTRY_DEFAULT='azure-container-registry://username:password@myregistry.com?tenant=tenant+id&account_name=store+name'
+
+
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/adf.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/adf.html new file mode 100644 index 00000000000..9315a5c1b82 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/adf.html @@ -0,0 +1,909 @@ + + + + + + + + + + + + Microsoft Azure Data Factory — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

Microsoft Azure Data Factory

+

The Microsoft Azure Data Factory connection type enables the Azure Data Factory Integrations.

+
+

Authenticating to Azure Data Factory

+

There are multiple ways to connect to Azure Data Factory using Airflow.

+
    +
  1. Use token credentials +i.e. add specific credentials (client_id, secret, tenant) and subscription id to the Airflow connection.

  2. +
  3. Fallback on DefaultAzureCredential. +This includes a mechanism to try different options to authenticate: Managed System Identity, environment variables, authentication through Azure CLI…

  4. +
+
+
+

Default Connection IDs

+

All hooks and operators related to Microsoft Azure Data Factory use azure_data_factory_default by default.

+
+
+

Configuring the Connection

+
+
Client ID

Specify the client_id used for the initial connection. +This is needed for token credentials authentication mechanism. +It can be left out to fall back on DefaultAzureCredential.

+
+
Secret

Specify the secret used for the initial connection. +This is needed for token credentials authentication mechanism. +It can be left out to fall back on DefaultAzureCredential.

+
+
Tenant ID

Specify the Azure tenant ID used for the initial connection. +This is needed for token credentials authentication mechanism. +It can be left out to fall back on DefaultAzureCredential. +Use extra param tenantId to pass in the tenant ID.

+
+
Subscription ID

Specify the ID of the subscription used for the initial connection. +This is needed for all authentication mechanisms. +Use extra param subscriptionId to pass in the Azure subscription ID.

+
+
Factory Name (optional)

Specify the Azure Data Factory to interface with. +If not specified in the connection, this needs to be passed in directly to hooks, operators, and sensors. +Use extra param factory_name to pass in the factory name.

+
+
Resource Group Name (optional)

Specify the Azure Resource Group Name under which the desired data factory resides. +If not specified in the connection, this needs to be passed in directly to hooks, operators, and sensors. +Use extra param resource_group_name to pass in the resource group name.

+
+
+

When specifying the connection in environment variable you should specify +it using URI syntax.

+

Note that all components of the URI should be URL-encoded.

+
+
+

Examples

+
export AIRFLOW_CONN_AZURE_DATA_FACTORY_DEFAULT='azure-data-factory://applicationid:serviceprincipalpassword@?tenantId=tenant+id&subscriptionId=subscription+id&resource_group_name=group+name&factory_name=factory+name'
+
+
+
export AIRFLOW_CONN_AZURE_DATA_FACTORY_DEFAULT='azure-data-factory://applicationid:serviceprincipalpassword@?tenantId=tenant+id&subscriptionId=subscription+id'
+
+
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/adl.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/adl.html new file mode 100644 index 00000000000..0b704eaa915 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/adl.html @@ -0,0 +1,891 @@ + + + + + + + + + + + + Microsoft Azure Data Lake Connection — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

Microsoft Azure Data Lake Connection

+

The Microsoft Azure Data Lake connection type enables the Azure Data Lake Integrations.

+
+

Authenticating to Azure Data Lake

+

There is one way to connect to Azure Data Lake using Airflow.

+
    +
  1. Use token credentials +i.e. add specific credentials (client_id, secret, tenant) and account name to the Airflow connection.

  2. +
+
+
+

Default Connection IDs

+

All hooks and operators related to Microsoft Azure Data Lake use azure_data_lake_default by default.

+
+
+

Configuring the Connection

+
+
Login

Specify the client_id used for the initial connection. +This is needed for token credentials authentication mechanism.

+
+
Password

Specify the secret used for the initial connection. +This is only needed for token credentials authentication mechanism.

+
+
Extra (optional)

Specify the extra parameters (as json dictionary) that can be used in Azure Data Lake connection. +The following parameters are all optional:

+
    +
  • tenant: Specify the tenant to use. +This is needed for token credentials authentication mechanism.

  • +
  • account_name: Specify the azure data lake account name. +This is sometimes called the store_name

  • +
+
+
+

When specifying the connection in environment variable you should specify +it using URI syntax.

+

Note that all components of the URI should be URL-encoded.

+

For example:

+
export AIRFLOW_CONN_AZURE_DATA_LAKE_DEFAULT='azure-data-lake://client%20id:secret@?tenant=tenant+id&account_name=store+name'
+
+
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/adx.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/adx.html new file mode 100644 index 00000000000..de861217c4f --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/adx.html @@ -0,0 +1,904 @@ + + + + + + + + + + + + Microsoft Azure Data Explorer — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+
+
+

Microsoft Azure Data Explorer

+

The Azure Data Explorer connection type enables Azure Data Explorer (ADX) integrations in Airflow.

+
+

Authenticating to Azure Data Explorer

+

There are three ways to connect to Azure Data Explorer using Airflow.

+
    +
  1. Use AAD application certificate +(i.e. use “AAD_APP” or “AAD_APP_CERT” as the Authentication Method in the Airflow connection).

  2. +
  3. Use AAD username and password +(i.e. use “AAD_CREDS” as the Authentication Method in the Airflow connection).

  4. +
  5. Use a AAD device code +(i.e. use “AAD_DEVICE” as the Authentication Method in the Airflow connection).

  6. +
+

Only one authorization method can be used at a time. If you need to manage multiple credentials or keys then you should +configure multiple connections.

+
+
+

Default Connection IDs

+

All hooks and operators related to Microsoft Azure Data Explorer use azure_data_explorer_default by default.

+
+
+

Configuring the Connection

+
+
Data Explorer Cluster URL

Specify the Data Explorer cluster URL. Needed for all authentication methods.

+
+
Authentication Method

Specify authentication method. Available authentication methods are:

+
    +
  • AAD_APP: Authentication with AAD application certificate. A Tenant ID is required when using this method. Provide application ID and application key through Username and Password parameters.

  • +
  • AAD_APP_CERT: Authentication with AAD application certificate. Tenant ID, Application PEM Certificate, and Application Certificate Thumbprint are required when using this method.

  • +
  • AAD_CREDS: Authentication with AAD username and password. A Tenant ID is required when using this method. Username and Password parameters are used for authentication with AAD.

  • +
  • AAD_DEVICE: Authenticate with AAD device code. Please note that if you choose this option, you’ll need to authenticate for every new instance that is initialized. It is highly recommended to create one instance and use it for all queries.

  • +
+
+
Username (optional)

Specify the username used for data explorer. Needed for with AAD_APP, AAD_APP_CERT, and AAD_CREDS authentication methods.

+
+
Password (optional)

Specify the password used for data explorer. Needed for with AAD_APP, and AAD_CREDS authentication methods.

+
+
Tenant ID (optional)

Specify AAD tenant. Needed for AAD_APP, AAD_APP_CERT, and AAD_CREDS.

+
+
Application PEM Certificate (optional)

Specify the certificate. Needed for AAD_APP_CERT authentication method.

+
+
Application Certificate Thumbprint (optional)

Specify the thumbprint needed for use with AAD_APP_CERT authentication method.

+
+
+

When specifying the connection in environment variable you should specify +it using URI syntax.

+

Note that all components of the URI should be URL-encoded.

+

For example:

+
export AIRFLOW_CONN_AZURE_DATA_EXPLORER_DEFAULT='azure-data-explorer://add%20username:add%20password@mycluster.com?auth_method=AAD_APP&tenant=tenant+id'
+
+
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/asb.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/asb.html new file mode 100644 index 00000000000..e3e9f5739ee --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/asb.html @@ -0,0 +1,875 @@ + + + + + + + + + + + + Microsoft Azure Service Bus — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

Microsoft Azure Service Bus

+

The Microsoft Azure Service Bus connection type enables the Azure Service Bus Integration.

+
+

Authenticating to Azure Service Bus

+

There are multiple ways to authenticate and authorize access to Azure Service Bus resources: +Currently Supports Shared Access Signatures (SAS).

+
    +
  1. Use a Connection String +i.e. Use connection string Field to add Connection String in the Airflow connection.

  2. +
+
+
+

Default Connection IDs

+

All hooks and operators related to Microsoft Azure Service Bus use azure_service_bus_default by default.

+
+
+

Configuring the Connection

+
+
Connection String

Specify the Azure Service bus connection string ID used for the initial connection. +Please find the documentation on how to generate connection string in azure service bus +Get connection string +Use the key connection_string to pass in the Connection ID .

+
+
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/azure.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/azure.html new file mode 100644 index 00000000000..15ff9f2a6a9 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/azure.html @@ -0,0 +1,903 @@ + + + + + + + + + + + + Microsoft Azure Connection — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+
+
+

Microsoft Azure Connection

+

The Microsoft Azure connection type enables the Azure Integrations.

+
+

Authenticating to Azure

+

There are three ways to connect to Azure using Airflow.

+
    +
  1. Use token credentials +i.e. add specific credentials (client_id, secret, tenant) and subscription id to the Airflow connection.

  2. +
  3. Use a JSON file +i.e. create a key file on disk and link to it in the Airflow connection.

  4. +
  5. Use a JSON dictionary +i.e. add a key config directly into the Airflow connection.

  6. +
+

Only one authorization method can be used at a time. If you need to manage multiple credentials or keys then you should +configure multiple connections.

+
+
+

Default Connection IDs

+

All hooks and operators related to Microsoft Azure Container Instances use azure_default by default.

+
+
+

Configuring the Connection

+
+
Login (optional)

Specify the client_id used for the initial connection. +This is only needed for token credentials authentication mechanism.

+
+
Password (optional)

Specify the secret used for the initial connection. +This is only needed for token credentials authentication mechanism.

+
+
Extra (optional)

Specify the extra parameters (as json dictionary) that can be used in Azure connection. +The following parameters are all optional:

+
    +
  • tenantId: Specify the tenant to use. +This is only needed for token credentials authentication mechanism.

  • +
  • subscriptionId: Specify the subscription id to use. +This is only needed for token credentials authentication mechanism.

  • +
  • key_path: If set, it uses the JSON file authentication mechanism. +It specifies the path to the json file that contains the authentication information.

  • +
  • key_json: If set, it uses the JSON dictionary authentication mechanism. +It specifies the json that contains the authentication information.

  • +
+
+
+

When specifying the connection in environment variable you should specify +it using URI syntax.

+

Note that all components of the URI should be URL-encoded.

+

For example:

+
export AIRFLOW_CONN_AZURE_DEFAULT='azure://?key_path=%2Fkeys%2Fkey.json'
+
+
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/azure_batch.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/azure_batch.html new file mode 100644 index 00000000000..8681c2cc5d5 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/azure_batch.html @@ -0,0 +1,881 @@ + + + + + + + + + + + + Microsoft Azure Batch — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

Microsoft Azure Batch

+

The Microsoft Azure Batch connection type enables the Azure Batch integrations.

+
+

Authenticating to Azure Batch

+

There is one way to connect to Azure Batch using Airflow.

+
    +
  1. Use Azure Shared Key Credential +i.e. add shared key credentials to the Airflow connection.

  2. +
+
+
+

Default Connection IDs

+

All hooks and operators related to Microsoft Azure Batch use azure_batch_default by default.

+
+
+

Configuring the Connection

+
+
Batch Account Name

Specify the Azure Batch Account Name used for the initial connection.

+
+
Batch Account Access Key

Specify the access key used for the initial connection.

+
+
Batch Account URL

Specify the batch account URL you would like to use.

+
+
+

When specifying the connection in environment variable you should specify it using URI syntax.

+

Note that all components of the URI should be URL-encoded.

+

For example:

+
export AIRFLOW_CONN_AZURE_BATCH_DEFAULT='azure-batch://batch%20acount:batch%20key@?account_url=mybatchaccount.com'
+
+
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/azure_container_volume.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/azure_container_volume.html new file mode 100644 index 00000000000..6bc1f9dac4d --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/azure_container_volume.html @@ -0,0 +1,895 @@ + + + + + + + + + + + + Microsoft Azure Container Volume Connection — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+
+
+

Microsoft Azure Container Volume Connection

+

The Microsoft Azure Container Volume connection type enables the Azure Container Volume Integrations.

+
+

Authenticating to Azure Container Volume

+

There are four ways to connect to Azure Container Volume using Airflow.

+
    +
  1. Use token credentials +i.e. add specific credentials (client_id, secret) and subscription id to the Airflow connection.

  2. +
  3. Use a Connection String +i.e. add connection string to extra__azure_container_volume__connection_string in the Airflow connection.

  4. +
+

Only one authorization method can be used at a time. If you need to manage multiple credentials or keys then you should +configure multiple connections.

+
+
+

Default Connection IDs

+

All hooks and operators related to Azure Container Volume use azure_container_volume_default by default.

+
+
+

Configuring the Connection

+
+
Login (optional)

Specify the login used for azure blob storage. For use with Shared Key Credential and SAS Token authentication.

+
+
Password (optional)

Specify the password used for azure blob storage. For use with +Active Directory (token credential) and shared key authentication.

+
+
Host (optional)

Specify the account url for anonymous public read, Active Directory, shared access key authentication.

+
+
Extra (optional)

Specify the extra parameters (as json dictionary) that can be used in Azure connection. +The following parameters are all optional:

+
    +
  • extra__azure_container_volume__connection_string: Connection string for use with connection string authentication.

  • +
+
+
+

When specifying the connection in environment variable you should specify +it using URI syntax.

+

Note that all components of the URI should be URL-encoded.

+

For example connect with token credentials:

+
export AIRFLOW_CONN_WASP_DEFAULT='azure_container_volume://blob%20username:blob%20password@myblob.com'
+
+
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/azure_cosmos.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/azure_cosmos.html new file mode 100644 index 00000000000..55e9fec0974 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/azure_cosmos.html @@ -0,0 +1,887 @@ + + + + + + + + + + + + Microsoft Azure Cosmos — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

Microsoft Azure Cosmos

+

The Microsoft Azure Cosmos connection type enables the Azure Cosmos Integrations.

+
+

Authenticating to Azure

+

There is one way to connect to Azure Cosmos using Airflow.

+
    +
  1. Use Primary Keys +i.e. add specific credentials (client_id, secret, tenant) and account name to the Airflow connection.

  2. +
+
+
+

Default Connection IDs

+

All hooks and operators related to Microsoft Azure Cosmos use azure_cosmos_default by default.

+
+
+

Configuring the Connection

+
+
Login

Specify the Cosmos Endpoint URI used for the initial connection.

+
+
Password

Specify the Cosmos Master Key Token used for the initial connection.

+
+
Extra (optional)

Specify the extra parameters (as json dictionary) that can be used in Azure Cosmos connection. +The following parameters are all optional:

+
    +
  • database_name: Specify the azure cosmos database to use.

  • +
  • collection_name: Specify the azure cosmos collection to use.

  • +
+
+
+

When specifying the connection in environment variable you should specify +it using URI syntax.

+

Note that all components of the URI should be URL-encoded.

+

For example:

+
export AIRFLOW_CONN_AZURE_COSMOS_DEFAULT='azure-cosmos://https%3A%2F%2Fairflow.azure.com:master%20key@?database_name=mydatabase&collection_name=mycollection'
+
+
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/azure_fileshare.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/azure_fileshare.html new file mode 100644 index 00000000000..57888b404d7 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/azure_fileshare.html @@ -0,0 +1,899 @@ + + + + + + + + + + + + Microsoft Azure File Share Connection — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+
+
+

Microsoft Azure File Share Connection

+

The Microsoft Azure File Share connection type enables the Azure File Share Integrations.

+
+

Authenticating to Azure File Share

+

There are four ways to connect to Azure File Share using Airflow.

+
    +
  1. Use token credentials +i.e. add specific credentials (client_id, secret) and subscription id to the Airflow connection.

  2. +
  3. Use a SAS Token +i.e. add a key config to sas_token in the Airflow connection.

  4. +
  5. Use a Connection String +i.e. add connection string to connection_string in the Airflow connection.

  6. +
+

Only one authorization method can be used at a time. If you need to manage multiple credentials or keys then you should +configure multiple connections.

+
+
+

Default Connection IDs

+

All hooks and operators related to Azure File Share use azure_fileshare_default by default.

+
+
+

Configuring the Connection

+
+
Login (optional)

Specify the login used for azure blob storage. For use with Shared Key Credential and SAS Token authentication.

+
+
Password (optional)

Specify the password used for azure blob storage. For use with +Active Directory (token credential) and shared key authentication.

+
+
Host (optional)

Specify the account url for anonymous public read, Active Directory, shared access key authentication.

+
+
Extra (optional)

Specify the extra parameters (as json dictionary) that can be used in Azure connection. +The following parameters are all optional:

+
    +
  • connection_string: Connection string for use with connection string authentication.

  • +
  • sas_token: SAS Token for use with SAS Token authentication.

  • +
  • protocol: Specify the protocol to use (default is https).

  • +
+
+
+

When specifying the connection in environment variable you should specify +it using URI syntax.

+

Note that all components of the URI should be URL-encoded.

+

For example connect with token credentials:

+
export AIRFLOW_CONN_WASP_DEFAULT='azure_fileshare://blob%20username@myblob.com?sas_token=token'
+
+
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/azure_synapse.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/azure_synapse.html new file mode 100644 index 00000000000..790b0f8f81e --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/azure_synapse.html @@ -0,0 +1,890 @@ + + + + + + + + + + + + Microsoft Azure Synapse — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+

Microsoft Azure Synapse

+

The Microsoft Azure Synapse connection type enables the Azure Synapse Integrations.

+
+

Authenticating to Azure Synapse

+

There are multiple ways to connect to Azure Synapse using Airflow.

+
    +
  1. Use token credentials +i.e. add specific credentials (client_id, secret, tenant) and subscription id to the Airflow connection.

  2. +
  3. Fallback on DefaultAzureCredential. +This includes a mechanism to try different options to authenticate: Managed System Identity, environment variables, authentication through Azure CLI…

  4. +
+
+
+

Default Connection IDs

+

All hooks and operators related to Microsoft Azure Synapse use azure_synapse_default by default.

+
+
+

Configuring the Connection

+
+
Client ID

Specify the client_id used for the initial connection. +This is needed for token credentials authentication mechanism. +It can be left out to fall back on DefaultAzureCredential.

+
+
Secret

Specify the secret used for the initial connection. +This is needed for token credentials authentication mechanism. +It can be left out to fall back on DefaultAzureCredential.

+
+
Tenant ID

Specify the Azure tenant ID used for the initial connection. +This is needed for token credentials authentication mechanism. +It can be left out to fall back on DefaultAzureCredential. +Use the key extra__azure_synapse__tenantId to pass in the tenant ID.

+
+
Subscription ID

A subscription ID is required for the connection. +This is needed for all authentication mechanisms. +Use the key extra__azure_synapse__subscriptionId to pass in the Azure subscription ID.

+
+
Synapse Workspace URL

Specify the Azure Synapse endpoint to interface with.

+
+
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/index.html new file mode 100644 index 00000000000..3b053bd0bb8 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/index.html @@ -0,0 +1,862 @@ + + + + + + + + + + + + Connection Types — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + + +
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/wasb.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/wasb.html new file mode 100644 index 00000000000..082a8ba0989 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/connections/wasb.html @@ -0,0 +1,902 @@ + + + + + + + + + + + + Microsoft Azure Blob Storage Connection — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+
+
+

Microsoft Azure Blob Storage Connection

+

The Microsoft Azure Blob Storage connection type enables the Azure Blob Storage Integrations.

+
+

Authenticating to Azure Blob Storage

+

There are four ways to connect to Azure Blob Storage using Airflow.

+
    +
  1. Use token credentials +i.e. add specific credentials (client_id, secret, tenant) and subscription id to the Airflow connection.

  2. +
  3. Use Azure Shared Key Credential +i.e. add shared key credentials to shared_access_key the Airflow connection.

  4. +
  5. Use a SAS Token +i.e. add a key config to sas_token in the Airflow connection.

  6. +
  7. Use a Connection String +i.e. add connection string to connection_string in the Airflow connection.

  8. +
+

Only one authorization method can be used at a time. If you need to manage multiple credentials or keys then you should +configure multiple connections.

+
+
+

Default Connection IDs

+

All hooks and operators related to Microsoft Azure Blob Storage use wasb_default by default.

+
+
+

Configuring the Connection

+
+
Login (optional)

Specify the login used for azure blob storage. For use with Shared Key Credential and SAS Token authentication.

+
+
Password (optional)

Specify the password used for azure blob storage. For use with +Active Directory (token credential) and shared key authentication.

+
+
Host (optional)

Specify the account url for anonymous public read, Active Directory, shared access key authentication.

+
+
Extra (optional)

Specify the extra parameters (as json dictionary) that can be used in Azure connection. +The following parameters are all optional:

+
    +
  • tenant_id: Specify the tenant to use. Needed for Active Directory (token) authentication.

  • +
  • shared_access_key: Specify the shared access key. Needed for shared access key authentication.

  • +
  • connection_string: Connection string for use with connection string authentication.

  • +
  • sas_token: SAS Token for use with SAS Token authentication.

  • +
+
+
+

When specifying the connection in environment variable you should specify +it using URI syntax.

+

Note that all components of the URI should be URL-encoded.

+

For example connect with token credentials:

+
export AIRFLOW_CONN_WASB_DEFAULT='wasb://blob%20username:blob%20password@myblob.com?tenant_id=tenant+id'
+
+
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/genindex.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/genindex.html new file mode 100644 index 00000000000..ec4a1aace02 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/genindex.html @@ -0,0 +1,2508 @@ + + + + + + + + + + + + Index — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ + +

Index

+ +
+ A + | B + | C + | D + | E + | F + | G + | H + | I + | K + | L + | M + | N + | O + | P + | Q + | R + | S + | T + | U + | V + | W + +
+

A

+ + + +
+ +

B

+ + + +
+ +

C

+ + + +
+ +

D

+ + + +
+ +

E

+ + + +
+ +

F

+ + + +
+ +

G

+ + + +
+ +

H

+ + +
+ +

I

+ + + +
+ +

K

+ + +
+ +

L

+ + + +
+ +

M

+ + +
+ +

N

+ + + +
+ +

O

+ + + +
+ +

P

+ + + +
+ +

Q

+ + + +
+ +

R

+ + + +
+ +

S

+ + + +
+ +

T

+ + + +
+ +

U

+ + + +
+ +

V

+ + +
+ +

W

+ + + +
+ + + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/index.html new file mode 100644 index 00000000000..81f3231a32a --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/index.html @@ -0,0 +1,1579 @@ + + + + + + + + + + + + apache-airflow-providers-microsoft-azure — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+
+
+

apache-airflow-providers-microsoft-azure

+
+

Content

+ +
+

References

+ +
+
+
+ +
+

Commits

+ +
+
+
+

Package apache-airflow-providers-microsoft-azure

+

Microsoft Azure

+

Release: 5.0.1

+
+
+

Provider package

+

This is a provider package for microsoft.azure provider. All classes for this provider package +are in airflow.providers.microsoft.azure python package.

+
+
+

Installation

+

You can install this package on top of an existing Airflow 2 installation (see Requirements below) +for the minimum Airflow version supported) via +pip install apache-airflow-providers-microsoft-azure

+
+
+

Requirements

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

PIP package

Version required

apache-airflow

>=2.3.0

azure-batch

>=8.0.0

azure-cosmos

>=4.0.0

azure-datalake-store

>=0.0.45

azure-identity

>=1.3.1

azure-keyvault-secrets

>=4.1.0,<5.0

azure-kusto-data

>=0.0.43,<0.1

azure-mgmt-containerinstance

>=1.5.0,<2.0

azure-mgmt-datafactory

>=1.0.0,<2.0

azure-mgmt-datalake-store

>=0.5.0

azure-mgmt-resource

>=2.2.0

azure-storage-blob

>=12.14.0

azure-storage-common

>=2.1.0

azure-storage-file

>=2.1.0

azure-servicebus

>=7.6.1; platform_machine != "aarch64"

azure-synapse-spark

adal

>=1.2.7

+
+
+

Cross provider package dependencies

+

Those are dependencies that might be needed in order to use all the features of the package. +You need to install the specified provider packages in order to use them.

+

You can install such cross-provider dependencies when installing from PyPI. For example:

+
pip install apache-airflow-providers-microsoft-azure[google]
+
+
+ ++++ + + + + + + + + + + + + + + + + +

Dependent package

Extra

apache-airflow-providers-google

google

apache-airflow-providers-oracle

oracle

apache-airflow-providers-sftp

sftp

+
+
+

Downloading official packages

+

You can download officially released packages and verify their checksums and signatures from the +Official Apache Download site

+ +
+
+
+
+

Changelog

+
+

5.0.1

+
+

Bug Fixes

+
    +
  • Make arguments 'offset' and 'length' not required (#28234)

  • +
+
+
+
+

5.0.0

+

This release of provider is only available for Airflow 2.3+ as explained in the +Apache Airflow providers support policy.

+
+

Breaking changes

+
    +
  • In AzureFileShareHook, if both extra__azure_fileshare__foo and foo existed in connection extra +dict, the prefixed version would be used; now, the non-prefixed version will be preferred.

  • +
  • Remove deprecated classes (#27417)

  • +
  • In Azure Batch vm_size and vm_node_agent_sku_id parameters are required.

  • +
+
+
+

Misc

+
    +
  • Move min airflow version to 2.3.0 for all providers (#27196)

  • +
+
+
+

Features

+
    +
  • Add azure, google, authentication library limits to eaager upgrade (#27535)

  • +
  • Allow and prefer non-prefixed extra fields for remaining azure (#27220)

  • +
  • Allow and prefer non-prefixed extra fields for AzureFileShareHook (#27041)

  • +
  • Allow and prefer non-prefixed extra fields for AzureDataExplorerHook (#27219)

  • +
  • Allow and prefer non-prefixed extra fields for AzureDataFactoryHook (#27047)

  • +
  • Update WasbHook to reflect preference for unprefixed extra (#27024)

  • +
  • Look for 'extra__' instead of 'extra_' in 'get_field' (#27489)

  • +
+
+
+

Bug Fixes

+
    +
  • Fix Azure Batch errors revealed by added typing to azure batch lib (#27601)

  • +
  • Fix separator getting added to variables_prefix when empty (#26749)

  • +
+
+
+
+

4.3.0

+
+

Features

+
    +
  • Add DataFlow operations to Azure DataFactory hook (#26345)

  • +
  • Add network_profile param in AzureContainerInstancesOperator (#26117)

  • +
  • Add Azure synapse operator (#26038)

  • +
  • Auto tail file logs in Web UI (#26169)

  • +
  • Implement Azure Service Bus Topic Create, Delete Operators (#25436)

  • +
+
+
+

Bug Fixes

+
    +
  • Fix AzureBatchOperator false negative task status (#25844)

  • +
+
+
+
+

4.2.0

+
+

Features

+
    +
  • Add 'test_connection' method to AzureContainerInstanceHook (#25362)

  • +
  • Add test_connection to Azure Batch hook (#25235)

  • +
  • Bump typing-extensions and mypy for ParamSpec (#25088)

  • +
  • Implement Azure Service Bus (Update and Receive) Subscription Operator (#25029)

  • +
  • Set default wasb Azure http logging level to warning; fixes #16224 (#18896)

  • +
+
+
+
+

4.1.0

+
+

Features

+
    +
  • Add 'test_connection' method to AzureCosmosDBHook (#25018)

  • +
  • Add test_connection method to AzureFileShareHook (#24843)

  • +
  • Add test_connection method to Azure WasbHook (#24771)

  • +
  • Implement Azure service bus subscription Operators (#24625)

  • +
  • Implement Azure Service Bus Queue Operators (#24038)

  • +
+
+
+

Bug Fixes

+
    +
  • Update providers to use functools compat for ''cached_property'' (#24582)

  • +
+
+
+
+

4.0.0

+
+

Breaking changes

+ +
+
+

Features

+
    +
  • Pass connection extra parameters to wasb BlobServiceClient (#24154)

  • +
+
+
+

Misc

+
    +
  • Apply per-run log templates to log handlers (#24153)

  • +
  • Migrate Microsoft example DAGs to new design #22452 - azure (#24141)

  • +
  • Add typing to Azure Cosmos Client Hook (#23941)

  • +
+
+
+
+

3.9.0

+
+

Features

+
    +
  • wasb hook: user defaultAzureCredentials instead of managedIdentity (#23394)

  • +
+
+
+

Misc

+
    +
  • Replace usage of 'DummyOperator' with 'EmptyOperator' (#22974)

  • +
+
+
+
+

3.8.0

+
+

Features

+
    +
  • Update secrets backends to use get_conn_value instead of get_conn_uri (#22348)

  • +
+
+
+

Misc

+
    +
  • Docs: Fix example usage for 'AzureCosmosDocumentSensor' (#22735)

  • +
+
+
+
+

3.7.2

+
+

Bug Fixes

+
    +
  • Fix mistakenly added install_requires for all providers (#22382)

  • +
+
+
+
+

3.7.1

+
+

Misc

+
    +
  • Add Trove classifiers in PyPI (Framework :: Apache Airflow :: Provider)

  • +
+
+
+
+

3.7.0

+
+

Features

+
    +
  • Add 'test_connection' method to 'AzureDataFactoryHook' (#21924)

  • +
  • Add pre-commit check for docstring param types (#21398)

  • +
  • Make container creation configurable when uploading files via WasbHook (#20510)

  • +
+
+
+

Misc

+
    +
  • Support for Python 3.10

  • +
  • (AzureCosmosDBHook) Update to latest Cosmos API (#21514)

  • +
+
+
+
+

3.6.0

+
+

Features

+
    +
  • Add optional features in providers. (#21074)

  • +
+
+
+

Misc

+
    +
  • Refactor operator links to not create ad hoc TaskInstances (#21285)

  • +
+
+
+
+

3.5.0

+
+

Features

+
    +
  • Azure: New sftp to wasb operator (#18877)

  • +
  • Removes InputRequired validation with azure extra (#20084)

  • +
  • Add operator link to monitor Azure Data Factory pipeline runs (#20207)

  • +
+
+
+
+

3.4.0

+
+

Features

+
    +
  • Remove unnecessary connection form customizations in Azure (#19595)

  • +
  • Update Azure modules to comply with AIP-21 (#19431)

  • +
  • Remove 'host' from hidden fields in 'WasbHook' (#19475)

  • +
  • use DefaultAzureCredential if login not provided for Data Factory (#19079)

  • +
+
+
+

Bug Fixes

+
    +
  • Fix argument error in AzureContainerInstancesOperator (#19668)

  • +
+
+
+
+

3.3.0

+
+

Features

+
    +
  • update azure cosmos to latest version (#18695)

  • +
  • Added sas_token var to BlobServiceClient return. Updated tests (#19234)

  • +
  • Add pre-commit hook for common misspelling check in files (#18964)

  • +
+
+
+

Bug Fixes

+
    +
  • Fix changelog for Azure Provider (#18736)

  • +
+
+
+

Other

+
    +
  • Expanding docs on client auth for AzureKeyVaultBackend (#18659)

  • +
  • Static start_date and default arg cleanup for Microsoft providers example DAGs (#19062)

  • +
+
+
+
+

3.2.0

+
+

Features

+
    +
  • Rename AzureDataLakeStorage to ADLS (#18493)

  • +
  • Creating ADF pipeline run operator, sensor + ADF custom conn fields (#17885)

  • +
  • Rename LocalToAzureDataLakeStorageOperator to LocalFilesystemToADLSOperator (#18168)

  • +
  • Rename FileToWasbOperator to LocalFilesystemToWasbOperator (#18109)

  • +
+
+
+

Bug Fixes

+
    +
  • Fixed wasb hook attempting to create container when getting a blob client (#18287)

  • +
  • Removing redundant relabeling of password conn field (#18386)

  • +
  • Proper handling of Account URL custom conn field in AzureBatchHook (#18456)

  • +
  • Proper handling of custom conn field values in the AzureDataExplorerHook (#18203)

  • +
+
+
+
+

Main

+

Changes in operators names and import paths are listed in the following table +This is a backward compatible change. Deprecated operators will be removed in the next major release.

+ ++++++ + + + + + + + + + + + + + + + + + +

Deprecated operator name

New operator name

Deprecated path

New path

AzureDataLakeStorageListOperator

ADLSListOperator

airflow.providers.microsoft.azure.operators.adls_list

airflow.providers.microsoft.azure.operators.adls

AzureDataLakeStorageDeleteOperator

ADLSDeleteOperator

airflow.providers.microsoft.azure.operators.adls_delete

airflow.providers.microsoft.azure.operators.adls

+
+
+

3.1.1

+
+

Misc

+
    +
  • Optimise connection importing for Airflow 2.2.0

  • +
  • Adds secrets backend/logging/auth information to provider yaml (#17625)

  • +
+
+
+
+

3.1.0

+
+

Features

+
    +
  • Add support for managed identity in WASB hook (#16628)

  • +
  • Reduce log messages for happy path (#16626)

  • +
+
+
+

Bug Fixes

+
    +
  • Fix multiple issues in Microsoft AzureContainerInstancesOperator (#15634)

  • +
+
+
+
+

3.0.0

+
+

Breaking changes

+
    +
  • Auto-apply apply_default decorator (#15667)

  • +
+
+

Warning

+

Due to apply_default decorator removal, this version of the provider requires Airflow 2.1.0+. +If your Airflow version is < 2.1.0, and you want to install this provider version, first upgrade +Airflow to at least version 2.1.0. Otherwise your Airflow package version will be upgraded +automatically and you will have to manually run airflow upgrade db to complete the migration.

+
+
    +
  • Fixes AzureFileShare connection extras (#16388)

  • +
+

Azure Container Volume and Azure File Share have now dedicated connection types with editable +UI fields. You should not use Wasb connection type any more for those connections. Names of +connection ids for those hooks/operators were changed to reflect that.

+
+
+

Features

+
    +
  • add oracle  connection link (#15632)

  • +
  • Add delimiter argument to WasbHook delete_file method (#15637)

  • +
+
+
+

Bug Fixes

+
    +
  • Fix colon spacing in ``AzureDataExplorerHook docstring (#15841)``

  • +
  • fix wasb remote logging when blob already exists (#16280)

  • +
+
+
+
+

2.0.0

+
+

Breaking changes

+
    +
  • Removes unnecessary AzureContainerInstance connection type (#15514)

  • +
+

This change removes azure_container_instance_default connection type and replaces it with the +azure_default. The problem was that AzureContainerInstance was not needed as it was exactly the +same as the plain “azure” connection, however it’s presence caused duplication in the field names +used in the UI editor for connections and unnecessary warnings generated. This version uses +plain Azure Hook and connection also for Azure Container Instance. If you already have +azure_container_instance_default connection created in your DB, it will continue to work, but +the first time you edit it with the UI you will have to change it’s type to azure_default.

+
+
+

Features

+
    +
  • Add dynamic connection fields to Azure Connection (#15159)

  • +
+
+
+

Bug fixes

+
    +
  • Fix 'logging.exception' redundancy (#14823)

  • +
+
+
+
+

1.3.0

+
+

Features

+
    +
  • A bunch of template_fields_renderers additions (#15130)

  • +
+
+
+

Bug fixes

+
    +
  • Fix attributes for AzureDataFactory hook (#14704)

  • +
+
+
+
+

1.2.0

+
+

Features

+
    +
  • Add Azure Data Factory hook (#11015)

  • +
+
+
+

Bug fixes

+
    +
  • BugFix: Fix remote log in azure storage blob displays in one line (#14313)

  • +
  • Fix AzureDataFactoryHook failing to instantiate its connection (#14565)

  • +
+
+
+
+

1.1.0

+

Updated documentation and readme files.

+
+

Features

+
    +
  • Upgrade azure blob to v12 (#12188)

  • +
  • Fix Azure Data Explorer Operator (#13520)

  • +
  • add AzureDatalakeStorageDeleteOperator (#13206)

  • +
+
+
+
+

1.0.0

+

Initial version of the provider.

+
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/installing-providers-from-sources.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/installing-providers-from-sources.html new file mode 100644 index 00000000000..d881a686a1a --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/installing-providers-from-sources.html @@ -0,0 +1,952 @@ + + + + + + + + + + + + Installing from sources — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+
+
+
+
+

Installing from sources

+
+

Released packages

+

This page describes downloading and verifying apache-airflow-providers-microsoft-azure provider version +5.0.1 using officially released packages. +You can also install the provider package - as most Python packages - via +PyPI . +You can choose different version of the provider by selecting different version from the drop-down at +the top-left of the page.

+

The sdist and whl packages released are the “official” sources of installation that you can use if +you want to verify the origin of the packages and want to verify checksums and signatures of the packages. +The packages are available via the +Official Apache Software Foundations Downloads

+

The downloads are available at:

+ +

If you want to install from the source code, you can download from the sources link above, it will contain +a INSTALL file containing details on how you can build and install the provider.

+
+
+

Release integrity

+

PGP signatures KEYS

+

It is essential that you verify the integrity of the downloaded files using the PGP or SHA signatures. +The PGP signatures can be verified using GPG or PGP. Please download the KEYS as well as the asc +signature files for relevant distribution. It is recommended to get these files from the +main distribution directory and not from the mirrors.

+
gpg -i KEYS
+
+
+

or

+
pgpk -a KEYS
+
+
+

or

+
pgp -ka KEYS
+
+
+

To verify the binaries/sources you can download the relevant asc files for it from main +distribution directory and follow the below guide.

+
gpg --verify apache-airflow-providers-********.asc apache-airflow-*********
+
+
+

or

+
pgpv apache-airflow-providers-********.asc
+
+
+

or

+
pgp apache-airflow-providers-********.asc
+
+
+

Example:

+
$ gpg --verify apache-airflow-providers-microsoft-azure-5.0.1.tar.gz.asc apache-airflow-providers-microsoft-azure-5.0.1.tar.gz
+  gpg: Signature made Sat 11 Sep 12:49:54 2021 BST
+  gpg:                using RSA key CDE15C6E4D3A8EC4ECF4BA4B6674E08AD7DE406F
+  gpg:                issuer "kaxilnaik@apache.org"
+  gpg: Good signature from "Kaxil Naik <kaxilnaik@apache.org>" [unknown]
+  gpg:                 aka "Kaxil Naik <kaxilnaik@gmail.com>" [unknown]
+  gpg: WARNING: The key's User ID is not certified with a trusted signature!
+  gpg:          There is no indication that the signature belongs to the owner.
+  Primary key fingerprint: CDE1 5C6E 4D3A 8EC4 ECF4  BA4B 6674 E08A D7DE 406F
+
+
+

The “Good signature from …” is indication that the signatures are correct. +Do not worry about the “not certified with a trusted signature” warning. Most of the certificates used +by release managers are self signed, that’s why you get this warning. By importing the server in the +previous step and importing it via ID from KEYS page, you know that this is a valid Key already.

+

For SHA512 sum check, download the relevant sha512 and run the following:

+
shasum -a 512 apache-airflow-providers-********  | diff - apache-airflow-providers-********.sha512
+
+
+

The SHASUM of the file should match the one provided in .sha512 file.

+

Example:

+
shasum -a 512 apache-airflow-providers-microsoft-azure-5.0.1.tar.gz  | diff - apache-airflow-providers-microsoft-azure-5.0.1.tar.gz.sha512
+
+
+
+
+

Verifying PyPI releases

+

You can verify the Provider .whl packages from PyPI by locally downloading the package and signature +and SHA sum files with the script below:

+
#!/bin/bash
+PACKAGE_VERSION=5.0.1
+PACKAGE_NAME=apache-airflow-providers-microsoft-azure
+provider_download_dir=$(mktemp -d)
+pip download --no-deps "${PACKAGE_NAME}==${PACKAGE_VERSION}" --dest "${provider_download_dir}"
+curl "https://downloads.apache.org/airflow/providers/apache_airflow_providers_microsoft_azure-5.0.1-py3-none-any.whl.asc" \
+    -L -o "${provider_download_dir}/apache_airflow_providers_microsoft_azure-5.0.1-py3-none-any.whl.asc"
+curl "https://downloads.apache.org/airflow/providers/apache_airflow_providers_microsoft_azure-5.0.1-py3-none-any.whl.sha512" \
+    -L -o "${provider_download_dir}/apache_airflow_providers_microsoft_azure-5.0.1-py3-none-any.whl.sha512"
+echo
+echo "Please verify files downloaded to ${provider_download_dir}"
+ls -la "${provider_download_dir}"
+echo
+
+
+

Once you verify the files following the instructions from previous chapter you can remove the temporary +folder created.

+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/logging.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/logging.html new file mode 100644 index 00000000000..027aeba5b5e --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/logging.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/logging/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/logging/index.html new file mode 100644 index 00000000000..3b4365e07e0 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/logging/index.html @@ -0,0 +1,879 @@ + + + + + + + + + + + + Writing logs to Azure Blob Storage — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+
+
+

Writing logs to Azure Blob Storage

+

Airflow can be configured to read and write task logs in Azure Blob Storage. It uses an existing +Airflow connection to read or write logs. If you don’t have a connection properly setup, +this process will fail.

+

Follow the steps below to enable Azure Blob Storage logging:

+
    +
  1. Airflow’s logging system requires a custom .py file to be located in the PYTHONPATH, so that it’s importable from Airflow. Start by creating a directory to store the config file, $AIRFLOW_HOME/config is recommended.

  2. +
  3. Create empty files called $AIRFLOW_HOME/config/log_config.py and $AIRFLOW_HOME/config/__init__.py.

  4. +
  5. Copy the contents of airflow/config_templates/airflow_local_settings.py into the log_config.py file created in Step 2.

  6. +
  7. Customize the following portions of the template:

    +
    +
    # wasb buckets should start with "wasb" just to help Airflow select correct handler
    +REMOTE_BASE_LOG_FOLDER = 'wasb://<container_name>@<storage_account>.blob.core.windows.net'
    +
    +# Rename DEFAULT_LOGGING_CONFIG to LOGGING CONFIG
    +LOGGING_CONFIG = ...
    +
    +
    +
    +
  8. +
  9. Make sure a Azure Blob Storage (Wasb) connection hook has been defined in Airflow. The hook should have read and write access to the Azure Blob Storage bucket defined above in REMOTE_BASE_LOG_FOLDER.

  10. +
  11. Update $AIRFLOW_HOME/airflow.cfg to contain:

    +
    +
    [logging]
    +remote_logging = True
    +logging_config_class = log_config.LOGGING_CONFIG
    +remote_log_conn_id = <name of the Azure Blob Storage connection>
    +
    +
    +
    +
  12. +
  13. Restart the Airflow webserver and scheduler, and trigger (or wait for) a new task execution.

  14. +
  15. Verify that logs are showing up for newly executed tasks in the bucket you have defined.

  16. +
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/objects.inv b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/objects.inv new file mode 100644 index 0000000000000000000000000000000000000000..1e8e396e0fc0adc0181d54424c45667685065c85 GIT binary patch literal 6386 zcmVNERX>N99Zgg*Qc_4OWa&u{KZXhxWBOp+6Z)#;@bUGkmaA9L; zWi4T8a%OCAcP(&oZ+2;9WpZ;ZZE0h2Z*y;EbS+_eb#i42BOq2~a&u{KZaN?}E-)@J z3L_v?Xk{RBWo=<;Ze(S0Aa78b#rNMXCQiP zX<{x4c-qySU3a56(tz*%D}2uEZQSqN&Yj~p-QIEHj6b?}_J)H&aoAw+B1oM5^)HeD zVHlBfvUlD$TO|2bB>7e(`Norc^8%Ob2XFD#|4k&l`$zw-XJ5){@D_&eR3$^65SH!0 zcK7O8r-|X&4~FdT2RpXM`<)*~B>IuQVo;86Q`-L>#DoGfaKa$40((m=JB}m9#{TcE zlbD#p7k}-Dd1x+qwG%(KiYhb@aYF&CU5qK7bcNmknFvo3K%7U_1}a99X*hywgr3_LJuaA69I#JB9z)2a!VUhLb$z7m0=>{5buLnTk*^& zrgHCS6d)x*+UV@qc{2~o_h_6`QFE|vSx@WKI9bZmR3iI>450gd5F`dS%yNP(lbj3# z3Dd*TO$qjLGy>v!IvPRYJyQ#CsM9-|c`-dSji@+Y8U|q6r9y0UnjCDNQyF>nr2hn( z<`W<&$-s79i+aJvCsu6JcPkF9|AcFZQehLOP4v~s!6+l8TTCK>ythLPbp09xP> z`h9dRF$-#%`U@)4bRXL{!sf$piF7d`dqO%eaUCFRLM*=rn=2-?{Dn=bvQOKKnZ{YR z?^{9W5{t&ET`>|dwZObbrOZuo!tIW&x!i~}{hG1u1thY(Ag!^$0n9~IWdWVusz6bv zqx|_Kdkpf1&tq%j#dKnBS_H7Gb2ED3dZaBf*|{KNajO#GdMURn1(zGR5VCw%mI55kmPH-z+jtZY zK;zmvNL0Fk!ViQl;da2FWwaeJXc=v{ z3|a==6@!-HcEq5;ClK4^M4eA;54|hKH)rjD<>lKax?y(tv4c+8Tt?g#gIi|p!d$~C zFF#(8bi#~vs8hpSm1u_=XOW^E)`&30?T9)s)UFt_)D}dYX=gMJcU@scTf}MySDef3 zjNe$o?SSi;(RRdp%xJsiKxWWgF}oRVM{HSfpGLRb$d`n_U`6PrAnLxuc0~p#|hXVdv4$a z#3^Y+XqZF}u{Ked?79G)I%l417Na_?`#zuBZb4FD)~097ZKoULR+XXjnhnt=ZCcb3U-diTa)V(?$)-$hw9hrM>g5E7x!Jop+)1g&2LR6#T+8&rNtQ|=w;+F zF!PScBh<13;rbfenmDo9gz(`*3xH;~^c>L(Hl`w?ZCnFm(yzeYQNTzs69611W>Q35 zEyN$7F`@(ugC{>!&Vt^6wxw(1!y9aAMFbDCbkX4s3UpDzU2WZDs7|&{*sLjox7z7s z#B^MBapJhFy6E^9-r3PREMlZCyrXDphHB%2RUZ!k2j4o`4&LB?wfNgKb(C>FjE4_h z;2h8Avw0UlClAwc7dZc%fcM0eCY}HE?c%i5UD!{{xzkI0aEAD6RUg zeTXlsgJ>6#S8s0w6W&A!G8A~~1Cb}ceLo8vVZw*Qoq)mQ;w!MM%~3PzDZ5hNsBb53 z&H^o9AmU_uK}}#`(>PprEZF8cTq4*UB0dZ1tRXI*DN2n!ABabWGXsdngRq#23hK^DoHI+4dO{Cgsl+8ZP%P-PND0TRg?wMC2GdfEcV zYeWddd^?7?dG$V_Bfb=xw)w;y)xAh)j^18IHBo6VQyZ+YVoYYT#!3iH)mRy-`5G&u zHdte`9>O#a;TlL&^|prAeATT1H&}ZYByi1ET?wJ7dMiUUUuk93CTncT6&KEpsft?* zYOMa&6L&z3>)|`1&6o>KErNDIn{_WqwFz&qHZQ}S=NN62C4{ExuME|E#g$Q;uCYcN zUJ0Sm8rwNjE2B18V^=%^RJngJRB`J;P1WBs#OAB<8F*8)dCks-!WR0t2!y3RE=N=$ zKe39^hAMDaR0GE+c3>qtSc}oehti<+;o)p}(8m`B*1PBXrs826KYgTVmJW ziNZaMABU^y(i*+2n%|Cb6VmbX&&hoD{4{=ET0e*L$?$$EE*j|o`_uSoHvik2JX_!I zJA%9zk5=>XL+7gc=WM!qGM@Hl=qKW|u`!Tz8-kq|CIiJ^VS;i4%>JEPUlPR+p_A5H z5Fy*3i+$25{zbs7F}CstX#a*u1Wdvpe;IcTf|91-n!O=FuO@QMQrG=N0~=jftOcD-kD*C&dRSH4bD0Tjouw@Qp+%v&YKHhH&%M>KP- z76Z++AyD%xN=mm)!pz&LGz2?$Dw1E*B6fw`r+=bYxpUZ>cyWx~Ex@rY2)qoGI8lCg zV?))=k}B(ImVWR~zN`;+lPmnHo`-ge$olc~^F&LU#64c3Y0yVlN* z-&C4Ldz_B%ksvmkF}vvW)!TG}Gq94#>j>$G?I*;1)(<=K?6rS8(ts-7U6v&Um_nW& zuu75>0M|&4LCW*t`AqnN|3b+s*Ae2P4R+zXq6)PHh~t!C;X;=Yat@S51R2!1X>Ac8 z1gw*=X%`4ISX*z1j;qHvEC_Ini(3NKs4h7LD6UHxf{&@#XbRlC)UJF!!vbzK_(Zsk z0g3O02c0qH`;}=-`GQAGCVayRmAB)%yx6L zR+$=MdqbAt_t63{?RF)Q7vDX;8wKxS`Iy$X-w8~uYsDg{c&+TgjK4Xt zASmj7Re4$}2u?FFszlh$!{ow&H-;EmOf>I8OR4BI(Fo;xSm@thMA|<(8JvqkN!uGw8MDrQhZt35#s2}ijtrkuqa(@Td+JP zYvTKSV~TPp^xJUr--!0NwRhd&jw3?yn6ptgu=#t{3r%i-8zzxMgidFNQ)$u1up*?H zq`!^7OfCerW2=Wd{o^qUfZOc&ux7Adlj*}~IDfbuSQ2|`GsHZhvU|u=XR)?Zy-R?M^O(@Eiq5`O{6=Mr@X+|Wtw)ncG_BqWxKD;Q?Pc~^1U)m zoj^U9gNNaFYf@2GL{<#Q@$*mfA=hN%1%@;7P~DkBYZ9hUvkmJA&8Cor)nW>PSdB$W z2t-3_B0A8fv(a#BJx-=$>t(q7VI<>x{4`sR4TnT)y*l8MogdmR9~POzcX@kAQ?@I2 zh(JLH!+)>lV{0^fUJfVETXR_0o0(F!cR&cHa$B980bp6UVA+$NWBevF=x+ zKgUZv3)&`hCsda4&hGy?9(9BltGCh)`e}RVm^F-wSxDn(uexGYiS+urR3-c@Wuhn=p#v2ppL|v@z7o+*)g)40*h?ldM$*3Es4v$4veq{ZZ#g;Z69JK>O z2d%rMPL1yDb#)NH!EiEvoX&n(KW13ED!&X11gWXZ(m=X+8P5N-{yDq1UWR{9XTt|$ zNnP^FE)%irXhV&KWXpw=-P~sV2WgT;2PLj0LILFa%--w>Zh;C(IM$~ShcmvE(!sDM z^YLgooBwSb8OnGU9}B_D!H3GcnxkfLy07Twkma4t#zG<)H!QFV2bvpFU2~=x0*GVe z&YtE3;h~rmeR#NgK=g4U?*q{Xiqx3(0pjfk(ZVP{Q>99F=3Es75ZUO+wiAQ!aE%T+ z9&5nXa%Qbw*!Yb%YGYZk+cP?#XwL@Woj)Df5bTNrk^np%ySdJt5rClbJzOvI@#ExQ zh7zI}MO|yO~bx|&ctR^|8!3~<7@UlAdN>MFVL^d9c^9zj;ighedwOF|n)na|{0Lf^DDvMJsl917~K!?lU z0mV4ELB0VzOW6#$dP}x4c+G{|SL2t{D-_~z2NA3UCwCb&!)J+`v7an5D6`8JE3RfQ z5;tRq5jW>-Mt8I-txQ>6+rGM1v!u6v2y;pgp>gEN4mq87?2MZnxB3AOQDuHu*NRdN9ML83Uv=0fU+Ea-~s!)<@Jk$Qw z@dSIYxPSm!S%ks9LL9^%AkLc-eoeTze$&WyJ#bho0KU4>ltl>Zsl;~#(5nFVl*0}I zK${Qf%8`|NSm9h0N^o6>m_8`hO;0Tx6R0$5>^+R27$EMt9cF#yxF9`@=mUjx@eyfg<~xO06g^wyxBhAT;M3VdNg1?!rY9@ZTbSf#Gq zThRwytI4YOvA*)j#kQ1FfgcDBCf=SmPRZnsCkWNrQ3svhf zLk^AfkoJ_L74FpIg6yF0;cR1rmaWd21Z?B$HNI_KwGfSYdrr9BdND0$m^~6=UTo*x z-jKe%vz<3`m-(o7g}}Q);9V{-2ngTmnU3C_=veU4E0Mudp1#jRQ1wT7SK6Ov+k@Dn zEZwnm_qA5K`zn{tB3$*eNb)Db{CZ*j8DT75Rz2aqUg-Xe(D8ofhY^X6rO|b*G`f~c zTZn$Ff1mbaM+x}bI%#5czvk;m3o6_{wr+M-sQ8Io^gUm7t0pVY&=Tg4xH+8x3Dc5SE!+GelvJXd;rQ&&*j|CTVwix+MEobw}Z+ehxQ|2_3lU3|h z<)x5yKBqRS^Mt*HZR~kS`hWm3F z?rSsLUoeiQZ&#SO&lH7RGGv-d7S;^+rXIZj(H5uOtF>;ewle5$=o`H~B#yXzj z$9I|VCKj|7xG35ZUGn!m5N!Fm#_AGAq;nx%@ zE6ZhxBmPXHD1Wh^T|g8~0uxatQM58e#H_JETisPc^H!HND@9^T0v8uoCUKEID?rnB zrN~;cCPKTgkn084YHZoGHQ>IoYiQ1WBDhwK8*s{8=>)PEw8o)Ns_uav&NaHi5uhF=oNhYQzr#uDf4g#4Unj+%LLY45dQzu=>i zy8eq#>2l|o&pgH<^O`P^9^Z^4BRwy6%HBQ}ZgVE4+-kTW(r#p#V15^Z&fPpc(SW%VI&qpDyI^@O)gdHl*@)sh4L0{ zr^cF`%jjqZ@n|S0Eks3!q#M%@qdu98G=OcI2S0#Y9(6 zZXM)#R_w=hbzV~Z3oYX1D}G}mxN^gw#Ewp9q{K6kW+C3cu!fiIXTDq8&U@+#W?%5` zoqXN#?ak$4UHWagfPa#&{!cF8Ui`ZEnOXsI$85lzWRk9GHh%lZHHPE=0Un@B5tLU! Ar~m)} literal 0 HcmV?d00001 diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/adf_run_pipeline.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/adf_run_pipeline.html new file mode 100644 index 00000000000..b2401dd721a --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/adf_run_pipeline.html @@ -0,0 +1,900 @@ + + + + + + + + + + + + Azure Data Factory Operators — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+
+
+

Azure Data Factory Operators

+

Azure Data Factory is Azure’s cloud ETL service for scale-out serverless data integration and data transformation. +It offers a code-free UI for intuitive authoring and single-pane-of-glass monitoring and management.

+
+

AzureDataFactoryRunPipelineOperator

+

Use the AzureDataFactoryRunPipelineOperator to execute a pipeline within a data factory. +By default, the operator will periodically check on the status of the executed pipeline to terminate with a “Succeeded” status. +This functionality can be disabled for an asynchronous wait – typically with the AzureDataFactoryPipelineRunSensor – by setting wait_for_termination to False.

+

Below is an example of using this operator to execute an Azure Data Factory pipeline.

+
+
+

tests/system/providers/microsoft/azure/example_adf_run_pipeline.py[source]

+
    run_pipeline1 = AzureDataFactoryRunPipelineOperator(
+        task_id="run_pipeline1",
+        pipeline_name="pipeline1",
+        parameters={"myParam": "value"},
+    )
+
+
+
+
+

Here is a different example of using this operator to execute a pipeline but coupled with the AzureDataFactoryPipelineRunSensor to perform an asynchronous wait.

+
+
+

tests/system/providers/microsoft/azure/example_adf_run_pipeline.py[source]

+
    run_pipeline2 = AzureDataFactoryRunPipelineOperator(
+        task_id="run_pipeline2",
+        pipeline_name="pipeline2",
+        wait_for_termination=False,
+    )
+
+    pipeline_run_sensor = AzureDataFactoryPipelineRunStatusSensor(
+        task_id="pipeline_run_sensor",
+        run_id=cast(str, XComArg(run_pipeline2, key="run_id")),
+    )
+
+
+
+
+
+
+

Reference

+

For further information, please refer to the Microsoft documentation:

+
+
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/adls.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/adls.html new file mode 100644 index 00000000000..fed4acaa487 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/adls.html @@ -0,0 +1,878 @@ + + + + + + + + + + + + Azure DataLake Storage Operators — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+
+
+

Azure DataLake Storage Operators

+
+

Prerequisite Tasks

+
+

ADLSDeleteOperator

+

Use the +ADLSDeleteOperator to remove +file(s) from Azure DataLake Storage

+

Below is an example of using this operator to delete a file from ADL.

+
+

tests/system/providers/microsoft/azure/example_adls_delete.py[source]

+
    remove_file = ADLSDeleteOperator(task_id="delete_task", path=REMOTE_FILE_PATH, recursive=True)
+
+
+
+
+
+

Reference

+

For further information, look at:

+ +
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/asb.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/asb.html new file mode 100644 index 00000000000..cb0f04591a1 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/asb.html @@ -0,0 +1,1064 @@ + + + + + + + + + + + + Azure Service Bus Operators — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+
+
+

Azure Service Bus Operators

+

Azure Service Bus is a fully managed enterprise message broker with message queues and +publish-subscribe topics (in a namespace). Service Bus is used to decouple applications +and services from each other. Service Bus that perform operations on +entities, such as namespaces, queues, and topics.

+
+
The Service Bus REST API provides operations for working with the following resources:
    +
  • Azure Resource Manager

  • +
  • Service Bus service

  • +
+
+
+
+

Azure Service Bus Queue Operators

+

Azure Service Bus Operators helps to interact with Azure Bus Queue based operation like Create, Delete, +Send and Receive message in Queue.

+
+
+
+

Create Azure Service Bus Queue

+

To create Azure service bus queue with specific Parameter you can use +AzureServiceBusCreateQueueOperator.

+

Below is an example of using this operator to execute an Azure Service Bus Create Queue.

+
+

tests/system/providers/microsoft/azure/example_azure_service_bus.py[source]

+
create_service_bus_queue = AzureServiceBusCreateQueueOperator(
+    task_id="create_service_bus_queue",
+    queue_name=QUEUE_NAME,
+)
+
+
+
+
+
+

Send Message to Azure Service Bus Queue

+

To Send message or list of message or batch Message to the Azure Service Bus Queue. You can use +AzureServiceBusSendMessageOperator.

+

Below is an example of using this operator to execute an Azure Service Bus Send Message to Queue.

+
+

tests/system/providers/microsoft/azure/example_azure_service_bus.py[source]

+
send_message_to_service_bus_queue = AzureServiceBusSendMessageOperator(
+    task_id="send_message_to_service_bus_queue",
+    message=MESSAGE,
+    queue_name=QUEUE_NAME,
+    batch=False,
+)
+
+
+
+
+
+

Receive Message Azure Service Bus Queue

+

To Receive Message or list of message or Batch message message in a Queue you can use +AzureServiceBusReceiveMessageOperator.

+

Below is an example of using this operator to execute an Azure Service Bus Create Queue.

+
+

tests/system/providers/microsoft/azure/example_azure_service_bus.py[source]

+
receive_message_service_bus_queue = AzureServiceBusReceiveMessageOperator(
+    task_id="receive_message_service_bus_queue",
+    queue_name=QUEUE_NAME,
+    max_message_count=20,
+    max_wait_time=5,
+)
+
+
+
+
+
+

Delete Azure Service Bus Queue

+

To Delete the Azure service bus queue you can use +AzureServiceBusDeleteQueueOperator.

+

Below is an example of using this operator to execute an Azure Service Bus Delete Queue.

+
+

tests/system/providers/microsoft/azure/example_azure_service_bus.py[source]

+
delete_service_bus_queue = AzureServiceBusDeleteQueueOperator(
+    task_id="delete_service_bus_queue", queue_name=QUEUE_NAME, trigger_rule="all_done"
+)
+
+
+
+
+

Azure Service Bus Topic Operators

+

Azure Service Bus Topic based Operators helps to interact with topic in service bus namespace +and it helps to Create, Delete operation for topic.

+
+
+
+

Create Azure Service Bus Topic

+

To create Azure service bus topic with specific Parameter you can use +AzureServiceBusTopicCreateOperator.

+

Below is an example of using this operator to execute an Azure Service Bus Create Topic.

+
+

tests/system/providers/microsoft/azure/example_azure_service_bus.py[source]

+
create_service_bus_topic = AzureServiceBusTopicCreateOperator(
+    task_id="create_service_bus_topic", topic_name=TOPIC_NAME
+)
+
+
+
+
+
+

Delete Azure Service Bus Topic

+

To Delete the Azure service bus topic you can use +AzureServiceBusTopicDeleteOperator.

+

Below is an example of using this operator to execute an Azure Service Bus Delete topic.

+
+

tests/system/providers/microsoft/azure/example_azure_service_bus.py[source]

+
delete_asb_topic = AzureServiceBusTopicDeleteOperator(
+    task_id="delete_asb_topic",
+    topic_name=TOPIC_NAME,
+)
+
+
+
+
+

Azure Service Bus Subscription Operators

+

Azure Service Bus Subscription based Operators helps to interact topic Subscription in service bus namespace +and it helps to Create, Delete operation for subscription under topic.

+
+
+
+

Create Azure Service Bus Subscription

+

To create Azure service bus topic Subscription with specific Parameter you can use +AzureServiceBusSubscriptionCreateOperator.

+

Below is an example of using this operator to execute an Azure Service Bus Create Subscription.

+
+

tests/system/providers/microsoft/azure/example_azure_service_bus.py[source]

+
create_service_bus_subscription = AzureServiceBusSubscriptionCreateOperator(
+    task_id="create_service_bus_subscription",
+    topic_name=TOPIC_NAME,
+    subscription_name=SUBSCRIPTION_NAME,
+)
+
+
+
+
+
+

Update Azure Service Bus Subscription

+

To Update the Azure service bus topic Subscription which is already created, with specific Parameter you can use +AzureServiceBusUpdateSubscriptionOperator.

+

Below is an example of using this operator to execute an Azure Service Bus Update Subscription.

+
+

tests/system/providers/microsoft/azure/example_azure_service_bus.py[source]

+
update_service_bus_subscription = AzureServiceBusUpdateSubscriptionOperator(
+    task_id="update_service_bus_subscription",
+    topic_name=TOPIC_NAME,
+    subscription_name=SUBSCRIPTION_NAME,
+    max_delivery_count=5,
+)
+
+
+
+
+
+

Receive Azure Service Bus Subscription Message

+

To Receive a Batch messages from a Service Bus Subscription under specific Topic, you can use +ASBReceiveSubscriptionMessageOperator.

+

Below is an example of using this operator to execute an Azure Service Bus Receive Subscription Message.

+
+

tests/system/providers/microsoft/azure/example_azure_service_bus.py[source]

+
receive_message_service_bus_subscription = ASBReceiveSubscriptionMessageOperator(
+    task_id="receive_message_service_bus_subscription",
+    topic_name=TOPIC_NAME,
+    subscription_name=SUBSCRIPTION_NAME,
+    max_message_count=10,
+)
+
+
+
+
+
+

Delete Azure Service Bus Subscription

+

To Delete the Azure service bus topic Subscription you can use +AzureServiceBusSubscriptionDeleteOperator.

+

Below is an example of using this operator to execute an Azure Service Bus Delete Subscription under topic.

+
+

tests/system/providers/microsoft/azure/example_azure_service_bus.py[source]

+
delete_service_bus_subscription = AzureServiceBusSubscriptionDeleteOperator(
+    task_id="delete_service_bus_subscription",
+    topic_name=TOPIC_NAME,
+    subscription_name=SUBSCRIPTION_NAME,
+    trigger_rule="all_done",
+)
+
+
+
+
+

Reference

+

For further information, please refer to the Microsoft documentation:

+
+
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/azure_blob_to_gcs.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/azure_blob_to_gcs.html new file mode 100644 index 00000000000..5712fe35236 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/azure_blob_to_gcs.html @@ -0,0 +1,906 @@ + + + + + + + + + + + + Azure Blob Storage Transfer Operator — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+
+
+

Azure Blob Storage Transfer Operator

+

The Blob service stores text and binary data as objects in the cloud. +The Blob service offers the following three resources: the storage account, containers, and blobs. +Within your storage account, containers provide a way to organize sets of blobs. +For more information about the service visit Azure Blob Storage API documentation.

+
+

Before you begin

+

Before using Blob Storage within Airflow you need to authenticate your account with Token, Login and Password. +Please follow Azure +instructions +to do it.

+

TOKEN should be added to the Connection in Airflow in JSON format, Login and Password as plain text. +You can check how to do such connection.

+

See following example. +Set values for these fields:

+
Connection Id: wasb_default
+Login: Storage Account Name
+Password: KEY1
+Extra: {"sas_token": "TOKEN"}
+
+
+
+
+

Transfer Data from Blob Storage to Google Cloud Storage

+

Operator transfers data from Azure Blob Storage to specified bucket in Google Cloud Storage

+

To get information about jobs within a Azure Blob Storage use: +AzureBlobStorageToGCSOperator

+

Example usage:

+
+

tests/system/providers/microsoft/azure/example_azure_blob_to_gcs.py[source]

+
with DAG(
+    DAG_ID,
+    schedule=None,
+    start_date=datetime(2021, 1, 1),  # Override to match your needs
+    default_args={"container_name": AZURE_CONTAINER_NAME, "blob_name": BLOB_NAME},
+) as dag:
+
+    wait_for_blob = WasbBlobSensor(task_id="wait_for_blob")
+
+    transfer_files_to_gcs = AzureBlobStorageToGCSOperator(
+        task_id="transfer_files_to_gcs",
+        # AZURE arg
+        file_path=GCP_OBJECT_NAME,
+        # GCP args
+        bucket_name=GCP_BUCKET_NAME,
+        object_name=GCP_OBJECT_NAME,
+        filename=GCP_BUCKET_FILE_PATH,
+        gzip=False,
+        delegate_to=None,
+        impersonation_chain=None,
+    )
+
+
+
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/azure_synapse.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/azure_synapse.html new file mode 100644 index 00000000000..a2f787a353c --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/azure_synapse.html @@ -0,0 +1,884 @@ + + + + + + + + + + + + Azure Synapse Operators — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+
+
+

Azure Synapse Operators

+

Azure Synapse Analytics is a limitless analytics service that brings together data integration, +enterprise data warehousing and big data analytics. It gives you the freedom to query data on your terms, +using either serverless or dedicated options—at scale. +Azure Synapse brings these worlds together with a unified experience to ingest, +explore, prepare, transform, manage and serve data for immediate BI and machine learning needs.

+
+

AzureSynapseRunSparkBatchOperator

+

Use the AzureSynapseRunSparkBatchOperator to execute a +spark application within Synapse Analytics. +By default, the operator will periodically check on the status of the executed Spark job to +terminate with a “Succeeded” status.

+

Below is an example of using this operator to execute a Spark application on Azure Synapse.

+
+
+

tests/system/providers/microsoft/azure/example_azure_synapse.py[source]

+
run_spark_job = AzureSynapseRunSparkBatchOperator(
+    task_id="run_spark_job", spark_pool="provsparkpool", payload=SPARK_JOB_PAYLOAD  # type: ignore
+)
+
+
+
+
+
+
+

Reference

+

For further information, please refer to the Microsoft documentation:

+
+
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/index.html new file mode 100644 index 00000000000..4109f184534 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/index.html @@ -0,0 +1,867 @@ + + + + + + + + + + + + Microsoft Operators — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + + +
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/local_to_adls.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/local_to_adls.html new file mode 100644 index 00000000000..df59ccc918c --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/local_to_adls.html @@ -0,0 +1,886 @@ + + + + + + + + + + + + Upload data from Local Filesystem to Azure Data Lake — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+
+
+

Upload data from Local Filesystem to Azure Data Lake

+

The Azure Data Lake (ADL) make it easy to store data of +any size, shape, and speed. +This page shows how to upload data from local filesystem to ADL.

+
+

Prerequisite Tasks

+
+

LocalFilesystemToADLSOperator

+

LocalFilesystemToADLSOperator allows you to +upload data from local filesystem to ADL.

+

Below is an example of using this operator to upload a file to ADL.

+
+

tests/system/providers/microsoft/azure/example_local_to_adls.py[source]

+
    upload_file = LocalFilesystemToADLSOperator(
+        task_id="upload_task",
+        local_path=LOCAL_FILE_PATH,
+        remote_path=REMOTE_FILE_PATH,
+    )
+
+
+
+
+

Reference

+

For further information, look at:

+ +
+
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/sftp_to_wasb.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/sftp_to_wasb.html new file mode 100644 index 00000000000..15155a5790a --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/operators/sftp_to_wasb.html @@ -0,0 +1,889 @@ + + + + + + + + + + + + Azure Blob Storage Transfer Operator — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+
+
+

Azure Blob Storage Transfer Operator

+

The Blob service stores text and binary data as objects in the cloud. +The Blob service offers the following three resources: the storage account, containers, and blobs. +Within your storage account, containers provide a way to organize sets of blobs. +For more information about the service visit Azure Blob Storage API documentation.

+
+

Before you begin

+

Before using Blob Storage within Airflow you need to authenticate your account with Token, Login and Password. +Please follow Azure +instructions +to do it.

+

See following example. +Set values for these fields:

+
SFTP Conn Id: sftp_default
+WASB Conn Id: wasb_default
+
+
+
+
+

Transfer Data from SFTP Source Path to Blob Storage

+

Operator transfers data from SFTP Source Path to specified container in Azure Blob Storage

+

To get information about jobs within a Azure Blob Storage use: +SFTPToWasbOperator +Example usage:

+
+

tests/system/providers/microsoft/azure/example_sftp_to_wasb.py[source]

+
transfer_files_to_azure = SFTPToWasbOperator(
+    task_id="transfer_files_from_sftp_to_wasb",
+    # SFTP args
+    sftp_source_path=SFTP_SRC_PATH,
+    # AZURE args
+    container_name=AZURE_CONTAINER_NAME,
+    blob_prefix=BLOB_PREFIX,
+)
+
+
+
+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/py-modindex.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/py-modindex.html new file mode 100644 index 00000000000..2b2d7e623a8 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/py-modindex.html @@ -0,0 +1,1103 @@ + + + + + + + + + + + + Python Module Index — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ + +

Python Module Index

+ +
+ a | + t +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
 
+ a
+ airflow +
    + airflow.providers.microsoft.azure +
    + airflow.providers.microsoft.azure.hooks +
    + airflow.providers.microsoft.azure.hooks.adx +
    + airflow.providers.microsoft.azure.hooks.asb +
    + airflow.providers.microsoft.azure.hooks.base_azure +
    + airflow.providers.microsoft.azure.hooks.batch +
    + airflow.providers.microsoft.azure.hooks.container_instance +
    + airflow.providers.microsoft.azure.hooks.container_registry +
    + airflow.providers.microsoft.azure.hooks.container_volume +
    + airflow.providers.microsoft.azure.hooks.cosmos +
    + airflow.providers.microsoft.azure.hooks.data_factory +
    + airflow.providers.microsoft.azure.hooks.data_lake +
    + airflow.providers.microsoft.azure.hooks.fileshare +
    + airflow.providers.microsoft.azure.hooks.synapse +
    + airflow.providers.microsoft.azure.hooks.wasb +
    + airflow.providers.microsoft.azure.log +
    + airflow.providers.microsoft.azure.log.wasb_task_handler +
    + airflow.providers.microsoft.azure.operators +
    + airflow.providers.microsoft.azure.operators.adls +
    + airflow.providers.microsoft.azure.operators.adx +
    + airflow.providers.microsoft.azure.operators.asb +
    + airflow.providers.microsoft.azure.operators.batch +
    + airflow.providers.microsoft.azure.operators.container_instances +
    + airflow.providers.microsoft.azure.operators.cosmos +
    + airflow.providers.microsoft.azure.operators.data_factory +
    + airflow.providers.microsoft.azure.operators.synapse +
    + airflow.providers.microsoft.azure.operators.wasb_delete_blob +
    + airflow.providers.microsoft.azure.secrets +
    + airflow.providers.microsoft.azure.secrets.key_vault +
    + airflow.providers.microsoft.azure.sensors +
    + airflow.providers.microsoft.azure.sensors.cosmos +
    + airflow.providers.microsoft.azure.sensors.data_factory +
    + airflow.providers.microsoft.azure.sensors.wasb +
    + airflow.providers.microsoft.azure.transfers +
    + airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs +
    + airflow.providers.microsoft.azure.transfers.local_to_adls +
    + airflow.providers.microsoft.azure.transfers.local_to_wasb +
    + airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake +
    + airflow.providers.microsoft.azure.transfers.sftp_to_wasb +
    + airflow.providers.microsoft.azure.utils +
 
+ t
+ tests +
    + tests.system.providers.microsoft.azure +
    + tests.system.providers.microsoft.azure.example_adf_run_pipeline +
    + tests.system.providers.microsoft.azure.example_adls_delete +
    + tests.system.providers.microsoft.azure.example_azure_blob_to_gcs +
    + tests.system.providers.microsoft.azure.example_azure_container_instances +
    + tests.system.providers.microsoft.azure.example_azure_cosmosdb +
    + tests.system.providers.microsoft.azure.example_azure_service_bus +
    + tests.system.providers.microsoft.azure.example_azure_synapse +
    + tests.system.providers.microsoft.azure.example_fileshare +
    + tests.system.providers.microsoft.azure.example_local_to_adls +
    + tests.system.providers.microsoft.azure.example_local_to_wasb +
    + tests.system.providers.microsoft.azure.example_sftp_to_wasb +
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/search.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/search.html new file mode 100644 index 00000000000..64b70fa91b7 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/search.html @@ -0,0 +1,865 @@ + + + + + + + + + + + + Search — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +

Search

+ + + + +

+ Searching for multiple words only shows matches that contain + all words. +

+ + +
+ + + +
+ + + +
+ +
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/searchindex.js b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/searchindex.js new file mode 100644 index 00000000000..f6d0d02cf74 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/searchindex.js @@ -0,0 +1 @@ +Search.setIndex({"docnames": ["_api/airflow/providers/microsoft/azure/hooks/adx/index", "_api/airflow/providers/microsoft/azure/hooks/asb/index", "_api/airflow/providers/microsoft/azure/hooks/base_azure/index", "_api/airflow/providers/microsoft/azure/hooks/batch/index", "_api/airflow/providers/microsoft/azure/hooks/container_instance/index", "_api/airflow/providers/microsoft/azure/hooks/container_registry/index", "_api/airflow/providers/microsoft/azure/hooks/container_volume/index", "_api/airflow/providers/microsoft/azure/hooks/cosmos/index", "_api/airflow/providers/microsoft/azure/hooks/data_factory/index", "_api/airflow/providers/microsoft/azure/hooks/data_lake/index", "_api/airflow/providers/microsoft/azure/hooks/fileshare/index", "_api/airflow/providers/microsoft/azure/hooks/index", "_api/airflow/providers/microsoft/azure/hooks/synapse/index", "_api/airflow/providers/microsoft/azure/hooks/wasb/index", "_api/airflow/providers/microsoft/azure/index", "_api/airflow/providers/microsoft/azure/log/index", "_api/airflow/providers/microsoft/azure/log/wasb_task_handler/index", "_api/airflow/providers/microsoft/azure/operators/adls/index", "_api/airflow/providers/microsoft/azure/operators/adx/index", "_api/airflow/providers/microsoft/azure/operators/asb/index", "_api/airflow/providers/microsoft/azure/operators/batch/index", "_api/airflow/providers/microsoft/azure/operators/container_instances/index", "_api/airflow/providers/microsoft/azure/operators/cosmos/index", "_api/airflow/providers/microsoft/azure/operators/data_factory/index", "_api/airflow/providers/microsoft/azure/operators/index", "_api/airflow/providers/microsoft/azure/operators/synapse/index", "_api/airflow/providers/microsoft/azure/operators/wasb_delete_blob/index", "_api/airflow/providers/microsoft/azure/secrets/index", "_api/airflow/providers/microsoft/azure/secrets/key_vault/index", "_api/airflow/providers/microsoft/azure/sensors/cosmos/index", "_api/airflow/providers/microsoft/azure/sensors/data_factory/index", "_api/airflow/providers/microsoft/azure/sensors/index", "_api/airflow/providers/microsoft/azure/sensors/wasb/index", "_api/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs/index", "_api/airflow/providers/microsoft/azure/transfers/index", "_api/airflow/providers/microsoft/azure/transfers/local_to_adls/index", "_api/airflow/providers/microsoft/azure/transfers/local_to_wasb/index", "_api/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake/index", "_api/airflow/providers/microsoft/azure/transfers/sftp_to_wasb/index", "_api/airflow/providers/microsoft/azure/utils/index", "_api/tests/system/providers/microsoft/azure/example_adf_run_pipeline/index", "_api/tests/system/providers/microsoft/azure/example_adls_delete/index", "_api/tests/system/providers/microsoft/azure/example_azure_blob_to_gcs/index", "_api/tests/system/providers/microsoft/azure/example_azure_container_instances/index", "_api/tests/system/providers/microsoft/azure/example_azure_cosmosdb/index", "_api/tests/system/providers/microsoft/azure/example_azure_service_bus/index", "_api/tests/system/providers/microsoft/azure/example_azure_synapse/index", "_api/tests/system/providers/microsoft/azure/example_fileshare/index", "_api/tests/system/providers/microsoft/azure/example_local_to_adls/index", "_api/tests/system/providers/microsoft/azure/example_local_to_wasb/index", "_api/tests/system/providers/microsoft/azure/example_sftp_to_wasb/index", "_api/tests/system/providers/microsoft/azure/index", "commits", "connections/acr", "connections/adf", "connections/adl", "connections/adx", "connections/asb", "connections/azure", "connections/azure_batch", "connections/azure_container_volume", "connections/azure_cosmos", "connections/azure_fileshare", "connections/azure_synapse", "connections/index", "connections/wasb", "index", "installing-providers-from-sources", "logging/index", "operators/adf_run_pipeline", "operators/adls", "operators/asb", "operators/azure_blob_to_gcs", "operators/azure_synapse", "operators/index", "operators/local_to_adls", "operators/sftp_to_wasb", "secrets-backends/azure-key-vault"], "filenames": ["_api/airflow/providers/microsoft/azure/hooks/adx/index.rst", "_api/airflow/providers/microsoft/azure/hooks/asb/index.rst", "_api/airflow/providers/microsoft/azure/hooks/base_azure/index.rst", "_api/airflow/providers/microsoft/azure/hooks/batch/index.rst", "_api/airflow/providers/microsoft/azure/hooks/container_instance/index.rst", "_api/airflow/providers/microsoft/azure/hooks/container_registry/index.rst", "_api/airflow/providers/microsoft/azure/hooks/container_volume/index.rst", "_api/airflow/providers/microsoft/azure/hooks/cosmos/index.rst", "_api/airflow/providers/microsoft/azure/hooks/data_factory/index.rst", "_api/airflow/providers/microsoft/azure/hooks/data_lake/index.rst", "_api/airflow/providers/microsoft/azure/hooks/fileshare/index.rst", "_api/airflow/providers/microsoft/azure/hooks/index.rst", "_api/airflow/providers/microsoft/azure/hooks/synapse/index.rst", "_api/airflow/providers/microsoft/azure/hooks/wasb/index.rst", "_api/airflow/providers/microsoft/azure/index.rst", "_api/airflow/providers/microsoft/azure/log/index.rst", "_api/airflow/providers/microsoft/azure/log/wasb_task_handler/index.rst", "_api/airflow/providers/microsoft/azure/operators/adls/index.rst", "_api/airflow/providers/microsoft/azure/operators/adx/index.rst", "_api/airflow/providers/microsoft/azure/operators/asb/index.rst", "_api/airflow/providers/microsoft/azure/operators/batch/index.rst", "_api/airflow/providers/microsoft/azure/operators/container_instances/index.rst", "_api/airflow/providers/microsoft/azure/operators/cosmos/index.rst", "_api/airflow/providers/microsoft/azure/operators/data_factory/index.rst", "_api/airflow/providers/microsoft/azure/operators/index.rst", "_api/airflow/providers/microsoft/azure/operators/synapse/index.rst", "_api/airflow/providers/microsoft/azure/operators/wasb_delete_blob/index.rst", "_api/airflow/providers/microsoft/azure/secrets/index.rst", "_api/airflow/providers/microsoft/azure/secrets/key_vault/index.rst", "_api/airflow/providers/microsoft/azure/sensors/cosmos/index.rst", "_api/airflow/providers/microsoft/azure/sensors/data_factory/index.rst", "_api/airflow/providers/microsoft/azure/sensors/index.rst", "_api/airflow/providers/microsoft/azure/sensors/wasb/index.rst", "_api/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs/index.rst", "_api/airflow/providers/microsoft/azure/transfers/index.rst", "_api/airflow/providers/microsoft/azure/transfers/local_to_adls/index.rst", "_api/airflow/providers/microsoft/azure/transfers/local_to_wasb/index.rst", "_api/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake/index.rst", "_api/airflow/providers/microsoft/azure/transfers/sftp_to_wasb/index.rst", "_api/airflow/providers/microsoft/azure/utils/index.rst", "_api/tests/system/providers/microsoft/azure/example_adf_run_pipeline/index.rst", "_api/tests/system/providers/microsoft/azure/example_adls_delete/index.rst", "_api/tests/system/providers/microsoft/azure/example_azure_blob_to_gcs/index.rst", "_api/tests/system/providers/microsoft/azure/example_azure_container_instances/index.rst", "_api/tests/system/providers/microsoft/azure/example_azure_cosmosdb/index.rst", "_api/tests/system/providers/microsoft/azure/example_azure_service_bus/index.rst", "_api/tests/system/providers/microsoft/azure/example_azure_synapse/index.rst", "_api/tests/system/providers/microsoft/azure/example_fileshare/index.rst", "_api/tests/system/providers/microsoft/azure/example_local_to_adls/index.rst", "_api/tests/system/providers/microsoft/azure/example_local_to_wasb/index.rst", "_api/tests/system/providers/microsoft/azure/example_sftp_to_wasb/index.rst", "_api/tests/system/providers/microsoft/azure/index.rst", "commits.rst", "connections/acr.rst", "connections/adf.rst", "connections/adl.rst", "connections/adx.rst", "connections/asb.rst", "connections/azure.rst", "connections/azure_batch.rst", "connections/azure_container_volume.rst", "connections/azure_cosmos.rst", "connections/azure_fileshare.rst", "connections/azure_synapse.rst", "connections/index.rst", "connections/wasb.rst", "index.rst", "installing-providers-from-sources.rst", "logging/index.rst", "operators/adf_run_pipeline.rst", "operators/adls.rst", "operators/asb.rst", "operators/azure_blob_to_gcs.rst", "operators/azure_synapse.rst", "operators/index.rst", "operators/local_to_adls.rst", "operators/sftp_to_wasb.rst", "secrets-backends/azure-key-vault.rst"], "titles": ["airflow.providers.microsoft.azure.hooks.adx", "airflow.providers.microsoft.azure.hooks.asb", "airflow.providers.microsoft.azure.hooks.base_azure", "airflow.providers.microsoft.azure.hooks.batch", "airflow.providers.microsoft.azure.hooks.container_instance", "airflow.providers.microsoft.azure.hooks.container_registry", "airflow.providers.microsoft.azure.hooks.container_volume", "airflow.providers.microsoft.azure.hooks.cosmos", "airflow.providers.microsoft.azure.hooks.data_factory", "airflow.providers.microsoft.azure.hooks.data_lake", "airflow.providers.microsoft.azure.hooks.fileshare", "airflow.providers.microsoft.azure.hooks", "airflow.providers.microsoft.azure.hooks.synapse", "airflow.providers.microsoft.azure.hooks.wasb", "airflow.providers.microsoft.azure", "airflow.providers.microsoft.azure.log", "airflow.providers.microsoft.azure.log.wasb_task_handler", "airflow.providers.microsoft.azure.operators.adls", "airflow.providers.microsoft.azure.operators.adx", "airflow.providers.microsoft.azure.operators.asb", "airflow.providers.microsoft.azure.operators.batch", "airflow.providers.microsoft.azure.operators.container_instances", "airflow.providers.microsoft.azure.operators.cosmos", "airflow.providers.microsoft.azure.operators.data_factory", "airflow.providers.microsoft.azure.operators", "airflow.providers.microsoft.azure.operators.synapse", "airflow.providers.microsoft.azure.operators.wasb_delete_blob", "airflow.providers.microsoft.azure.secrets", "airflow.providers.microsoft.azure.secrets.key_vault", "airflow.providers.microsoft.azure.sensors.cosmos", "airflow.providers.microsoft.azure.sensors.data_factory", "airflow.providers.microsoft.azure.sensors", "airflow.providers.microsoft.azure.sensors.wasb", "airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs", "airflow.providers.microsoft.azure.transfers", "airflow.providers.microsoft.azure.transfers.local_to_adls", "airflow.providers.microsoft.azure.transfers.local_to_wasb", "airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake", "airflow.providers.microsoft.azure.transfers.sftp_to_wasb", "airflow.providers.microsoft.azure.utils", "tests.system.providers.microsoft.azure.example_adf_run_pipeline", "tests.system.providers.microsoft.azure.example_adls_delete", "tests.system.providers.microsoft.azure.example_azure_blob_to_gcs", "tests.system.providers.microsoft.azure.example_azure_container_instances", "tests.system.providers.microsoft.azure.example_azure_cosmosdb", "tests.system.providers.microsoft.azure.example_azure_service_bus", "tests.system.providers.microsoft.azure.example_azure_synapse", "tests.system.providers.microsoft.azure.example_fileshare", "tests.system.providers.microsoft.azure.example_local_to_adls", "tests.system.providers.microsoft.azure.example_local_to_wasb", "tests.system.providers.microsoft.azure.example_sftp_to_wasb", "tests.system.providers.microsoft.azure", "Package apache-airflow-providers-microsoft-azure", "Microsoft Azure Container Registry Connection", "Microsoft Azure Data Factory", "Microsoft Azure Data Lake Connection", "Microsoft Azure Data Explorer", "Microsoft Azure Service Bus", "Microsoft Azure Connection", "Microsoft Azure Batch", "Microsoft Azure Container Volume Connection", "Microsoft Azure Cosmos", "Microsoft Azure File Share Connection", "Microsoft Azure Synapse", "Connection Types", "Microsoft Azure Blob Storage Connection", "apache-airflow-providers-microsoft-azure", "Installing from sources", "Writing logs to Azure Blob Storage", "Azure Data Factory Operators", "Azure DataLake Storage Operators", "Azure Service Bus Operators", "Azure Blob Storage Transfer Operator", "Azure Synapse Operators", "Microsoft Operators", "Upload data from Local Filesystem to Azure Data Lake", "Azure Blob Storage Transfer Operator", "Azure Key Vault Backend"], "terms": {"thi": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77], "contain": [0, 3, 4, 5, 6, 7, 9, 10, 13, 18, 20, 21, 26, 32, 33, 36, 38, 52, 58, 64, 66, 67, 68, 72, 76], "data": [0, 8, 9, 13, 17, 18, 21, 23, 30, 33, 35, 37, 38, 52, 64, 66, 70, 73, 74], "explor": [0, 18, 52, 64, 66, 73], "azuredataexplorerhook": [0, 18, 52, 66], "azure_data_explorer_conn_id": [0, 18], "default_conn_nam": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 13, 23, 25, 30], "sourc": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 13, 16, 17, 18, 19, 20, 21, 22, 23, 25, 26, 28, 29, 30, 32, 33, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 66, 69, 70, 71, 72, 73, 75], "base": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 13, 16, 17, 18, 19, 20, 21, 22, 23, 25, 26, 28, 29, 30, 32, 33, 35, 36, 37, 38, 71], "basehook": [0, 1, 2, 3, 5, 6, 7, 8, 9, 10, 12, 13], "interact": [0, 1, 7, 8, 9, 10, 12, 13, 38, 71], "kusto": [0, 18, 52, 66], "cluster": [0, 56], "i": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77], "specifi": [0, 1, 7, 8, 13, 17, 19, 20, 22, 23, 28, 33, 38, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 65, 66, 72, 76, 77], "url": [0, 28, 52, 53, 54, 55, 56, 58, 59, 60, 61, 62, 63, 65, 66], "exampl": [0, 3, 7, 9, 13, 17, 20, 21, 28, 29, 43, 44, 52, 53, 55, 56, 58, 59, 60, 61, 62, 65, 66, 67, 69, 70, 71, 72, 73, 75, 76, 77], "http": [0, 18, 28, 52, 61, 62, 66, 67, 77], "help": [0, 68, 71], "window": [0, 13, 21, 68], "net": [0, 68, 77], "The": [0, 1, 2, 3, 8, 12, 13, 17, 19, 20, 21, 22, 23, 25, 28, 29, 30, 33, 37, 38, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 65, 66, 67, 68, 71, 72, 75, 76, 77], "paramet": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 13, 16, 17, 18, 19, 20, 21, 22, 23, 25, 26, 28, 29, 30, 32, 33, 35, 36, 37, 38, 52, 55, 56, 58, 60, 61, 62, 65, 66, 69, 71, 77], "must": [0, 3, 13, 20, 33, 77], "through": [0, 13, 54, 56, 63], "connect": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 13, 17, 18, 19, 20, 21, 22, 23, 25, 26, 28, 29, 30, 32, 33, 35, 36, 37, 38, 44, 52, 66, 68, 72], "detail": [0, 1, 4, 20, 28, 52, 66, 67, 77], "tenant": [0, 9, 53, 54, 55, 56, 58, 61, 63, 65], "id": [0, 2, 3, 4, 5, 6, 8, 9, 10, 12, 20, 21, 23, 28, 29, 33, 38, 66, 67, 72, 76, 77], "To": [0, 67, 71, 72, 76, 77], "learn": [0, 73], "about": [0, 52, 67, 72, 76], "refer": [0, 1, 2, 6, 7, 9, 10, 13, 17, 18, 19, 20, 21, 22, 23, 25, 26, 28, 29, 32, 33, 35, 36, 37, 38], "doc": [0, 18, 28, 29, 52, 66], "com": [0, 18, 28, 53, 56, 59, 60, 61, 62, 65, 66, 67], "en": [0, 18, 28], "u": [0, 18, 28], "onedr": 0, "find": [0, 9, 17, 57], "your": [0, 38, 66, 72, 73, 76, 77], "offic": 0, "365": 0, "authent": [0, 2, 13, 28, 52, 66, 72, 76, 77], "method": [0, 17, 20, 21, 22, 23, 25, 26, 28, 33, 35, 37, 52, 56, 58, 60, 62, 65, 66], "avail": [0, 19, 56, 66, 67], "ar": [0, 1, 9, 16, 19, 21, 23, 35, 38, 52, 54, 55, 56, 57, 58, 60, 61, 62, 63, 65, 66, 67, 68], "aad_app": [0, 56], "aad": [0, 56], "applic": [0, 4, 13, 52, 56, 71, 73], "certif": [0, 56, 67], "A": [0, 1, 3, 4, 5, 6, 8, 9, 10, 12, 17, 19, 20, 52, 56, 63, 66], "requir": [0, 4, 9, 19, 20, 21, 33, 52, 56, 63, 68], "when": [0, 1, 13, 16, 17, 19, 20, 21, 22, 23, 25, 26, 33, 35, 37, 38, 52, 53, 54, 55, 56, 58, 59, 60, 61, 62, 65, 66], "us": [0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 12, 13, 17, 19, 20, 21, 22, 23, 25, 26, 28, 33, 35, 36, 37, 38, 43, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 65, 66, 67, 68, 69, 70, 71, 72, 73, 75, 76, 77], "kei": [0, 6, 7, 13, 21, 28, 52, 56, 57, 58, 59, 60, 61, 62, 63, 65, 67, 69], "usernam": [0, 53, 56], "password": [0, 4, 7, 9, 13, 21, 52, 53, 55, 56, 58, 60, 61, 62, 65, 66, 72, 76], "aad_app_cert": [0, 56], "pem": [0, 56], "thumbprint": [0, 56], "aad_cr": [0, 56], "aad_devic": [0, 56], "devic": [0, 56], "code": [0, 52, 56, 67, 69], "pleas": [0, 21, 35, 56, 57, 67, 69, 71, 72, 73, 76], "note": [0, 21, 23, 44, 52, 53, 54, 55, 56, 58, 59, 60, 61, 62, 65], "you": [0, 1, 4, 7, 28, 38, 44, 53, 54, 55, 56, 58, 59, 60, 61, 62, 65, 66, 67, 68, 71, 73, 75, 77], "choos": [0, 56, 67], "option": [0, 7, 10, 13, 18, 26, 28, 32, 33, 35, 36, 38, 52, 54, 55, 56, 58, 60, 61, 62, 63, 65, 66, 73], "ll": [0, 56], "need": [0, 1, 4, 13, 19, 20, 21, 23, 25, 54, 55, 56, 58, 60, 62, 63, 65, 66, 72, 73, 76], "everi": [0, 56], "new": [0, 4, 7, 10, 13, 16, 18, 22, 23, 38, 52, 56, 66, 68], "instanc": [0, 3, 4, 5, 16, 18, 20, 21, 23, 25, 56, 58, 66], "initi": [0, 52, 53, 54, 55, 56, 57, 58, 59, 61, 63, 66], "It": [0, 1, 2, 13, 16, 19, 20, 22, 54, 56, 58, 63, 67, 68, 69, 73], "highli": [0, 56], "recommend": [0, 56, 67, 68], "creat": [0, 1, 3, 4, 7, 8, 9, 10, 13, 16, 17, 19, 20, 21, 22, 23, 25, 26, 28, 33, 35, 36, 37, 38, 47, 52, 56, 58, 66, 67, 68, 74], "one": [0, 7, 13, 38, 52, 53, 55, 56, 58, 59, 60, 61, 62, 65, 66, 67], "all": [0, 3, 13, 17, 19, 21, 23, 26, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 65, 66], "queri": [0, 7, 18, 29, 37, 56, 73], "str": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 13, 16, 17, 18, 19, 20, 21, 22, 23, 25, 26, 28, 29, 30, 32, 33, 35, 36, 37, 38, 69], "conn_name_attr": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 13], "azure_data_explorer_default": [0, 18, 56], "conn_typ": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 13, 39], "azure_data_explor": 0, "hook_nam": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 13], "static": [0, 1, 2, 3, 5, 6, 7, 8, 9, 10, 12, 13, 28, 52, 66], "get_connection_form_widget": [0, 2, 3, 6, 7, 8, 9, 10, 12, 13], "return": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 13, 16, 17, 18, 19, 20, 23, 26, 28, 52, 66], "widget": [0, 2, 3, 6, 7, 8, 9, 10, 12, 13], "add": [0, 2, 3, 6, 7, 8, 9, 10, 12, 13, 35, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 65, 66], "form": [0, 2, 3, 6, 7, 8, 9, 10, 12, 13, 52, 66], "get_ui_field_behaviour": [0, 1, 2, 3, 5, 6, 7, 8, 9, 10, 12, 13], "custom": [0, 1, 2, 3, 5, 6, 7, 8, 9, 10, 12, 13, 52, 66, 68], "field": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 13, 37, 38, 39, 52, 57, 66, 72, 76], "behaviour": [0, 1, 2, 3, 5, 6, 7, 8, 9, 10, 12, 13], "get_conn": [0, 1, 2, 3, 5, 7, 8, 9, 10, 12, 13], "kustocli": 0, "object": [0, 3, 9, 10, 13, 16, 17, 23, 33, 38, 72, 76, 77], "run_queri": 0, "databas": [0, 7, 18, 22, 37, 61], "none": [0, 1, 3, 7, 8, 9, 10, 12, 13, 16, 18, 19, 20, 21, 23, 26, 28, 30, 32, 33, 35, 36, 37, 38, 67, 72], "run": [0, 3, 8, 12, 18, 20, 21, 23, 25, 30, 37, 38, 44, 52, 66, 67], "kql": [0, 18], "configur": [0, 3, 4, 7, 8, 13, 28, 52, 66, 68, 77], "respons": [0, 13, 18], "kustoresponsedataset": 0, "If": [0, 1, 3, 4, 8, 9, 13, 16, 19, 20, 23, 26, 28, 33, 35, 38, 54, 56, 58, 60, 62, 65, 66, 67, 68, 77], "unsuccess": 0, "airflowexcept": [0, 8], "rais": [0, 8, 9, 17], "dict": [0, 18, 21, 22, 23, 32, 35, 36, 37, 38, 66], "see": [0, 7, 9, 13, 18, 37, 52, 66, 72, 76], "api": [0, 3, 9, 18, 19, 28, 35, 52, 66, 71, 72, 76], "netfx": [0, 18], "request": [0, 18, 28, 33, 77], "properti": [0, 4, 18, 19, 20, 23, 38], "list": [0, 1, 4, 7, 9, 10, 13, 17, 18, 19, 20, 21, 33, 38, 52, 66, 71], "clientrequestproperti": [0, 18], "type": [0, 2, 3, 4, 7, 8, 9, 10, 13, 16, 19, 20, 21, 23, 28, 37, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 65, 66, 73, 77], "kustoresponsedatasetv2": 0, "5": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77], "0": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77], "1": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77], "2": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77], "6": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77], "dev0": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77], "an": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77], "experiment": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77], "featur": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77], "baseazureservicebushook": 1, "azure_service_bus_conn_id": [1, 19], "session": [1, 19], "string": [1, 3, 10, 13, 16, 17, 19, 20, 33, 52, 57, 60, 62, 65], "servic": [1, 2, 3, 4, 5, 8, 19, 20, 21, 28, 33, 52, 64, 66, 69, 72, 73, 74, 76, 77], "bu": [1, 19, 52, 64, 66, 74], "azure_service_bus_default": [1, 19, 57], "azure_service_bu": [1, 19], "abstract": 1, "adminclienthook": 1, "servicebusadministrationcli": [1, 19], "client": [1, 2, 3, 7, 8, 9, 19, 28, 52, 54, 55, 63, 66, 77], "updat": [1, 7, 8, 19, 52, 66, 68, 74], "delet": [1, 4, 7, 8, 9, 10, 13, 17, 19, 20, 26, 38, 47, 50, 52, 66, 70, 74], "resourc": [1, 2, 4, 8, 19, 21, 23, 30, 54, 57, 71, 72, 76, 77], "namespac": [1, 19, 71], "same": [1, 17, 20, 21, 22, 23, 25, 26, 33, 35, 37, 66], "inherit": 1, "from": [1, 4, 7, 8, 9, 10, 13, 16, 17, 19, 20, 23, 28, 33, 37, 38, 39, 52, 66, 68, 70, 71, 74, 77], "create_queu": 1, "queue_nam": [1, 19, 45, 71], "max_delivery_count": [1, 19, 71], "10": [1, 19, 52, 66, 71], "dead_lettering_on_message_expir": [1, 19], "true": [1, 9, 10, 13, 16, 17, 19, 20, 21, 23, 25, 26, 35, 38, 68, 70], "enable_batched_oper": [1, 19], "queue": [1, 19, 52, 66, 74], "admin": [1, 19], "queueproperti": [1, 19], "name": [1, 3, 4, 8, 9, 10, 13, 17, 19, 20, 21, 22, 23, 26, 28, 30, 32, 33, 36, 37, 38, 39, 47, 52, 53, 54, 55, 59, 61, 66, 68, 72, 77], "int": [1, 3, 4, 8, 9, 12, 13, 19, 20, 23, 25, 35], "maximum": [1, 19], "deliveri": [1, 19], "count": [1, 10, 19, 21], "messag": [1, 4, 16, 19, 45, 52, 66, 74], "automat": [1, 13, 19, 20, 66], "dead": [1, 12, 19], "letter": [1, 19], "after": [1, 4, 19, 20, 52], "number": [1, 3, 9, 13, 19, 20, 21, 35, 52], "default": [1, 7, 8, 13, 19, 20, 23, 28, 36, 38, 52, 66, 69, 73, 77], "valu": [1, 7, 8, 13, 19, 21, 23, 28, 52, 66, 69, 72, 76, 77], "bool": [1, 3, 8, 9, 10, 13, 16, 17, 19, 20, 23, 25, 26, 33, 35, 36, 38], "indic": [1, 8, 19, 67], "whether": [1, 3, 9, 13, 17, 19, 20, 35, 38], "subscript": [1, 19, 52, 54, 58, 60, 62, 63, 65, 66, 74], "ha": [1, 3, 8, 19, 21, 68], "support": [1, 9, 17, 19, 21, 23, 28, 35, 52, 57, 66, 77], "expir": [1, 19], "server": [1, 19, 38, 50, 53, 67], "side": [1, 19], "batch": [1, 11, 14, 19, 24, 52, 64, 66, 71], "oper": [1, 8, 12, 14, 33, 35, 37, 38, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 65, 66, 75], "enabl": [1, 19, 23, 33, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 65, 68, 77], "delete_queu": 1, "delete_subscript": 1, "subscription_nam": [1, 19, 45, 71], "topic_nam": [1, 19, 45, 71], "topic": [1, 19, 52, 66, 74], "entiti": [1, 19, 71], "under": [1, 3, 19, 20, 23, 54, 71], "servicebu": [1, 19, 66], "own": [1, 19], "rule": [1, 19, 52], "messagehook": 1, "servicebuscli": 1, "act": [1, 2], "high": [1, 52], "level": [1, 52, 66], "interfac": [1, 52, 54, 63], "get": [1, 3, 4, 6, 7, 8, 12, 20, 21, 23, 25, 28, 33, 38, 39, 52, 57, 66, 67, 72, 76], "servicebussend": 1, "servicebusreceiv": 1, "send_messag": 1, "batch_message_flag": 1, "fals": [1, 3, 6, 8, 9, 10, 13, 16, 17, 19, 20, 26, 35, 36, 38, 52, 66, 69, 71, 72], "By": [1, 23, 52, 67, 69, 73], "send": [1, 19, 74], "": [1, 4, 8, 12, 16, 20, 23, 25, 26, 35, 52, 66, 67, 68, 69, 70], "which": [1, 2, 3, 4, 5, 6, 9, 10, 12, 17, 19, 20, 21, 25, 28, 29, 33, 35, 38, 54, 71, 77], "sent": [1, 19, 28, 77], "can": [1, 7, 9, 13, 17, 19, 21, 28, 35, 38, 44, 54, 55, 56, 58, 60, 61, 62, 63, 65, 66, 67, 68, 69, 71, 72, 77], "flag": [1, 19, 23, 25], "set": [1, 3, 8, 12, 13, 19, 20, 23, 28, 33, 38, 44, 52, 58, 66, 69, 72, 76, 77], "send_list_messag": 1, "sender": 1, "send_batch_messag": 1, "receive_messag": 1, "max_message_count": [1, 19, 71], "max_wait_tim": [1, 19, 71], "receiv": [1, 19, 52, 66, 74], "onc": [1, 19, 67], "float": [1, 19], "time": [1, 3, 8, 12, 19, 20, 21, 23, 25, 56, 58, 60, 62, 65, 66], "wait": [1, 3, 8, 12, 19, 20, 23, 25, 32, 68, 69], "second": [1, 8, 12, 19, 23, 25], "first": [1, 8, 19, 33, 39, 66], "arriv": [1, 19, 32], "receive_subscription_messag": 1, "approach": 1, "optim": [1, 13], "wish": 1, "process": [1, 20, 21, 23, 25, 68], "multipl": [1, 9, 13, 19, 35, 38, 52, 54, 56, 57, 58, 60, 62, 63, 65, 66], "simultan": 1, "perform": [1, 13, 23, 38, 69, 71], "ad": [1, 13, 20, 52, 53, 66, 72], "hoc": [1, 52, 66], "singl": [1, 3, 9, 13, 35, 38, 69], "call": [1, 9, 19, 35, 52, 55, 68], "actual": [1, 9, 19, 35], "depend": [1, 19, 52], "prefetch_count": [1, 19], "incom": [1, 19], "stream": [1, 10, 13, 19], "rate": [1, 19], "fulli": [1, 19, 71], "prefetch": [1, 19], "config": [1, 8, 19, 28, 52, 58, 62, 65, 68, 77], "timeout": [1, 3, 8, 12, 19, 20, 23, 25], "until": [1, 19], "close": [1, 16, 19], "within": [1, 3, 9, 19, 20, 21, 23, 25, 35, 38, 69, 72, 73, 76], "period": [1, 19, 69, 73], "empti": [1, 19, 52, 66, 68], "azurebasehook": [2, 4, 52], "sdk_client": 2, "conn_id": [2, 5, 28, 39], "azure_default": [2, 4, 58, 66], "offer": [2, 3, 20, 69, 72, 76], "sever": 2, "mechan": [2, 54, 55, 58, 63], "librari": [2, 7, 52, 66], "upstream": 2, "ani": [2, 8, 9, 13, 16, 20, 21, 23, 25, 26, 33, 35, 52, 66, 67, 75, 77], "sdkclient": 2, "inform": [2, 17, 19, 21, 23, 33, 35, 37, 38, 52, 58, 66, 69, 70, 71, 72, 73, 75, 76], "azure_conn_id": [2, 4], "pass": [2, 13, 21, 23, 52, 54, 57, 63, 66], "dure": 2, "init": 2, "azurebatchhook": [3, 20, 52, 66], "azure_batch_conn_id": [3, 20], "princip": [3, 4, 5, 21, 28, 77], "start": [3, 4, 5, 8, 12, 13, 19, 20, 21, 23, 68], "azure_batch_default": [3, 20, 59], "azure_batch": 3, "configure_pool": 3, "pool_id": [3, 20], "vm_size": [3, 66], "vm_node_agent_sku_id": [3, 20, 66], "vm_publish": [3, 20], "vm_offer": [3, 20], "sku_starts_with": [3, 20], "vm_sku": [3, 20], "vm_version": [3, 20], "os_famili": [3, 20], "os_vers": [3, 20], "display_nam": 3, "target_dedicated_nod": [3, 20, 52], "use_latest_image_and_sku": [3, 20], "kwarg": [3, 9, 10, 13, 17, 18, 19, 20, 21, 22, 23, 25, 26, 28, 29, 30, 32, 33, 35, 36, 37, 38], "pool": [3, 12, 20, 25], "uniqu": [3, 19, 20, 21, 29], "identifi": [3, 8, 12, 20, 23, 25, 30, 38], "account": [3, 6, 9, 10, 13, 17, 20, 33, 52, 55, 59, 60, 61, 62, 65, 66, 72, 76], "size": [3, 4, 10, 19, 20, 75], "virtual": [3, 20], "machin": [3, 20, 73], "displai": [3, 20, 52, 66], "desir": [3, 8, 12, 20, 54], "dedic": [3, 20, 66, 73], "comput": [3, 13, 20], "node": [3, 20], "latest": [3, 20, 21, 52, 66], "verifi": [3, 20, 52, 66, 68], "vm": 3, "imag": [3, 20, 21, 52, 53], "sku": [3, 20, 21], "publish": [3, 19, 20, 71], "marketplac": [3, 20], "For": [3, 13, 17, 19, 20, 21, 23, 28, 33, 35, 38, 52, 53, 55, 56, 58, 59, 60, 61, 62, 65, 66, 67, 69, 70, 71, 72, 73, 75, 76, 77], "canon": [3, 20], "microsoftwindowsserv": [3, 20], "ubuntuserv": [3, 20], "windowsserv": [3, 20], "search": 3, "version": [3, 19, 20, 28, 52, 66, 67], "agent": [3, 20], "guest": [3, 20], "o": [3, 20, 67], "famili": [3, 20], "instal": [3, 20, 52], "create_pool": 3, "alreadi": [3, 8, 10, 13, 22, 38, 52, 66, 67, 71], "exist": [3, 4, 7, 8, 9, 10, 13, 16, 22, 26, 29, 35, 36, 38, 44, 52, 66, 68], "model": [3, 4, 8, 17, 18, 19, 20, 21, 22, 23, 25, 26, 33, 35, 36, 37, 38, 52], "pooladdparamet": 3, "wait_for_all_node_st": 3, "node_st": 3, "reach": [3, 8, 12, 23, 25], "given": [3, 13, 20, 28, 29], "state": [3, 4], "batch_model": [3, 20], "computenodest": 3, "configure_job": 3, "job_id": [3, 12, 20], "job": [3, 12, 20, 25, 52, 72, 73, 76], "create_job": 3, "jobaddparamet": 3, "configure_task": 3, "task_id": [3, 17, 21, 29, 69, 70, 71, 72, 73, 75, 76], "command_lin": 3, "container_set": 3, "task": [3, 16, 17, 20, 21, 23, 25, 52, 66, 68], "command": [3, 20, 21, 38, 52], "line": [3, 20, 52, 66], "containerconfigur": 3, "well": [3, 67], "doesn": 3, "t": [3, 8, 68], "have": [3, 13, 33, 38, 52, 66, 68, 77], "add_single_task_to_job": 3, "taskaddparamet": 3, "wait_for_job_tasks_to_complet": 3, "particular": 3, "complet": [3, 8, 20, 66], "amount": [3, 19, 20, 21], "befor": [3, 19, 20, 37, 44, 52], "out": [3, 7, 52, 54, 63, 69], "minut": [3, 19, 20], "test_connect": [3, 4, 7, 8, 10, 13, 52, 66], "test": [3, 4, 7, 8, 10, 13, 52, 66, 69, 70, 71, 72, 73, 75, 76], "azurecontainerinstancehook": [4, 52, 66], "base_azur": [4, 11, 14], "commun": [4, 5, 7, 9, 13], "order": [4, 19, 21, 52, 66], "work": [4, 33, 66, 71], "activ": [4, 23, 60, 62, 65], "directori": [4, 9, 10, 17, 35, 47, 60, 62, 65, 67, 68], "app": 4, "registr": 4, "fill": 4, "client_id": [4, 45, 54, 55, 58, 60, 61, 62, 63, 65], "login": [4, 7, 9, 13, 21, 52, 53, 55, 58, 60, 61, 62, 65, 66, 72, 76], "gener": [4, 52, 57, 66], "tenantid": [4, 54, 58], "subscriptionid": [4, 54, 58], "extra": [4, 7, 8, 9, 13, 35, 39, 52, 54, 55, 58, 60, 61, 62, 65, 66, 72], "json": [4, 52, 55, 58, 60, 61, 62, 65, 72], "azure_container_inst": 4, "create_or_upd": 4, "resource_group": [4, 21], "container_group": 4, "group": [4, 8, 21, 23, 30, 54], "mgmt": [4, 8, 66], "containerinst": [4, 66], "containergroup": 4, "get_state_exitcode_detail": 4, "exitcod": 4, "tupl": [4, 21], "unknown": [4, 67], "get_messag": 4, "event": 4, "get_stat": 4, "get_log": 4, "tail": [4, 52, 66], "1000": 4, "log": [4, 14, 23, 28, 52, 66], "registri": [5, 21, 64], "azurecontainerregistryhook": 5, "azure_registri": 5, "azure_container_registry_conn_id": 5, "azure_container_registry_default": [5, 53], "azure_container_registri": 5, "azurecontainervolumehook": 6, "azure_container_volume_conn_id": 6, "azure_container_volume_default": [6, 60], "wrap": 6, "volum": [6, 10, 21, 64, 66], "should": [6, 7, 9, 10, 13, 19, 20, 21, 29, 30, 32, 38, 53, 54, 55, 56, 58, 59, 60, 61, 62, 65, 66, 67, 68, 72, 77], "azure_container_volum": [6, 60], "get_storagekei": 6, "file": [6, 9, 10, 13, 16, 17, 20, 26, 33, 35, 36, 37, 38, 50, 52, 58, 64, 66, 67, 68, 70, 75, 77], "storag": [6, 9, 10, 13, 16, 19, 26, 32, 33, 36, 38, 52, 60, 62, 64, 66, 74, 75], "get_file_volum": 6, "mount_nam": 6, "share_nam": [6, 10], "storage_account_nam": 6, "read_onli": 6, "integr": [7, 9, 13, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 65, 69, 73], "cosmosdb": [7, 22, 29], "azurecosmosdbhook": [7, 52, 66], "via": [7, 9, 13, 23, 52, 66, 67], "make": [7, 9, 13, 33, 44, 52, 66, 68, 75], "sure": [7, 9, 13, 44, 68], "azure_cosmo": 7, "author": [7, 9, 13, 19, 33, 56, 57, 58, 60, 62, 65, 69], "done": [7, 9, 13], "suppli": [7, 9, 13], "endpoint": [7, 61, 63], "uri": [7, 28, 53, 54, 55, 56, 58, 59, 60, 61, 62, 65, 77], "secret": [7, 9, 14, 52, 54, 55, 58, 60, 61, 62, 63, 65, 66, 77], "database_nam": [7, 22, 29, 61], "collection_nam": [7, 22, 29, 61], "collect": [7, 22, 61], "azure_cosmos_default": [7, 22, 29, 44, 61], "azure_cosmos_conn_id": [7, 22, 29], "master": [7, 61], "follow": [7, 17, 55, 58, 60, 61, 62, 65, 66, 67, 68, 71, 72, 76], "db": [7, 52, 66], "does_collection_exist": 7, "check": [7, 8, 9, 10, 12, 13, 16, 23, 25, 29, 30, 38, 39, 52, 66, 67, 69, 72, 73], "create_collect": 7, "partition_kei": 7, "does_database_exist": 7, "create_databas": 7, "delete_databas": 7, "delete_collect": 7, "upsert_docu": 7, "document": [7, 22, 29, 44, 52, 57, 66, 69, 70, 71, 72, 73, 75, 76], "document_id": [7, 29], "insert": [7, 22], "insert_docu": 7, "delete_docu": 7, "get_docu": 7, "sql_string": 7, "sql": [7, 37], "get_database_link": 7, "database_id": 7, "link": [7, 8, 23, 52, 58, 66, 67], "get_collection_link": 7, "collection_id": 7, "get_document_link": 7, "credenti": [8, 12, 28, 33, 53, 54, 55, 56, 58, 59, 60, 61, 62, 63, 65, 77], "provide_targeted_factori": 8, "func": 8, "target": [8, 13, 25, 29, 36, 38], "factori": [8, 23, 30, 52, 64, 66, 74], "decor": [8, 52, 66], "case": [8, 9, 35, 38], "isn": 8, "resource_group_nam": [8, 23, 30, 54], "factory_nam": [8, 23, 30, 54], "pipelineruninfo": 8, "typing_compat": 8, "typeddict": 8, "pipelin": [8, 23, 30, 52, 66, 69], "info": [8, 33, 52], "dictionari": [8, 17, 20, 21, 22, 23, 25, 26, 33, 35, 37, 55, 58, 60, 61, 62, 65], "run_id": [8, 30, 69], "azuredatafactorypipelinerunstatu": 8, "status": [8, 12], "queu": 8, "in_progress": 8, "inprogress": 8, "succeed": [8, 69, 73], "fail": [8, 16, 20, 23, 38, 52, 66, 68], "cancel": [8, 12], "terminal_status": [8, 12], "except": [8, 52, 66], "azuredatafactorypipelinerunexcept": 8, "get_field": [8, 39, 52, 66], "field_nam": [8, 39], "strict": [8, 52], "short": [8, 33, 39], "backcompat": [8, 39], "we": [8, 9, 35, 39], "prefix": [8, 13, 26, 28, 32, 38, 39, 52, 66], "azuredatafactoryhook": [8, 23, 30, 52, 66], "azure_data_factory_conn_id": [8, 23, 30], "azure_data_factori": 8, "azure_data_factory_default": [8, 54], "get_factori": 8, "adf": [8, 52, 66], "datafactori": [8, 52, 66], "update_factori": 8, "definit": 8, "doe": [8, 10, 13, 26], "create_factori": 8, "delete_factori": 8, "get_linked_servic": 8, "linked_service_nam": 8, "linkedserviceresourc": 8, "update_linked_servic": 8, "linked_servic": 8, "create_linked_servic": 8, "delete_linked_servic": 8, "get_dataset": 8, "dataset_nam": 8, "dataset": [8, 13], "datasetresourc": 8, "update_dataset": 8, "create_dataset": 8, "delete_dataset": 8, "get_dataflow": 8, "dataflow_nam": 8, "dataflow": [8, 52, 66], "update_dataflow": 8, "create_dataflow": 8, "delete_dataflow": 8, "get_pipelin": 8, "pipeline_nam": [8, 23, 69], "pipelineresourc": 8, "update_pipelin": 8, "create_pipelin": 8, "delete_pipelin": 8, "run_pipelin": 8, "createrunrespons": 8, "get_pipeline_run": 8, "pipelinerun": 8, "get_pipeline_run_statu": 8, "current": [8, 12, 21, 38, 57], "statu": [8, 12, 23, 25, 30, 52, 66, 69, 73], "wait_for_pipeline_run_statu": 8, "expected_status": [8, 12], "check_interv": [8, 12, 23, 25], "60": [8, 12, 23, 25], "24": [8, 12, 23, 25, 52], "7": [8, 12, 23, 25], "match": [8, 9, 12, 13, 26, 29, 32, 35, 52, 67, 72], "expect": [8, 12], "e": [8, 12, 13, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 65], "against": [8, 12, 37, 52], "termin": [8, 12, 23, 25, 69, 73], "boolean": [8, 19], "expected_statu": 8, "cancel_pipeline_run": 8, "get_trigg": 8, "trigger_nam": 8, "trigger": [8, 44, 68], "triggerresourc": 8, "update_trigg": 8, "create_trigg": 8, "delete_trigg": 8, "start_trigg": 8, "poller": 8, "core": [8, 9, 35, 52, 68], "poll": 8, "lropol": 8, "stop_trigg": 8, "stop": [8, 52], "rerun_trigg": 8, "rerun": [8, 23], "cancel_trigg": 8, "lake": [9, 17, 35, 37, 64, 70, 74], "azuredatalakehook": [9, 52], "rest": [9, 18, 71], "compat": [9, 12, 25, 52, 66], "webhdf": 9, "azure_data_lak": 9, "account_nam": [9, 13, 53, 55], "azure_data_lake_default": [9, 17, 35, 55], "azure_data_lake_conn_id": [9, 17, 35, 37], "user": [9, 12, 20, 21, 25, 28, 52, 66, 67, 77], "azuredlfilesystem": 9, "check_for_fil": [9, 10], "file_path": [9, 10, 13, 33, 36, 72], "path": [9, 10, 13, 17, 28, 33, 35, 36, 37, 38, 52, 58, 66, 70], "otherwis": [9, 10, 13, 16, 66], "upload_fil": [9, 41, 48, 75], "local_path": [9, 35, 75], "remote_path": [9, 35, 75], "nthread": [9, 35], "64": [9, 35], "overwrit": [9, 35, 38], "buffers": [9, 35], "4194304": [9, 35], "blocksiz": [9, 35], "upload": [9, 10, 13, 16, 20, 33, 35, 36, 38, 49, 52, 66, 74], "local": [9, 16, 33, 35, 37, 52, 67, 74], "recurs": [9, 17, 35, 70], "glob": [9, 17, 35], "pattern": [9, 35, 52], "remot": [9, 16, 35, 38, 52, 66], "root": [9, 35], "write": [9, 16, 19, 35], "thread": [9, 20, 21, 23, 25, 35], "forcibli": [9, 35], "quit": [9, 35], "regardless": [9, 35], "would": [9, 17, 28, 35, 59, 66, 77], "overwritten": [9, 16, 35], "onli": [9, 13, 19, 21, 23, 25, 35, 38, 44, 55, 56, 58, 60, 62, 65, 66], "filenam": [9, 33, 35, 37, 72], "22": [9, 35, 52], "byte": [9, 10, 13, 19, 35], "intern": [9, 35], "buffer": [9, 35], "block": [9, 35], "cannot": [9, 35], "bigger": [9, 35], "than": [9, 35], "chunk": [9, 13, 35], "smaller": [9, 35], "each": [9, 13, 20, 35, 71, 77], "download_fil": 9, "download": [9, 10, 13, 20, 33, 38, 67], "blob": [9, 13, 26, 32, 33, 36, 38, 52, 60, 62, 64, 66, 74], "specif": [9, 19, 52, 53, 54, 55, 58, 60, 61, 62, 63, 65, 71], "unless": 9, "Will": 9, "globstr": 9, "full": 9, "adl": [9, 14, 24, 52, 66, 70, 75], "remov": [9, 17, 52, 66, 67, 70], "ignore_not_found": [9, 17], "loop": [9, 17], "locat": [9, 16, 17, 38, 68], "error": [9, 12, 16, 17, 52, 66], "found": [9, 16, 17], "azurefilesharehook": [10, 52, 66], "azure_fileshare_conn_id": 10, "azure_fileshare_default": [10, 62], "azure_fileshar": [10, 62], "fileservic": 10, "check_for_directori": 10, "directory_nam": 10, "share": [10, 57, 59, 60, 64, 65, 66], "keyword": [10, 13, 26, 32, 36, 38], "argument": [10, 13, 26, 32, 36, 38, 52, 66], "take": [10, 13, 17, 19, 23, 26, 32, 33, 35, 36, 38, 52], "file_nam": 10, "list_directories_and_fil": 10, "store": [10, 37, 53, 55, 66, 68, 72, 75, 76], "list_fil": 10, "create_shar": 10, "delete_shar": 10, "create_directori": 10, "get_fil": [10, 13], "where": [10, 13, 52], "get_file_to_path": 10, "get_file_to_stream": 10, "io": [10, 21], "filehandl": 10, "load_fil": [10, 13, 36, 38], "load": [10, 13, 36, 37, 52], "create_file_from_path": 10, "load_str": [10, 13], "string_data": [10, 13], "create_file_from_text": 10, "load_stream": 10, "open": 10, "create_file_from_stream": 10, "adx": [11, 14, 24, 56], "asb": [11, 14, 24], "container_inst": [11, 14, 24], "container_registri": [11, 14], "container_volum": [11, 14], "cosmo": [11, 14, 24, 31, 52, 64, 66], "data_factori": [11, 14, 24, 31], "data_lak": [11, 14], "fileshar": [11, 14, 21, 47, 52], "synaps": [11, 14, 24, 52, 64, 66, 74], "wasb": [11, 14, 16, 26, 31, 33, 36, 38, 52, 65, 66, 68, 76], "azuresynapsesparkbatchrunstatu": 12, "spark": [12, 25, 66, 73], "not_start": 12, "idl": [12, 19], "busi": 12, "shutting_down": 12, "kill": [12, 20, 21, 23, 25], "success": [12, 13, 26], "azuresynapsehook": [12, 25], "azure_synapse_conn_id": [12, 25], "spark_pool": [12, 25, 73], "param": [12, 52, 54, 66], "apach": [12, 67], "submit": [12, 25], "azure_synaps": 12, "azure_synapse_default": [12, 63], "run_spark_job": [12, 46, 73], "payload": [12, 19, 25, 73], "livi": [12, 25], "repres": [12, 25], "want": [12, 25, 66, 67, 77], "get_job_run_statu": 12, "wait_for_job_run_statu": 12, "cancel_job_run": 12, "protocol": [13, 62], "sa": [13, 57, 60, 62, 65], "token": [13, 33, 54, 55, 58, 60, 61, 62, 63, 65, 72, 76], "wasb_default": [13, 26, 32, 33, 36, 38, 65, 72, 76], "wasbhook": [13, 16, 26, 32, 36, 38, 52, 66], "wasb_conn_id": [13, 26, 32, 33, 36, 38], "public_read": 13, "These": [13, 23], "account_kei": 13, "addit": [13, 52, 66], "blockblockservic": 13, "constructor": [13, 52], "sas_token": [13, 52, 62, 65, 66, 72], "your_token": 13, "defaultazurecredenti": [13, 28, 52, 54, 63, 66, 77], "infrastructur": 13, "anonym": [13, 60, 62, 65], "public": [13, 60, 62, 65], "read": [13, 16, 28, 60, 62, 65, 68], "access": [13, 28, 57, 59, 60, 62, 65, 68], "blobservicecli": [13, 52, 66], "check_for_blob": [13, 26, 32], "container_nam": [13, 26, 32, 33, 36, 38, 68, 72, 76], "blob_nam": [13, 26, 32, 33, 36, 42, 72], "blobclient": 13, "get_blob_properti": 13, "check_for_prefix": [13, 32], "containercli": 13, "walk_blob": 13, "get_blobs_list": [13, 52], "includ": [13, 21, 52, 54, 63], "delimit": [13, 37, 52, 66], "filter": [13, 19], "result": 13, "whose": 13, "begin": [13, 40], "more": [13, 17, 19, 20, 21, 22, 23, 25, 26, 28, 33, 35, 37, 38, 52, 66, 72, 76, 77], "snapshot": 13, "metadata": [13, 19], "uncommittedblob": 13, "copi": [13, 38, 68], "g": 13, "csv": [13, 37], "create_contain": [13, 36, 38], "attempt": [13, 23, 36, 38, 52, 66], "prior": [13, 36, 38], "mai": [13, 36, 38, 52, 77], "yet": [13, 36, 38], "upload_blob": 13, "download_blob": 13, "read_fil": 13, "blob_typ": 13, "blockblob": 13, "length": [13, 52, 66], "either": [13, 19, 73], "pageblob": 13, "appendblob": 13, "offset": [13, 52, 66], "storagestreamdownload": 13, "rang": 13, "section": [13, 77], "delete_contain": 13, "delete_blob": 13, "mark": 13, "blobproperti": 13, "delete_fil": [13, 38, 52, 66], "is_prefix": [13, 26], "ignore_if_miss": [13, 26], "even": [13, 26, 52], "hook": [14, 16, 19, 35, 38, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 65, 66, 68], "wasb_task_handl": [14, 15], "wasb_delete_blob": [14, 24], "key_vault": [14, 27, 77], "sensor": [14, 52, 54, 66], "transfer": [14, 52, 74], "azure_blob_to_gc": [14, 34], "local_to_adl": [14, 34], "local_to_wasb": [14, 34], "oracle_to_azure_data_lak": [14, 34], "sftp_to_wasb": [14, 34], "util": [14, 16, 23, 28], "wasbtaskhandl": 16, "base_log_fold": 16, "wasb_log_fold": 16, "wasb_contain": 16, "delete_local_copi": 16, "filename_templ": 16, "file_task_handl": 16, "filetaskhandl": 16, "logging_mixin": [16, 23, 28], "loggingmixin": [16, 23, 28], "python": [16, 17, 28, 52, 66, 67, 77], "handler": [16, 52, 66, 68], "handl": [16, 52, 66], "extend": 16, "set_context": 16, "ti": [16, 21], "task_inst": 16, "context": [16, 17, 18, 19, 20, 21, 22, 23, 25, 26, 29, 30, 32, 33, 35, 36, 37, 38, 52], "wasb_log_exist": 16, "remote_log_loc": 16, "els": 16, "wasb_read": 16, "return_error": 16, "occur": 16, "wasb_writ": 16, "append": 16, "silent": 16, "wa": [16, 23, 66], "adlsdeleteoper": [17, 66], "baseoper": [17, 18, 19, 20, 21, 22, 23, 25, 26, 33, 35, 36, 37, 38], "how": [17, 19, 23, 33, 35, 38, 57, 67, 72, 75], "look": [17, 19, 23, 33, 35, 38, 52, 66, 70, 75, 77], "guid": [17, 19, 21, 23, 33, 35, 38, 52, 67], "template_field": [17, 18, 19, 20, 21, 22, 23, 25, 26, 29, 30, 32, 33, 35, 36, 37, 38, 52], "sequenc": [17, 18, 19, 20, 21, 22, 23, 25, 26, 29, 30, 32, 33, 35, 36, 37, 38, 52], "ui_color": [17, 18, 19, 20, 22, 23, 25, 30, 35, 37], "901dd2": 17, "execut": [17, 18, 19, 20, 21, 22, 23, 25, 26, 33, 35, 36, 37, 38, 68, 69, 71, 73], "main": [17, 20, 21, 22, 23, 25, 26, 33, 35, 37, 52, 67], "deriv": [17, 20, 21, 22, 23, 25, 26, 29, 30, 32, 33, 35, 37], "render": [17, 20, 21, 22, 23, 25, 26, 33, 35, 37, 38], "jinja": [17, 20, 21, 22, 23, 25, 26, 33, 35, 37], "templat": [17, 18, 20, 21, 22, 23, 25, 26, 33, 35, 36, 37, 38, 52, 66, 68], "get_template_context": [17, 20, 21, 22, 23, 25, 26, 33, 35, 37], "adlslistoper": [17, 66], "xcom": [17, 52], "downstream": 17, "parquet": 17, "folder": [17, 38, 52, 67], "output": 17, "adls_fil": 17, "azuredataexplorerqueryoper": 18, "00a1f2": 18, "template_ext": 18, "get_hook": [18, 20], "primaryresult": 18, "v2": [18, 52], "response2": 18, "azureservicebuscreatequeueoper": [19, 71], "e4f0e8": [19, 22, 35], "azureservicebussendmessageoper": [19, 71], "Its": 19, "ref": [19, 52], "howto": 19, "azureservicebusreceivemessageoper": [19, 71], "azureservicebusdeletequeueoper": [19, 71], "azureservicebustopiccreateoper": [19, 71], "default_message_time_to_l": 19, "max_size_in_megabyt": 19, "requires_duplicate_detect": 19, "duplicate_detection_history_time_window": 19, "size_in_byt": 19, "filtering_messages_before_publish": 19, "authorization_rul": 19, "support_ord": 19, "auto_delete_on_idl": 19, "enable_partit": 19, "enable_express": 19, "user_metadata": 19, "max_message_size_in_kilobyt": 19, "datetim": [19, 23, 52, 72], "timedelta": 19, "iso": 19, "8601": 19, "span": 19, "live": 19, "durat": 19, "timetol": 19, "itself": 19, "input": [19, 21], "format": [19, 52, 72], "like": [19, 59, 71, 77], "pt300": 19, "accept": 19, "megabyt": 19, "memori": [19, 21], "alloc": [19, 21], "duplic": [19, 66], "detect": [19, 44], "structur": 19, "defin": [19, 29, 30, 32, 68], "histori": 19, "authorizationrul": 19, "interv": 19, "minimum": [19, 66], "partit": 19, "across": 19, "broker": [19, 71], "express": [19, 52], "hold": 19, "temporarili": 19, "persist": 19, "associ": [19, 23], "kilobyt": 19, "premium": 19, "2021": [19, 52, 67, 72], "05": [19, 52], "higher": 19, "allow": [19, 21, 52, 66, 75], "1024": 19, "while": [19, 29, 30, 32], "102400": 19, "azureservicebussubscriptioncreateoper": [19, 71], "lock_dur": 19, "requires_sess": 19, "dead_lettering_on_filter_evaluation_except": 19, "forward_to": 19, "forward_dead_lettered_messages_to": 19, "peek": 19, "lock": 19, "other": [19, 71, 77], "lockdur": 19, "concept": 19, "recipi": 19, "forward": 19, "charact": [19, 37, 52], "azureservicebusupdatesubscriptionoper": [19, 71], "asbreceivesubscriptionmessageoper": [19, 71], "azureservicebussubscriptiondeleteoper": [19, 71], "azureservicebustopicdeleteoper": [19, 71], "azurebatchoper": [20, 52, 66], "batch_pool_id": 20, "batch_pool_vm_s": 20, "batch_job_id": 20, "batch_task_command_lin": 20, "batch_task_id": 20, "batch_pool_display_nam": 20, "batch_job_display_nam": 20, "batch_job_manager_task": 20, "batch_job_preparation_task": 20, "batch_job_release_task": 20, "batch_task_display_nam": 20, "batch_task_container_set": 20, "batch_start_task": 20, "batch_max_retri": 20, "3": [20, 28], "batch_task_resource_fil": 20, "batch_task_output_fil": 20, "batch_task_user_ident": 20, "target_low_priority_nod": 20, "enable_auto_scal": [20, 52], "auto_scale_formula": 20, "use_latest_verified_vm_image_and_sku": 20, "25": [20, 52], "should_delete_job": 20, "should_delete_pool": 20, "jobmanagertask": 20, "manag": [20, 28, 52, 54, 56, 58, 60, 62, 63, 65, 66, 67, 69, 71, 73, 77], "launch": 20, "jobpreparationtask": 20, "prepar": [20, 52, 73], "jobreleasetask": 20, "releas": [20, 52, 66], "undo": 20, "chang": [20, 52], "made": [20, 67], "taskcontainerset": 20, "starttask": 20, "join": 20, "restart": [20, 21, 68], "retri": 20, "consid": 20, "resourcefil": 20, "outputfil": 20, "userident": 20, "ident": [20, 28, 33, 52, 54, 63, 66, 77], "omit": 20, "non": [20, 23, 25, 52, 66], "administr": 20, "low": 20, "prioriti": 20, "adjust": 20, "over": 20, "formula": 20, "enableautoscal": 20, "f0f0e4": 20, "on_kil": [20, 21, 23, 25], "overrid": [20, 21, 23, 25, 29, 30, 32, 72], "cleanup": [20, 21, 23, 25, 52, 66], "subprocess": [20, 21, 23, 25], "multiprocess": [20, 21, 23, 25], "clean": [20, 21, 23, 25, 52], "up": [20, 21, 23, 25, 52, 68, 77], "leav": [20, 21, 23, 25], "ghost": [20, 21, 23, 25], "behind": [20, 21, 23, 25], "clean_up": 20, "default_environment_vari": 21, "default_secured_vari": 21, "default_volum": 21, "default_memory_in_gb": 21, "default_cpu": 21, "azurecontainerinstancesoper": [21, 43, 52, 66], "ci_conn_id": 21, "registry_conn_id": 21, "region": 21, "environment_vari": 21, "secured_vari": 21, "memory_in_gb": 21, "cpu": 21, "gpu": 21, "remove_on_error": 21, "fail_if_exist": 21, "tag": 21, "os_typ": 21, "linux": 21, "restart_polici": 21, "never": 21, "ip_address": 21, "port": 21, "network_profil": [21, 52, 66], "privat": 21, "docker": 21, "wherein": 21, "parallel": 21, "pair": 21, "environ": [21, 28, 53, 54, 55, 56, 58, 59, 60, 61, 62, 63, 65, 77], "variabl": [21, 28, 38, 53, 54, 55, 56, 58, 59, 60, 61, 62, 63, 65], "environment": 21, "expos": 21, "outsid": 21, "typic": [21, 69], "mount": 21, "insid": [21, 38, 52], "container_timeout": 21, "max": 21, "system": [21, 23, 52, 54, 63, 68, 69, 70, 71, 72, 73, 75, 76], "possibl": [21, 52], "polici": [21, 66], "alwai": 21, "onfailur": 21, "ipaddress": 21, "ip": 21, "address": 21, "containergroupnetworkprofil": 21, "network": 21, "profil": 21, "azure_service_princip": 21, "azure_registry_us": 21, "my": 21, "d": [21, 67], "myprivateregistri": 21, "azurecr": 21, "my_contain": 21, "westeurop": 21, "model_path": 21, "my_valu": 21, "postgres_login": 21, "macro": 21, "postgres_default": 21, "postgres_password": 21, "job_guid": 21, "xcom_pul": 21, "task1": 21, "azure_container_instance_conn_id": 21, "my_storage_contain": 21, "my_fileshar": 21, "14": [21, 52, 66], "4": 21, "gpuresourc": 21, "k80": 21, "bin": [21, 67], "echo": [21, 67], "world": [21, 73], "start_contain": 21, "template_fields_render": [21, 23, 25, 37, 52, 66], "azurecosmosinsertdocumentoper": 22, "both": [22, 66], "thei": 22, "do": [22, 67, 72, 76, 77], "azuredatafactorypipelinerunlink": 23, "baseoperatorlink": [23, 52], "construct": 23, "monitor": [23, 52, 66, 69], "get_link": 23, "ti_kei": [23, 52], "extern": 23, "old": [23, 52], "signatur": [23, 52, 57, 66, 67], "function": [23, 29, 30, 32, 52, 69], "self": [23, 67], "dttm": 23, "That": 23, "still": 23, "runtim": 23, "deprec": [23, 28, 35, 52, 66], "taskinst": [23, 52, 66], "taskinstancekei": 23, "azuredatafactoryrunpipelineoper": 23, "wait_for_termin": [23, 25, 69], "reference_pipeline_run_id": 23, "is_recoveri": 23, "start_activity_nam": 23, "start_from_failur": 23, "could": 23, "disabl": [23, 69], "asynchron": [23, 25, 69], "long": 23, "azuredatafactorypipelinerunsensor": [23, 69], "correspond": 23, "recoveri": 23, "mode": 23, "referenc": 23, "groupid": 23, "In": [23, 66], "parameternam": 23, "0678d4": [23, 25], "operator_extra_link": 23, "azuresynapserunsparkbatchoper": 25, "sparkbatchjobopt": 25, "wasbdeletebloboper": 26, "check_opt": [26, 32], "azurekeyvaultbackend": [28, 52, 66, 77], "connections_prefix": [28, 77], "variables_prefix": [28, 52, 66, 77], "config_prefix": 28, "vault_url": [28, 77], "sep": [28, 67], "basesecretsbackend": 28, "retriev": 28, "vault": [28, 52], "backend": [28, 52, 66], "cfg": [28, 68, 77], "backend_kwarg": [28, 77], "azure_key_vault_uri": 28, "smtp": [28, 77], "And": 28, "hello": [28, 77], "sdk": [28, 77], "azure_tenant_id": [28, 77], "azure_client_id": [28, 77], "azure_client_secret": [28, 77], "view": 28, "null": [28, 77], "separ": [28, 52, 66], "concaten": 28, "secret_prefix": 28, "secret_id": 28, "get_conn_valu": [28, 52, 66], "serial": 28, "represent": [28, 77], "get_conn_uri": [28, 52, 66], "As": 28, "deseri": 28, "get_vari": 28, "get_config": 28, "build_path": 28, "path_prefix": 28, "build": [28, 52, 67], "valid": [28, 52, 66, 67], "also": [28, 66, 67], "replac": [28, 52, 66], "underscor": 28, "dash": 28, "easi": [28, 75], "switch": [28, 52], "between": 28, "so": [28, 68, 77], "connection_default": 28, "becom": 28, "azurecosmosdocumentsensor": [29, 44, 52, 66], "basesensoroper": [29, 30, 32], "azure_cosmos_sensor": 29, "somedatabase_nam": 29, "somecollection_nam": 29, "poke": [29, 30, 32], "azuredatafactorypipelinerunstatussensor": [30, 69], "50e6ff": 30, "wasbblobsensor": [32, 72], "wasbprefixsensor": 32, "azureblobstoragetogcsoper": [33, 72], "gcp_conn_id": 33, "google_cloud_default": 33, "bucket_nam": [33, 72], "object_nam": [33, 72], "gzip": [33, 72], "delegate_to": [33, 72], "impersonation_chain": [33, 72], "bucket": [33, 52, 68, 72], "googl": [33, 52, 66], "cloud": [33, 52, 69, 76], "fetch": 33, "compress": 33, "imperson": 33, "domain": 33, "wide": 33, "deleg": 33, "term": [33, 73], "chain": 33, "access_token": 33, "last": 33, "grant": 33, "origin": [33, 67], "creator": 33, "iam": 33, "role": 33, "directli": [33, 54, 58], "preced": 33, "localfilesystemtoadlsoper": [35, 52, 66], "extra_upload_opt": 35, "localtoazuredatalakestorageoper": [35, 52, 66], "arg": [35, 52, 66, 72, 76], "localfilesystemtowasboper": [36, 52, 66], "load_opt": [36, 38], "oracletoazuredatalakeoper": 37, "azure_data_lake_path": 37, "oracle_conn_id": 37, "sql_param": 37, "encod": [37, 53, 54, 55, 56, 58, 59, 60, 61, 62, 65], "utf": [37, 52], "8": 37, "quotechar": 37, "quot": 37, "quote_minim": 37, "move": [37, 38, 52, 66], "oracl": [37, 52, 66], "destin": 37, "put": 37, "strategi": 37, "unicodecsv": 37, "e08c8c": 37, "sftp": [38, 50, 52, 66], "wildcard": 38, "sftpfile": 38, "sftptowasboper": [38, 76], "sftp_source_path": [38, 76], "blob_prefix": [38, 50, 76], "sftp_conn_id": 38, "sftp_default": [38, 76], "move_object": 38, "wasb_overwrite_object": 38, "appear": 38, "end": 38, "establish": 38, "instead": [38, 52, 66], "equival": 38, "mv": 38, "oppos": 38, "cp": 38, "might": [38, 66], "resourceexistserror": 38, "source_path_contains_wildcard": 38, "dry_run": 38, "dry": 38, "just": [38, 68], "get_sftp_files_map": 38, "get_tree_behavior": 38, "extract": 38, "tree": 38, "behavior": 38, "check_wildcards_limit": 38, "sftp_hook": 38, "re": 38, "get_full_path_blob": 38, "previou": [38, 67], "copy_files_to_wasb": 38, "sftp_file": 38, "uploaded_fil": 38, "been": [38, 68], "env_id": [40, 41, 42, 43, 44, 47, 48, 49, 50], "dag_id": [40, 41, 42, 43, 44, 47, 48, 49, 50, 72], "test_run": [40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50], "local_file_path": [41, 48, 50, 75], "remote_file_path": [41, 48, 70, 75], "azure_container_nam": [42, 50, 72, 76], "gcp_bucket_file_path": [42, 72], "gcp_bucket_nam": [42, 72], "gcp_object_nam": [42, 72], "wait_for_blob": [42, 72], "dag": [43, 44, 52, 66, 72], "aci_exampl": 43, "t1": [43, 44], "highlight": 44, "usag": [44, 52, 66, 72, 76], "now": [44, 52, 66], "manual": [44, 66], "airflow": [44, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 65, 67, 68, 72, 76, 77], "example_cosmosdb_sensor": 44, "properli": [44, 68], "example_azure_cosmosdb_sensor": 44, "execution_timeout": [45, 46], "sb_mgmt_queue_test": 45, "message_list": 45, "sb_mgmt_topic_test": 45, "sb_mgmt_subscript": 45, "create_service_bus_queu": [45, 71], "airflow_hom": [46, 68], "default_arg": [46, 52, 72], "spark_job_payload": [46, 73], "myfileshar": 47, "mydirectori": 47, "create_fileshar": 47, "delete_fileshar": 47, "path_to_upload_fil": 49, "sftp_src_path": [50, 76], "sample_filenam": 50, "file_complete_path": 50, "sftp_file_complete_path": 50, "delete_sftp_fil": 50, "transfer_files_to_sftp_step": 50, "example_adf_run_pipelin": [51, 69], "example_adls_delet": [51, 70], "example_azure_blob_to_gc": [51, 72], "example_azure_container_inst": 51, "example_azure_cosmosdb": 51, "example_azure_service_bu": [51, 71], "example_azure_synaps": [51, 73], "example_fileshar": 51, "example_local_to_adl": [51, 75], "example_local_to_wasb": 51, "example_sftp_to_wasb": [51, 76], "commit": 52, "changelog": 52, "2022": 52, "12": [52, 66, 67], "09": 52, "subject": 52, "7e776db254": 52, "28234": [52, 66], "11": [52, 67], "15": 52, "12c3c39d1a": 52, "novemb": 52, "wave": 52, "27613": 52, "547e6e80f3": 52, "fix": 52, "reveal": [52, 66], "lib": [52, 66], "27601": [52, 66], "a50195d617": 52, "07": 52, "limit": [52, 66], "eaager": [52, 66], "upgrad": [52, 66], "27535": [52, 66], "5cd78cf425": 52, "06": 52, "avoid": 52, "backtrack": 52, "27531": 52, "a16f24b5d7": 52, "class": [52, 66, 77], "27417": [52, 66], "59da943428": 52, "04": 52, "suppress": 52, "27495": 52, "680965b2ea": 52, "03": 52, "extra__": [52, 66], "extra_": [52, 66], "27489": [52, 66], "5df1d6ec20": 52, "28": 52, "prefer": [52, 66], "remain": [52, 66], "27220": [52, 66], "c49740eb25": 52, "27041": [52, 66], "9ab1a6a3e7": 52, "27": 52, "style": 52, "26872": 52, "78b8ea2f22": 52, "min": [52, 66], "27196": [52, 66], "3676d3a402": 52, "27219": [52, 66], "6b9e76b7b3": 52, "23": 52, "27047": [52, 66], "2a34dc9e84": 52, "normal": 52, "27205": 52, "d51de50e5c": 52, "reflect": [52, 66], "unprefix": [52, 66], "27024": [52, 66], "59cba36db0": 52, "13": 52, "25426": 52, "32434a128a": 52, "30": 52, "26749": [52, 66], "f8db64c35c": 52, "septemb": 52, "26731": 52, "24d88e8fe": 52, "19": 52, "26345": [52, 66], "1f7b296227": 52, "18": 52, "auto": [52, 66], "web": [52, 66], "ui": [52, 66, 69], "26169": [52, 66], "06acf40a43": 52, "appli": [52, 66], "pep": 52, "563": 52, "postpon": 52, "evalu": 52, "annot": 52, "26289": 52, "5060785988": 52, "26117": [52, 66], "4bd0734a35": 52, "01": 52, "26038": [52, 66], "afb282aee4": 52, "08": 52, "neg": [52, 66], "25844": [52, 66], "5c7c518aa0": 52, "16": 52, "implement": [52, 66], "25436": [52, 66], "e5ac6c7cfb": 52, "august": 52, "25618": 52, "d5f40d739f": 52, "warn": [52, 66, 67], "16224": [52, 66], "18896": [52, 66], "8bb0c4fd32": 52, "25362": [52, 66], "eab0167f1b": 52, "25235": [52, 66], "e32e9c5880": 52, "bump": [52, 66], "extens": [52, 66], "mypi": [52, 66], "paramspec": [52, 66], "25088": [52, 66], "292440d54f": 52, "25029": [52, 66], "d2459a241b": 52, "juli": 52, "25030": 52, "bfd506cbfc": 52, "25018": [52, 66], "aa8bf2cf85": 52, "24625": [52, 66], "b27fc0367c": 52, "24843": [52, 66], "f18c609d12": 52, "24771": [52, 66], "0de31bd73a": 52, "29": 52, "24672": 52, "510a6bab45": 52, "yaml": [52, 66], "24702": 52, "09f38ad3f6": 52, "24038": [52, 66], "9c59831ee7": 52, "21": [52, 66], "functool": [52, 66], "cached_properti": [52, 66], "24582": [52, 66], "dcdcf3a2b8": 52, "rc2": 52, "24307": 52, "717a7588bc": 52, "descript": 52, "doubl": 52, "24292": 52, "aeabe994b3": 52, "24231": 52, "c23826915d": 52, "per": [52, 66], "24153": [52, 66], "027b707d21": 52, "explanatori": 52, "contributor": 52, "24229": 52, "389e858d93": 52, "24154": [52, 66], "6e83885c95": 52, "migrat": [52, 66], "design": [52, 66], "22452": [52, 66], "24141": [52, 66], "3393647aa6": 52, "26": 52, "23941": [52, 66], "ec6761a5c0": 52, "f": 52, "23597": 52, "75c60923e0": 52, "23631": 52, "8f181c1034": 52, "managedident": [52, 66], "23394": [52, 66], "2d109401b3": 52, "pre": [52, 66], "22887": 52, "8b6b0848a3": 52, "brees": 52, "pull": 52, "23104": 52, "49e336ae03": 52, "dummyoper": [52, 66], "emptyoper": [52, 66], "22974": [52, 66], "6933022e94": 52, "22884": 52, "56ab82ed7a": 52, "mid": 52, "april": 52, "22819": 52, "d3976d9b20": 52, "22735": [52, 66], "7ab45d41d6": 52, "22348": [52, 66], "d7dbfb7e26": 52, "bugfix": [52, 66], "22383": 52, "16adc035b1": 52, "classifi": [52, 66], "march": 52, "22226": 52, "c1ab8e2d7b": 52, "protect": 52, "accident": 52, "misus": 52, "get_valu": 52, "22244": 52, "d08284ed25": 52, "map_index": 52, "22112": 52, "f5b96315fe": 52, "feb": 52, "22056": 52, "ba79adb631": 52, "02": 52, "creation": [52, 66], "20510": [52, 66], "f42559a773": 52, "21924": [52, 66], "08575ddd8a": 52, "21798": 52, "3c4524b4ec": 52, "21514": [52, 66], "0a3ff43d41": 52, "docstr": [52, 66], "21398": [52, 66], "d94fa37830": 52, "januari": 52, "delai": 52, "21439": 52, "6c3a67d4fc": 52, "21257": 52, "ddb5246bd1": 52, "refactor": [52, 66], "21285": [52, 66], "cb73053211": 52, "21074": [52, 66], "602abe8394": 52, "20": [52, 71], "sphinx": 52, "autoapi": 52, "typehint": 52, "20951": 52, "730db3fb77": 52, "fake": 52, "stub": 52, "20936": 52, "f8fd0f7b4c": 52, "explain": [52, 66], "introduc": 52, "20827": 52, "31": 52, "f77417eb0d": 52, "k8": 52, "pypi": [52, 66], "20614": 52, "97496ba2b4": 52, "decemb": 52, "20523": 52, "a22d5bd076": 52, "20611": 52, "83f8e178ba": 52, "ext": 52, "20608": 52, "d56e7b56bb": 52, "friendli": 52, "20571": 52, "a0821235fb": 52, "everywher": 52, "20565": 52, "3299064958": 52, "isort": 52, "pyi": 52, "20556": 52, "e63e23c582": 52, "issu": [52, 66], "20409": 52, "341bf5ab1f": 52, "18877": [52, 66], "05e4cd1c6a": 52, "20207": [52, 66], "2fb5e1d0ec": 52, "declar": 52, "relat": [52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 65], "20226": 52, "42f133c5f6": 52, "inputrequir": [52, 66], "20084": [52, 66], "374574b8d0": 52, "19923": 52, "853576d901": 52, "19882": 52, "e25446a8b1": 52, "19668": [52, 66], "11e73d2db1": 52, "unnecessari": [52, 66], "19595": [52, 66], "4212c49324": 52, "modul": [52, 66], "compli": [52, 66], "aip": [52, 66], "19431": [52, 66], "0f516458be": 52, "host": [52, 53, 60, 62, 65, 66], "hidden": [52, 66], "19475": [52, 66], "ca679c014c": 52, "19079": [52, 66], "490a382ed6": 52, "ensur": 52, "catchup": 52, "19396": 52, "d9567eb106": 52, "octob": 52, "19321": 52, "61d0093054": 52, "var": [52, 66], "19234": [52, 66], "ceb2b53a10": 52, "start_dat": [52, 66, 72], "19062": [52, 66], "86a2a19ad2": 52, "17": 52, "18855": 52, "1571f80546": 52, "common": [52, 66], "misspel": [52, 66], "18964": [52, 66], "1b75f9181f": 52, "18736": [52, 66], "181ac36db3": 52, "18695": [52, 66], "6d504b43ea": 52, "expand": [52, 66], "auth": [52, 66], "18659": [52, 66], "c8485a83bc": 52, "revert": 52, "18663": 52, "18694": 52, "10421c6931": 52, "840ea3efb9": 52, "18613": 52, "a458fcc573": 52, "miscellan": 52, "taskflow": 52, "18278": 52, "46484466c4": 52, "redund": [52, 66], "relabel": [52, 66], "conn": [52, 66, 76], "18386": [52, 66], "97d6892318": 52, "renam": [52, 66, 68], "azuredatalakestorag": [52, 66], "18493": [52, 66], "1d2924c94e": 52, "proper": [52, 66], "18456": [52, 66], "11e34535e8": 52, "17885": [52, 66], "410e6d7967": 52, "18203": [52, 66], "2dac083ae2": 52, "18287": [52, 66], "d119ae8f3f": 52, "18168": [52, 66], "28de326d61": 52, "filetowasboper": [52, 66], "18109": [52, 66], "0a68588479": 52, "17890": 52, "be75dcd39c": 52, "meta": 52, "76ed2a49c6": 52, "import": [52, 66, 67, 68], "lazili": 52, "individu": [52, 53], "17682": 52, "29aab6434f": 52, "17625": [52, 66], "87f408b1e7": 52, "17116": 52, "48ca9374bf": 52, "16873": 52, "d02ded65ea": 52, "wrongli": 52, "escap": 52, "amazon": 52, "17020": 52, "b916b75079": 52, "17015": 52, "866a601b76": 52, "pylint": 52, "our": 52, "toolchain": 52, "16682": 52, "caf0a8499f": 52, "16628": [52, 66], "ffb1fcacff": 52, "15634": [52, 66], "a2a58d27ef": 52, "reduc": [52, 66], "happi": [52, 66], "16626": [52, 66], "bbc627a3da": 52, "16501": 52, "cbf8001d76": 52, "synchron": 52, "buggfix": 52, "16464": 52, "1fba5402bb": 52, "june": 52, "16405": 52, "0c80a7d411": 52, "azurefileshar": [52, 66], "16388": [52, 66], "29b7f795d6": 52, "16280": [52, 66], "9c94b72d44": 52, "16294": 52, "476d0f6e3d": 52, "pyupgrad": 52, "15991": 52, "c844ff742e": 52, "colon": [52, 66], "space": [52, 66], "15841": [52, 66], "37681bca00": 52, "apply_default": [52, 66], "15667": [52, 66], "3b4fdd0a7a": 52, "15632": [52, 66], "b1bd59440b": 52, "15637": [52, 66], "0f97a3970d": 52, "invalid": 52, "15651": 52, "db557a8c4a": 52, "15628": 52, "807ad32ce5": 52, "pip": [52, 66, 67], "15576": 52, "657384615f": 52, "14823": [52, 66], "d65e492a3": 52, "azurecontainerinst": [52, 66], "15514": [52, 66], "cb1344b63d": 52, "15352": 52, "1a85ba9e93": 52, "dynam": [52, 66], "15159": [52, 66], "042be2e4e0": 52, "15236": 52, "9b76b94c94": 52, "bunch": [52, 66], "15130": [52, 66], "a7ca1b3b0b": 52, "14968": 52, "68e4c4dcb0": 52, "backport": 52, "14886": 52, "4372d45615": 52, "attribut": [52, 66], "azuredatafactori": [52, 66], "14704": [52, 66], "b753c7fa60": 52, "four": [52, 60, 62, 65], "previous": 52, "exclud": [52, 77], "14655": 52, "e7bb17aeb8": 52, "built": 52, "14606": 52, "630aeff72c": 52, "instanti": [52, 66], "its": [52, 66], "14565": [52, 66], "589d6dec92": 52, "next": [52, 66], "14487": 52, "11d03d2f63": 52, "11015": [52, 66], "5bfa0f123b": 52, "14313": [52, 66], "ca35bd7f7f": 52, "14125": 52, "10343ec29f": 52, "correct": [52, 67, 68], "tool": 52, "rc": 52, "14082": 52, "88bdcfa0df": 52, "14013": 52, "ac2f72c98d": 52, "13767": 52, "94b1531230": 52, "v12": [52, 66], "12188": [52, 66], "a9ac2b040b": 52, "flynt": 52, "13732": 52, "3fd5ef3555": 52, "miss": 52, "logo": 52, "13717": 52, "b2cb6ee5ba": 52, "13520": [52, 66], "295d66f914": 52, "2020": 52, "grammar": 52, "13380": 52, "a1e9195076": 52, "13190": 52, "5185d81ff9": 52, "azuredatalakestoragedeleteoper": [52, 66], "13206": [52, 66], "6cf76d7ac0": 52, "typo": 52, "13148": 52, "5090fb0c89": 52, "script": [52, 67], "13073": 52, "32971a1a2d": 52, "12955": 52, "b40dffa085": 52, "rema": 52, "12917": 52, "9b39f24780": 52, "12558": 52, "bd90136aaf": 52, "12681": 52, "2037303eef": 52, "discoveri": 52, "12466": 52, "543d88b3a1": 52, "12673": 52, "6b3c6add9": 52, "setup": [52, 68], "py": [52, 68, 69, 70, 71, 72, 73, 75, 76], "conflict": 52, "12636": 52, "c34ef853c8": 52, "12444": 52, "0080354502": 52, "readm": [52, 66], "0b2": 52, "12449": 52, "7ca0b6f121": 52, "markdownlint": 52, "md003": 52, "head": 52, "header": 52, "12427": 52, "12438": 52, "ae7cb4a1e2": 52, "wrong": 52, "hash": 52, "12390": 52, "6889a333cf": 52, "improv": 52, "12366": 52, "7825e8f590": 52, "12304": 52, "dd2095f4a8": 52, "simplifi": 52, "12216": 52, "85a18e13d9": 52, "point": 52, "project": 52, "page": [52, 67, 75], "cross": 52, "12212": 52, "59eb5de78c": 52, "come": 52, "0beta1": 52, "12206": 52, "b2a28d1590": 52, "dev": 52, "12082": 52, "3ff7e0743a": 52, "lookup": 52, "12174": 52, "41bf172c1d": 52, "12093": 52, "4e8f9cc8d0": 52, "black": 52, "formmatt": 52, "9550": 52, "8c42cf1b00": 52, "11447": 52, "5a439e84eb": 52, "2a1": 52, "11855": 52, "872b1566a1": 52, "11826": 52, "6ce855af11": 52, "spell": 52, "11821": 52, "349b0811c3": 52, "d200": 52, "pydocstyl": 52, "11688": 52, "f8ff217e2f": 52, "incorrect": 52, "11635": 52, "16e7129719": 52, "11487": 52, "686e0ee7df": 52, "hardcod": 52, "11408": 52, "d2754ef769": 52, "11359": 52, "832a7850f1": 52, "gc": 52, "11321": 52, "5d007fd2ff": 52, "11342": 52, "b0fcf67559": 52, "azurefilesharetogcsoper": 52, "10991": 52, "c51016b0b8": 52, "10814": 52, "fd682fd70a": 52, "11272": 52, "4210618789": 52, "11251": 52, "0a0e1af800": 52, "broken": 52, "markdown": 52, "toc": 52, "11249": 52, "ca4238eb4d": 52, "month": 52, "11242": 52, "5220e4c384": 52, "11238": 52, "5093245d6f": 52, "coverag": 52, "yandex": 52, "11198": 52, "f3e87c5030": 52, "d202": 52, "11032": 52, "f77a11d5b1": 52, "10898": 52, "9549274d11": 52, "8b1": 52, "10818": 52, "fdd9b6f65b": 52, "10543": 52, "3696c34c28": 52, "word": 52, "10528": 52, "ee7ca128a1": 52, "refernc": 52, "10483": 52, "2f552233f5": 52, "9747": 52, "cdec301254": 52, "10205": 52, "24c8e4c2d6": 52, "10163": 52, "aeea71274d": 52, "10097": 52, "7d24b088cd": 52, "example_dag": 52, "9985": 52, "0bf330ba86": 52, "9950": 52, "33f0cd2657": 52, "keep": 52, "9784": 52, "d3c76da952": 52, "hint": 52, "9774": 52, "23f80f34ad": 52, "respect": 52, "9714": 52, "d0e7db4024": 52, "fresh": 52, "9408": 52, "12af6a0800": 52, "final": 52, "23rc1": 52, "9404": 52, "c7e5bce57f": 52, "candid": 52, "9370": 52, "f6bd817a3a": 52, "9320": 52, "0b0e4f7a4c": 52, "rc3": 52, "relas": 52, "9026": 52, "00642a46d0": 52, "8994": 52, "375d1ca229": 52, "8898": 52, "12c5e5d8a": 52, "8891": 52, "f3521fb0e3": 52, "regener": 52, "8886": 52, "92585ca4cb": 52, "autom": 52, "8807": 52, "87969a350d": 52, "6515": 52, "8170": 52, "d99833c9b5": 52, "4529": 52, "8024": 52, "4bde99f132": 52, "7802": 52, "a83eb335e5": 52, "super": 52, "7821": 52, "f0e2421807": 52, "6896": 52, "__init__": [52, 68], "7520": 52, "4bec1cc489": 52, "6895": 52, "7519": 52, "3320e432a1": 52, "6817": 52, "lazi": 52, "face": 52, "untouch": 52, "7517": 52, "086e307245": 52, "6890": 52, "7513": 52, "4d03e33c11": 52, "implicit": 52, "explicit": 52, "entri": 52, "md": [52, 66], "squash": 52, "rebas": 52, "7456": 52, "175a160463": 52, "6828": 52, "zope": 52, "7448": 52, "1e00243014": 52, "5176": 52, "5785": 52, "97a429f9d0": 52, "6714": 52, "magic": 52, "comment": 52, "7338": 52, "83c037873f": 52, "6674": [52, 67], "accord": 52, "7287": 52, "057f3ae3a4": 52, "6670": 52, "6669": 52, "contrib": 52, "7286": 52, "290330ba60": 52, "6552": 52, "7158": 52, "There": [53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 65, 67], "wai": [53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 65, 72, 76], "syntax": [53, 54, 55, 56, 58, 59, 60, 61, 62, 65], "compon": [53, 54, 55, 56, 58, 59, 60, 61, 62, 65], "export": [53, 54, 55, 56, 58, 59, 60, 61, 62, 65], "airflow_conn_azure_container_registry_default": 53, "myregistri": 53, "fallback": [54, 63], "try": [54, 63], "differ": [54, 63, 67, 69], "cli": [54, 63], "left": [54, 63, 67], "fall": [54, 63], "back": [54, 63], "resid": 54, "airflow_conn_azure_data_factory_default": 54, "applicationid": 54, "serviceprincipalpassword": 54, "sometim": 55, "store_nam": 55, "airflow_conn_azure_data_lake_default": 55, "20id": 55, "three": [56, 58, 72, 76], "provid": [56, 67, 69, 70, 71, 72, 73, 75, 76, 77], "airflow_conn_azure_data_explorer_default": 56, "20usernam": [56, 60, 62, 65], "20password": [56, 60, 65], "myclust": 56, "auth_method": 56, "connection_str": [57, 62, 65], "disk": 58, "key_path": 58, "key_json": 58, "airflow_conn_azure_default": 58, "2fkei": 58, "airflow_conn_azure_batch_default": 59, "20acount": 59, "20kei": [59, 61], "account_url": 59, "mybatchaccount": 59, "extra__azure_container_volume__connection_str": 60, "airflow_conn_wasp_default": [60, 62], "myblob": [60, 62, 65], "primari": [61, 67], "airflow_conn_azure_cosmos_default": 61, "3a": 61, "2f": 61, "2fairflow": 61, "mydatabas": 61, "mycollect": 61, "extra__azure_synapse__tenantid": 63, "extra__azure_synapse__subscriptionid": 63, "workspac": 63, "microsoft": [64, 67, 69, 70, 71, 72, 73, 75, 76, 77], "azur": [64, 67, 74], "shared_access_kei": 65, "tenant_id": 65, "airflow_conn_wasb_default": 65, "repositori": 66, "top": [66, 67], "below": [66, 67, 68, 69, 70, 71, 73, 75], "datalak": [66, 74], "45": 66, "keyvault": 66, "43": 66, "platform_machin": 66, "aarch64": 66, "adal": 66, "those": [66, 67], "them": 66, "checksum": [66, 67], "site": 66, "sdist": [66, 67], "asc": [66, 67], "sha512": [66, 67], "wheel": 66, "extra__azure_fileshare__foo": 66, "foo": 66, "github": 66, "mistakenli": 66, "install_requir": 66, "22382": 66, "trove": 66, "framework": 66, "tabl": 66, "backward": 66, "major": 66, "azuredatalakestoragelistoper": 66, "adls_list": 66, "adls_delet": 66, "optimis": 66, "due": 66, "least": 66, "edit": 66, "were": 66, "azure_container_instance_default": 66, "problem": 66, "exactli": 66, "plain": [66, 72], "howev": 66, "presenc": 66, "caus": 66, "editor": 66, "continu": 66, "describ": 67, "offici": 67, "most": 67, "select": [67, 68], "drop": 67, "down": 67, "whl": 67, "softwar": 67, "foundat": 67, "abov": [67, 68], "pgp": 67, "essenti": 67, "sha": 67, "gpg": 67, "relev": [67, 77], "distribut": 67, "mirror": 67, "pgpk": 67, "ka": 67, "binari": [67, 72, 76], "pgpv": 67, "tar": 67, "gz": 67, "sat": 67, "49": 67, "54": 67, "bst": 67, "rsa": 67, "cde15c6e4d3a8ec4ecf4ba4b6674e08ad7de406f": 67, "issuer": 67, "kaxilnaik": 67, "org": 67, "good": 67, "kaxil": 67, "naik": 67, "aka": 67, "gmail": 67, "certifi": 67, "trust": 67, "belong": 67, "owner": 67, "fingerprint": 67, "cde1": 67, "5c6e": 67, "4d3a": 67, "8ec4": 67, "ecf4": 67, "ba4b": 67, "e08a": 67, "d7de": 67, "406f": 67, "worri": 67, "sign": 67, "why": 67, "step": [67, 68], "know": 67, "sum": 67, "shasum": 67, "512": 67, "diff": 67, "bash": 67, "package_vers": 67, "package_nam": 67, "provider_download_dir": 67, "mktemp": 67, "dep": 67, "dest": 67, "curl": 67, "apache_airflow_providers_microsoft_azur": 67, "py3": 67, "l": 67, "la": 67, "instruct": [67, 72, 76], "chapter": 67, "temporari": 67, "don": 68, "pythonpath": 68, "log_config": 68, "content": 68, "config_templ": 68, "airflow_local_set": 68, "portion": 68, "remote_base_log_fold": 68, "storage_account": 68, "default_logging_config": 68, "logging_config": 68, "remote_log": 68, "logging_config_class": 68, "remote_log_conn_id": 68, "webserv": 68, "schedul": [68, 72], "show": [68, 75], "newli": 68, "etl": 69, "scale": [69, 73], "serverless": [69, 73], "transform": [69, 73], "free": 69, "intuit": 69, "pane": 69, "glass": 69, "run_pipeline1": 69, "pipeline1": 69, "myparam": 69, "here": [69, 77], "coupl": 69, "run_pipeline2": 69, "pipeline2": 69, "pipeline_run_sensor": 69, "cast": 69, "xcomarg": 69, "further": [69, 70, 71, 73, 75], "remove_fil": 70, "delete_task": 70, "enterpris": [71, 73], "subscrib": 71, "decoupl": 71, "send_message_to_service_bus_queu": 71, "receive_message_service_bus_queu": 71, "delete_service_bus_queu": 71, "trigger_rul": 71, "all_don": 71, "create_service_bus_top": 71, "delete_asb_top": 71, "create_service_bus_subscript": 71, "update_service_bus_subscript": 71, "receive_message_service_bus_subscript": 71, "delete_service_bus_subscript": 71, "text": [72, 76], "organ": [72, 76], "visit": [72, 76], "key1": 72, "transfer_files_to_gc": 72, "gcp": 72, "analyt": 73, "limitless": 73, "bring": 73, "togeth": 73, "wareh": 73, "big": 73, "give": 73, "freedom": 73, "unifi": 73, "experi": 73, "ingest": 73, "serv": 73, "immedi": 73, "bi": 73, "provsparkpool": 73, "ignor": 73, "filesystem": 74, "shape": 75, "speed": 75, "upload_task": 75, "transfer_files_to_azur": 76, "transfer_files_from_sftp_to_wasb": 76, "sampl": 77, "akv": 77, "exclus": 77, "combin": 77, "prevent": 77, "being": 77, "some": 77, "_prefix": 77, "ones": 77, "smtp_default": 77}, "objects": {"airflow.providers.microsoft": [[14, 0, 0, "-", "azure"]], "airflow.providers.microsoft.azure": [[11, 0, 0, "-", "hooks"], [15, 0, 0, "-", "log"], [24, 0, 0, "-", "operators"], [27, 0, 0, "-", "secrets"], [31, 0, 0, "-", "sensors"], [34, 0, 0, "-", "transfers"], [39, 0, 0, "-", "utils"]], "airflow.providers.microsoft.azure.hooks": [[0, 0, 0, "-", "adx"], [1, 0, 0, "-", "asb"], [2, 0, 0, "-", "base_azure"], [3, 0, 0, "-", "batch"], [4, 0, 0, "-", "container_instance"], [5, 0, 0, "-", "container_registry"], [6, 0, 0, "-", "container_volume"], [7, 0, 0, "-", "cosmos"], [8, 0, 0, "-", "data_factory"], [9, 0, 0, "-", "data_lake"], [10, 0, 0, "-", "fileshare"], [12, 0, 0, "-", "synapse"], [13, 0, 0, "-", "wasb"]], "airflow.providers.microsoft.azure.hooks.adx": [[0, 1, 1, "", "AzureDataExplorerHook"]], "airflow.providers.microsoft.azure.hooks.adx.AzureDataExplorerHook": [[0, 2, 1, "", "conn_name_attr"], [0, 2, 1, "", "conn_type"], [0, 2, 1, "", "default_conn_name"], [0, 3, 1, "", "get_conn"], [0, 3, 1, "", "get_connection_form_widgets"], [0, 3, 1, "", "get_ui_field_behaviour"], [0, 2, 1, "", "hook_name"], [0, 3, 1, "", "run_query"]], "airflow.providers.microsoft.azure.hooks.asb": [[1, 1, 1, "", "AdminClientHook"], [1, 1, 1, "", "BaseAzureServiceBusHook"], [1, 1, 1, "", "MessageHook"]], "airflow.providers.microsoft.azure.hooks.asb.AdminClientHook": [[1, 3, 1, "", "create_queue"], [1, 3, 1, "", "delete_queue"], [1, 3, 1, "", "delete_subscription"], [1, 3, 1, "", "get_conn"]], "airflow.providers.microsoft.azure.hooks.asb.BaseAzureServiceBusHook": [[1, 2, 1, "", "conn_name_attr"], [1, 2, 1, "", "conn_type"], [1, 2, 1, "", "default_conn_name"], [1, 3, 1, "", "get_conn"], [1, 3, 1, "", "get_ui_field_behaviour"], [1, 2, 1, "", "hook_name"]], "airflow.providers.microsoft.azure.hooks.asb.MessageHook": [[1, 3, 1, "", "get_conn"], [1, 3, 1, "", "receive_message"], [1, 3, 1, "", "receive_subscription_message"], [1, 3, 1, "", "send_batch_message"], [1, 3, 1, "", "send_list_messages"], [1, 3, 1, "", "send_message"]], "airflow.providers.microsoft.azure.hooks.base_azure": [[2, 1, 1, "", "AzureBaseHook"]], "airflow.providers.microsoft.azure.hooks.base_azure.AzureBaseHook": [[2, 2, 1, "", "conn_name_attr"], [2, 2, 1, "", "conn_type"], [2, 2, 1, "", "default_conn_name"], [2, 3, 1, "", "get_conn"], [2, 3, 1, "", "get_connection_form_widgets"], [2, 3, 1, "", "get_ui_field_behaviour"], [2, 2, 1, "", "hook_name"]], "airflow.providers.microsoft.azure.hooks.batch": [[3, 1, 1, "", "AzureBatchHook"]], "airflow.providers.microsoft.azure.hooks.batch.AzureBatchHook": [[3, 3, 1, "", "add_single_task_to_job"], [3, 3, 1, "", "configure_job"], [3, 3, 1, "", "configure_pool"], [3, 3, 1, "", "configure_task"], [3, 2, 1, "", "conn_name_attr"], [3, 2, 1, "", "conn_type"], [3, 3, 1, "", "create_job"], [3, 3, 1, "", "create_pool"], [3, 2, 1, "", "default_conn_name"], [3, 3, 1, "", "get_conn"], [3, 3, 1, "", "get_connection_form_widgets"], [3, 3, 1, "", "get_ui_field_behaviour"], [3, 2, 1, "", "hook_name"], [3, 3, 1, "", "test_connection"], [3, 3, 1, "", "wait_for_all_node_state"], [3, 3, 1, "", "wait_for_job_tasks_to_complete"]], "airflow.providers.microsoft.azure.hooks.container_instance": [[4, 1, 1, "", "AzureContainerInstanceHook"]], "airflow.providers.microsoft.azure.hooks.container_instance.AzureContainerInstanceHook": [[4, 2, 1, "", "conn_name_attr"], [4, 2, 1, "", "conn_type"], [4, 3, 1, "", "create_or_update"], [4, 2, 1, "", "default_conn_name"], [4, 3, 1, "", "delete"], [4, 3, 1, "", "exists"], [4, 3, 1, "", "get_logs"], [4, 3, 1, "", "get_messages"], [4, 3, 1, "", "get_state"], [4, 3, 1, "", "get_state_exitcode_details"], [4, 2, 1, "", "hook_name"], [4, 3, 1, "", "test_connection"]], "airflow.providers.microsoft.azure.hooks.container_registry": [[5, 1, 1, "", "AzureContainerRegistryHook"]], "airflow.providers.microsoft.azure.hooks.container_registry.AzureContainerRegistryHook": [[5, 2, 1, "", "conn_name_attr"], [5, 2, 1, "", "conn_type"], [5, 2, 1, "", "default_conn_name"], [5, 3, 1, "", "get_conn"], [5, 3, 1, "", "get_ui_field_behaviour"], [5, 2, 1, "", "hook_name"]], "airflow.providers.microsoft.azure.hooks.container_volume": [[6, 1, 1, "", "AzureContainerVolumeHook"]], "airflow.providers.microsoft.azure.hooks.container_volume.AzureContainerVolumeHook": [[6, 2, 1, "", "conn_name_attr"], [6, 2, 1, "", "conn_type"], [6, 2, 1, "", "default_conn_name"], [6, 3, 1, "", "get_connection_form_widgets"], [6, 3, 1, "", "get_file_volume"], [6, 3, 1, "", "get_storagekey"], [6, 3, 1, "", "get_ui_field_behaviour"], [6, 2, 1, "", "hook_name"]], "airflow.providers.microsoft.azure.hooks.cosmos": [[7, 1, 1, "", "AzureCosmosDBHook"], [7, 4, 1, "", "get_collection_link"], [7, 4, 1, "", "get_database_link"], [7, 4, 1, "", "get_document_link"]], "airflow.providers.microsoft.azure.hooks.cosmos.AzureCosmosDBHook": [[7, 2, 1, "", "conn_name_attr"], [7, 2, 1, "", "conn_type"], [7, 3, 1, "", "create_collection"], [7, 3, 1, "", "create_database"], [7, 2, 1, "", "default_conn_name"], [7, 3, 1, "", "delete_collection"], [7, 3, 1, "", "delete_database"], [7, 3, 1, "", "delete_document"], [7, 3, 1, "", "does_collection_exist"], [7, 3, 1, "", "does_database_exist"], [7, 3, 1, "", "get_conn"], [7, 3, 1, "", "get_connection_form_widgets"], [7, 3, 1, "", "get_document"], [7, 3, 1, "", "get_documents"], [7, 3, 1, "", "get_ui_field_behaviour"], [7, 2, 1, "", "hook_name"], [7, 3, 1, "", "insert_documents"], [7, 3, 1, "", "test_connection"], [7, 3, 1, "", "upsert_document"]], "airflow.providers.microsoft.azure.hooks.data_factory": [[8, 1, 1, "", "AzureDataFactoryHook"], [8, 5, 1, "", "AzureDataFactoryPipelineRunException"], [8, 1, 1, "", "AzureDataFactoryPipelineRunStatus"], [8, 6, 1, "", "Credentials"], [8, 1, 1, "", "PipelineRunInfo"], [8, 4, 1, "", "get_field"], [8, 4, 1, "", "provide_targeted_factory"]], "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook": [[8, 3, 1, "", "cancel_pipeline_run"], [8, 3, 1, "", "cancel_trigger"], [8, 2, 1, "", "conn_name_attr"], [8, 2, 1, "", "conn_type"], [8, 3, 1, "", "create_dataflow"], [8, 3, 1, "", "create_dataset"], [8, 3, 1, "", "create_factory"], [8, 3, 1, "", "create_linked_service"], [8, 3, 1, "", "create_pipeline"], [8, 3, 1, "", "create_trigger"], [8, 2, 1, "", "default_conn_name"], [8, 3, 1, "", "delete_dataflow"], [8, 3, 1, "", "delete_dataset"], [8, 3, 1, "", "delete_factory"], [8, 3, 1, "", "delete_linked_service"], [8, 3, 1, "", "delete_pipeline"], [8, 3, 1, "", "delete_trigger"], [8, 3, 1, "", "get_conn"], [8, 3, 1, "", "get_connection_form_widgets"], [8, 3, 1, "", "get_dataflow"], [8, 3, 1, "", "get_dataset"], [8, 3, 1, "", "get_factory"], [8, 3, 1, "", "get_linked_service"], [8, 3, 1, "", "get_pipeline"], [8, 3, 1, "", "get_pipeline_run"], [8, 3, 1, "", "get_pipeline_run_status"], [8, 3, 1, "", "get_trigger"], [8, 3, 1, "", "get_ui_field_behaviour"], [8, 2, 1, "", "hook_name"], [8, 3, 1, "", "rerun_trigger"], [8, 3, 1, "", "run_pipeline"], [8, 3, 1, "", "start_trigger"], [8, 3, 1, "", "stop_trigger"], [8, 3, 1, "", "test_connection"], [8, 3, 1, "", "update_dataflow"], [8, 3, 1, "", "update_dataset"], [8, 3, 1, "", "update_factory"], [8, 3, 1, "", "update_linked_service"], [8, 3, 1, "", "update_pipeline"], [8, 3, 1, "", "update_trigger"], [8, 3, 1, "", "wait_for_pipeline_run_status"]], "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryPipelineRunStatus": [[8, 2, 1, "", "CANCELING"], [8, 2, 1, "", "CANCELLED"], [8, 2, 1, "", "FAILED"], [8, 2, 1, "", "IN_PROGRESS"], [8, 2, 1, "", "QUEUED"], [8, 2, 1, "", "SUCCEEDED"], [8, 2, 1, "", "TERMINAL_STATUSES"]], "airflow.providers.microsoft.azure.hooks.data_factory.PipelineRunInfo": [[8, 2, 1, "", "factory_name"], [8, 2, 1, "", "resource_group_name"], [8, 2, 1, "", "run_id"]], "airflow.providers.microsoft.azure.hooks.data_lake": [[9, 1, 1, "", "AzureDataLakeHook"]], "airflow.providers.microsoft.azure.hooks.data_lake.AzureDataLakeHook": [[9, 3, 1, "", "check_for_file"], [9, 2, 1, "", "conn_name_attr"], [9, 2, 1, "", "conn_type"], [9, 2, 1, "", "default_conn_name"], [9, 3, 1, "", "download_file"], [9, 3, 1, "", "get_conn"], [9, 3, 1, "", "get_connection_form_widgets"], [9, 3, 1, "", "get_ui_field_behaviour"], [9, 2, 1, "", "hook_name"], [9, 3, 1, "", "list"], [9, 3, 1, "", "remove"], [9, 3, 1, "", "upload_file"]], "airflow.providers.microsoft.azure.hooks.fileshare": [[10, 1, 1, "", "AzureFileShareHook"]], "airflow.providers.microsoft.azure.hooks.fileshare.AzureFileShareHook": [[10, 3, 1, "", "check_for_directory"], [10, 3, 1, "", "check_for_file"], [10, 2, 1, "", "conn_name_attr"], [10, 2, 1, "", "conn_type"], [10, 3, 1, "", "create_directory"], [10, 3, 1, "", "create_share"], [10, 2, 1, "", "default_conn_name"], [10, 3, 1, "", "delete_share"], [10, 3, 1, "", "get_conn"], [10, 3, 1, "", "get_connection_form_widgets"], [10, 3, 1, "", "get_file"], [10, 3, 1, "", "get_file_to_stream"], [10, 3, 1, "", "get_ui_field_behaviour"], [10, 2, 1, "", "hook_name"], [10, 3, 1, "", "list_directories_and_files"], [10, 3, 1, "", "list_files"], [10, 3, 1, "", "load_file"], [10, 3, 1, "", "load_stream"], [10, 3, 1, "", "load_string"], [10, 3, 1, "", "test_connection"]], "airflow.providers.microsoft.azure.hooks.synapse": [[12, 1, 1, "", "AzureSynapseHook"], [12, 1, 1, "", "AzureSynapseSparkBatchRunStatus"], [12, 6, 1, "", "Credentials"]], "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseHook": [[12, 3, 1, "", "cancel_job_run"], [12, 2, 1, "", "conn_name_attr"], [12, 2, 1, "", "conn_type"], [12, 2, 1, "", "default_conn_name"], [12, 3, 1, "", "get_conn"], [12, 3, 1, "", "get_connection_form_widgets"], [12, 3, 1, "", "get_job_run_status"], [12, 3, 1, "", "get_ui_field_behaviour"], [12, 2, 1, "", "hook_name"], [12, 3, 1, "", "run_spark_job"], [12, 3, 1, "", "wait_for_job_run_status"]], "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseSparkBatchRunStatus": [[12, 2, 1, "", "BUSY"], [12, 2, 1, "", "DEAD"], [12, 2, 1, "", "ERROR"], [12, 2, 1, "", "IDLE"], [12, 2, 1, "", "KILLED"], [12, 2, 1, "", "NOT_STARTED"], [12, 2, 1, "", "RUNNING"], [12, 2, 1, "", "SHUTTING_DOWN"], [12, 2, 1, "", "STARTING"], [12, 2, 1, "", "SUCCESS"], [12, 2, 1, "", "TERMINAL_STATUSES"]], "airflow.providers.microsoft.azure.hooks.wasb": [[13, 1, 1, "", "WasbHook"]], "airflow.providers.microsoft.azure.hooks.wasb.WasbHook": [[13, 3, 1, "", "check_for_blob"], [13, 3, 1, "", "check_for_prefix"], [13, 2, 1, "", "conn_name_attr"], [13, 2, 1, "", "conn_type"], [13, 3, 1, "", "create_container"], [13, 2, 1, "", "default_conn_name"], [13, 3, 1, "", "delete_blobs"], [13, 3, 1, "", "delete_container"], [13, 3, 1, "", "delete_file"], [13, 3, 1, "", "download"], [13, 3, 1, "", "get_blobs_list"], [13, 3, 1, "", "get_conn"], [13, 3, 1, "", "get_connection_form_widgets"], [13, 3, 1, "", "get_file"], [13, 3, 1, "", "get_ui_field_behaviour"], [13, 2, 1, "", "hook_name"], [13, 3, 1, "", "load_file"], [13, 3, 1, "", "load_string"], [13, 3, 1, "", "read_file"], [13, 3, 1, "", "test_connection"], [13, 3, 1, "", "upload"]], "airflow.providers.microsoft.azure.log": [[16, 0, 0, "-", "wasb_task_handler"]], "airflow.providers.microsoft.azure.log.wasb_task_handler": [[16, 1, 1, "", "WasbTaskHandler"]], "airflow.providers.microsoft.azure.log.wasb_task_handler.WasbTaskHandler": [[16, 3, 1, "", "close"], [16, 3, 1, "", "hook"], [16, 3, 1, "", "set_context"], [16, 3, 1, "", "wasb_log_exists"], [16, 3, 1, "", "wasb_read"], [16, 3, 1, "", "wasb_write"]], "airflow.providers.microsoft.azure.operators": [[17, 0, 0, "-", "adls"], [18, 0, 0, "-", "adx"], [19, 0, 0, "-", "asb"], [20, 0, 0, "-", "batch"], [21, 0, 0, "-", "container_instances"], [22, 0, 0, "-", "cosmos"], [23, 0, 0, "-", "data_factory"], [25, 0, 0, "-", "synapse"], [26, 0, 0, "-", "wasb_delete_blob"]], "airflow.providers.microsoft.azure.operators.adls": [[17, 1, 1, "", "ADLSDeleteOperator"], [17, 1, 1, "", "ADLSListOperator"]], "airflow.providers.microsoft.azure.operators.adls.ADLSDeleteOperator": [[17, 3, 1, "", "execute"], [17, 2, 1, "", "template_fields"], [17, 2, 1, "", "ui_color"]], "airflow.providers.microsoft.azure.operators.adls.ADLSListOperator": [[17, 3, 1, "", "execute"], [17, 2, 1, "", "template_fields"], [17, 2, 1, "", "ui_color"]], "airflow.providers.microsoft.azure.operators.adx": [[18, 1, 1, "", "AzureDataExplorerQueryOperator"]], "airflow.providers.microsoft.azure.operators.adx.AzureDataExplorerQueryOperator": [[18, 3, 1, "", "execute"], [18, 3, 1, "", "get_hook"], [18, 2, 1, "", "template_ext"], [18, 2, 1, "", "template_fields"], [18, 2, 1, "", "ui_color"]], "airflow.providers.microsoft.azure.operators.asb": [[19, 1, 1, "", "ASBReceiveSubscriptionMessageOperator"], [19, 1, 1, "", "AzureServiceBusCreateQueueOperator"], [19, 1, 1, "", "AzureServiceBusDeleteQueueOperator"], [19, 1, 1, "", "AzureServiceBusReceiveMessageOperator"], [19, 1, 1, "", "AzureServiceBusSendMessageOperator"], [19, 1, 1, "", "AzureServiceBusSubscriptionCreateOperator"], [19, 1, 1, "", "AzureServiceBusSubscriptionDeleteOperator"], [19, 1, 1, "", "AzureServiceBusTopicCreateOperator"], [19, 1, 1, "", "AzureServiceBusTopicDeleteOperator"], [19, 1, 1, "", "AzureServiceBusUpdateSubscriptionOperator"]], "airflow.providers.microsoft.azure.operators.asb.ASBReceiveSubscriptionMessageOperator": [[19, 3, 1, "", "execute"], [19, 2, 1, "", "template_fields"], [19, 2, 1, "", "ui_color"]], "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusCreateQueueOperator": [[19, 3, 1, "", "execute"], [19, 2, 1, "", "template_fields"], [19, 2, 1, "", "ui_color"]], "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusDeleteQueueOperator": [[19, 3, 1, "", "execute"], [19, 2, 1, "", "template_fields"], [19, 2, 1, "", "ui_color"]], "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusReceiveMessageOperator": [[19, 3, 1, "", "execute"], [19, 2, 1, "", "template_fields"], [19, 2, 1, "", "ui_color"]], "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusSendMessageOperator": [[19, 3, 1, "", "execute"], [19, 2, 1, "", "template_fields"], [19, 2, 1, "", "ui_color"]], "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusSubscriptionCreateOperator": [[19, 3, 1, "", "execute"], [19, 2, 1, "", "template_fields"], [19, 2, 1, "", "ui_color"]], "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusSubscriptionDeleteOperator": [[19, 3, 1, "", "execute"], [19, 2, 1, "", "template_fields"], [19, 2, 1, "", "ui_color"]], "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusTopicCreateOperator": [[19, 3, 1, "", "execute"], [19, 2, 1, "", "template_fields"], [19, 2, 1, "", "ui_color"]], "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusTopicDeleteOperator": [[19, 3, 1, "", "execute"], [19, 2, 1, "", "template_fields"], [19, 2, 1, "", "ui_color"]], "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusUpdateSubscriptionOperator": [[19, 3, 1, "", "execute"], [19, 2, 1, "", "template_fields"], [19, 2, 1, "", "ui_color"]], "airflow.providers.microsoft.azure.operators.batch": [[20, 1, 1, "", "AzureBatchOperator"]], "airflow.providers.microsoft.azure.operators.batch.AzureBatchOperator": [[20, 3, 1, "", "clean_up"], [20, 3, 1, "", "execute"], [20, 3, 1, "", "get_hook"], [20, 3, 1, "", "on_kill"], [20, 2, 1, "", "template_fields"], [20, 2, 1, "", "ui_color"]], "airflow.providers.microsoft.azure.operators.container_instances": [[21, 1, 1, "", "AzureContainerInstancesOperator"], [21, 6, 1, "", "DEFAULT_CPU"], [21, 6, 1, "", "DEFAULT_ENVIRONMENT_VARIABLES"], [21, 6, 1, "", "DEFAULT_MEMORY_IN_GB"], [21, 6, 1, "", "DEFAULT_SECURED_VARIABLES"], [21, 6, 1, "", "DEFAULT_VOLUMES"], [21, 6, 1, "", "Volume"]], "airflow.providers.microsoft.azure.operators.container_instances.AzureContainerInstancesOperator": [[21, 3, 1, "", "execute"], [21, 3, 1, "", "on_kill"], [21, 2, 1, "", "template_fields"], [21, 2, 1, "", "template_fields_renderers"]], "airflow.providers.microsoft.azure.operators.cosmos": [[22, 1, 1, "", "AzureCosmosInsertDocumentOperator"]], "airflow.providers.microsoft.azure.operators.cosmos.AzureCosmosInsertDocumentOperator": [[22, 3, 1, "", "execute"], [22, 2, 1, "", "template_fields"], [22, 2, 1, "", "ui_color"]], "airflow.providers.microsoft.azure.operators.data_factory": [[23, 1, 1, "", "AzureDataFactoryPipelineRunLink"], [23, 1, 1, "", "AzureDataFactoryRunPipelineOperator"]], "airflow.providers.microsoft.azure.operators.data_factory.AzureDataFactoryPipelineRunLink": [[23, 3, 1, "", "get_link"], [23, 2, 1, "", "name"]], "airflow.providers.microsoft.azure.operators.data_factory.AzureDataFactoryRunPipelineOperator": [[23, 3, 1, "", "execute"], [23, 3, 1, "", "on_kill"], [23, 2, 1, "", "operator_extra_links"], [23, 2, 1, "", "template_fields"], [23, 2, 1, "", "template_fields_renderers"], [23, 2, 1, "", "ui_color"]], "airflow.providers.microsoft.azure.operators.synapse": [[25, 1, 1, "", "AzureSynapseRunSparkBatchOperator"]], "airflow.providers.microsoft.azure.operators.synapse.AzureSynapseRunSparkBatchOperator": [[25, 3, 1, "", "execute"], [25, 3, 1, "", "on_kill"], [25, 2, 1, "", "template_fields"], [25, 2, 1, "", "template_fields_renderers"], [25, 2, 1, "", "ui_color"]], "airflow.providers.microsoft.azure.operators.wasb_delete_blob": [[26, 1, 1, "", "WasbDeleteBlobOperator"]], "airflow.providers.microsoft.azure.operators.wasb_delete_blob.WasbDeleteBlobOperator": [[26, 3, 1, "", "execute"], [26, 2, 1, "", "template_fields"]], "airflow.providers.microsoft.azure.secrets": [[28, 0, 0, "-", "key_vault"]], "airflow.providers.microsoft.azure.secrets.key_vault": [[28, 1, 1, "", "AzureKeyVaultBackend"]], "airflow.providers.microsoft.azure.secrets.key_vault.AzureKeyVaultBackend": [[28, 3, 1, "", "build_path"], [28, 3, 1, "", "client"], [28, 3, 1, "", "get_config"], [28, 3, 1, "", "get_conn_uri"], [28, 3, 1, "", "get_conn_value"], [28, 3, 1, "", "get_variable"]], "airflow.providers.microsoft.azure.sensors": [[29, 0, 0, "-", "cosmos"], [30, 0, 0, "-", "data_factory"], [32, 0, 0, "-", "wasb"]], "airflow.providers.microsoft.azure.sensors.cosmos": [[29, 1, 1, "", "AzureCosmosDocumentSensor"]], "airflow.providers.microsoft.azure.sensors.cosmos.AzureCosmosDocumentSensor": [[29, 3, 1, "", "poke"], [29, 2, 1, "", "template_fields"]], "airflow.providers.microsoft.azure.sensors.data_factory": [[30, 1, 1, "", "AzureDataFactoryPipelineRunStatusSensor"]], "airflow.providers.microsoft.azure.sensors.data_factory.AzureDataFactoryPipelineRunStatusSensor": [[30, 3, 1, "", "poke"], [30, 2, 1, "", "template_fields"], [30, 2, 1, "", "ui_color"]], "airflow.providers.microsoft.azure.sensors.wasb": [[32, 1, 1, "", "WasbBlobSensor"], [32, 1, 1, "", "WasbPrefixSensor"]], "airflow.providers.microsoft.azure.sensors.wasb.WasbBlobSensor": [[32, 3, 1, "", "poke"], [32, 2, 1, "", "template_fields"]], "airflow.providers.microsoft.azure.sensors.wasb.WasbPrefixSensor": [[32, 3, 1, "", "poke"], [32, 2, 1, "", "template_fields"]], "airflow.providers.microsoft.azure.transfers": [[33, 0, 0, "-", "azure_blob_to_gcs"], [35, 0, 0, "-", "local_to_adls"], [36, 0, 0, "-", "local_to_wasb"], [37, 0, 0, "-", "oracle_to_azure_data_lake"], [38, 0, 0, "-", "sftp_to_wasb"]], "airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs": [[33, 1, 1, "", "AzureBlobStorageToGCSOperator"]], "airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs.AzureBlobStorageToGCSOperator": [[33, 3, 1, "", "execute"], [33, 2, 1, "", "template_fields"]], "airflow.providers.microsoft.azure.transfers.local_to_adls": [[35, 1, 1, "", "LocalFilesystemToADLSOperator"], [35, 1, 1, "", "LocalToAzureDataLakeStorageOperator"]], "airflow.providers.microsoft.azure.transfers.local_to_adls.LocalFilesystemToADLSOperator": [[35, 3, 1, "", "execute"], [35, 2, 1, "", "template_fields"], [35, 2, 1, "", "ui_color"]], "airflow.providers.microsoft.azure.transfers.local_to_wasb": [[36, 1, 1, "", "LocalFilesystemToWasbOperator"]], "airflow.providers.microsoft.azure.transfers.local_to_wasb.LocalFilesystemToWasbOperator": [[36, 3, 1, "", "execute"], [36, 2, 1, "", "template_fields"]], "airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake": [[37, 1, 1, "", "OracleToAzureDataLakeOperator"]], "airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake.OracleToAzureDataLakeOperator": [[37, 3, 1, "", "execute"], [37, 2, 1, "", "template_fields"], [37, 2, 1, "", "template_fields_renderers"], [37, 2, 1, "", "ui_color"]], "airflow.providers.microsoft.azure.transfers.sftp_to_wasb": [[38, 1, 1, "", "SFTPToWasbOperator"], [38, 6, 1, "", "SftpFile"], [38, 6, 1, "", "WILDCARD"]], "airflow.providers.microsoft.azure.transfers.sftp_to_wasb.SFTPToWasbOperator": [[38, 3, 1, "", "check_wildcards_limit"], [38, 3, 1, "", "copy_files_to_wasb"], [38, 3, 1, "", "delete_files"], [38, 3, 1, "", "dry_run"], [38, 3, 1, "", "execute"], [38, 3, 1, "", "get_full_path_blob"], [38, 3, 1, "", "get_sftp_files_map"], [38, 3, 1, "", "get_tree_behavior"], [38, 3, 1, "", "sftp_hook"], [38, 7, 1, "", "source_path_contains_wildcard"], [38, 2, 1, "", "template_fields"]], "airflow.providers.microsoft.azure.utils": [[39, 4, 1, "", "get_field"]], "tests.system.providers.microsoft": [[51, 0, 0, "-", "azure"]], "tests.system.providers.microsoft.azure": [[40, 0, 0, "-", "example_adf_run_pipeline"], [41, 0, 0, "-", "example_adls_delete"], [42, 0, 0, "-", "example_azure_blob_to_gcs"], [43, 0, 0, "-", "example_azure_container_instances"], [44, 0, 0, "-", "example_azure_cosmosdb"], [45, 0, 0, "-", "example_azure_service_bus"], [46, 0, 0, "-", "example_azure_synapse"], [47, 0, 0, "-", "example_fileshare"], [48, 0, 0, "-", "example_local_to_adls"], [49, 0, 0, "-", "example_local_to_wasb"], [50, 0, 0, "-", "example_sftp_to_wasb"]], "tests.system.providers.microsoft.azure.example_adf_run_pipeline": [[40, 6, 1, "", "DAG_ID"], [40, 6, 1, "", "ENV_ID"], [40, 6, 1, "", "begin"], [40, 6, 1, "", "test_run"]], "tests.system.providers.microsoft.azure.example_adls_delete": [[41, 6, 1, "", "DAG_ID"], [41, 6, 1, "", "ENV_ID"], [41, 6, 1, "", "LOCAL_FILE_PATH"], [41, 6, 1, "", "REMOTE_FILE_PATH"], [41, 6, 1, "", "test_run"], [41, 6, 1, "", "upload_file"]], "tests.system.providers.microsoft.azure.example_azure_blob_to_gcs": [[42, 6, 1, "", "AZURE_CONTAINER_NAME"], [42, 6, 1, "", "BLOB_NAME"], [42, 6, 1, "", "DAG_ID"], [42, 6, 1, "", "ENV_ID"], [42, 6, 1, "", "GCP_BUCKET_FILE_PATH"], [42, 6, 1, "", "GCP_BUCKET_NAME"], [42, 6, 1, "", "GCP_OBJECT_NAME"], [42, 6, 1, "", "test_run"], [42, 6, 1, "", "wait_for_blob"]], "tests.system.providers.microsoft.azure.example_azure_container_instances": [[43, 6, 1, "", "DAG_ID"], [43, 6, 1, "", "ENV_ID"], [43, 6, 1, "", "t1"], [43, 6, 1, "", "test_run"]], "tests.system.providers.microsoft.azure.example_azure_cosmosdb": [[44, 6, 1, "", "DAG_ID"], [44, 6, 1, "", "ENV_ID"], [44, 6, 1, "", "t1"], [44, 6, 1, "", "test_run"]], "tests.system.providers.microsoft.azure.example_azure_service_bus": [[45, 6, 1, "", "CLIENT_ID"], [45, 6, 1, "", "EXECUTION_TIMEOUT"], [45, 6, 1, "", "MESSAGE"], [45, 6, 1, "", "MESSAGE_LIST"], [45, 6, 1, "", "QUEUE_NAME"], [45, 6, 1, "", "SUBSCRIPTION_NAME"], [45, 6, 1, "", "TOPIC_NAME"], [45, 6, 1, "", "create_service_bus_queue"], [45, 6, 1, "", "test_run"]], "tests.system.providers.microsoft.azure.example_azure_synapse": [[46, 6, 1, "", "AIRFLOW_HOME"], [46, 6, 1, "", "EXECUTION_TIMEOUT"], [46, 6, 1, "", "SPARK_JOB_PAYLOAD"], [46, 6, 1, "", "default_args"], [46, 6, 1, "", "run_spark_job"], [46, 6, 1, "", "test_run"]], "tests.system.providers.microsoft.azure.example_fileshare": [[47, 6, 1, "", "DAG_ID"], [47, 6, 1, "", "DIRECTORY"], [47, 6, 1, "", "ENV_ID"], [47, 6, 1, "", "NAME"], [47, 4, 1, "", "create_fileshare"], [47, 4, 1, "", "delete_fileshare"], [47, 6, 1, "", "test_run"]], "tests.system.providers.microsoft.azure.example_local_to_adls": [[48, 6, 1, "", "DAG_ID"], [48, 6, 1, "", "ENV_ID"], [48, 6, 1, "", "LOCAL_FILE_PATH"], [48, 6, 1, "", "REMOTE_FILE_PATH"], [48, 6, 1, "", "test_run"], [48, 6, 1, "", "upload_file"]], "tests.system.providers.microsoft.azure.example_local_to_wasb": [[49, 6, 1, "", "DAG_ID"], [49, 6, 1, "", "ENV_ID"], [49, 6, 1, "", "PATH_TO_UPLOAD_FILE"], [49, 6, 1, "", "test_run"], [49, 6, 1, "", "upload"]], "tests.system.providers.microsoft.azure.example_sftp_to_wasb": [[50, 6, 1, "", "AZURE_CONTAINER_NAME"], [50, 6, 1, "", "BLOB_PREFIX"], [50, 6, 1, "", "DAG_ID"], [50, 6, 1, "", "ENV_ID"], [50, 6, 1, "", "FILE_COMPLETE_PATH"], [50, 6, 1, "", "LOCAL_FILE_PATH"], [50, 6, 1, "", "SAMPLE_FILENAME"], [50, 6, 1, "", "SFTP_FILE_COMPLETE_PATH"], [50, 6, 1, "", "SFTP_SRC_PATH"], [50, 4, 1, "", "delete_sftp_file"], [50, 6, 1, "", "test_run"], [50, 6, 1, "", "transfer_files_to_sftp_step"]]}, "objtypes": {"0": "py:module", "1": "py:class", "2": "py:attribute", "3": "py:method", "4": "py:function", "5": "py:exception", "6": "py:data", "7": "py:property"}, "objnames": {"0": ["py", "module", "Python module"], "1": ["py", "class", "Python class"], "2": ["py", "attribute", "Python attribute"], "3": ["py", "method", "Python method"], "4": ["py", "function", "Python function"], "5": ["py", "exception", "Python exception"], "6": ["py", "data", "Python data"], "7": ["py", "property", "Python property"]}, "titleterms": {"airflow": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 52, 66], "provid": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 66], "microsoft": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 65, 66, 74], "azur": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 65, 66, 68, 69, 70, 71, 72, 73, 75, 76, 77], "hook": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13], "adx": [0, 18], "modul": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 13, 16, 17, 18, 19, 20, 21, 22, 23, 25, 26, 28, 29, 30, 32, 33, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50], "content": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 13, 16, 17, 18, 19, 20, 21, 22, 23, 25, 26, 28, 29, 30, 32, 33, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 66], "class": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 13, 16, 17, 18, 19, 20, 21, 22, 23, 25, 26, 28, 29, 30, 32, 33, 35, 36, 37, 38], "asb": [1, 19], "base_azur": 2, "batch": [3, 20, 59], "container_inst": [4, 21], "container_registri": 5, "container_volum": 6, "cosmo": [7, 22, 29, 61], "function": [7, 8, 39, 47, 50], "data_factori": [8, 23, 30], "attribut": [8, 12, 21, 38, 47, 50], "data_lak": 9, "fileshar": 10, "submodul": [11, 14, 15, 24, 27, 31, 34, 51], "synaps": [12, 25, 63, 73], "wasb": [13, 32], "subpackag": 14, "log": [15, 16, 68], "wasb_task_handl": 16, "oper": [17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 69, 70, 71, 72, 73, 74, 76], "adl": 17, "wasb_delete_blob": 26, "secret": [27, 28], "key_vault": 28, "sensor": [29, 30, 31, 32], "transfer": [33, 34, 35, 36, 37, 38, 72, 76], "azure_blob_to_gc": 33, "local_to_adl": 35, "local_to_wasb": 36, "oracle_to_azure_data_lak": 37, "sftp_to_wasb": 38, "util": 39, "test": [40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51], "system": [40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51], "example_adf_run_pipelin": 40, "example_adls_delet": 41, "example_azure_blob_to_gc": 42, "example_azure_container_inst": 43, "example_azure_cosmosdb": 44, "example_azure_service_bu": 45, "example_azure_synaps": 46, "example_fileshar": 47, "example_local_to_adl": 48, "example_local_to_wasb": 49, "example_sftp_to_wasb": 50, "packag": [52, 66, 67], "apach": [52, 66], "5": [52, 66], "0": [52, 66], "1": [52, 66], "4": [52, 66], "3": [52, 66], "2": [52, 66], "9": [52, 66], "8": [52, 66], "7": [52, 66], "6": [52, 66], "contain": [53, 60], "registri": 53, "connect": [53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 77], "authent": [53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 65], "default": [53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 65], "id": [53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 65], "configur": [53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 65], "data": [54, 55, 56, 69, 72, 75, 76], "factori": [54, 69], "exampl": 54, "lake": [55, 75], "explor": 56, "servic": [57, 71], "bu": [57, 71], "volum": 60, "file": 62, "share": 62, "type": 64, "blob": [65, 68, 72, 76], "storag": [65, 68, 70, 72, 76], "guid": 66, "refer": [66, 69, 70, 71, 73, 75, 77], "resourc": 66, "commit": 66, "instal": [66, 67], "requir": 66, "cross": 66, "depend": 66, "download": 66, "offici": 66, "changelog": 66, "bug": 66, "fix": 66, "break": 66, "chang": 66, "misc": 66, "featur": 66, "other": 66, "main": 66, "from": [67, 72, 75, 76], "sourc": [67, 76], "releas": 67, "integr": 67, "verifi": 67, "pypi": 67, "write": 68, "azuredatafactoryrunpipelineoper": 69, "datalak": 70, "prerequisit": [70, 75], "task": [70, 75], "adlsdeleteoper": 70, "queue": 71, "creat": 71, "send": 71, "messag": 71, "receiv": 71, "delet": 71, "topic": 71, "subscript": 71, "updat": 71, "befor": [72, 76], "you": [72, 76], "begin": [72, 76], "googl": 72, "cloud": 72, "azuresynapserunsparkbatchoper": 73, "upload": 75, "local": 75, "filesystem": 75, "localfilesystemtoadlsoper": 75, "sftp": 76, "path": 76, "kei": 77, "vault": 77, "backend": 77, "option": 77, "lookup": 77, "store": 77, "retriev": 77, "variabl": 77}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 8, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.viewcode": 1, "sphinx.ext.intersphinx": 1, "sphinx": 57}, "alltitles": {"airflow.providers.microsoft.azure.hooks.adx": [[0, "module-airflow.providers.microsoft.azure.hooks.adx"]], "Module Contents": [[0, "module-contents"], [1, "module-contents"], [2, "module-contents"], [3, "module-contents"], [4, "module-contents"], [5, "module-contents"], [6, "module-contents"], [7, "module-contents"], [8, "module-contents"], [9, "module-contents"], [10, "module-contents"], [12, "module-contents"], [13, "module-contents"], [16, "module-contents"], [17, "module-contents"], [18, "module-contents"], [19, "module-contents"], [20, "module-contents"], [21, "module-contents"], [22, "module-contents"], [23, "module-contents"], [25, "module-contents"], [26, "module-contents"], [28, "module-contents"], [29, "module-contents"], [30, "module-contents"], [32, "module-contents"], [33, "module-contents"], [35, "module-contents"], [36, "module-contents"], [37, "module-contents"], [38, "module-contents"], [39, "module-contents"], [40, "module-contents"], [41, "module-contents"], [42, "module-contents"], [43, "module-contents"], [44, "module-contents"], [45, "module-contents"], [46, "module-contents"], [47, "module-contents"], [48, "module-contents"], [49, "module-contents"], [50, "module-contents"]], "Classes": [[0, "classes"], [1, "classes"], [2, "classes"], [3, "classes"], [4, "classes"], [5, "classes"], [6, "classes"], [7, "classes"], [8, "classes"], [9, "classes"], [10, "classes"], [12, "classes"], [13, "classes"], [16, "classes"], [17, "classes"], [18, "classes"], [19, "classes"], [20, "classes"], [21, "classes"], [22, "classes"], [23, "classes"], [25, "classes"], [26, "classes"], [28, "classes"], [29, "classes"], [30, "classes"], [32, "classes"], [33, "classes"], [35, "classes"], [36, "classes"], [37, "classes"], [38, "classes"]], "airflow.providers.microsoft.azure.hooks.asb": [[1, "module-airflow.providers.microsoft.azure.hooks.asb"]], "airflow.providers.microsoft.azure.hooks.base_azure": [[2, "module-airflow.providers.microsoft.azure.hooks.base_azure"]], "airflow.providers.microsoft.azure.hooks.batch": [[3, "module-airflow.providers.microsoft.azure.hooks.batch"]], "airflow.providers.microsoft.azure.hooks.container_instance": [[4, "module-airflow.providers.microsoft.azure.hooks.container_instance"]], "airflow.providers.microsoft.azure.hooks.container_registry": [[5, "module-airflow.providers.microsoft.azure.hooks.container_registry"]], "airflow.providers.microsoft.azure.hooks.container_volume": [[6, "module-airflow.providers.microsoft.azure.hooks.container_volume"]], "airflow.providers.microsoft.azure.hooks.cosmos": [[7, "module-airflow.providers.microsoft.azure.hooks.cosmos"]], "Functions": [[7, "functions"], [8, "functions"], [39, "functions"], [47, "functions"], [50, "functions"]], "airflow.providers.microsoft.azure.hooks.data_factory": [[8, "module-airflow.providers.microsoft.azure.hooks.data_factory"]], "Attributes": [[8, "attributes"], [12, "attributes"], [21, "attributes"], [38, "attributes"], [47, "attributes"], [50, "attributes"]], "airflow.providers.microsoft.azure.hooks.data_lake": [[9, "module-airflow.providers.microsoft.azure.hooks.data_lake"]], "airflow.providers.microsoft.azure.hooks.fileshare": [[10, "module-airflow.providers.microsoft.azure.hooks.fileshare"]], "airflow.providers.microsoft.azure.hooks": [[11, "module-airflow.providers.microsoft.azure.hooks"]], "Submodules": [[11, "submodules"], [14, "submodules"], [15, "submodules"], [24, "submodules"], [27, "submodules"], [31, "submodules"], [34, "submodules"], [51, "submodules"]], "airflow.providers.microsoft.azure.hooks.synapse": [[12, "module-airflow.providers.microsoft.azure.hooks.synapse"]], "airflow.providers.microsoft.azure.hooks.wasb": [[13, "module-airflow.providers.microsoft.azure.hooks.wasb"]], "airflow.providers.microsoft.azure": [[14, "module-airflow.providers.microsoft.azure"]], "Subpackages": [[14, "subpackages"]], "airflow.providers.microsoft.azure.log": [[15, "module-airflow.providers.microsoft.azure.log"]], "airflow.providers.microsoft.azure.log.wasb_task_handler": [[16, "module-airflow.providers.microsoft.azure.log.wasb_task_handler"]], "airflow.providers.microsoft.azure.operators.adls": [[17, "module-airflow.providers.microsoft.azure.operators.adls"]], "airflow.providers.microsoft.azure.operators.adx": [[18, "module-airflow.providers.microsoft.azure.operators.adx"]], "airflow.providers.microsoft.azure.operators.asb": [[19, "module-airflow.providers.microsoft.azure.operators.asb"]], "airflow.providers.microsoft.azure.operators.batch": [[20, "module-airflow.providers.microsoft.azure.operators.batch"]], "airflow.providers.microsoft.azure.operators.container_instances": [[21, "module-airflow.providers.microsoft.azure.operators.container_instances"]], "airflow.providers.microsoft.azure.operators.cosmos": [[22, "module-airflow.providers.microsoft.azure.operators.cosmos"]], "airflow.providers.microsoft.azure.operators.data_factory": [[23, "module-airflow.providers.microsoft.azure.operators.data_factory"]], "airflow.providers.microsoft.azure.operators": [[24, "module-airflow.providers.microsoft.azure.operators"]], "airflow.providers.microsoft.azure.operators.synapse": [[25, "module-airflow.providers.microsoft.azure.operators.synapse"]], "airflow.providers.microsoft.azure.operators.wasb_delete_blob": [[26, "module-airflow.providers.microsoft.azure.operators.wasb_delete_blob"]], "airflow.providers.microsoft.azure.secrets": [[27, "module-airflow.providers.microsoft.azure.secrets"]], "airflow.providers.microsoft.azure.secrets.key_vault": [[28, "module-airflow.providers.microsoft.azure.secrets.key_vault"]], "airflow.providers.microsoft.azure.sensors.cosmos": [[29, "module-airflow.providers.microsoft.azure.sensors.cosmos"]], "airflow.providers.microsoft.azure.sensors.data_factory": [[30, "module-airflow.providers.microsoft.azure.sensors.data_factory"]], "airflow.providers.microsoft.azure.sensors": [[31, "module-airflow.providers.microsoft.azure.sensors"]], "airflow.providers.microsoft.azure.sensors.wasb": [[32, "module-airflow.providers.microsoft.azure.sensors.wasb"]], "airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs": [[33, "module-airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs"]], "airflow.providers.microsoft.azure.transfers": [[34, "module-airflow.providers.microsoft.azure.transfers"]], "airflow.providers.microsoft.azure.transfers.local_to_adls": [[35, "module-airflow.providers.microsoft.azure.transfers.local_to_adls"]], "airflow.providers.microsoft.azure.transfers.local_to_wasb": [[36, "module-airflow.providers.microsoft.azure.transfers.local_to_wasb"]], "airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake": [[37, "module-airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake"]], "airflow.providers.microsoft.azure.transfers.sftp_to_wasb": [[38, "module-airflow.providers.microsoft.azure.transfers.sftp_to_wasb"]], "airflow.providers.microsoft.azure.utils": [[39, "module-airflow.providers.microsoft.azure.utils"]], "tests.system.providers.microsoft.azure.example_adf_run_pipeline": [[40, "module-tests.system.providers.microsoft.azure.example_adf_run_pipeline"]], "tests.system.providers.microsoft.azure.example_adls_delete": [[41, "module-tests.system.providers.microsoft.azure.example_adls_delete"]], "tests.system.providers.microsoft.azure.example_azure_blob_to_gcs": [[42, "module-tests.system.providers.microsoft.azure.example_azure_blob_to_gcs"]], "tests.system.providers.microsoft.azure.example_azure_container_instances": [[43, "module-tests.system.providers.microsoft.azure.example_azure_container_instances"]], "tests.system.providers.microsoft.azure.example_azure_cosmosdb": [[44, "module-tests.system.providers.microsoft.azure.example_azure_cosmosdb"]], "tests.system.providers.microsoft.azure.example_azure_service_bus": [[45, "module-tests.system.providers.microsoft.azure.example_azure_service_bus"]], "tests.system.providers.microsoft.azure.example_azure_synapse": [[46, "module-tests.system.providers.microsoft.azure.example_azure_synapse"]], "tests.system.providers.microsoft.azure.example_fileshare": [[47, "module-tests.system.providers.microsoft.azure.example_fileshare"]], "tests.system.providers.microsoft.azure.example_local_to_adls": [[48, "module-tests.system.providers.microsoft.azure.example_local_to_adls"]], "tests.system.providers.microsoft.azure.example_local_to_wasb": [[49, "module-tests.system.providers.microsoft.azure.example_local_to_wasb"]], "tests.system.providers.microsoft.azure.example_sftp_to_wasb": [[50, "module-tests.system.providers.microsoft.azure.example_sftp_to_wasb"]], "tests.system.providers.microsoft.azure": [[51, "module-tests.system.providers.microsoft.azure"]], "Package apache-airflow-providers-microsoft-azure": [[52, "package-apache-airflow-providers-microsoft-azure"], [66, "package-apache-airflow-providers-microsoft-azure"]], "5.0.1": [[52, "id1"], [66, "id1"]], "5.0.0": [[52, "id2"], [66, "id2"]], "4.3.0": [[52, "id3"], [66, "id4"]], "4.2.0": [[52, "id5"], [66, "id7"]], "4.1.0": [[52, "id6"], [66, "id9"]], "4.0.0": [[52, "id7"], [66, "id12"]], "3.9.0": [[52, "id8"], [66, "id16"]], "3.8.0": [[52, "id9"], [66, "id19"]], "3.7.2": [[52, "id10"], [66, "id22"]], "3.7.1": [[52, "id11"], [66, "id24"]], "3.7.0": [[52, "id12"], [66, "id26"]], "3.6.0": [[52, "id13"], [66, "id29"]], "3.5.0": [[52, "id14"], [66, "id32"]], "3.4.0": [[52, "id16"], [66, "id34"]], "3.3.0": [[52, "id17"], [66, "id37"]], "3.2.0": [[52, "id18"], [66, "id40"]], "3.1.1": [[52, "id19"], [66, "id43"]], "3.1.0": [[52, "id20"], [66, "id45"]], "3.0.0": [[52, "id21"], [66, "id48"]], "2.0.0": [[52, "id22"], [66, "id52"]], "1.3.0": [[52, "id23"], [66, "id56"]], "1.2.0": [[52, "id24"], [66, "id59"]], "1.1.0": [[52, "id25"], [66, "id62"]], "1.0.0": [[52, "id26"], [66, "id64"]], "Microsoft Azure Container Registry Connection": [[53, "microsoft-azure-container-registry-connection"]], "Authenticating to Azure Container Registry": [[53, "authenticating-to-azure-container-registry"]], "Default Connection IDs": [[53, "default-connection-ids"], [54, "default-connection-ids"], [55, "default-connection-ids"], [56, "default-connection-ids"], [57, "default-connection-ids"], [58, "default-connection-ids"], [59, "default-connection-ids"], [60, "default-connection-ids"], [61, "default-connection-ids"], [62, "default-connection-ids"], [63, "default-connection-ids"], [65, "default-connection-ids"]], "Configuring the Connection": [[53, "configuring-the-connection"], [54, "configuring-the-connection"], [55, "configuring-the-connection"], [56, "configuring-the-connection"], [57, "configuring-the-connection"], [58, "configuring-the-connection"], [59, "configuring-the-connection"], [60, "configuring-the-connection"], [61, "configuring-the-connection"], [62, "configuring-the-connection"], [63, "configuring-the-connection"], [65, "configuring-the-connection"]], "Microsoft Azure Data Factory": [[54, "microsoft-azure-data-factory"]], "Authenticating to Azure Data Factory": [[54, "authenticating-to-azure-data-factory"]], "Examples": [[54, "examples"]], "Microsoft Azure Data Lake Connection": [[55, "microsoft-azure-data-lake-connection"]], "Authenticating to Azure Data Lake": [[55, "authenticating-to-azure-data-lake"]], "Microsoft Azure Data Explorer": [[56, "microsoft-azure-data-explorer"]], "Authenticating to Azure Data Explorer": [[56, "authenticating-to-azure-data-explorer"]], "Microsoft Azure Service Bus": [[57, "microsoft-azure-service-bus"]], "Authenticating to Azure Service Bus": [[57, "authenticating-to-azure-service-bus"]], "Microsoft Azure Connection": [[58, "microsoft-azure-connection"]], "Authenticating to Azure": [[58, "authenticating-to-azure"], [61, "authenticating-to-azure"]], "Microsoft Azure Batch": [[59, "microsoft-azure-batch"]], "Authenticating to Azure Batch": [[59, "authenticating-to-azure-batch"]], "Microsoft Azure Container Volume Connection": [[60, "microsoft-azure-container-volume-connection"]], "Authenticating to Azure Container Volume": [[60, "authenticating-to-azure-container-volume"]], "Microsoft Azure Cosmos": [[61, "microsoft-azure-cosmos"]], "Microsoft Azure File Share Connection": [[62, "microsoft-azure-file-share-connection"]], "Authenticating to Azure File Share": [[62, "authenticating-to-azure-file-share"]], "Microsoft Azure Synapse": [[63, "microsoft-azure-synapse"]], "Authenticating to Azure Synapse": [[63, "authenticating-to-azure-synapse"]], "Connection Types": [[64, "connection-types"]], "Microsoft Azure Blob Storage Connection": [[65, "microsoft-azure-blob-storage-connection"]], "Authenticating to Azure Blob Storage": [[65, "authenticating-to-azure-blob-storage"]], "apache-airflow-providers-microsoft-azure": [[66, "apache-airflow-providers-microsoft-azure"]], "Content": [[66, "content"]], "Guides": [[66, null]], "References": [[66, null]], "Resources": [[66, null]], "Commits": [[66, null]], "Provider package": [[66, "provider-package"]], "Installation": [[66, "installation"]], "Requirements": [[66, "requirements"]], "Cross provider package dependencies": [[66, "cross-provider-package-dependencies"]], "Downloading official packages": [[66, "downloading-official-packages"]], "Changelog": [[66, "changelog"]], "Bug Fixes": [[66, "bug-fixes"], [66, "id3"], [66, "id6"], [66, "id11"], [66, "id23"], [66, "id36"], [66, "id39"], [66, "id42"], [66, "id47"], [66, "id51"]], "Breaking changes": [[66, "breaking-changes"], [66, "id13"], [66, "id49"], [66, "id53"]], "Misc": [[66, "misc"], [66, "id15"], [66, "id18"], [66, "id21"], [66, "id25"], [66, "id28"], [66, "id31"], [66, "id44"]], "Features": [[66, "features"], [66, "id5"], [66, "id8"], [66, "id10"], [66, "id14"], [66, "id17"], [66, "id20"], [66, "id27"], [66, "id30"], [66, "id33"], [66, "id35"], [66, "id38"], [66, "id41"], [66, "id46"], [66, "id50"], [66, "id54"], [66, "id57"], [66, "id60"], [66, "id63"]], "Other": [[66, "other"]], "Main": [[66, "main"]], "Bug fixes": [[66, "id55"], [66, "id58"], [66, "id61"]], "Installing from sources": [[67, "installing-from-sources"]], "Released packages": [[67, "released-packages"]], "Release integrity": [[67, "release-integrity"]], "Verifying PyPI releases": [[67, "verifying-pypi-releases"]], "Writing logs to Azure Blob Storage": [[68, "writing-logs-to-azure-blob-storage"]], "Azure Data Factory Operators": [[69, "azure-data-factory-operators"]], "AzureDataFactoryRunPipelineOperator": [[69, "azuredatafactoryrunpipelineoperator"]], "Reference": [[69, "reference"], [70, "reference"], [71, "reference"], [73, "reference"], [75, "reference"], [77, "reference"]], "Azure DataLake Storage Operators": [[70, "azure-datalake-storage-operators"]], "Prerequisite Tasks": [[70, "prerequisite-tasks"], [75, "prerequisite-tasks"]], "ADLSDeleteOperator": [[70, "adlsdeleteoperator"]], "Azure Service Bus Operators": [[71, "azure-service-bus-operators"]], "Azure Service Bus Queue Operators": [[71, "azure-service-bus-queue-operators"]], "Create Azure Service Bus Queue": [[71, "create-azure-service-bus-queue"]], "Send Message to Azure Service Bus Queue": [[71, "send-message-to-azure-service-bus-queue"]], "Receive Message Azure Service Bus Queue": [[71, "receive-message-azure-service-bus-queue"]], "Delete Azure Service Bus Queue": [[71, "delete-azure-service-bus-queue"]], "Azure Service Bus Topic Operators": [[71, "azure-service-bus-topic-operators"]], "Create Azure Service Bus Topic": [[71, "create-azure-service-bus-topic"]], "Delete Azure Service Bus Topic": [[71, "delete-azure-service-bus-topic"]], "Azure Service Bus Subscription Operators": [[71, "azure-service-bus-subscription-operators"]], "Create Azure Service Bus Subscription": [[71, "create-azure-service-bus-subscription"]], "Update Azure Service Bus Subscription": [[71, "update-azure-service-bus-subscription"]], "Receive Azure Service Bus Subscription Message": [[71, "receive-azure-service-bus-subscription-message"]], "Delete Azure Service Bus Subscription": [[71, "delete-azure-service-bus-subscription"]], "Azure Blob Storage Transfer Operator": [[72, "azure-blob-storage-transfer-operator"], [76, "azure-blob-storage-transfer-operator"]], "Before you begin": [[72, "before-you-begin"], [76, "before-you-begin"]], "Transfer Data from Blob Storage to Google Cloud Storage": [[72, "transfer-data-from-blob-storage-to-google-cloud-storage"]], "Azure Synapse Operators": [[73, "azure-synapse-operators"]], "AzureSynapseRunSparkBatchOperator": [[73, "azuresynapserunsparkbatchoperator"]], "Microsoft Operators": [[74, "microsoft-operators"]], "Upload data from Local Filesystem to Azure Data Lake": [[75, "upload-data-from-local-filesystem-to-azure-data-lake"]], "LocalFilesystemToADLSOperator": [[75, "localfilesystemtoadlsoperator"]], "Transfer Data from SFTP Source Path to Blob Storage": [[76, "transfer-data-from-sftp-source-path-to-blob-storage"]], "Azure Key Vault Backend": [[77, "azure-key-vault-backend"]], "Optional lookup": [[77, "optional-lookup"]], "Storing and Retrieving Connections": [[77, "storing-and-retrieving-connections"]], "Storing and Retrieving Variables": [[77, "storing-and-retrieving-variables"]]}, "indexentries": {"azuredataexplorerhook (class in airflow.providers.microsoft.azure.hooks.adx)": [[0, "airflow.providers.microsoft.azure.hooks.adx.AzureDataExplorerHook"]], "airflow.providers.microsoft.azure.hooks.adx": [[0, "module-airflow.providers.microsoft.azure.hooks.adx"]], "conn_name_attr (airflow.providers.microsoft.azure.hooks.adx.azuredataexplorerhook attribute)": [[0, "airflow.providers.microsoft.azure.hooks.adx.AzureDataExplorerHook.conn_name_attr"]], "conn_type (airflow.providers.microsoft.azure.hooks.adx.azuredataexplorerhook attribute)": [[0, "airflow.providers.microsoft.azure.hooks.adx.AzureDataExplorerHook.conn_type"]], "default_conn_name (airflow.providers.microsoft.azure.hooks.adx.azuredataexplorerhook attribute)": [[0, "airflow.providers.microsoft.azure.hooks.adx.AzureDataExplorerHook.default_conn_name"]], "get_conn() (airflow.providers.microsoft.azure.hooks.adx.azuredataexplorerhook method)": [[0, "airflow.providers.microsoft.azure.hooks.adx.AzureDataExplorerHook.get_conn"]], "get_connection_form_widgets() (airflow.providers.microsoft.azure.hooks.adx.azuredataexplorerhook static method)": [[0, "airflow.providers.microsoft.azure.hooks.adx.AzureDataExplorerHook.get_connection_form_widgets"]], "get_ui_field_behaviour() (airflow.providers.microsoft.azure.hooks.adx.azuredataexplorerhook static method)": [[0, "airflow.providers.microsoft.azure.hooks.adx.AzureDataExplorerHook.get_ui_field_behaviour"]], "hook_name (airflow.providers.microsoft.azure.hooks.adx.azuredataexplorerhook attribute)": [[0, "airflow.providers.microsoft.azure.hooks.adx.AzureDataExplorerHook.hook_name"]], "module": [[0, "module-airflow.providers.microsoft.azure.hooks.adx"], [1, "module-airflow.providers.microsoft.azure.hooks.asb"], [2, "module-airflow.providers.microsoft.azure.hooks.base_azure"], [3, "module-airflow.providers.microsoft.azure.hooks.batch"], [4, "module-airflow.providers.microsoft.azure.hooks.container_instance"], [5, "module-airflow.providers.microsoft.azure.hooks.container_registry"], [6, "module-airflow.providers.microsoft.azure.hooks.container_volume"], [7, "module-airflow.providers.microsoft.azure.hooks.cosmos"], [8, "module-airflow.providers.microsoft.azure.hooks.data_factory"], [9, "module-airflow.providers.microsoft.azure.hooks.data_lake"], [10, "module-airflow.providers.microsoft.azure.hooks.fileshare"], [11, "module-airflow.providers.microsoft.azure.hooks"], [12, "module-airflow.providers.microsoft.azure.hooks.synapse"], [13, "module-airflow.providers.microsoft.azure.hooks.wasb"], [14, "module-airflow.providers.microsoft.azure"], [15, "module-airflow.providers.microsoft.azure.log"], [16, "module-airflow.providers.microsoft.azure.log.wasb_task_handler"], [17, "module-airflow.providers.microsoft.azure.operators.adls"], [18, "module-airflow.providers.microsoft.azure.operators.adx"], [19, "module-airflow.providers.microsoft.azure.operators.asb"], [20, "module-airflow.providers.microsoft.azure.operators.batch"], [21, "module-airflow.providers.microsoft.azure.operators.container_instances"], [22, "module-airflow.providers.microsoft.azure.operators.cosmos"], [23, "module-airflow.providers.microsoft.azure.operators.data_factory"], [24, "module-airflow.providers.microsoft.azure.operators"], [25, "module-airflow.providers.microsoft.azure.operators.synapse"], [26, "module-airflow.providers.microsoft.azure.operators.wasb_delete_blob"], [27, "module-airflow.providers.microsoft.azure.secrets"], [28, "module-airflow.providers.microsoft.azure.secrets.key_vault"], [29, "module-airflow.providers.microsoft.azure.sensors.cosmos"], [30, "module-airflow.providers.microsoft.azure.sensors.data_factory"], [31, "module-airflow.providers.microsoft.azure.sensors"], [32, "module-airflow.providers.microsoft.azure.sensors.wasb"], [33, "module-airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs"], [34, "module-airflow.providers.microsoft.azure.transfers"], [35, "module-airflow.providers.microsoft.azure.transfers.local_to_adls"], [36, "module-airflow.providers.microsoft.azure.transfers.local_to_wasb"], [37, "module-airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake"], [38, "module-airflow.providers.microsoft.azure.transfers.sftp_to_wasb"], [39, "module-airflow.providers.microsoft.azure.utils"], [40, "module-tests.system.providers.microsoft.azure.example_adf_run_pipeline"], [41, "module-tests.system.providers.microsoft.azure.example_adls_delete"], [42, "module-tests.system.providers.microsoft.azure.example_azure_blob_to_gcs"], [43, "module-tests.system.providers.microsoft.azure.example_azure_container_instances"], [44, "module-tests.system.providers.microsoft.azure.example_azure_cosmosdb"], [45, "module-tests.system.providers.microsoft.azure.example_azure_service_bus"], [46, "module-tests.system.providers.microsoft.azure.example_azure_synapse"], [47, "module-tests.system.providers.microsoft.azure.example_fileshare"], [48, "module-tests.system.providers.microsoft.azure.example_local_to_adls"], [49, "module-tests.system.providers.microsoft.azure.example_local_to_wasb"], [50, "module-tests.system.providers.microsoft.azure.example_sftp_to_wasb"], [51, "module-tests.system.providers.microsoft.azure"]], "run_query() (airflow.providers.microsoft.azure.hooks.adx.azuredataexplorerhook method)": [[0, "airflow.providers.microsoft.azure.hooks.adx.AzureDataExplorerHook.run_query"]], "adminclienthook (class in airflow.providers.microsoft.azure.hooks.asb)": [[1, "airflow.providers.microsoft.azure.hooks.asb.AdminClientHook"]], "baseazureservicebushook (class in airflow.providers.microsoft.azure.hooks.asb)": [[1, "airflow.providers.microsoft.azure.hooks.asb.BaseAzureServiceBusHook"]], "messagehook (class in airflow.providers.microsoft.azure.hooks.asb)": [[1, "airflow.providers.microsoft.azure.hooks.asb.MessageHook"]], "airflow.providers.microsoft.azure.hooks.asb": [[1, "module-airflow.providers.microsoft.azure.hooks.asb"]], "conn_name_attr (airflow.providers.microsoft.azure.hooks.asb.baseazureservicebushook attribute)": [[1, "airflow.providers.microsoft.azure.hooks.asb.BaseAzureServiceBusHook.conn_name_attr"]], "conn_type (airflow.providers.microsoft.azure.hooks.asb.baseazureservicebushook attribute)": [[1, "airflow.providers.microsoft.azure.hooks.asb.BaseAzureServiceBusHook.conn_type"]], "create_queue() (airflow.providers.microsoft.azure.hooks.asb.adminclienthook method)": [[1, "airflow.providers.microsoft.azure.hooks.asb.AdminClientHook.create_queue"]], "default_conn_name (airflow.providers.microsoft.azure.hooks.asb.baseazureservicebushook attribute)": [[1, "airflow.providers.microsoft.azure.hooks.asb.BaseAzureServiceBusHook.default_conn_name"]], "delete_queue() (airflow.providers.microsoft.azure.hooks.asb.adminclienthook method)": [[1, "airflow.providers.microsoft.azure.hooks.asb.AdminClientHook.delete_queue"]], "delete_subscription() (airflow.providers.microsoft.azure.hooks.asb.adminclienthook method)": [[1, "airflow.providers.microsoft.azure.hooks.asb.AdminClientHook.delete_subscription"]], "get_conn() (airflow.providers.microsoft.azure.hooks.asb.adminclienthook method)": [[1, "airflow.providers.microsoft.azure.hooks.asb.AdminClientHook.get_conn"]], "get_conn() (airflow.providers.microsoft.azure.hooks.asb.baseazureservicebushook method)": [[1, "airflow.providers.microsoft.azure.hooks.asb.BaseAzureServiceBusHook.get_conn"]], "get_conn() (airflow.providers.microsoft.azure.hooks.asb.messagehook method)": [[1, "airflow.providers.microsoft.azure.hooks.asb.MessageHook.get_conn"]], "get_ui_field_behaviour() (airflow.providers.microsoft.azure.hooks.asb.baseazureservicebushook static method)": [[1, "airflow.providers.microsoft.azure.hooks.asb.BaseAzureServiceBusHook.get_ui_field_behaviour"]], "hook_name (airflow.providers.microsoft.azure.hooks.asb.baseazureservicebushook attribute)": [[1, "airflow.providers.microsoft.azure.hooks.asb.BaseAzureServiceBusHook.hook_name"]], "receive_message() (airflow.providers.microsoft.azure.hooks.asb.messagehook method)": [[1, "airflow.providers.microsoft.azure.hooks.asb.MessageHook.receive_message"]], "receive_subscription_message() (airflow.providers.microsoft.azure.hooks.asb.messagehook method)": [[1, "airflow.providers.microsoft.azure.hooks.asb.MessageHook.receive_subscription_message"]], "send_batch_message() (airflow.providers.microsoft.azure.hooks.asb.messagehook static method)": [[1, "airflow.providers.microsoft.azure.hooks.asb.MessageHook.send_batch_message"]], "send_list_messages() (airflow.providers.microsoft.azure.hooks.asb.messagehook static method)": [[1, "airflow.providers.microsoft.azure.hooks.asb.MessageHook.send_list_messages"]], "send_message() (airflow.providers.microsoft.azure.hooks.asb.messagehook method)": [[1, "airflow.providers.microsoft.azure.hooks.asb.MessageHook.send_message"]], "azurebasehook (class in airflow.providers.microsoft.azure.hooks.base_azure)": [[2, "airflow.providers.microsoft.azure.hooks.base_azure.AzureBaseHook"]], "airflow.providers.microsoft.azure.hooks.base_azure": [[2, "module-airflow.providers.microsoft.azure.hooks.base_azure"]], "conn_name_attr (airflow.providers.microsoft.azure.hooks.base_azure.azurebasehook attribute)": [[2, "airflow.providers.microsoft.azure.hooks.base_azure.AzureBaseHook.conn_name_attr"]], "conn_type (airflow.providers.microsoft.azure.hooks.base_azure.azurebasehook attribute)": [[2, "airflow.providers.microsoft.azure.hooks.base_azure.AzureBaseHook.conn_type"]], "default_conn_name (airflow.providers.microsoft.azure.hooks.base_azure.azurebasehook attribute)": [[2, "airflow.providers.microsoft.azure.hooks.base_azure.AzureBaseHook.default_conn_name"]], "get_conn() (airflow.providers.microsoft.azure.hooks.base_azure.azurebasehook method)": [[2, "airflow.providers.microsoft.azure.hooks.base_azure.AzureBaseHook.get_conn"]], "get_connection_form_widgets() (airflow.providers.microsoft.azure.hooks.base_azure.azurebasehook static method)": [[2, "airflow.providers.microsoft.azure.hooks.base_azure.AzureBaseHook.get_connection_form_widgets"]], "get_ui_field_behaviour() (airflow.providers.microsoft.azure.hooks.base_azure.azurebasehook static method)": [[2, "airflow.providers.microsoft.azure.hooks.base_azure.AzureBaseHook.get_ui_field_behaviour"]], "hook_name (airflow.providers.microsoft.azure.hooks.base_azure.azurebasehook attribute)": [[2, "airflow.providers.microsoft.azure.hooks.base_azure.AzureBaseHook.hook_name"]], "azurebatchhook (class in airflow.providers.microsoft.azure.hooks.batch)": [[3, "airflow.providers.microsoft.azure.hooks.batch.AzureBatchHook"]], "add_single_task_to_job() (airflow.providers.microsoft.azure.hooks.batch.azurebatchhook method)": [[3, "airflow.providers.microsoft.azure.hooks.batch.AzureBatchHook.add_single_task_to_job"]], "airflow.providers.microsoft.azure.hooks.batch": [[3, "module-airflow.providers.microsoft.azure.hooks.batch"]], "configure_job() (airflow.providers.microsoft.azure.hooks.batch.azurebatchhook method)": [[3, "airflow.providers.microsoft.azure.hooks.batch.AzureBatchHook.configure_job"]], "configure_pool() (airflow.providers.microsoft.azure.hooks.batch.azurebatchhook method)": [[3, "airflow.providers.microsoft.azure.hooks.batch.AzureBatchHook.configure_pool"]], "configure_task() (airflow.providers.microsoft.azure.hooks.batch.azurebatchhook method)": [[3, "airflow.providers.microsoft.azure.hooks.batch.AzureBatchHook.configure_task"]], "conn_name_attr (airflow.providers.microsoft.azure.hooks.batch.azurebatchhook attribute)": [[3, "airflow.providers.microsoft.azure.hooks.batch.AzureBatchHook.conn_name_attr"]], "conn_type (airflow.providers.microsoft.azure.hooks.batch.azurebatchhook attribute)": [[3, "airflow.providers.microsoft.azure.hooks.batch.AzureBatchHook.conn_type"]], "create_job() (airflow.providers.microsoft.azure.hooks.batch.azurebatchhook method)": [[3, "airflow.providers.microsoft.azure.hooks.batch.AzureBatchHook.create_job"]], "create_pool() (airflow.providers.microsoft.azure.hooks.batch.azurebatchhook method)": [[3, "airflow.providers.microsoft.azure.hooks.batch.AzureBatchHook.create_pool"]], "default_conn_name (airflow.providers.microsoft.azure.hooks.batch.azurebatchhook attribute)": [[3, "airflow.providers.microsoft.azure.hooks.batch.AzureBatchHook.default_conn_name"]], "get_conn() (airflow.providers.microsoft.azure.hooks.batch.azurebatchhook method)": [[3, "airflow.providers.microsoft.azure.hooks.batch.AzureBatchHook.get_conn"]], "get_connection_form_widgets() (airflow.providers.microsoft.azure.hooks.batch.azurebatchhook static method)": [[3, "airflow.providers.microsoft.azure.hooks.batch.AzureBatchHook.get_connection_form_widgets"]], "get_ui_field_behaviour() (airflow.providers.microsoft.azure.hooks.batch.azurebatchhook static method)": [[3, "airflow.providers.microsoft.azure.hooks.batch.AzureBatchHook.get_ui_field_behaviour"]], "hook_name (airflow.providers.microsoft.azure.hooks.batch.azurebatchhook attribute)": [[3, "airflow.providers.microsoft.azure.hooks.batch.AzureBatchHook.hook_name"]], "test_connection() (airflow.providers.microsoft.azure.hooks.batch.azurebatchhook method)": [[3, "airflow.providers.microsoft.azure.hooks.batch.AzureBatchHook.test_connection"]], "wait_for_all_node_state() (airflow.providers.microsoft.azure.hooks.batch.azurebatchhook method)": [[3, "airflow.providers.microsoft.azure.hooks.batch.AzureBatchHook.wait_for_all_node_state"]], "wait_for_job_tasks_to_complete() (airflow.providers.microsoft.azure.hooks.batch.azurebatchhook method)": [[3, "airflow.providers.microsoft.azure.hooks.batch.AzureBatchHook.wait_for_job_tasks_to_complete"]], "azurecontainerinstancehook (class in airflow.providers.microsoft.azure.hooks.container_instance)": [[4, "airflow.providers.microsoft.azure.hooks.container_instance.AzureContainerInstanceHook"]], "airflow.providers.microsoft.azure.hooks.container_instance": [[4, "module-airflow.providers.microsoft.azure.hooks.container_instance"]], "conn_name_attr (airflow.providers.microsoft.azure.hooks.container_instance.azurecontainerinstancehook attribute)": [[4, "airflow.providers.microsoft.azure.hooks.container_instance.AzureContainerInstanceHook.conn_name_attr"]], "conn_type (airflow.providers.microsoft.azure.hooks.container_instance.azurecontainerinstancehook attribute)": [[4, "airflow.providers.microsoft.azure.hooks.container_instance.AzureContainerInstanceHook.conn_type"]], "create_or_update() (airflow.providers.microsoft.azure.hooks.container_instance.azurecontainerinstancehook method)": [[4, "airflow.providers.microsoft.azure.hooks.container_instance.AzureContainerInstanceHook.create_or_update"]], "default_conn_name (airflow.providers.microsoft.azure.hooks.container_instance.azurecontainerinstancehook attribute)": [[4, "airflow.providers.microsoft.azure.hooks.container_instance.AzureContainerInstanceHook.default_conn_name"]], "delete() (airflow.providers.microsoft.azure.hooks.container_instance.azurecontainerinstancehook method)": [[4, "airflow.providers.microsoft.azure.hooks.container_instance.AzureContainerInstanceHook.delete"]], "exists() (airflow.providers.microsoft.azure.hooks.container_instance.azurecontainerinstancehook method)": [[4, "airflow.providers.microsoft.azure.hooks.container_instance.AzureContainerInstanceHook.exists"]], "get_logs() (airflow.providers.microsoft.azure.hooks.container_instance.azurecontainerinstancehook method)": [[4, "airflow.providers.microsoft.azure.hooks.container_instance.AzureContainerInstanceHook.get_logs"]], "get_messages() (airflow.providers.microsoft.azure.hooks.container_instance.azurecontainerinstancehook method)": [[4, "airflow.providers.microsoft.azure.hooks.container_instance.AzureContainerInstanceHook.get_messages"]], "get_state() (airflow.providers.microsoft.azure.hooks.container_instance.azurecontainerinstancehook method)": [[4, "airflow.providers.microsoft.azure.hooks.container_instance.AzureContainerInstanceHook.get_state"]], "get_state_exitcode_details() (airflow.providers.microsoft.azure.hooks.container_instance.azurecontainerinstancehook method)": [[4, "airflow.providers.microsoft.azure.hooks.container_instance.AzureContainerInstanceHook.get_state_exitcode_details"]], "hook_name (airflow.providers.microsoft.azure.hooks.container_instance.azurecontainerinstancehook attribute)": [[4, "airflow.providers.microsoft.azure.hooks.container_instance.AzureContainerInstanceHook.hook_name"]], "test_connection() (airflow.providers.microsoft.azure.hooks.container_instance.azurecontainerinstancehook method)": [[4, "airflow.providers.microsoft.azure.hooks.container_instance.AzureContainerInstanceHook.test_connection"]], "azurecontainerregistryhook (class in airflow.providers.microsoft.azure.hooks.container_registry)": [[5, "airflow.providers.microsoft.azure.hooks.container_registry.AzureContainerRegistryHook"]], "airflow.providers.microsoft.azure.hooks.container_registry": [[5, "module-airflow.providers.microsoft.azure.hooks.container_registry"]], "conn_name_attr (airflow.providers.microsoft.azure.hooks.container_registry.azurecontainerregistryhook attribute)": [[5, "airflow.providers.microsoft.azure.hooks.container_registry.AzureContainerRegistryHook.conn_name_attr"]], "conn_type (airflow.providers.microsoft.azure.hooks.container_registry.azurecontainerregistryhook attribute)": [[5, "airflow.providers.microsoft.azure.hooks.container_registry.AzureContainerRegistryHook.conn_type"]], "default_conn_name (airflow.providers.microsoft.azure.hooks.container_registry.azurecontainerregistryhook attribute)": [[5, "airflow.providers.microsoft.azure.hooks.container_registry.AzureContainerRegistryHook.default_conn_name"]], "get_conn() (airflow.providers.microsoft.azure.hooks.container_registry.azurecontainerregistryhook method)": [[5, "airflow.providers.microsoft.azure.hooks.container_registry.AzureContainerRegistryHook.get_conn"]], "get_ui_field_behaviour() (airflow.providers.microsoft.azure.hooks.container_registry.azurecontainerregistryhook static method)": [[5, "airflow.providers.microsoft.azure.hooks.container_registry.AzureContainerRegistryHook.get_ui_field_behaviour"]], "hook_name (airflow.providers.microsoft.azure.hooks.container_registry.azurecontainerregistryhook attribute)": [[5, "airflow.providers.microsoft.azure.hooks.container_registry.AzureContainerRegistryHook.hook_name"]], "azurecontainervolumehook (class in airflow.providers.microsoft.azure.hooks.container_volume)": [[6, "airflow.providers.microsoft.azure.hooks.container_volume.AzureContainerVolumeHook"]], "airflow.providers.microsoft.azure.hooks.container_volume": [[6, "module-airflow.providers.microsoft.azure.hooks.container_volume"]], "conn_name_attr (airflow.providers.microsoft.azure.hooks.container_volume.azurecontainervolumehook attribute)": [[6, "airflow.providers.microsoft.azure.hooks.container_volume.AzureContainerVolumeHook.conn_name_attr"]], "conn_type (airflow.providers.microsoft.azure.hooks.container_volume.azurecontainervolumehook attribute)": [[6, "airflow.providers.microsoft.azure.hooks.container_volume.AzureContainerVolumeHook.conn_type"]], "default_conn_name (airflow.providers.microsoft.azure.hooks.container_volume.azurecontainervolumehook attribute)": [[6, "airflow.providers.microsoft.azure.hooks.container_volume.AzureContainerVolumeHook.default_conn_name"]], "get_connection_form_widgets() (airflow.providers.microsoft.azure.hooks.container_volume.azurecontainervolumehook static method)": [[6, "airflow.providers.microsoft.azure.hooks.container_volume.AzureContainerVolumeHook.get_connection_form_widgets"]], "get_file_volume() (airflow.providers.microsoft.azure.hooks.container_volume.azurecontainervolumehook method)": [[6, "airflow.providers.microsoft.azure.hooks.container_volume.AzureContainerVolumeHook.get_file_volume"]], "get_storagekey() (airflow.providers.microsoft.azure.hooks.container_volume.azurecontainervolumehook method)": [[6, "airflow.providers.microsoft.azure.hooks.container_volume.AzureContainerVolumeHook.get_storagekey"]], "get_ui_field_behaviour() (airflow.providers.microsoft.azure.hooks.container_volume.azurecontainervolumehook static method)": [[6, "airflow.providers.microsoft.azure.hooks.container_volume.AzureContainerVolumeHook.get_ui_field_behaviour"]], "hook_name (airflow.providers.microsoft.azure.hooks.container_volume.azurecontainervolumehook attribute)": [[6, "airflow.providers.microsoft.azure.hooks.container_volume.AzureContainerVolumeHook.hook_name"]], "azurecosmosdbhook (class in airflow.providers.microsoft.azure.hooks.cosmos)": [[7, "airflow.providers.microsoft.azure.hooks.cosmos.AzureCosmosDBHook"]], "airflow.providers.microsoft.azure.hooks.cosmos": [[7, "module-airflow.providers.microsoft.azure.hooks.cosmos"]], "conn_name_attr (airflow.providers.microsoft.azure.hooks.cosmos.azurecosmosdbhook attribute)": [[7, "airflow.providers.microsoft.azure.hooks.cosmos.AzureCosmosDBHook.conn_name_attr"]], "conn_type (airflow.providers.microsoft.azure.hooks.cosmos.azurecosmosdbhook attribute)": [[7, "airflow.providers.microsoft.azure.hooks.cosmos.AzureCosmosDBHook.conn_type"]], "create_collection() (airflow.providers.microsoft.azure.hooks.cosmos.azurecosmosdbhook method)": [[7, "airflow.providers.microsoft.azure.hooks.cosmos.AzureCosmosDBHook.create_collection"]], "create_database() (airflow.providers.microsoft.azure.hooks.cosmos.azurecosmosdbhook method)": [[7, "airflow.providers.microsoft.azure.hooks.cosmos.AzureCosmosDBHook.create_database"]], "default_conn_name (airflow.providers.microsoft.azure.hooks.cosmos.azurecosmosdbhook attribute)": [[7, "airflow.providers.microsoft.azure.hooks.cosmos.AzureCosmosDBHook.default_conn_name"]], "delete_collection() (airflow.providers.microsoft.azure.hooks.cosmos.azurecosmosdbhook method)": [[7, "airflow.providers.microsoft.azure.hooks.cosmos.AzureCosmosDBHook.delete_collection"]], "delete_database() (airflow.providers.microsoft.azure.hooks.cosmos.azurecosmosdbhook method)": [[7, "airflow.providers.microsoft.azure.hooks.cosmos.AzureCosmosDBHook.delete_database"]], "delete_document() (airflow.providers.microsoft.azure.hooks.cosmos.azurecosmosdbhook method)": [[7, "airflow.providers.microsoft.azure.hooks.cosmos.AzureCosmosDBHook.delete_document"]], "does_collection_exist() (airflow.providers.microsoft.azure.hooks.cosmos.azurecosmosdbhook method)": [[7, "airflow.providers.microsoft.azure.hooks.cosmos.AzureCosmosDBHook.does_collection_exist"]], "does_database_exist() (airflow.providers.microsoft.azure.hooks.cosmos.azurecosmosdbhook method)": [[7, "airflow.providers.microsoft.azure.hooks.cosmos.AzureCosmosDBHook.does_database_exist"]], "get_collection_link() (in module airflow.providers.microsoft.azure.hooks.cosmos)": [[7, "airflow.providers.microsoft.azure.hooks.cosmos.get_collection_link"]], "get_conn() (airflow.providers.microsoft.azure.hooks.cosmos.azurecosmosdbhook method)": [[7, "airflow.providers.microsoft.azure.hooks.cosmos.AzureCosmosDBHook.get_conn"]], "get_connection_form_widgets() (airflow.providers.microsoft.azure.hooks.cosmos.azurecosmosdbhook static method)": [[7, "airflow.providers.microsoft.azure.hooks.cosmos.AzureCosmosDBHook.get_connection_form_widgets"]], "get_database_link() (in module airflow.providers.microsoft.azure.hooks.cosmos)": [[7, "airflow.providers.microsoft.azure.hooks.cosmos.get_database_link"]], "get_document() (airflow.providers.microsoft.azure.hooks.cosmos.azurecosmosdbhook method)": [[7, "airflow.providers.microsoft.azure.hooks.cosmos.AzureCosmosDBHook.get_document"]], "get_document_link() (in module airflow.providers.microsoft.azure.hooks.cosmos)": [[7, "airflow.providers.microsoft.azure.hooks.cosmos.get_document_link"]], "get_documents() (airflow.providers.microsoft.azure.hooks.cosmos.azurecosmosdbhook method)": [[7, "airflow.providers.microsoft.azure.hooks.cosmos.AzureCosmosDBHook.get_documents"]], "get_ui_field_behaviour() (airflow.providers.microsoft.azure.hooks.cosmos.azurecosmosdbhook static method)": [[7, "airflow.providers.microsoft.azure.hooks.cosmos.AzureCosmosDBHook.get_ui_field_behaviour"]], "hook_name (airflow.providers.microsoft.azure.hooks.cosmos.azurecosmosdbhook attribute)": [[7, "airflow.providers.microsoft.azure.hooks.cosmos.AzureCosmosDBHook.hook_name"]], "insert_documents() (airflow.providers.microsoft.azure.hooks.cosmos.azurecosmosdbhook method)": [[7, "airflow.providers.microsoft.azure.hooks.cosmos.AzureCosmosDBHook.insert_documents"]], "test_connection() (airflow.providers.microsoft.azure.hooks.cosmos.azurecosmosdbhook method)": [[7, "airflow.providers.microsoft.azure.hooks.cosmos.AzureCosmosDBHook.test_connection"]], "upsert_document() (airflow.providers.microsoft.azure.hooks.cosmos.azurecosmosdbhook method)": [[7, "airflow.providers.microsoft.azure.hooks.cosmos.AzureCosmosDBHook.upsert_document"]], "azuredatafactoryhook (class in airflow.providers.microsoft.azure.hooks.data_factory)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook"]], "azuredatafactorypipelinerunexception": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryPipelineRunException"]], "azuredatafactorypipelinerunstatus (class in airflow.providers.microsoft.azure.hooks.data_factory)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryPipelineRunStatus"]], "canceling (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactorypipelinerunstatus attribute)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryPipelineRunStatus.CANCELING"]], "cancelled (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactorypipelinerunstatus attribute)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryPipelineRunStatus.CANCELLED"]], "credentials (in module airflow.providers.microsoft.azure.hooks.data_factory)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.Credentials"]], "failed (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactorypipelinerunstatus attribute)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryPipelineRunStatus.FAILED"]], "in_progress (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactorypipelinerunstatus attribute)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryPipelineRunStatus.IN_PROGRESS"]], "pipelineruninfo (class in airflow.providers.microsoft.azure.hooks.data_factory)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.PipelineRunInfo"]], "queued (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactorypipelinerunstatus attribute)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryPipelineRunStatus.QUEUED"]], "succeeded (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactorypipelinerunstatus attribute)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryPipelineRunStatus.SUCCEEDED"]], "terminal_statuses (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactorypipelinerunstatus attribute)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryPipelineRunStatus.TERMINAL_STATUSES"]], "airflow.providers.microsoft.azure.hooks.data_factory": [[8, "module-airflow.providers.microsoft.azure.hooks.data_factory"]], "cancel_pipeline_run() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.cancel_pipeline_run"]], "cancel_trigger() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.cancel_trigger"]], "conn_name_attr (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook attribute)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.conn_name_attr"]], "conn_type (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook attribute)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.conn_type"]], "create_dataflow() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.create_dataflow"]], "create_dataset() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.create_dataset"]], "create_factory() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.create_factory"]], "create_linked_service() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.create_linked_service"]], "create_pipeline() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.create_pipeline"]], "create_trigger() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.create_trigger"]], "default_conn_name (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook attribute)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.default_conn_name"]], "delete_dataflow() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.delete_dataflow"]], "delete_dataset() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.delete_dataset"]], "delete_factory() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.delete_factory"]], "delete_linked_service() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.delete_linked_service"]], "delete_pipeline() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.delete_pipeline"]], "delete_trigger() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.delete_trigger"]], "factory_name (airflow.providers.microsoft.azure.hooks.data_factory.pipelineruninfo attribute)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.PipelineRunInfo.factory_name"]], "get_conn() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.get_conn"]], "get_connection_form_widgets() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook static method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.get_connection_form_widgets"]], "get_dataflow() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.get_dataflow"]], "get_dataset() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.get_dataset"]], "get_factory() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.get_factory"]], "get_field() (in module airflow.providers.microsoft.azure.hooks.data_factory)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.get_field"]], "get_linked_service() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.get_linked_service"]], "get_pipeline() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.get_pipeline"]], "get_pipeline_run() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.get_pipeline_run"]], "get_pipeline_run_status() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.get_pipeline_run_status"]], "get_trigger() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.get_trigger"]], "get_ui_field_behaviour() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook static method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.get_ui_field_behaviour"]], "hook_name (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook attribute)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.hook_name"]], "provide_targeted_factory() (in module airflow.providers.microsoft.azure.hooks.data_factory)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.provide_targeted_factory"]], "rerun_trigger() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.rerun_trigger"]], "resource_group_name (airflow.providers.microsoft.azure.hooks.data_factory.pipelineruninfo attribute)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.PipelineRunInfo.resource_group_name"]], "run_id (airflow.providers.microsoft.azure.hooks.data_factory.pipelineruninfo attribute)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.PipelineRunInfo.run_id"]], "run_pipeline() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.run_pipeline"]], "start_trigger() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.start_trigger"]], "stop_trigger() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.stop_trigger"]], "test_connection() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.test_connection"]], "update_dataflow() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.update_dataflow"]], "update_dataset() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.update_dataset"]], "update_factory() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.update_factory"]], "update_linked_service() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.update_linked_service"]], "update_pipeline() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.update_pipeline"]], "update_trigger() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.update_trigger"]], "wait_for_pipeline_run_status() (airflow.providers.microsoft.azure.hooks.data_factory.azuredatafactoryhook method)": [[8, "airflow.providers.microsoft.azure.hooks.data_factory.AzureDataFactoryHook.wait_for_pipeline_run_status"]], "azuredatalakehook (class in airflow.providers.microsoft.azure.hooks.data_lake)": [[9, "airflow.providers.microsoft.azure.hooks.data_lake.AzureDataLakeHook"]], "airflow.providers.microsoft.azure.hooks.data_lake": [[9, "module-airflow.providers.microsoft.azure.hooks.data_lake"]], "check_for_file() (airflow.providers.microsoft.azure.hooks.data_lake.azuredatalakehook method)": [[9, "airflow.providers.microsoft.azure.hooks.data_lake.AzureDataLakeHook.check_for_file"]], "conn_name_attr (airflow.providers.microsoft.azure.hooks.data_lake.azuredatalakehook attribute)": [[9, "airflow.providers.microsoft.azure.hooks.data_lake.AzureDataLakeHook.conn_name_attr"]], "conn_type (airflow.providers.microsoft.azure.hooks.data_lake.azuredatalakehook attribute)": [[9, "airflow.providers.microsoft.azure.hooks.data_lake.AzureDataLakeHook.conn_type"]], "default_conn_name (airflow.providers.microsoft.azure.hooks.data_lake.azuredatalakehook attribute)": [[9, "airflow.providers.microsoft.azure.hooks.data_lake.AzureDataLakeHook.default_conn_name"]], "download_file() (airflow.providers.microsoft.azure.hooks.data_lake.azuredatalakehook method)": [[9, "airflow.providers.microsoft.azure.hooks.data_lake.AzureDataLakeHook.download_file"]], "get_conn() (airflow.providers.microsoft.azure.hooks.data_lake.azuredatalakehook method)": [[9, "airflow.providers.microsoft.azure.hooks.data_lake.AzureDataLakeHook.get_conn"]], "get_connection_form_widgets() (airflow.providers.microsoft.azure.hooks.data_lake.azuredatalakehook static method)": [[9, "airflow.providers.microsoft.azure.hooks.data_lake.AzureDataLakeHook.get_connection_form_widgets"]], "get_ui_field_behaviour() (airflow.providers.microsoft.azure.hooks.data_lake.azuredatalakehook static method)": [[9, "airflow.providers.microsoft.azure.hooks.data_lake.AzureDataLakeHook.get_ui_field_behaviour"]], "hook_name (airflow.providers.microsoft.azure.hooks.data_lake.azuredatalakehook attribute)": [[9, "airflow.providers.microsoft.azure.hooks.data_lake.AzureDataLakeHook.hook_name"]], "list() (airflow.providers.microsoft.azure.hooks.data_lake.azuredatalakehook method)": [[9, "airflow.providers.microsoft.azure.hooks.data_lake.AzureDataLakeHook.list"]], "remove() (airflow.providers.microsoft.azure.hooks.data_lake.azuredatalakehook method)": [[9, "airflow.providers.microsoft.azure.hooks.data_lake.AzureDataLakeHook.remove"]], "upload_file() (airflow.providers.microsoft.azure.hooks.data_lake.azuredatalakehook method)": [[9, "airflow.providers.microsoft.azure.hooks.data_lake.AzureDataLakeHook.upload_file"]], "azurefilesharehook (class in airflow.providers.microsoft.azure.hooks.fileshare)": [[10, "airflow.providers.microsoft.azure.hooks.fileshare.AzureFileShareHook"]], "airflow.providers.microsoft.azure.hooks.fileshare": [[10, "module-airflow.providers.microsoft.azure.hooks.fileshare"]], "check_for_directory() (airflow.providers.microsoft.azure.hooks.fileshare.azurefilesharehook method)": [[10, "airflow.providers.microsoft.azure.hooks.fileshare.AzureFileShareHook.check_for_directory"]], "check_for_file() (airflow.providers.microsoft.azure.hooks.fileshare.azurefilesharehook method)": [[10, "airflow.providers.microsoft.azure.hooks.fileshare.AzureFileShareHook.check_for_file"]], "conn_name_attr (airflow.providers.microsoft.azure.hooks.fileshare.azurefilesharehook attribute)": [[10, "airflow.providers.microsoft.azure.hooks.fileshare.AzureFileShareHook.conn_name_attr"]], "conn_type (airflow.providers.microsoft.azure.hooks.fileshare.azurefilesharehook attribute)": [[10, "airflow.providers.microsoft.azure.hooks.fileshare.AzureFileShareHook.conn_type"]], "create_directory() (airflow.providers.microsoft.azure.hooks.fileshare.azurefilesharehook method)": [[10, "airflow.providers.microsoft.azure.hooks.fileshare.AzureFileShareHook.create_directory"]], "create_share() (airflow.providers.microsoft.azure.hooks.fileshare.azurefilesharehook method)": [[10, "airflow.providers.microsoft.azure.hooks.fileshare.AzureFileShareHook.create_share"]], "default_conn_name (airflow.providers.microsoft.azure.hooks.fileshare.azurefilesharehook attribute)": [[10, "airflow.providers.microsoft.azure.hooks.fileshare.AzureFileShareHook.default_conn_name"]], "delete_share() (airflow.providers.microsoft.azure.hooks.fileshare.azurefilesharehook method)": [[10, "airflow.providers.microsoft.azure.hooks.fileshare.AzureFileShareHook.delete_share"]], "get_conn() (airflow.providers.microsoft.azure.hooks.fileshare.azurefilesharehook method)": [[10, "airflow.providers.microsoft.azure.hooks.fileshare.AzureFileShareHook.get_conn"]], "get_connection_form_widgets() (airflow.providers.microsoft.azure.hooks.fileshare.azurefilesharehook static method)": [[10, "airflow.providers.microsoft.azure.hooks.fileshare.AzureFileShareHook.get_connection_form_widgets"]], "get_file() (airflow.providers.microsoft.azure.hooks.fileshare.azurefilesharehook method)": [[10, "airflow.providers.microsoft.azure.hooks.fileshare.AzureFileShareHook.get_file"]], "get_file_to_stream() (airflow.providers.microsoft.azure.hooks.fileshare.azurefilesharehook method)": [[10, "airflow.providers.microsoft.azure.hooks.fileshare.AzureFileShareHook.get_file_to_stream"]], "get_ui_field_behaviour() (airflow.providers.microsoft.azure.hooks.fileshare.azurefilesharehook static method)": [[10, "airflow.providers.microsoft.azure.hooks.fileshare.AzureFileShareHook.get_ui_field_behaviour"]], "hook_name (airflow.providers.microsoft.azure.hooks.fileshare.azurefilesharehook attribute)": [[10, "airflow.providers.microsoft.azure.hooks.fileshare.AzureFileShareHook.hook_name"]], "list_directories_and_files() (airflow.providers.microsoft.azure.hooks.fileshare.azurefilesharehook method)": [[10, "airflow.providers.microsoft.azure.hooks.fileshare.AzureFileShareHook.list_directories_and_files"]], "list_files() (airflow.providers.microsoft.azure.hooks.fileshare.azurefilesharehook method)": [[10, "airflow.providers.microsoft.azure.hooks.fileshare.AzureFileShareHook.list_files"]], "load_file() (airflow.providers.microsoft.azure.hooks.fileshare.azurefilesharehook method)": [[10, "airflow.providers.microsoft.azure.hooks.fileshare.AzureFileShareHook.load_file"]], "load_stream() (airflow.providers.microsoft.azure.hooks.fileshare.azurefilesharehook method)": [[10, "airflow.providers.microsoft.azure.hooks.fileshare.AzureFileShareHook.load_stream"]], "load_string() (airflow.providers.microsoft.azure.hooks.fileshare.azurefilesharehook method)": [[10, "airflow.providers.microsoft.azure.hooks.fileshare.AzureFileShareHook.load_string"]], "test_connection() (airflow.providers.microsoft.azure.hooks.fileshare.azurefilesharehook method)": [[10, "airflow.providers.microsoft.azure.hooks.fileshare.AzureFileShareHook.test_connection"]], "airflow.providers.microsoft.azure.hooks": [[11, "module-airflow.providers.microsoft.azure.hooks"]], "azuresynapsehook (class in airflow.providers.microsoft.azure.hooks.synapse)": [[12, "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseHook"]], "azuresynapsesparkbatchrunstatus (class in airflow.providers.microsoft.azure.hooks.synapse)": [[12, "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseSparkBatchRunStatus"]], "busy (airflow.providers.microsoft.azure.hooks.synapse.azuresynapsesparkbatchrunstatus attribute)": [[12, "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseSparkBatchRunStatus.BUSY"]], "credentials (in module airflow.providers.microsoft.azure.hooks.synapse)": [[12, "airflow.providers.microsoft.azure.hooks.synapse.Credentials"]], "dead (airflow.providers.microsoft.azure.hooks.synapse.azuresynapsesparkbatchrunstatus attribute)": [[12, "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseSparkBatchRunStatus.DEAD"]], "error (airflow.providers.microsoft.azure.hooks.synapse.azuresynapsesparkbatchrunstatus attribute)": [[12, "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseSparkBatchRunStatus.ERROR"]], "idle (airflow.providers.microsoft.azure.hooks.synapse.azuresynapsesparkbatchrunstatus attribute)": [[12, "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseSparkBatchRunStatus.IDLE"]], "killed (airflow.providers.microsoft.azure.hooks.synapse.azuresynapsesparkbatchrunstatus attribute)": [[12, "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseSparkBatchRunStatus.KILLED"]], "not_started (airflow.providers.microsoft.azure.hooks.synapse.azuresynapsesparkbatchrunstatus attribute)": [[12, "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseSparkBatchRunStatus.NOT_STARTED"]], "running (airflow.providers.microsoft.azure.hooks.synapse.azuresynapsesparkbatchrunstatus attribute)": [[12, "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseSparkBatchRunStatus.RUNNING"]], "shutting_down (airflow.providers.microsoft.azure.hooks.synapse.azuresynapsesparkbatchrunstatus attribute)": [[12, "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseSparkBatchRunStatus.SHUTTING_DOWN"]], "starting (airflow.providers.microsoft.azure.hooks.synapse.azuresynapsesparkbatchrunstatus attribute)": [[12, "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseSparkBatchRunStatus.STARTING"]], "success (airflow.providers.microsoft.azure.hooks.synapse.azuresynapsesparkbatchrunstatus attribute)": [[12, "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseSparkBatchRunStatus.SUCCESS"]], "terminal_statuses (airflow.providers.microsoft.azure.hooks.synapse.azuresynapsesparkbatchrunstatus attribute)": [[12, "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseSparkBatchRunStatus.TERMINAL_STATUSES"]], "airflow.providers.microsoft.azure.hooks.synapse": [[12, "module-airflow.providers.microsoft.azure.hooks.synapse"]], "cancel_job_run() (airflow.providers.microsoft.azure.hooks.synapse.azuresynapsehook method)": [[12, "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseHook.cancel_job_run"]], "conn_name_attr (airflow.providers.microsoft.azure.hooks.synapse.azuresynapsehook attribute)": [[12, "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseHook.conn_name_attr"]], "conn_type (airflow.providers.microsoft.azure.hooks.synapse.azuresynapsehook attribute)": [[12, "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseHook.conn_type"]], "default_conn_name (airflow.providers.microsoft.azure.hooks.synapse.azuresynapsehook attribute)": [[12, "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseHook.default_conn_name"]], "get_conn() (airflow.providers.microsoft.azure.hooks.synapse.azuresynapsehook method)": [[12, "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseHook.get_conn"]], "get_connection_form_widgets() (airflow.providers.microsoft.azure.hooks.synapse.azuresynapsehook static method)": [[12, "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseHook.get_connection_form_widgets"]], "get_job_run_status() (airflow.providers.microsoft.azure.hooks.synapse.azuresynapsehook method)": [[12, "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseHook.get_job_run_status"]], "get_ui_field_behaviour() (airflow.providers.microsoft.azure.hooks.synapse.azuresynapsehook static method)": [[12, "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseHook.get_ui_field_behaviour"]], "hook_name (airflow.providers.microsoft.azure.hooks.synapse.azuresynapsehook attribute)": [[12, "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseHook.hook_name"]], "run_spark_job() (airflow.providers.microsoft.azure.hooks.synapse.azuresynapsehook method)": [[12, "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseHook.run_spark_job"]], "wait_for_job_run_status() (airflow.providers.microsoft.azure.hooks.synapse.azuresynapsehook method)": [[12, "airflow.providers.microsoft.azure.hooks.synapse.AzureSynapseHook.wait_for_job_run_status"]], "wasbhook (class in airflow.providers.microsoft.azure.hooks.wasb)": [[13, "airflow.providers.microsoft.azure.hooks.wasb.WasbHook"]], "airflow.providers.microsoft.azure.hooks.wasb": [[13, "module-airflow.providers.microsoft.azure.hooks.wasb"]], "check_for_blob() (airflow.providers.microsoft.azure.hooks.wasb.wasbhook method)": [[13, "airflow.providers.microsoft.azure.hooks.wasb.WasbHook.check_for_blob"]], "check_for_prefix() (airflow.providers.microsoft.azure.hooks.wasb.wasbhook method)": [[13, "airflow.providers.microsoft.azure.hooks.wasb.WasbHook.check_for_prefix"]], "conn_name_attr (airflow.providers.microsoft.azure.hooks.wasb.wasbhook attribute)": [[13, "airflow.providers.microsoft.azure.hooks.wasb.WasbHook.conn_name_attr"]], "conn_type (airflow.providers.microsoft.azure.hooks.wasb.wasbhook attribute)": [[13, "airflow.providers.microsoft.azure.hooks.wasb.WasbHook.conn_type"]], "create_container() (airflow.providers.microsoft.azure.hooks.wasb.wasbhook method)": [[13, "airflow.providers.microsoft.azure.hooks.wasb.WasbHook.create_container"]], "default_conn_name (airflow.providers.microsoft.azure.hooks.wasb.wasbhook attribute)": [[13, "airflow.providers.microsoft.azure.hooks.wasb.WasbHook.default_conn_name"]], "delete_blobs() (airflow.providers.microsoft.azure.hooks.wasb.wasbhook method)": [[13, "airflow.providers.microsoft.azure.hooks.wasb.WasbHook.delete_blobs"]], "delete_container() (airflow.providers.microsoft.azure.hooks.wasb.wasbhook method)": [[13, "airflow.providers.microsoft.azure.hooks.wasb.WasbHook.delete_container"]], "delete_file() (airflow.providers.microsoft.azure.hooks.wasb.wasbhook method)": [[13, "airflow.providers.microsoft.azure.hooks.wasb.WasbHook.delete_file"]], "download() (airflow.providers.microsoft.azure.hooks.wasb.wasbhook method)": [[13, "airflow.providers.microsoft.azure.hooks.wasb.WasbHook.download"]], "get_blobs_list() (airflow.providers.microsoft.azure.hooks.wasb.wasbhook method)": [[13, "airflow.providers.microsoft.azure.hooks.wasb.WasbHook.get_blobs_list"]], "get_conn() (airflow.providers.microsoft.azure.hooks.wasb.wasbhook method)": [[13, "airflow.providers.microsoft.azure.hooks.wasb.WasbHook.get_conn"]], "get_connection_form_widgets() (airflow.providers.microsoft.azure.hooks.wasb.wasbhook static method)": [[13, "airflow.providers.microsoft.azure.hooks.wasb.WasbHook.get_connection_form_widgets"]], "get_file() (airflow.providers.microsoft.azure.hooks.wasb.wasbhook method)": [[13, "airflow.providers.microsoft.azure.hooks.wasb.WasbHook.get_file"]], "get_ui_field_behaviour() (airflow.providers.microsoft.azure.hooks.wasb.wasbhook static method)": [[13, "airflow.providers.microsoft.azure.hooks.wasb.WasbHook.get_ui_field_behaviour"]], "hook_name (airflow.providers.microsoft.azure.hooks.wasb.wasbhook attribute)": [[13, "airflow.providers.microsoft.azure.hooks.wasb.WasbHook.hook_name"]], "load_file() (airflow.providers.microsoft.azure.hooks.wasb.wasbhook method)": [[13, "airflow.providers.microsoft.azure.hooks.wasb.WasbHook.load_file"]], "load_string() (airflow.providers.microsoft.azure.hooks.wasb.wasbhook method)": [[13, "airflow.providers.microsoft.azure.hooks.wasb.WasbHook.load_string"]], "read_file() (airflow.providers.microsoft.azure.hooks.wasb.wasbhook method)": [[13, "airflow.providers.microsoft.azure.hooks.wasb.WasbHook.read_file"]], "test_connection() (airflow.providers.microsoft.azure.hooks.wasb.wasbhook method)": [[13, "airflow.providers.microsoft.azure.hooks.wasb.WasbHook.test_connection"]], "upload() (airflow.providers.microsoft.azure.hooks.wasb.wasbhook method)": [[13, "airflow.providers.microsoft.azure.hooks.wasb.WasbHook.upload"]], "airflow.providers.microsoft.azure": [[14, "module-airflow.providers.microsoft.azure"]], "airflow.providers.microsoft.azure.log": [[15, "module-airflow.providers.microsoft.azure.log"]], "wasbtaskhandler (class in airflow.providers.microsoft.azure.log.wasb_task_handler)": [[16, "airflow.providers.microsoft.azure.log.wasb_task_handler.WasbTaskHandler"]], "airflow.providers.microsoft.azure.log.wasb_task_handler": [[16, "module-airflow.providers.microsoft.azure.log.wasb_task_handler"]], "close() (airflow.providers.microsoft.azure.log.wasb_task_handler.wasbtaskhandler method)": [[16, "airflow.providers.microsoft.azure.log.wasb_task_handler.WasbTaskHandler.close"]], "hook() (airflow.providers.microsoft.azure.log.wasb_task_handler.wasbtaskhandler method)": [[16, "airflow.providers.microsoft.azure.log.wasb_task_handler.WasbTaskHandler.hook"]], "set_context() (airflow.providers.microsoft.azure.log.wasb_task_handler.wasbtaskhandler method)": [[16, "airflow.providers.microsoft.azure.log.wasb_task_handler.WasbTaskHandler.set_context"]], "wasb_log_exists() (airflow.providers.microsoft.azure.log.wasb_task_handler.wasbtaskhandler method)": [[16, "airflow.providers.microsoft.azure.log.wasb_task_handler.WasbTaskHandler.wasb_log_exists"]], "wasb_read() (airflow.providers.microsoft.azure.log.wasb_task_handler.wasbtaskhandler method)": [[16, "airflow.providers.microsoft.azure.log.wasb_task_handler.WasbTaskHandler.wasb_read"]], "wasb_write() (airflow.providers.microsoft.azure.log.wasb_task_handler.wasbtaskhandler method)": [[16, "airflow.providers.microsoft.azure.log.wasb_task_handler.WasbTaskHandler.wasb_write"]], "adlsdeleteoperator (class in airflow.providers.microsoft.azure.operators.adls)": [[17, "airflow.providers.microsoft.azure.operators.adls.ADLSDeleteOperator"]], "adlslistoperator (class in airflow.providers.microsoft.azure.operators.adls)": [[17, "airflow.providers.microsoft.azure.operators.adls.ADLSListOperator"]], "airflow.providers.microsoft.azure.operators.adls": [[17, "module-airflow.providers.microsoft.azure.operators.adls"]], "execute() (airflow.providers.microsoft.azure.operators.adls.adlsdeleteoperator method)": [[17, "airflow.providers.microsoft.azure.operators.adls.ADLSDeleteOperator.execute"]], "execute() (airflow.providers.microsoft.azure.operators.adls.adlslistoperator method)": [[17, "airflow.providers.microsoft.azure.operators.adls.ADLSListOperator.execute"]], "template_fields (airflow.providers.microsoft.azure.operators.adls.adlsdeleteoperator attribute)": [[17, "airflow.providers.microsoft.azure.operators.adls.ADLSDeleteOperator.template_fields"]], "template_fields (airflow.providers.microsoft.azure.operators.adls.adlslistoperator attribute)": [[17, "airflow.providers.microsoft.azure.operators.adls.ADLSListOperator.template_fields"]], "ui_color (airflow.providers.microsoft.azure.operators.adls.adlsdeleteoperator attribute)": [[17, "airflow.providers.microsoft.azure.operators.adls.ADLSDeleteOperator.ui_color"]], "ui_color (airflow.providers.microsoft.azure.operators.adls.adlslistoperator attribute)": [[17, "airflow.providers.microsoft.azure.operators.adls.ADLSListOperator.ui_color"]], "azuredataexplorerqueryoperator (class in airflow.providers.microsoft.azure.operators.adx)": [[18, "airflow.providers.microsoft.azure.operators.adx.AzureDataExplorerQueryOperator"]], "airflow.providers.microsoft.azure.operators.adx": [[18, "module-airflow.providers.microsoft.azure.operators.adx"]], "execute() (airflow.providers.microsoft.azure.operators.adx.azuredataexplorerqueryoperator method)": [[18, "airflow.providers.microsoft.azure.operators.adx.AzureDataExplorerQueryOperator.execute"]], "get_hook() (airflow.providers.microsoft.azure.operators.adx.azuredataexplorerqueryoperator method)": [[18, "airflow.providers.microsoft.azure.operators.adx.AzureDataExplorerQueryOperator.get_hook"]], "template_ext (airflow.providers.microsoft.azure.operators.adx.azuredataexplorerqueryoperator attribute)": [[18, "airflow.providers.microsoft.azure.operators.adx.AzureDataExplorerQueryOperator.template_ext"]], "template_fields (airflow.providers.microsoft.azure.operators.adx.azuredataexplorerqueryoperator attribute)": [[18, "airflow.providers.microsoft.azure.operators.adx.AzureDataExplorerQueryOperator.template_fields"]], "ui_color (airflow.providers.microsoft.azure.operators.adx.azuredataexplorerqueryoperator attribute)": [[18, "airflow.providers.microsoft.azure.operators.adx.AzureDataExplorerQueryOperator.ui_color"]], "asbreceivesubscriptionmessageoperator (class in airflow.providers.microsoft.azure.operators.asb)": [[19, "airflow.providers.microsoft.azure.operators.asb.ASBReceiveSubscriptionMessageOperator"]], "azureservicebuscreatequeueoperator (class in airflow.providers.microsoft.azure.operators.asb)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusCreateQueueOperator"]], "azureservicebusdeletequeueoperator (class in airflow.providers.microsoft.azure.operators.asb)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusDeleteQueueOperator"]], "azureservicebusreceivemessageoperator (class in airflow.providers.microsoft.azure.operators.asb)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusReceiveMessageOperator"]], "azureservicebussendmessageoperator (class in airflow.providers.microsoft.azure.operators.asb)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusSendMessageOperator"]], "azureservicebussubscriptioncreateoperator (class in airflow.providers.microsoft.azure.operators.asb)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusSubscriptionCreateOperator"]], "azureservicebussubscriptiondeleteoperator (class in airflow.providers.microsoft.azure.operators.asb)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusSubscriptionDeleteOperator"]], "azureservicebustopiccreateoperator (class in airflow.providers.microsoft.azure.operators.asb)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusTopicCreateOperator"]], "azureservicebustopicdeleteoperator (class in airflow.providers.microsoft.azure.operators.asb)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusTopicDeleteOperator"]], "azureservicebusupdatesubscriptionoperator (class in airflow.providers.microsoft.azure.operators.asb)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusUpdateSubscriptionOperator"]], "airflow.providers.microsoft.azure.operators.asb": [[19, "module-airflow.providers.microsoft.azure.operators.asb"]], "execute() (airflow.providers.microsoft.azure.operators.asb.asbreceivesubscriptionmessageoperator method)": [[19, "airflow.providers.microsoft.azure.operators.asb.ASBReceiveSubscriptionMessageOperator.execute"]], "execute() (airflow.providers.microsoft.azure.operators.asb.azureservicebuscreatequeueoperator method)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusCreateQueueOperator.execute"]], "execute() (airflow.providers.microsoft.azure.operators.asb.azureservicebusdeletequeueoperator method)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusDeleteQueueOperator.execute"]], "execute() (airflow.providers.microsoft.azure.operators.asb.azureservicebusreceivemessageoperator method)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusReceiveMessageOperator.execute"]], "execute() (airflow.providers.microsoft.azure.operators.asb.azureservicebussendmessageoperator method)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusSendMessageOperator.execute"]], "execute() (airflow.providers.microsoft.azure.operators.asb.azureservicebussubscriptioncreateoperator method)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusSubscriptionCreateOperator.execute"]], "execute() (airflow.providers.microsoft.azure.operators.asb.azureservicebussubscriptiondeleteoperator method)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusSubscriptionDeleteOperator.execute"]], "execute() (airflow.providers.microsoft.azure.operators.asb.azureservicebustopiccreateoperator method)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusTopicCreateOperator.execute"]], "execute() (airflow.providers.microsoft.azure.operators.asb.azureservicebustopicdeleteoperator method)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusTopicDeleteOperator.execute"]], "execute() (airflow.providers.microsoft.azure.operators.asb.azureservicebusupdatesubscriptionoperator method)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusUpdateSubscriptionOperator.execute"]], "template_fields (airflow.providers.microsoft.azure.operators.asb.asbreceivesubscriptionmessageoperator attribute)": [[19, "airflow.providers.microsoft.azure.operators.asb.ASBReceiveSubscriptionMessageOperator.template_fields"]], "template_fields (airflow.providers.microsoft.azure.operators.asb.azureservicebuscreatequeueoperator attribute)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusCreateQueueOperator.template_fields"]], "template_fields (airflow.providers.microsoft.azure.operators.asb.azureservicebusdeletequeueoperator attribute)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusDeleteQueueOperator.template_fields"]], "template_fields (airflow.providers.microsoft.azure.operators.asb.azureservicebusreceivemessageoperator attribute)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusReceiveMessageOperator.template_fields"]], "template_fields (airflow.providers.microsoft.azure.operators.asb.azureservicebussendmessageoperator attribute)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusSendMessageOperator.template_fields"]], "template_fields (airflow.providers.microsoft.azure.operators.asb.azureservicebussubscriptioncreateoperator attribute)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusSubscriptionCreateOperator.template_fields"]], "template_fields (airflow.providers.microsoft.azure.operators.asb.azureservicebussubscriptiondeleteoperator attribute)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusSubscriptionDeleteOperator.template_fields"]], "template_fields (airflow.providers.microsoft.azure.operators.asb.azureservicebustopiccreateoperator attribute)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusTopicCreateOperator.template_fields"]], "template_fields (airflow.providers.microsoft.azure.operators.asb.azureservicebustopicdeleteoperator attribute)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusTopicDeleteOperator.template_fields"]], "template_fields (airflow.providers.microsoft.azure.operators.asb.azureservicebusupdatesubscriptionoperator attribute)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusUpdateSubscriptionOperator.template_fields"]], "ui_color (airflow.providers.microsoft.azure.operators.asb.asbreceivesubscriptionmessageoperator attribute)": [[19, "airflow.providers.microsoft.azure.operators.asb.ASBReceiveSubscriptionMessageOperator.ui_color"]], "ui_color (airflow.providers.microsoft.azure.operators.asb.azureservicebuscreatequeueoperator attribute)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusCreateQueueOperator.ui_color"]], "ui_color (airflow.providers.microsoft.azure.operators.asb.azureservicebusdeletequeueoperator attribute)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusDeleteQueueOperator.ui_color"]], "ui_color (airflow.providers.microsoft.azure.operators.asb.azureservicebusreceivemessageoperator attribute)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusReceiveMessageOperator.ui_color"]], "ui_color (airflow.providers.microsoft.azure.operators.asb.azureservicebussendmessageoperator attribute)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusSendMessageOperator.ui_color"]], "ui_color (airflow.providers.microsoft.azure.operators.asb.azureservicebussubscriptioncreateoperator attribute)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusSubscriptionCreateOperator.ui_color"]], "ui_color (airflow.providers.microsoft.azure.operators.asb.azureservicebussubscriptiondeleteoperator attribute)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusSubscriptionDeleteOperator.ui_color"]], "ui_color (airflow.providers.microsoft.azure.operators.asb.azureservicebustopiccreateoperator attribute)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusTopicCreateOperator.ui_color"]], "ui_color (airflow.providers.microsoft.azure.operators.asb.azureservicebustopicdeleteoperator attribute)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusTopicDeleteOperator.ui_color"]], "ui_color (airflow.providers.microsoft.azure.operators.asb.azureservicebusupdatesubscriptionoperator attribute)": [[19, "airflow.providers.microsoft.azure.operators.asb.AzureServiceBusUpdateSubscriptionOperator.ui_color"]], "azurebatchoperator (class in airflow.providers.microsoft.azure.operators.batch)": [[20, "airflow.providers.microsoft.azure.operators.batch.AzureBatchOperator"]], "airflow.providers.microsoft.azure.operators.batch": [[20, "module-airflow.providers.microsoft.azure.operators.batch"]], "clean_up() (airflow.providers.microsoft.azure.operators.batch.azurebatchoperator method)": [[20, "airflow.providers.microsoft.azure.operators.batch.AzureBatchOperator.clean_up"]], "execute() (airflow.providers.microsoft.azure.operators.batch.azurebatchoperator method)": [[20, "airflow.providers.microsoft.azure.operators.batch.AzureBatchOperator.execute"]], "get_hook() (airflow.providers.microsoft.azure.operators.batch.azurebatchoperator method)": [[20, "airflow.providers.microsoft.azure.operators.batch.AzureBatchOperator.get_hook"]], "on_kill() (airflow.providers.microsoft.azure.operators.batch.azurebatchoperator method)": [[20, "airflow.providers.microsoft.azure.operators.batch.AzureBatchOperator.on_kill"]], "template_fields (airflow.providers.microsoft.azure.operators.batch.azurebatchoperator attribute)": [[20, "airflow.providers.microsoft.azure.operators.batch.AzureBatchOperator.template_fields"]], "ui_color (airflow.providers.microsoft.azure.operators.batch.azurebatchoperator attribute)": [[20, "airflow.providers.microsoft.azure.operators.batch.AzureBatchOperator.ui_color"]], "azurecontainerinstancesoperator (class in airflow.providers.microsoft.azure.operators.container_instances)": [[21, "airflow.providers.microsoft.azure.operators.container_instances.AzureContainerInstancesOperator"]], "default_cpu (in module airflow.providers.microsoft.azure.operators.container_instances)": [[21, "airflow.providers.microsoft.azure.operators.container_instances.DEFAULT_CPU"]], "default_environment_variables (in module airflow.providers.microsoft.azure.operators.container_instances)": [[21, "airflow.providers.microsoft.azure.operators.container_instances.DEFAULT_ENVIRONMENT_VARIABLES"]], "default_memory_in_gb (in module airflow.providers.microsoft.azure.operators.container_instances)": [[21, "airflow.providers.microsoft.azure.operators.container_instances.DEFAULT_MEMORY_IN_GB"]], "default_secured_variables (in module airflow.providers.microsoft.azure.operators.container_instances)": [[21, "airflow.providers.microsoft.azure.operators.container_instances.DEFAULT_SECURED_VARIABLES"]], "default_volumes (in module airflow.providers.microsoft.azure.operators.container_instances)": [[21, "airflow.providers.microsoft.azure.operators.container_instances.DEFAULT_VOLUMES"]], "volume (in module airflow.providers.microsoft.azure.operators.container_instances)": [[21, "airflow.providers.microsoft.azure.operators.container_instances.Volume"]], "airflow.providers.microsoft.azure.operators.container_instances": [[21, "module-airflow.providers.microsoft.azure.operators.container_instances"]], "execute() (airflow.providers.microsoft.azure.operators.container_instances.azurecontainerinstancesoperator method)": [[21, "airflow.providers.microsoft.azure.operators.container_instances.AzureContainerInstancesOperator.execute"]], "on_kill() (airflow.providers.microsoft.azure.operators.container_instances.azurecontainerinstancesoperator method)": [[21, "airflow.providers.microsoft.azure.operators.container_instances.AzureContainerInstancesOperator.on_kill"]], "template_fields (airflow.providers.microsoft.azure.operators.container_instances.azurecontainerinstancesoperator attribute)": [[21, "airflow.providers.microsoft.azure.operators.container_instances.AzureContainerInstancesOperator.template_fields"]], "template_fields_renderers (airflow.providers.microsoft.azure.operators.container_instances.azurecontainerinstancesoperator attribute)": [[21, "airflow.providers.microsoft.azure.operators.container_instances.AzureContainerInstancesOperator.template_fields_renderers"]], "azurecosmosinsertdocumentoperator (class in airflow.providers.microsoft.azure.operators.cosmos)": [[22, "airflow.providers.microsoft.azure.operators.cosmos.AzureCosmosInsertDocumentOperator"]], "airflow.providers.microsoft.azure.operators.cosmos": [[22, "module-airflow.providers.microsoft.azure.operators.cosmos"]], "execute() (airflow.providers.microsoft.azure.operators.cosmos.azurecosmosinsertdocumentoperator method)": [[22, "airflow.providers.microsoft.azure.operators.cosmos.AzureCosmosInsertDocumentOperator.execute"]], "template_fields (airflow.providers.microsoft.azure.operators.cosmos.azurecosmosinsertdocumentoperator attribute)": [[22, "airflow.providers.microsoft.azure.operators.cosmos.AzureCosmosInsertDocumentOperator.template_fields"]], "ui_color (airflow.providers.microsoft.azure.operators.cosmos.azurecosmosinsertdocumentoperator attribute)": [[22, "airflow.providers.microsoft.azure.operators.cosmos.AzureCosmosInsertDocumentOperator.ui_color"]], "azuredatafactorypipelinerunlink (class in airflow.providers.microsoft.azure.operators.data_factory)": [[23, "airflow.providers.microsoft.azure.operators.data_factory.AzureDataFactoryPipelineRunLink"]], "azuredatafactoryrunpipelineoperator (class in airflow.providers.microsoft.azure.operators.data_factory)": [[23, "airflow.providers.microsoft.azure.operators.data_factory.AzureDataFactoryRunPipelineOperator"]], "airflow.providers.microsoft.azure.operators.data_factory": [[23, "module-airflow.providers.microsoft.azure.operators.data_factory"]], "execute() (airflow.providers.microsoft.azure.operators.data_factory.azuredatafactoryrunpipelineoperator method)": [[23, "airflow.providers.microsoft.azure.operators.data_factory.AzureDataFactoryRunPipelineOperator.execute"]], "get_link() (airflow.providers.microsoft.azure.operators.data_factory.azuredatafactorypipelinerunlink method)": [[23, "airflow.providers.microsoft.azure.operators.data_factory.AzureDataFactoryPipelineRunLink.get_link"]], "name (airflow.providers.microsoft.azure.operators.data_factory.azuredatafactorypipelinerunlink attribute)": [[23, "airflow.providers.microsoft.azure.operators.data_factory.AzureDataFactoryPipelineRunLink.name"]], "on_kill() (airflow.providers.microsoft.azure.operators.data_factory.azuredatafactoryrunpipelineoperator method)": [[23, "airflow.providers.microsoft.azure.operators.data_factory.AzureDataFactoryRunPipelineOperator.on_kill"]], "operator_extra_links (airflow.providers.microsoft.azure.operators.data_factory.azuredatafactoryrunpipelineoperator attribute)": [[23, "airflow.providers.microsoft.azure.operators.data_factory.AzureDataFactoryRunPipelineOperator.operator_extra_links"]], "template_fields (airflow.providers.microsoft.azure.operators.data_factory.azuredatafactoryrunpipelineoperator attribute)": [[23, "airflow.providers.microsoft.azure.operators.data_factory.AzureDataFactoryRunPipelineOperator.template_fields"]], "template_fields_renderers (airflow.providers.microsoft.azure.operators.data_factory.azuredatafactoryrunpipelineoperator attribute)": [[23, "airflow.providers.microsoft.azure.operators.data_factory.AzureDataFactoryRunPipelineOperator.template_fields_renderers"]], "ui_color (airflow.providers.microsoft.azure.operators.data_factory.azuredatafactoryrunpipelineoperator attribute)": [[23, "airflow.providers.microsoft.azure.operators.data_factory.AzureDataFactoryRunPipelineOperator.ui_color"]], "airflow.providers.microsoft.azure.operators": [[24, "module-airflow.providers.microsoft.azure.operators"]], "azuresynapserunsparkbatchoperator (class in airflow.providers.microsoft.azure.operators.synapse)": [[25, "airflow.providers.microsoft.azure.operators.synapse.AzureSynapseRunSparkBatchOperator"]], "airflow.providers.microsoft.azure.operators.synapse": [[25, "module-airflow.providers.microsoft.azure.operators.synapse"]], "execute() (airflow.providers.microsoft.azure.operators.synapse.azuresynapserunsparkbatchoperator method)": [[25, "airflow.providers.microsoft.azure.operators.synapse.AzureSynapseRunSparkBatchOperator.execute"]], "on_kill() (airflow.providers.microsoft.azure.operators.synapse.azuresynapserunsparkbatchoperator method)": [[25, "airflow.providers.microsoft.azure.operators.synapse.AzureSynapseRunSparkBatchOperator.on_kill"]], "template_fields (airflow.providers.microsoft.azure.operators.synapse.azuresynapserunsparkbatchoperator attribute)": [[25, "airflow.providers.microsoft.azure.operators.synapse.AzureSynapseRunSparkBatchOperator.template_fields"]], "template_fields_renderers (airflow.providers.microsoft.azure.operators.synapse.azuresynapserunsparkbatchoperator attribute)": [[25, "airflow.providers.microsoft.azure.operators.synapse.AzureSynapseRunSparkBatchOperator.template_fields_renderers"]], "ui_color (airflow.providers.microsoft.azure.operators.synapse.azuresynapserunsparkbatchoperator attribute)": [[25, "airflow.providers.microsoft.azure.operators.synapse.AzureSynapseRunSparkBatchOperator.ui_color"]], "wasbdeletebloboperator (class in airflow.providers.microsoft.azure.operators.wasb_delete_blob)": [[26, "airflow.providers.microsoft.azure.operators.wasb_delete_blob.WasbDeleteBlobOperator"]], "airflow.providers.microsoft.azure.operators.wasb_delete_blob": [[26, "module-airflow.providers.microsoft.azure.operators.wasb_delete_blob"]], "execute() (airflow.providers.microsoft.azure.operators.wasb_delete_blob.wasbdeletebloboperator method)": [[26, "airflow.providers.microsoft.azure.operators.wasb_delete_blob.WasbDeleteBlobOperator.execute"]], "template_fields (airflow.providers.microsoft.azure.operators.wasb_delete_blob.wasbdeletebloboperator attribute)": [[26, "airflow.providers.microsoft.azure.operators.wasb_delete_blob.WasbDeleteBlobOperator.template_fields"]], "airflow.providers.microsoft.azure.secrets": [[27, "module-airflow.providers.microsoft.azure.secrets"]], "azurekeyvaultbackend (class in airflow.providers.microsoft.azure.secrets.key_vault)": [[28, "airflow.providers.microsoft.azure.secrets.key_vault.AzureKeyVaultBackend"]], "airflow.providers.microsoft.azure.secrets.key_vault": [[28, "module-airflow.providers.microsoft.azure.secrets.key_vault"]], "build_path() (airflow.providers.microsoft.azure.secrets.key_vault.azurekeyvaultbackend static method)": [[28, "airflow.providers.microsoft.azure.secrets.key_vault.AzureKeyVaultBackend.build_path"]], "client() (airflow.providers.microsoft.azure.secrets.key_vault.azurekeyvaultbackend method)": [[28, "airflow.providers.microsoft.azure.secrets.key_vault.AzureKeyVaultBackend.client"]], "get_config() (airflow.providers.microsoft.azure.secrets.key_vault.azurekeyvaultbackend method)": [[28, "airflow.providers.microsoft.azure.secrets.key_vault.AzureKeyVaultBackend.get_config"]], "get_conn_uri() (airflow.providers.microsoft.azure.secrets.key_vault.azurekeyvaultbackend method)": [[28, "airflow.providers.microsoft.azure.secrets.key_vault.AzureKeyVaultBackend.get_conn_uri"]], "get_conn_value() (airflow.providers.microsoft.azure.secrets.key_vault.azurekeyvaultbackend method)": [[28, "airflow.providers.microsoft.azure.secrets.key_vault.AzureKeyVaultBackend.get_conn_value"]], "get_variable() (airflow.providers.microsoft.azure.secrets.key_vault.azurekeyvaultbackend method)": [[28, "airflow.providers.microsoft.azure.secrets.key_vault.AzureKeyVaultBackend.get_variable"]], "azurecosmosdocumentsensor (class in airflow.providers.microsoft.azure.sensors.cosmos)": [[29, "airflow.providers.microsoft.azure.sensors.cosmos.AzureCosmosDocumentSensor"]], "airflow.providers.microsoft.azure.sensors.cosmos": [[29, "module-airflow.providers.microsoft.azure.sensors.cosmos"]], "poke() (airflow.providers.microsoft.azure.sensors.cosmos.azurecosmosdocumentsensor method)": [[29, "airflow.providers.microsoft.azure.sensors.cosmos.AzureCosmosDocumentSensor.poke"]], "template_fields (airflow.providers.microsoft.azure.sensors.cosmos.azurecosmosdocumentsensor attribute)": [[29, "airflow.providers.microsoft.azure.sensors.cosmos.AzureCosmosDocumentSensor.template_fields"]], "azuredatafactorypipelinerunstatussensor (class in airflow.providers.microsoft.azure.sensors.data_factory)": [[30, "airflow.providers.microsoft.azure.sensors.data_factory.AzureDataFactoryPipelineRunStatusSensor"]], "airflow.providers.microsoft.azure.sensors.data_factory": [[30, "module-airflow.providers.microsoft.azure.sensors.data_factory"]], "poke() (airflow.providers.microsoft.azure.sensors.data_factory.azuredatafactorypipelinerunstatussensor method)": [[30, "airflow.providers.microsoft.azure.sensors.data_factory.AzureDataFactoryPipelineRunStatusSensor.poke"]], "template_fields (airflow.providers.microsoft.azure.sensors.data_factory.azuredatafactorypipelinerunstatussensor attribute)": [[30, "airflow.providers.microsoft.azure.sensors.data_factory.AzureDataFactoryPipelineRunStatusSensor.template_fields"]], "ui_color (airflow.providers.microsoft.azure.sensors.data_factory.azuredatafactorypipelinerunstatussensor attribute)": [[30, "airflow.providers.microsoft.azure.sensors.data_factory.AzureDataFactoryPipelineRunStatusSensor.ui_color"]], "airflow.providers.microsoft.azure.sensors": [[31, "module-airflow.providers.microsoft.azure.sensors"]], "wasbblobsensor (class in airflow.providers.microsoft.azure.sensors.wasb)": [[32, "airflow.providers.microsoft.azure.sensors.wasb.WasbBlobSensor"]], "wasbprefixsensor (class in airflow.providers.microsoft.azure.sensors.wasb)": [[32, "airflow.providers.microsoft.azure.sensors.wasb.WasbPrefixSensor"]], "airflow.providers.microsoft.azure.sensors.wasb": [[32, "module-airflow.providers.microsoft.azure.sensors.wasb"]], "poke() (airflow.providers.microsoft.azure.sensors.wasb.wasbblobsensor method)": [[32, "airflow.providers.microsoft.azure.sensors.wasb.WasbBlobSensor.poke"]], "poke() (airflow.providers.microsoft.azure.sensors.wasb.wasbprefixsensor method)": [[32, "airflow.providers.microsoft.azure.sensors.wasb.WasbPrefixSensor.poke"]], "template_fields (airflow.providers.microsoft.azure.sensors.wasb.wasbblobsensor attribute)": [[32, "airflow.providers.microsoft.azure.sensors.wasb.WasbBlobSensor.template_fields"]], "template_fields (airflow.providers.microsoft.azure.sensors.wasb.wasbprefixsensor attribute)": [[32, "airflow.providers.microsoft.azure.sensors.wasb.WasbPrefixSensor.template_fields"]], "azureblobstoragetogcsoperator (class in airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs)": [[33, "airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs.AzureBlobStorageToGCSOperator"]], "airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs": [[33, "module-airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs"]], "execute() (airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs.azureblobstoragetogcsoperator method)": [[33, "airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs.AzureBlobStorageToGCSOperator.execute"]], "template_fields (airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs.azureblobstoragetogcsoperator attribute)": [[33, "airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs.AzureBlobStorageToGCSOperator.template_fields"]], "airflow.providers.microsoft.azure.transfers": [[34, "module-airflow.providers.microsoft.azure.transfers"]], "localfilesystemtoadlsoperator (class in airflow.providers.microsoft.azure.transfers.local_to_adls)": [[35, "airflow.providers.microsoft.azure.transfers.local_to_adls.LocalFilesystemToADLSOperator"]], "localtoazuredatalakestorageoperator (class in airflow.providers.microsoft.azure.transfers.local_to_adls)": [[35, "airflow.providers.microsoft.azure.transfers.local_to_adls.LocalToAzureDataLakeStorageOperator"]], "airflow.providers.microsoft.azure.transfers.local_to_adls": [[35, "module-airflow.providers.microsoft.azure.transfers.local_to_adls"]], "execute() (airflow.providers.microsoft.azure.transfers.local_to_adls.localfilesystemtoadlsoperator method)": [[35, "airflow.providers.microsoft.azure.transfers.local_to_adls.LocalFilesystemToADLSOperator.execute"]], "template_fields (airflow.providers.microsoft.azure.transfers.local_to_adls.localfilesystemtoadlsoperator attribute)": [[35, "airflow.providers.microsoft.azure.transfers.local_to_adls.LocalFilesystemToADLSOperator.template_fields"]], "ui_color (airflow.providers.microsoft.azure.transfers.local_to_adls.localfilesystemtoadlsoperator attribute)": [[35, "airflow.providers.microsoft.azure.transfers.local_to_adls.LocalFilesystemToADLSOperator.ui_color"]], "localfilesystemtowasboperator (class in airflow.providers.microsoft.azure.transfers.local_to_wasb)": [[36, "airflow.providers.microsoft.azure.transfers.local_to_wasb.LocalFilesystemToWasbOperator"]], "airflow.providers.microsoft.azure.transfers.local_to_wasb": [[36, "module-airflow.providers.microsoft.azure.transfers.local_to_wasb"]], "execute() (airflow.providers.microsoft.azure.transfers.local_to_wasb.localfilesystemtowasboperator method)": [[36, "airflow.providers.microsoft.azure.transfers.local_to_wasb.LocalFilesystemToWasbOperator.execute"]], "template_fields (airflow.providers.microsoft.azure.transfers.local_to_wasb.localfilesystemtowasboperator attribute)": [[36, "airflow.providers.microsoft.azure.transfers.local_to_wasb.LocalFilesystemToWasbOperator.template_fields"]], "oracletoazuredatalakeoperator (class in airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake)": [[37, "airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake.OracleToAzureDataLakeOperator"]], "airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake": [[37, "module-airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake"]], "execute() (airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake.oracletoazuredatalakeoperator method)": [[37, "airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake.OracleToAzureDataLakeOperator.execute"]], "template_fields (airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake.oracletoazuredatalakeoperator attribute)": [[37, "airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake.OracleToAzureDataLakeOperator.template_fields"]], "template_fields_renderers (airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake.oracletoazuredatalakeoperator attribute)": [[37, "airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake.OracleToAzureDataLakeOperator.template_fields_renderers"]], "ui_color (airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake.oracletoazuredatalakeoperator attribute)": [[37, "airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake.OracleToAzureDataLakeOperator.ui_color"]], "sftptowasboperator (class in airflow.providers.microsoft.azure.transfers.sftp_to_wasb)": [[38, "airflow.providers.microsoft.azure.transfers.sftp_to_wasb.SFTPToWasbOperator"]], "sftpfile (in module airflow.providers.microsoft.azure.transfers.sftp_to_wasb)": [[38, "airflow.providers.microsoft.azure.transfers.sftp_to_wasb.SftpFile"]], "wildcard (in module airflow.providers.microsoft.azure.transfers.sftp_to_wasb)": [[38, "airflow.providers.microsoft.azure.transfers.sftp_to_wasb.WILDCARD"]], "airflow.providers.microsoft.azure.transfers.sftp_to_wasb": [[38, "module-airflow.providers.microsoft.azure.transfers.sftp_to_wasb"]], "check_wildcards_limit() (airflow.providers.microsoft.azure.transfers.sftp_to_wasb.sftptowasboperator method)": [[38, "airflow.providers.microsoft.azure.transfers.sftp_to_wasb.SFTPToWasbOperator.check_wildcards_limit"]], "copy_files_to_wasb() (airflow.providers.microsoft.azure.transfers.sftp_to_wasb.sftptowasboperator method)": [[38, "airflow.providers.microsoft.azure.transfers.sftp_to_wasb.SFTPToWasbOperator.copy_files_to_wasb"]], "delete_files() (airflow.providers.microsoft.azure.transfers.sftp_to_wasb.sftptowasboperator method)": [[38, "airflow.providers.microsoft.azure.transfers.sftp_to_wasb.SFTPToWasbOperator.delete_files"]], "dry_run() (airflow.providers.microsoft.azure.transfers.sftp_to_wasb.sftptowasboperator method)": [[38, "airflow.providers.microsoft.azure.transfers.sftp_to_wasb.SFTPToWasbOperator.dry_run"]], "execute() (airflow.providers.microsoft.azure.transfers.sftp_to_wasb.sftptowasboperator method)": [[38, "airflow.providers.microsoft.azure.transfers.sftp_to_wasb.SFTPToWasbOperator.execute"]], "get_full_path_blob() (airflow.providers.microsoft.azure.transfers.sftp_to_wasb.sftptowasboperator method)": [[38, "airflow.providers.microsoft.azure.transfers.sftp_to_wasb.SFTPToWasbOperator.get_full_path_blob"]], "get_sftp_files_map() (airflow.providers.microsoft.azure.transfers.sftp_to_wasb.sftptowasboperator method)": [[38, "airflow.providers.microsoft.azure.transfers.sftp_to_wasb.SFTPToWasbOperator.get_sftp_files_map"]], "get_tree_behavior() (airflow.providers.microsoft.azure.transfers.sftp_to_wasb.sftptowasboperator method)": [[38, "airflow.providers.microsoft.azure.transfers.sftp_to_wasb.SFTPToWasbOperator.get_tree_behavior"]], "sftp_hook() (airflow.providers.microsoft.azure.transfers.sftp_to_wasb.sftptowasboperator method)": [[38, "airflow.providers.microsoft.azure.transfers.sftp_to_wasb.SFTPToWasbOperator.sftp_hook"]], "source_path_contains_wildcard (airflow.providers.microsoft.azure.transfers.sftp_to_wasb.sftptowasboperator property)": [[38, "airflow.providers.microsoft.azure.transfers.sftp_to_wasb.SFTPToWasbOperator.source_path_contains_wildcard"]], "template_fields (airflow.providers.microsoft.azure.transfers.sftp_to_wasb.sftptowasboperator attribute)": [[38, "airflow.providers.microsoft.azure.transfers.sftp_to_wasb.SFTPToWasbOperator.template_fields"]], "airflow.providers.microsoft.azure.utils": [[39, "module-airflow.providers.microsoft.azure.utils"]], "get_field() (in module airflow.providers.microsoft.azure.utils)": [[39, "airflow.providers.microsoft.azure.utils.get_field"]], "dag_id (in module tests.system.providers.microsoft.azure.example_adf_run_pipeline)": [[40, "tests.system.providers.microsoft.azure.example_adf_run_pipeline.DAG_ID"]], "env_id (in module tests.system.providers.microsoft.azure.example_adf_run_pipeline)": [[40, "tests.system.providers.microsoft.azure.example_adf_run_pipeline.ENV_ID"]], "begin (in module tests.system.providers.microsoft.azure.example_adf_run_pipeline)": [[40, "tests.system.providers.microsoft.azure.example_adf_run_pipeline.begin"]], "test_run (in module tests.system.providers.microsoft.azure.example_adf_run_pipeline)": [[40, "tests.system.providers.microsoft.azure.example_adf_run_pipeline.test_run"]], "tests.system.providers.microsoft.azure.example_adf_run_pipeline": [[40, "module-tests.system.providers.microsoft.azure.example_adf_run_pipeline"]], "dag_id (in module tests.system.providers.microsoft.azure.example_adls_delete)": [[41, "tests.system.providers.microsoft.azure.example_adls_delete.DAG_ID"]], "env_id (in module tests.system.providers.microsoft.azure.example_adls_delete)": [[41, "tests.system.providers.microsoft.azure.example_adls_delete.ENV_ID"]], "local_file_path (in module tests.system.providers.microsoft.azure.example_adls_delete)": [[41, "tests.system.providers.microsoft.azure.example_adls_delete.LOCAL_FILE_PATH"]], "remote_file_path (in module tests.system.providers.microsoft.azure.example_adls_delete)": [[41, "tests.system.providers.microsoft.azure.example_adls_delete.REMOTE_FILE_PATH"]], "test_run (in module tests.system.providers.microsoft.azure.example_adls_delete)": [[41, "tests.system.providers.microsoft.azure.example_adls_delete.test_run"]], "tests.system.providers.microsoft.azure.example_adls_delete": [[41, "module-tests.system.providers.microsoft.azure.example_adls_delete"]], "upload_file (in module tests.system.providers.microsoft.azure.example_adls_delete)": [[41, "tests.system.providers.microsoft.azure.example_adls_delete.upload_file"]], "azure_container_name (in module tests.system.providers.microsoft.azure.example_azure_blob_to_gcs)": [[42, "tests.system.providers.microsoft.azure.example_azure_blob_to_gcs.AZURE_CONTAINER_NAME"]], "blob_name (in module tests.system.providers.microsoft.azure.example_azure_blob_to_gcs)": [[42, "tests.system.providers.microsoft.azure.example_azure_blob_to_gcs.BLOB_NAME"]], "dag_id (in module tests.system.providers.microsoft.azure.example_azure_blob_to_gcs)": [[42, "tests.system.providers.microsoft.azure.example_azure_blob_to_gcs.DAG_ID"]], "env_id (in module tests.system.providers.microsoft.azure.example_azure_blob_to_gcs)": [[42, "tests.system.providers.microsoft.azure.example_azure_blob_to_gcs.ENV_ID"]], "gcp_bucket_file_path (in module tests.system.providers.microsoft.azure.example_azure_blob_to_gcs)": [[42, "tests.system.providers.microsoft.azure.example_azure_blob_to_gcs.GCP_BUCKET_FILE_PATH"]], "gcp_bucket_name (in module tests.system.providers.microsoft.azure.example_azure_blob_to_gcs)": [[42, "tests.system.providers.microsoft.azure.example_azure_blob_to_gcs.GCP_BUCKET_NAME"]], "gcp_object_name (in module tests.system.providers.microsoft.azure.example_azure_blob_to_gcs)": [[42, "tests.system.providers.microsoft.azure.example_azure_blob_to_gcs.GCP_OBJECT_NAME"]], "test_run (in module tests.system.providers.microsoft.azure.example_azure_blob_to_gcs)": [[42, "tests.system.providers.microsoft.azure.example_azure_blob_to_gcs.test_run"]], "tests.system.providers.microsoft.azure.example_azure_blob_to_gcs": [[42, "module-tests.system.providers.microsoft.azure.example_azure_blob_to_gcs"]], "wait_for_blob (in module tests.system.providers.microsoft.azure.example_azure_blob_to_gcs)": [[42, "tests.system.providers.microsoft.azure.example_azure_blob_to_gcs.wait_for_blob"]], "dag_id (in module tests.system.providers.microsoft.azure.example_azure_container_instances)": [[43, "tests.system.providers.microsoft.azure.example_azure_container_instances.DAG_ID"]], "env_id (in module tests.system.providers.microsoft.azure.example_azure_container_instances)": [[43, "tests.system.providers.microsoft.azure.example_azure_container_instances.ENV_ID"]], "t1 (in module tests.system.providers.microsoft.azure.example_azure_container_instances)": [[43, "tests.system.providers.microsoft.azure.example_azure_container_instances.t1"]], "test_run (in module tests.system.providers.microsoft.azure.example_azure_container_instances)": [[43, "tests.system.providers.microsoft.azure.example_azure_container_instances.test_run"]], "tests.system.providers.microsoft.azure.example_azure_container_instances": [[43, "module-tests.system.providers.microsoft.azure.example_azure_container_instances"]], "dag_id (in module tests.system.providers.microsoft.azure.example_azure_cosmosdb)": [[44, "tests.system.providers.microsoft.azure.example_azure_cosmosdb.DAG_ID"]], "env_id (in module tests.system.providers.microsoft.azure.example_azure_cosmosdb)": [[44, "tests.system.providers.microsoft.azure.example_azure_cosmosdb.ENV_ID"]], "t1 (in module tests.system.providers.microsoft.azure.example_azure_cosmosdb)": [[44, "tests.system.providers.microsoft.azure.example_azure_cosmosdb.t1"]], "test_run (in module tests.system.providers.microsoft.azure.example_azure_cosmosdb)": [[44, "tests.system.providers.microsoft.azure.example_azure_cosmosdb.test_run"]], "tests.system.providers.microsoft.azure.example_azure_cosmosdb": [[44, "module-tests.system.providers.microsoft.azure.example_azure_cosmosdb"]], "client_id (in module tests.system.providers.microsoft.azure.example_azure_service_bus)": [[45, "tests.system.providers.microsoft.azure.example_azure_service_bus.CLIENT_ID"]], "execution_timeout (in module tests.system.providers.microsoft.azure.example_azure_service_bus)": [[45, "tests.system.providers.microsoft.azure.example_azure_service_bus.EXECUTION_TIMEOUT"]], "message (in module tests.system.providers.microsoft.azure.example_azure_service_bus)": [[45, "tests.system.providers.microsoft.azure.example_azure_service_bus.MESSAGE"]], "message_list (in module tests.system.providers.microsoft.azure.example_azure_service_bus)": [[45, "tests.system.providers.microsoft.azure.example_azure_service_bus.MESSAGE_LIST"]], "queue_name (in module tests.system.providers.microsoft.azure.example_azure_service_bus)": [[45, "tests.system.providers.microsoft.azure.example_azure_service_bus.QUEUE_NAME"]], "subscription_name (in module tests.system.providers.microsoft.azure.example_azure_service_bus)": [[45, "tests.system.providers.microsoft.azure.example_azure_service_bus.SUBSCRIPTION_NAME"]], "topic_name (in module tests.system.providers.microsoft.azure.example_azure_service_bus)": [[45, "tests.system.providers.microsoft.azure.example_azure_service_bus.TOPIC_NAME"]], "create_service_bus_queue (in module tests.system.providers.microsoft.azure.example_azure_service_bus)": [[45, "tests.system.providers.microsoft.azure.example_azure_service_bus.create_service_bus_queue"]], "test_run (in module tests.system.providers.microsoft.azure.example_azure_service_bus)": [[45, "tests.system.providers.microsoft.azure.example_azure_service_bus.test_run"]], "tests.system.providers.microsoft.azure.example_azure_service_bus": [[45, "module-tests.system.providers.microsoft.azure.example_azure_service_bus"]], "airflow_home (in module tests.system.providers.microsoft.azure.example_azure_synapse)": [[46, "tests.system.providers.microsoft.azure.example_azure_synapse.AIRFLOW_HOME"]], "execution_timeout (in module tests.system.providers.microsoft.azure.example_azure_synapse)": [[46, "tests.system.providers.microsoft.azure.example_azure_synapse.EXECUTION_TIMEOUT"]], "spark_job_payload (in module tests.system.providers.microsoft.azure.example_azure_synapse)": [[46, "tests.system.providers.microsoft.azure.example_azure_synapse.SPARK_JOB_PAYLOAD"]], "default_args (in module tests.system.providers.microsoft.azure.example_azure_synapse)": [[46, "tests.system.providers.microsoft.azure.example_azure_synapse.default_args"]], "run_spark_job (in module tests.system.providers.microsoft.azure.example_azure_synapse)": [[46, "tests.system.providers.microsoft.azure.example_azure_synapse.run_spark_job"]], "test_run (in module tests.system.providers.microsoft.azure.example_azure_synapse)": [[46, "tests.system.providers.microsoft.azure.example_azure_synapse.test_run"]], "tests.system.providers.microsoft.azure.example_azure_synapse": [[46, "module-tests.system.providers.microsoft.azure.example_azure_synapse"]], "dag_id (in module tests.system.providers.microsoft.azure.example_fileshare)": [[47, "tests.system.providers.microsoft.azure.example_fileshare.DAG_ID"]], "directory (in module tests.system.providers.microsoft.azure.example_fileshare)": [[47, "tests.system.providers.microsoft.azure.example_fileshare.DIRECTORY"]], "env_id (in module tests.system.providers.microsoft.azure.example_fileshare)": [[47, "tests.system.providers.microsoft.azure.example_fileshare.ENV_ID"]], "name (in module tests.system.providers.microsoft.azure.example_fileshare)": [[47, "tests.system.providers.microsoft.azure.example_fileshare.NAME"]], "create_fileshare() (in module tests.system.providers.microsoft.azure.example_fileshare)": [[47, "tests.system.providers.microsoft.azure.example_fileshare.create_fileshare"]], "delete_fileshare() (in module tests.system.providers.microsoft.azure.example_fileshare)": [[47, "tests.system.providers.microsoft.azure.example_fileshare.delete_fileshare"]], "test_run (in module tests.system.providers.microsoft.azure.example_fileshare)": [[47, "tests.system.providers.microsoft.azure.example_fileshare.test_run"]], "tests.system.providers.microsoft.azure.example_fileshare": [[47, "module-tests.system.providers.microsoft.azure.example_fileshare"]], "dag_id (in module tests.system.providers.microsoft.azure.example_local_to_adls)": [[48, "tests.system.providers.microsoft.azure.example_local_to_adls.DAG_ID"]], "env_id (in module tests.system.providers.microsoft.azure.example_local_to_adls)": [[48, "tests.system.providers.microsoft.azure.example_local_to_adls.ENV_ID"]], "local_file_path (in module tests.system.providers.microsoft.azure.example_local_to_adls)": [[48, "tests.system.providers.microsoft.azure.example_local_to_adls.LOCAL_FILE_PATH"]], "remote_file_path (in module tests.system.providers.microsoft.azure.example_local_to_adls)": [[48, "tests.system.providers.microsoft.azure.example_local_to_adls.REMOTE_FILE_PATH"]], "test_run (in module tests.system.providers.microsoft.azure.example_local_to_adls)": [[48, "tests.system.providers.microsoft.azure.example_local_to_adls.test_run"]], "tests.system.providers.microsoft.azure.example_local_to_adls": [[48, "module-tests.system.providers.microsoft.azure.example_local_to_adls"]], "upload_file (in module tests.system.providers.microsoft.azure.example_local_to_adls)": [[48, "tests.system.providers.microsoft.azure.example_local_to_adls.upload_file"]], "dag_id (in module tests.system.providers.microsoft.azure.example_local_to_wasb)": [[49, "tests.system.providers.microsoft.azure.example_local_to_wasb.DAG_ID"]], "env_id (in module tests.system.providers.microsoft.azure.example_local_to_wasb)": [[49, "tests.system.providers.microsoft.azure.example_local_to_wasb.ENV_ID"]], "path_to_upload_file (in module tests.system.providers.microsoft.azure.example_local_to_wasb)": [[49, "tests.system.providers.microsoft.azure.example_local_to_wasb.PATH_TO_UPLOAD_FILE"]], "test_run (in module tests.system.providers.microsoft.azure.example_local_to_wasb)": [[49, "tests.system.providers.microsoft.azure.example_local_to_wasb.test_run"]], "tests.system.providers.microsoft.azure.example_local_to_wasb": [[49, "module-tests.system.providers.microsoft.azure.example_local_to_wasb"]], "upload (in module tests.system.providers.microsoft.azure.example_local_to_wasb)": [[49, "tests.system.providers.microsoft.azure.example_local_to_wasb.upload"]], "azure_container_name (in module tests.system.providers.microsoft.azure.example_sftp_to_wasb)": [[50, "tests.system.providers.microsoft.azure.example_sftp_to_wasb.AZURE_CONTAINER_NAME"]], "blob_prefix (in module tests.system.providers.microsoft.azure.example_sftp_to_wasb)": [[50, "tests.system.providers.microsoft.azure.example_sftp_to_wasb.BLOB_PREFIX"]], "dag_id (in module tests.system.providers.microsoft.azure.example_sftp_to_wasb)": [[50, "tests.system.providers.microsoft.azure.example_sftp_to_wasb.DAG_ID"]], "env_id (in module tests.system.providers.microsoft.azure.example_sftp_to_wasb)": [[50, "tests.system.providers.microsoft.azure.example_sftp_to_wasb.ENV_ID"]], "file_complete_path (in module tests.system.providers.microsoft.azure.example_sftp_to_wasb)": [[50, "tests.system.providers.microsoft.azure.example_sftp_to_wasb.FILE_COMPLETE_PATH"]], "local_file_path (in module tests.system.providers.microsoft.azure.example_sftp_to_wasb)": [[50, "tests.system.providers.microsoft.azure.example_sftp_to_wasb.LOCAL_FILE_PATH"]], "sample_filename (in module tests.system.providers.microsoft.azure.example_sftp_to_wasb)": [[50, "tests.system.providers.microsoft.azure.example_sftp_to_wasb.SAMPLE_FILENAME"]], "sftp_file_complete_path (in module tests.system.providers.microsoft.azure.example_sftp_to_wasb)": [[50, "tests.system.providers.microsoft.azure.example_sftp_to_wasb.SFTP_FILE_COMPLETE_PATH"]], "sftp_src_path (in module tests.system.providers.microsoft.azure.example_sftp_to_wasb)": [[50, "tests.system.providers.microsoft.azure.example_sftp_to_wasb.SFTP_SRC_PATH"]], "delete_sftp_file() (in module tests.system.providers.microsoft.azure.example_sftp_to_wasb)": [[50, "tests.system.providers.microsoft.azure.example_sftp_to_wasb.delete_sftp_file"]], "test_run (in module tests.system.providers.microsoft.azure.example_sftp_to_wasb)": [[50, "tests.system.providers.microsoft.azure.example_sftp_to_wasb.test_run"]], "tests.system.providers.microsoft.azure.example_sftp_to_wasb": [[50, "module-tests.system.providers.microsoft.azure.example_sftp_to_wasb"]], "transfer_files_to_sftp_step (in module tests.system.providers.microsoft.azure.example_sftp_to_wasb)": [[50, "tests.system.providers.microsoft.azure.example_sftp_to_wasb.transfer_files_to_sftp_step"]], "tests.system.providers.microsoft.azure": [[51, "module-tests.system.providers.microsoft.azure"]], "pythonpath": [[68, "index-0"]], "environment variable": [[68, "index-0"]]}}) \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/secrets-backends/azure-key-vault.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/secrets-backends/azure-key-vault.html new file mode 100644 index 00000000000..a8f9bc432a4 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/secrets-backends/azure-key-vault.html @@ -0,0 +1,892 @@ + + + + + + + + + + + + Azure Key Vault Backend — apache-airflow-providers-microsoft-azure Documentation + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+ + +
+ + +
+
+
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+
+
+
+ +
+ + +
+ + + +
+
+ + + + + +
+
+ + + + + + + + +
+ + + + +
+ + + + + + + + + + +
+
+
+
+
+ +
+
+
+

Azure Key Vault Backend

+

To enable the Azure Key Vault as secrets backend, specify +AzureKeyVaultBackend +as the backend in [secrets] section of airflow.cfg.

+

Here is a sample configuration:

+
[secrets]
+backend = airflow.providers.microsoft.azure.secrets.key_vault.AzureKeyVaultBackend
+backend_kwargs = {"connections_prefix": "airflow-connections", "variables_prefix": "airflow-variables", "vault_url": "https://example-akv-resource-name.vault.azure.net/"}
+
+
+

For client authentication, the DefaultAzureCredential from the Azure Python SDK is used as credential provider, +which supports service principal, managed identity and user credentials.

+

For example, to specify a service principal with secret you can set the environment variables AZURE_TENANT_ID, AZURE_CLIENT_ID and AZURE_CLIENT_SECRET.

+
+

Optional lookup

+

Optionally connections, variables, or config may be looked up exclusive of each other or in any combination. +This will prevent requests being sent to Azure Key Vault for the excluded type.

+

If you want to look up some and not others in Azure Key Vault you may do so by setting the relevant *_prefix parameter of the ones to be excluded as null.

+

For example, if you want to set parameter connections_prefix to "airflow-connections" and not look up variables, your configuration file should look like this:

+
[secrets]
+backend = airflow.providers.microsoft.azure.secrets.key_vault.AzureKeyVaultBackend
+backend_kwargs = {"connections_prefix": "airflow-connections", "variables_prefix": null, "vault_url": "https://example-akv-resource-name.vault.azure.net/"}
+
+
+
+
+

Storing and Retrieving Connections

+

If you have set connections_prefix as airflow-connections, then for a connection id of smtp_default, +you would want to store your connection at airflow-connections-smtp-default.

+

The value of the secret must be the connection URI representation +of the connection object.

+
+
+

Storing and Retrieving Variables

+

If you have set variables_prefix as airflow-variables, then for an Variable key of hello, +you would want to store your Variable at airflow-variables-hello.

+
+
+

Reference

+

For more details on client authentication refer to the DefaultAzureCredential Class reference.

+
+
+ + + +
+ +
+
+
+
+
+

Was this entry helpful?

+
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+ + + + + + +
+ +
+
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/secrets-backends/index.html b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/secrets-backends/index.html new file mode 100644 index 00000000000..d322df185d0 --- /dev/null +++ b/docs-archive/apache-airflow-providers-microsoft-azure/5.0.1/secrets-backends/index.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers-microsoft-azure/stable.txt b/docs-archive/apache-airflow-providers-microsoft-azure/stable.txt index 28cbf7c0aae..32f3eaad0d9 100644 --- a/docs-archive/apache-airflow-providers-microsoft-azure/stable.txt +++ b/docs-archive/apache-airflow-providers-microsoft-azure/stable.txt @@ -1 +1 @@ -5.0.0 \ No newline at end of file +5.0.1 \ No newline at end of file diff --git a/docs-archive/apache-airflow-providers/core-extensions/connections.html b/docs-archive/apache-airflow-providers/core-extensions/connections.html index 1e0a653c521..cd2bf38ae96 100644 --- a/docs-archive/apache-airflow-providers/core-extensions/connections.html +++ b/docs-archive/apache-airflow-providers/core-extensions/connections.html @@ -793,19 +793,19 @@

Jenkins

Microsoft Azure

diff --git a/docs-archive/apache-airflow-providers/core-extensions/extra-links.html b/docs-archive/apache-airflow-providers/core-extensions/extra-links.html index 581a5c8421c..18a0c0bcab2 100644 --- a/docs-archive/apache-airflow-providers/core-extensions/extra-links.html +++ b/docs-archive/apache-airflow-providers/core-extensions/extra-links.html @@ -691,7 +691,7 @@

Google

Microsoft Azure

diff --git a/docs-archive/apache-airflow-providers/core-extensions/logging.html b/docs-archive/apache-airflow-providers/core-extensions/logging.html index aac948412bc..fcad67a7289 100644 --- a/docs-archive/apache-airflow-providers/core-extensions/logging.html +++ b/docs-archive/apache-airflow-providers/core-extensions/logging.html @@ -595,9 +595,9 @@

Google

diff --git a/docs-archive/apache-airflow-providers/core-extensions/secrets-backends.html b/docs-archive/apache-airflow-providers/core-extensions/secrets-backends.html index d33b8dc0d24..fd665f2812f 100644 --- a/docs-archive/apache-airflow-providers/core-extensions/secrets-backends.html +++ b/docs-archive/apache-airflow-providers/core-extensions/secrets-backends.html @@ -591,7 +591,7 @@

Hashicorp

Microsoft Azure

diff --git a/docs-archive/apache-airflow-providers/operators-and-hooks-ref/azure.html b/docs-archive/apache-airflow-providers/operators-and-hooks-ref/azure.html index 5fef7120582..82c0ca5bc95 100644 --- a/docs-archive/apache-airflow-providers/operators-and-hooks-ref/azure.html +++ b/docs-archive/apache-airflow-providers/operators-and-hooks-ref/azure.html @@ -564,7 +564,7 @@

Azure: Microsoft Azure

Airflow has limited support for Microsoft Azure.

-

Some hooks are based on airflow.providers.microsoft.azure.hooks.base_azure +

Some hooks are based on airflow.providers.microsoft.azure.hooks.base_azure which authenticate Azure’s Python SDK Clients.

Services

@@ -573,7 +573,7 @@

Services

Hooks
-

airflow.providers.microsoft.azure.hooks.base_azure.

+

airflow.providers.microsoft.azure.hooks.base_azure.

Provider

apache-airflow-providers-microsoft-azure

@@ -587,10 +587,10 @@

Microsoft Azure

Operators
-

airflow.providers.microsoft.azure.operators.batch.

+

airflow.providers.microsoft.azure.operators.batch.

Hooks
-

airflow.providers.microsoft.azure.hooks.batch.

+

airflow.providers.microsoft.azure.hooks.batch.

Provider

apache-airflow-providers-microsoft-azure

@@ -604,10 +604,10 @@

Microsoft Azure BatchMicrosoft Azure Blob Storage

Operators
-

airflow.providers.microsoft.azure.operators.wasb_delete_blob.

+

airflow.providers.microsoft.azure.operators.wasb_delete_blob.

Hooks
-

airflow.providers.microsoft.azure.hooks.wasb.

+

airflow.providers.microsoft.azure.hooks.wasb.

Provider

apache-airflow-providers-microsoft-azure

@@ -621,10 +621,10 @@

Microsoft Azure Blob Storage

Operators
-

airflow.providers.microsoft.azure.operators.container_instances.

+

airflow.providers.microsoft.azure.operators.container_instances.

Hooks
-

airflow.providers.microsoft.azure.hooks.container_volume, airflow.providers.microsoft.azure.hooks.container_registry, airflow.providers.microsoft.azure.hooks.container_instance.

+

airflow.providers.microsoft.azure.hooks.container_volume, airflow.providers.microsoft.azure.hooks.container_registry, airflow.providers.microsoft.azure.hooks.container_instance.

Provider

apache-airflow-providers-microsoft-azure

@@ -638,10 +638,10 @@

Microsoft Azure Container Instances

Operators
-

airflow.providers.microsoft.azure.operators.cosmos.

+

airflow.providers.microsoft.azure.operators.cosmos.

Hooks
-

airflow.providers.microsoft.azure.hooks.cosmos.

+

airflow.providers.microsoft.azure.hooks.cosmos.

Provider

apache-airflow-providers-microsoft-azure

@@ -655,10 +655,10 @@

Microsoft Azure Cosmos DB

Operators
-

airflow.providers.microsoft.azure.operators.adx.

+

airflow.providers.microsoft.azure.operators.adx.

Hooks
-

airflow.providers.microsoft.azure.hooks.adx.

+

airflow.providers.microsoft.azure.hooks.adx.

Provider

apache-airflow-providers-microsoft-azure

@@ -672,13 +672,13 @@

Microsoft Azure Data Explorer

Operators
-

airflow.providers.microsoft.azure.operators.data_factory.

+

airflow.providers.microsoft.azure.operators.data_factory.

Hooks
-

airflow.providers.microsoft.azure.hooks.data_factory.

+

airflow.providers.microsoft.azure.hooks.data_factory.

Guides
-

Azure Data Factory Operators.

+

Azure Data Factory Operators.

Provider

apache-airflow-providers-microsoft-azure

@@ -692,13 +692,13 @@

Microsoft Azure Data Factory

Operators
-

airflow.providers.microsoft.azure.operators.adls.

+

airflow.providers.microsoft.azure.operators.adls.

Hooks
-

airflow.providers.microsoft.azure.hooks.data_lake.

+

airflow.providers.microsoft.azure.hooks.data_lake.

Guides
-

Azure DataLake Storage Operators.

+

Azure DataLake Storage Operators.

Provider

apache-airflow-providers-microsoft-azure

@@ -712,7 +712,7 @@

Microsoft Azure Data Lake Storage

Hooks
-

airflow.providers.microsoft.azure.hooks.fileshare.

+

airflow.providers.microsoft.azure.hooks.fileshare.

Provider

apache-airflow-providers-microsoft-azure

@@ -726,13 +726,13 @@

Microsoft Azure FileShare

Operators
-

airflow.providers.microsoft.azure.operators.asb.

+

airflow.providers.microsoft.azure.operators.asb.

Hooks
-

airflow.providers.microsoft.azure.hooks.asb.

+

airflow.providers.microsoft.azure.hooks.asb.

Guides
-

Azure Service Bus Operators.

+

Azure Service Bus Operators.

Provider

apache-airflow-providers-microsoft-azure

@@ -746,13 +746,13 @@

Microsoft Azure Service Bus

Operators
-

airflow.providers.microsoft.azure.operators.synapse.

+

airflow.providers.microsoft.azure.operators.synapse.

Hooks
-

airflow.providers.microsoft.azure.hooks.synapse.

+

airflow.providers.microsoft.azure.hooks.synapse.

Guides
-

Azure Synapse Operators.

+

Azure Synapse Operators.

Provider

apache-airflow-providers-microsoft-azure

@@ -810,10 +810,10 @@

Local to Microsoft Azure Data Lake Storage

Microsoft Azure Data Lake Storage

Operator guide
-

Upload data from Local Filesystem to Azure Data Lake

+

Upload data from Local Filesystem to Azure Data Lake

Python API
-

airflow.providers.microsoft.azure.transfers.local_to_adls

+

airflow.providers.microsoft.azure.transfers.local_to_adls

Provider

apache-airflow-providers-microsoft-azure

@@ -830,7 +830,7 @@

Oracle to Microsoft Azure Data Lake Storage

Microsoft Azure Data Lake Storage

Python API
-

airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake

+

airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake

Provider

apache-airflow-providers-microsoft-azure

@@ -844,7 +844,7 @@

Local to Microsoft Azure Blob Storage

Microsoft Azure Blob Storage

Python API
-

airflow.providers.microsoft.azure.transfers.local_to_wasb

+

airflow.providers.microsoft.azure.transfers.local_to_wasb

Provider

apache-airflow-providers-microsoft-azure

@@ -861,10 +861,10 @@

Microsoft Azure Blob Storage to Google Cloud Storage (GCS)

Google Cloud Storage (GCS)

Operator guide
-

Azure Blob Storage Transfer Operator

+

Azure Blob Storage Transfer Operator

Python API
-

airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs

+

airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs

Provider

apache-airflow-providers-microsoft-azure

@@ -881,10 +881,10 @@

SSH File Transfer Protocol (SFTP) to Microsoft Azure Blob Storage

Microsoft Azure Blob Storage

Operator guide
-

Azure Blob Storage Transfer Operator

+

Azure Blob Storage Transfer Operator

Python API
-

airflow.providers.microsoft.azure.transfers.sftp_to_wasb

+

airflow.providers.microsoft.azure.transfers.sftp_to_wasb

Provider

apache-airflow-providers-microsoft-azure

diff --git a/docs-archive/apache-airflow-providers/operators-and-hooks-ref/google.html b/docs-archive/apache-airflow-providers/operators-and-hooks-ref/google.html index 13bbd6d53c9..028d0026ee3 100644 --- a/docs-archive/apache-airflow-providers/operators-and-hooks-ref/google.html +++ b/docs-archive/apache-airflow-providers/operators-and-hooks-ref/google.html @@ -2010,10 +2010,10 @@

Microsoft Azure Blob Storage to Google Cloud Storage (GCS)

Google Cloud Storage (GCS)

Operator guide
-

Azure Blob Storage Transfer Operator

+

Azure Blob Storage Transfer Operator

Python API
-

airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs

+

airflow.providers.microsoft.azure.transfers.azure_blob_to_gcs

Provider

apache-airflow-providers-microsoft-azure

diff --git a/docs-archive/apache-airflow-providers/operators-and-hooks-ref/protocol.html b/docs-archive/apache-airflow-providers/operators-and-hooks-ref/protocol.html index fa6a6e94bf6..45e919ecaf7 100644 --- a/docs-archive/apache-airflow-providers/operators-and-hooks-ref/protocol.html +++ b/docs-archive/apache-airflow-providers/operators-and-hooks-ref/protocol.html @@ -570,14 +570,17 @@

Protocols

File Transfer Protocol (FTP)

-
Hooks
-

airflow.providers.ftp.hooks.ftp.

+
Operators
+

airflow.providers.ftp.operators.ftp.

-
Provider
-

apache-airflow-providers-ftp

+
Hooks
+

airflow.providers.ftp.hooks.ftp.

-
Product documentation
-

File Transfer Protocol (FTP)

+
Provider
+

apache-airflow-providers-ftp

+
+
Product documentation
+

File Transfer Protocol (FTP)

@@ -922,10 +925,10 @@

SSH File Transfer Protocol (SFTP) to Microsoft Azure Blob Storage

Microsoft Azure Blob Storage

Operator guide
-

Azure Blob Storage Transfer Operator

+

Azure Blob Storage Transfer Operator

Python API
-

airflow.providers.microsoft.azure.transfers.sftp_to_wasb

+

airflow.providers.microsoft.azure.transfers.sftp_to_wasb

Provider

apache-airflow-providers-microsoft-azure

diff --git a/docs-archive/apache-airflow-providers/operators-and-hooks-ref/software.html b/docs-archive/apache-airflow-providers/operators-and-hooks-ref/software.html index 1ad1f3dab55..c9d19d7c57d 100644 --- a/docs-archive/apache-airflow-providers/operators-and-hooks-ref/software.html +++ b/docs-archive/apache-airflow-providers/operators-and-hooks-ref/software.html @@ -1403,7 +1403,7 @@

Oracle to Microsoft Azure Data Lake Storage

Microsoft Azure Data Lake Storage

Python API
-

airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake

+

airflow.providers.microsoft.azure.transfers.oracle_to_azure_data_lake

Provider

apache-airflow-providers-microsoft-azure

diff --git a/docs-archive/apache-airflow-providers/packages-ref.html b/docs-archive/apache-airflow-providers/packages-ref.html index 94cb4afb044..09f89da8ecc 100644 --- a/docs-archive/apache-airflow-providers/packages-ref.html +++ b/docs-archive/apache-airflow-providers/packages-ref.html @@ -942,7 +942,7 @@

Providers packages reference
Available versions
-

4.1.1, 4.1.0, 4.0.1, 4.0.0, 3.1.0, 3.0.0, 2.3.3, 2.3.2, 2.3.1, 2.3.0, 2.2.0, 2.1.0, 2.0.3, 2.0.2, 2.0.1, 2.0.0, 1.0.3, 1.0.2, 1.0.1, 1.0.0.

+

5.0.0, 4.1.1, 4.1.0, 4.0.1, 4.0.0, 3.1.0, 3.0.0, 2.3.3, 2.3.2, 2.3.1, 2.3.0, 2.2.0, 2.1.0, 2.0.3, 2.0.2, 2.0.1, 2.0.0, 1.0.3, 1.0.2, 1.0.1, 1.0.0.

Reference

PyPI Repository

@@ -1113,7 +1113,7 @@

Providers packages reference
Available versions
-

1.1.0, 1.0.0.

+

2.0.0, 1.1.0, 1.0.0.

Reference

PyPI Repository

@@ -1579,13 +1579,13 @@

Providers packages reference
Available versions
-

5.0.0, 4.3.0, 4.2.0, 4.1.0, 4.0.0, 3.9.0, 3.8.0, 3.7.2, 3.7.1, 3.7.0, 3.6.0, 3.5.0, 3.4.0, 3.3.0, 3.2.0, 3.1.1, 3.1.0, 3.0.0, 2.0.0, 1.3.0, 1.2.0, 1.1.0, 1.0.0.

+

5.0.1, 5.0.0, 4.3.0, 4.2.0, 4.1.0, 4.0.0, 3.9.0, 3.8.0, 3.7.2, 3.7.1, 3.7.0, 3.6.0, 3.5.0, 3.4.0, 3.3.0, 3.2.0, 3.1.1, 3.1.0, 3.0.0, 2.0.0, 1.3.0, 1.2.0, 1.1.0, 1.0.0.

Reference

PyPI Repository

Python API Reference
-

airflow.providers.microsoft.azure

+

airflow.providers.microsoft.azure

diff --git a/docs-archive/apache-airflow-providers/searchindex.js b/docs-archive/apache-airflow-providers/searchindex.js index 573a63f39c9..2451c5b6260 100644 --- a/docs-archive/apache-airflow-providers/searchindex.js +++ b/docs-archive/apache-airflow-providers/searchindex.js @@ -1 +1 @@ -Search.setIndex({"docnames": ["core-extensions/auth-backends", "core-extensions/connections", "core-extensions/extra-links", "core-extensions/index", "core-extensions/logging", "core-extensions/secrets-backends", "howto/create-update-providers", "index", "installing-from-pypi", "installing-from-sources", "operators-and-hooks-ref/apache", "operators-and-hooks-ref/aws", "operators-and-hooks-ref/azure", "operators-and-hooks-ref/google", "operators-and-hooks-ref/index", "operators-and-hooks-ref/protocol", "operators-and-hooks-ref/services", "operators-and-hooks-ref/software", "packages-ref"], "filenames": ["core-extensions/auth-backends.rst", "core-extensions/connections.rst", "core-extensions/extra-links.rst", "core-extensions/index.rst", "core-extensions/logging.rst", "core-extensions/secrets-backends.rst", "howto/create-update-providers.rst", "index.rst", "installing-from-pypi.rst", "installing-from-sources.rst", "operators-and-hooks-ref/apache.rst", "operators-and-hooks-ref/aws.rst", "operators-and-hooks-ref/azure.rst", "operators-and-hooks-ref/google.rst", "operators-and-hooks-ref/index.rst", "operators-and-hooks-ref/protocol.rst", "operators-and-hooks-ref/services.rst", "operators-and-hooks-ref/software.rst", "packages-ref.rst"], "titles": ["Auth backends", "Connections", "Extra Links", "Core Extensions", "Writing logs", "Secret backends", "Community Providers", "Provider packages", "Installation from PyPI", "Installing Providers from Sources", "ASF: Apache Software Foundation", "AWS: Amazon Web Services", "Azure: Microsoft Azure", "Google", "Operators and Hooks Reference", "Protocol integrations", "Services", "Software integrations", "Providers packages reference"], "terms": {"thi": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18], "i": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18], "summari": [0, 1, 2, 4, 5, 7], "all": [0, 1, 2, 4, 5, 6, 7, 11, 13], "apach": [0, 2, 3, 4, 5, 6, 7, 8, 9, 12, 14, 16], "airflow": [0, 1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17], "commun": [0, 1, 2, 4, 5, 15], "provid": [0, 1, 2, 3, 4, 5, 8, 10, 11, 12, 13, 15, 16, 17], "implement": [0, 1, 2, 3, 4, 5, 6, 7], "authent": [0, 7, 12], "expos": [0, 1, 2, 4, 5, 7], "via": [0, 1, 2, 4, 5, 7, 8, 9, 18], "manag": [0, 1, 2, 4, 5, 7, 8, 9, 14, 18], "": [0, 5, 6, 7, 9, 12, 18], "web": [0, 7, 14, 18], "server": [0, 3, 7, 9, 14, 18], "api": [0, 7, 10, 11, 12, 14, 15, 16, 17, 18], "base": [0, 6, 11, 12, 13], "flask": 0, "applic": 0, "builder": 0, "capabl": [0, 5, 7, 18], "you": [0, 1, 2, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 15, 16, 17], "can": [0, 1, 2, 3, 4, 5, 6, 7, 9, 13], "read": [0, 5, 6, 7], "more": [0, 6, 7, 8], "about": [0, 6, 7, 9], "those": [0, 1, 4, 5, 6, 7, 8], "fab": 0, "secur": [0, 14, 18], "doc": [0, 6], "also": [0, 1, 2, 4, 5, 6, 7, 9, 13], "take": [0, 5], "look": [0, 5, 6, 7, 8], "avail": [0, 2, 4, 5, 6, 7, 14, 18], "core": [0, 4, 5, 14], "webserv": 0, "see": [0, 1, 2, 4, 5, 6, 7, 8, 13], "google_openid": 0, "devel": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18], "2": [0, 1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18], "5": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18], "0": [0, 1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18], "dev0": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18], "an": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18], "experiment": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18], "featur": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18], "extend": [1, 2, 3], "custom": [1, 2, 5, 6], "each": [1, 2, 6, 7], "defin": [1, 2, 6, 7], "own": [1, 2, 5, 6], "paramet": [1, 6], "ui": [1, 6, 7], "field": [1, 7], "behaviour": [1, 7], "when": [1, 6, 7, 8], "type": [1, 6, 13], "us": [1, 3, 8, 9, 13, 15, 17], "automat": [1, 6, 7], "creat": [1, 10], "hook": [1, 6, 7, 10, 11, 12, 15, 16, 17], "specif": [1, 3, 7], "The": [1, 2, 6, 7, 9], "explain": [1, 2, 6, 7], "airbytehook": 1, "oss": 1, "osshook": 1, "aw": [1, 7, 10, 13, 14, 15, 16, 17, 18], "awsgenerichook": 1, "emr": [1, 14], "emrhook": 1, "redshift": [1, 14], "redshiftsqlhook": 1, "cassandrahook": 1, "drillhook": 1, "druiddbapihook": 1, "hdfshook": 1, "hive_cli": 1, "hiveclihook": 1, "hiveserver2": 1, "hiveserver2hook": 1, "hive_metastor": 1, "hivemetastorehook": 1, "livyhook": 1, "pig_cli": 1, "pigclihook": 1, "spark_jdbc": [1, 10], "sparkjdbchook": 1, "spark_sql": [1, 10], "sparksqlhook": 1, "sparksubmithook": 1, "sqoophook": 1, "arangodbhook": 1, "asanahook": 1, "jirahook": 1, "cloudanthook": 1, "kuberneteshook": 1, "databrickshook": 1, "dbt_cloud": 1, "dbtcloudhook": 1, "dingdinghook": 1, "discordwebhookhook": 1, "dockerhook": 1, "elasticsearchhook": 1, "exasolhook": 1, "facebook_soci": 1, "facebookadsreportinghook": 1, "ftphook": 1, "githubhook": 1, "google_cloud_platform": [1, 7], "googlebasehook": [1, 13], "dataprep": 1, "googledataprephook": 1, "gcpcloudsql": 1, "cloudsqlhook": 1, "gcpcloudsqldb": 1, "cloudsqldatabasehook": 1, "gcpbigqueri": 1, "bigqueryhook": 1, "gcpssh": 1, "computeenginesshhook": 1, "leveldb": [1, 14, 18], "leveldbhook": 1, "grpchook": 1, "vault": [1, 14, 18], "vaulthook": 1, "httphook": 1, "imaphook": 1, "influxdbhook": 1, "jdbchook": 1, "jenkinshook": 1, "azurebasehook": 1, "azure_data_explor": 1, "azuredataexplorerhook": 1, "azure_batch": 1, "azurebatchhook": 1, "azure_cosmo": 1, "azurecosmosdbhook": 1, "azure_data_lak": 1, "azuredatalakehook": 1, "azure_fileshar": 1, "azurefilesharehook": 1, "azure_container_volum": 1, "azurecontainervolumehook": 1, "azure_container_inst": 1, "azurecontainerinstancehook": 1, "wasb": [1, 12], "wasbhook": 1, "azure_data_factori": 1, "azuredatafactoryhook": 1, "azure_container_registri": 1, "azurecontainerregistryhook": 1, "azure_service_bu": 1, "baseazureservicebushook": 1, "azure_synaps": 1, "azuresynapsehook": 1, "mssqlhook": 1, "mongo": [1, 17], "mongohook": 1, "mysqlhook": 1, "neo4jhook": 1, "odbchook": 1, "opsgeniealerthook": 1, "oraclehook": 1, "pagerdutyhook": 1, "pagerduty_ev": [1, 16], "pagerdutyeventshook": 1, "postgr": [1, 17], "postgreshook": 1, "prestohook": 1, "qubolehook": 1, "redishook": 1, "salesforcehook": 1, "sambahook": 1, "segmenthook": 1, "sftphook": 1, "slackhook": 1, "slackwebhook": 1, "slackwebhookhook": 1, "snowflakehook": 1, "sqlitehook": 1, "sshhook": 1, "tableauhook": 1, "tabularhook": 1, "trinohook": 1, "verticahook": 1, "yandexcloud": 1, "yandexcloudbasehook": 1, "zendeskhook": 1, "oper": [2, 6, 7, 10, 11, 12, 15, 16, 17], "For": [2, 6, 7, 9], "its": [2, 5, 7], "redirect": 2, "user": [2, 6, 7, 9], "extern": [2, 6, 7], "system": [2, 7, 14, 18], "button": 2, "task": [2, 4, 7], "page": [2, 7, 8, 9], "ar": [2, 6, 7, 8, 9, 11, 12, 13, 14], "here": [2, 3, 4, 5, 7, 14, 18], "batchjobdefinitionlink": 2, "batchjobdetailslink": 2, "batchjobqueuelink": 2, "emrclusterlink": 2, "cloudwatcheventslink": 2, "databricksjobrunlink": 2, "dbtcloudrunjoboperatorlink": 2, "bigqueryconsolelink": 2, "bigqueryconsoleindexablelink": 2, "aiplatformconsolelink": 2, "dataformrepositorylink": 2, "dataformworkspacelink": 2, "dataformworkflowinvocationlink": 2, "datafusioninstancelink": 2, "datafusionpipelinelink": 2, "datafusionpipelineslink": 2, "cloudsqlinstancelink": 2, "cloudsqlinstancedatabaselink": 2, "dataplextasklink": 2, "dataplextaskslink": 2, "dataplexlakelink": 2, "bigquerydatasetlink": 2, "bigquerytablelink": 2, "bigquerydatatransferconfiglink": 2, "computeinstancedetailslink": 2, "computeinstancetemplatedetailslink": 2, "computeinstancegroupmanagerdetailslink": 2, "cloudtasksqueuelink": 2, "cloudtaskslink": 2, "datacatalogentrygrouplink": 2, "datacatalogentrylink": 2, "datacatalogtagtemplatelink": 2, "dataproclink": 2, "dataproclistlink": 2, "dataprocmetastoredetailedlink": 2, "dataprocmetastorelink": 2, "dataprepflowlink": 2, "dataprepjobgrouplink": 2, "vertexaimodellink": 2, "vertexaimodellistlink": 2, "vertexaimodelexportlink": 2, "vertexaitraininglink": 2, "vertexaitrainingpipelineslink": 2, "vertexaidatasetlink": 2, "vertexaidatasetlistlink": 2, "vertexaihyperparametertuningjoblistlink": 2, "vertexaibatchpredictionjoblink": 2, "vertexaibatchpredictionjoblistlink": 2, "vertexaiendpointlink": 2, "vertexaiendpointlistlink": 2, "workflowsworkflowdetailslink": 2, "workflowslistofworkflowslink": 2, "workflowsexecutionlink": 2, "cloudcomposerenvironmentlink": 2, "cloudcomposerenvironmentslink": 2, "dataflowjoblink": 2, "clouddatastoreimportexportlink": 2, "clouddatastoreentitieslink": 2, "bigtableinstancelink": 2, "bigtableclusterlink": 2, "bigtabletableslink": 2, "spannerdatabaselink": 2, "spannerinstancelink": 2, "stackdrivernotificationslink": 2, "stackdriverpolicieslink": 2, "kubernetesengineclusterlink": 2, "kubernetesenginepodlink": 2, "pubsubsubscriptionlink": 2, "pubsubtopiclink": 2, "memcachedinstancedetailslink": 2, "memcachedinstancelistlink": 2, "redisinstancedetailslink": 2, "redisinstancelistlink": 2, "cloudbuildlink": 2, "cloudbuildlistlink": 2, "cloudbuildtriggerslistlink": 2, "cloudbuildtriggerdetailslink": 2, "lifescienceslink": 2, "cloudfunctionsdetailslink": 2, "cloudfunctionslistlink": 2, "cloudstoragetransferlistlink": 2, "cloudstoragetransferjoblink": 2, "cloudstoragetransferdetailslink": 2, "clouddlpdeidentifytemplateslistlink": 2, "clouddlpdeidentifytemplatedetailslink": 2, "clouddlpjobtriggerslistlink": 2, "clouddlpjobtriggerdetailslink": 2, "clouddlpjobslistlink": 2, "clouddlpjobdetailslink": 2, "clouddlpinspecttemplateslistlink": 2, "clouddlpinspecttemplatedetailslink": 2, "clouddlpinfotypeslistlink": 2, "clouddlpinfotypedetailslink": 2, "clouddlppossibleinfotypeslistlink": 2, "mlenginemodellink": 2, "mlenginemodelslistlink": 2, "mlenginejobdetailslink": 2, "mlenginejobslistlink": 2, "mlenginemodelversiondetailslink": 2, "storagelink": 2, "filedetailslink": 2, "azuredatafactorypipelinerunlink": 2, "qdslink": 2, "list": [3, 6, 7, 14, 18], "function": [3, 14], "thei": [3, 6, 7, 8, 18], "certain": [3, 7], "auth": 3, "backend": 3, "googl": [3, 6, 7, 9, 14], "connect": [3, 5, 6, 9, 13, 14, 18], "airbyt": [3, 9, 14], "alibaba": [3, 9], "amazon": [3, 7, 9, 14], "cassandra": [3, 9, 14], "drill": [3, 9, 14], "druid": [3, 9, 14], "hdf": [3, 9, 14], "hive": [3, 9, 14], "livi": [3, 9, 14], "pig": [3, 9, 14], "spark": [3, 9, 14], "sqoop": [3, 9, 14], "arangodb": [3, 9, 14], "asana": [3, 9, 14], "atlassian": [3, 9, 14], "jira": [3, 9, 14], "ibm": [3, 9, 14, 18], "cloudant": [3, 9, 14], "kubernet": [3, 9, 14], "databrick": [3, 9, 14], "dbt": [3, 9], "cloud": [3, 4, 7, 9, 14], "dingd": [3, 9, 14], "discord": [3, 9, 14], "docker": [3, 6, 9, 14], "elasticsearch": [3, 9, 14], "exasol": [3, 9, 14], "facebook": [3, 9, 14], "file": [3, 6, 7, 9, 14, 18], "transfer": [3, 6, 7, 9, 14, 18], "protocol": [3, 9, 14, 18], "ftp": [3, 9, 14], "github": [3, 6, 9, 14], "grpc": [3, 9, 14], "hashicorp": [3, 9, 14], "hypertext": [3, 9, 14, 18], "http": [3, 6, 7, 8, 9, 14], "internet": [3, 9, 14, 18], "messag": [3, 9, 14, 18], "access": [3, 6, 7, 9, 14, 18], "imap": [3, 9, 14], "influxdb": [3, 9, 14], "java": [3, 9, 14, 18], "databas": [3, 5, 7, 9, 14, 18], "jdbc": [3, 9, 14], "jenkin": [3, 9, 14], "microsoft": [3, 9, 14], "azur": [3, 9, 14], "sql": [3, 9, 14], "mssql": [3, 7, 9, 14], "mongodb": [3, 9, 14, 18], "mysql": [3, 9, 14], "neo4j": [3, 6, 9, 14], "odbc": [3, 9, 14], "opsgeni": [3, 9, 14], "oracl": [3, 9, 14], "pagerduti": [3, 9, 14], "postgresql": [3, 9, 14, 18], "presto": [3, 9, 14], "qubol": [3, 9, 14], "redi": [3, 9, 14], "salesforc": [3, 9, 14], "samba": [3, 9, 14], "segment": [3, 9, 14], "sftp": [3, 9, 14], "slack": [3, 9, 14], "snowflak": [3, 9, 14], "sqlite": [3, 9, 14], "ssh": [3, 9, 14], "tableau": [3, 9, 14], "tabular": [3, 9, 14], "trino": [3, 9, 14], "vertica": [3, 9, 14], "yandex": [3, 9, 14], "zendesk": [3, 9, 14], "extra": [3, 6], "link": [3, 6], "write": [3, 7], "log": [3, 6, 14], "secret": 3, "option": [4, 7], "osstaskhandl": 4, "s3taskhandl": 4, "cloudwatchtaskhandl": 4, "elasticsearchtaskhandl": 4, "platform": [4, 14, 18], "gcstaskhandl": 4, "stackdrivertaskhandl": 4, "blob": [4, 14], "storag": [4, 14], "wasbtaskhandl": 4, "ha": [5, 6, 7, 11, 12, 13], "variabl": [5, 7], "configur": [5, 6, 7, 13], "from": [5, 6, 7, 10, 11, 12, 13], "rather": [5, 6, 7], "than": [5, 6, 7], "while": [5, 6, 7, 8], "store": 5, "inform": [5, 6, 7], "possibl": [5, 6, 7], "mani": [5, 7], "enterpris": 5, "alreadi": [5, 6, 9], "have": [5, 7], "some": [5, 6, 7, 8, 12, 13], "tap": 5, "servic": [5, 6, 7, 14, 18], "integr": [5, 7, 10, 11, 12, 13, 14, 16, 18], "ones": [5, 6], "secretsmanagerbackend": 5, "systemsmanagerparameterstorebackend": 5, "cloudsecretmanagerbackend": 5, "vaultbackend": 5, "azurekeyvaultbackend": 5, "gather": 6, "necessari": 6, "step": [6, 7, 9, 14], "guidelin": 6, "exist": [6, 7], "should": [6, 7, 8, 9], "awar": 6, "mai": 6, "distinct": 6, "cover": [6, 7], "guid": [6, 9, 10, 11, 12, 13, 15, 16, 17], "sequenc": 6, "describ": [6, 7, 8], "wa": [6, 7], "design": [6, 7], "meet": 6, "most": [6, 7, 9], "linear": 6, "flow": 6, "order": [6, 7], "develop": [6, 7, 10], "anoth": 6, "recommend": [6, 9], "help": 6, "work": [6, 7], "similar": 6, "your": [6, 8], "That": [6, 7], "wai": [6, 7], "set": 6, "up": 6, "other": [6, 7, 8, 14], "depend": [6, 7], "first": [6, 7], "need": [6, 7, 8], "local": [6, 7, 14], "environ": [6, 7], "contribut": [6, 7], "quick": 6, "start": [6, 7], "did": 6, "yet": 6, "we": [6, 7], "breez": 6, "easili": [6, 7], "abl": [6, 7], "one": [6, 7, 9], "execut": [6, 18], "ci": 6, "workflow": [6, 8], "abov": [6, 7], "contain": [6, 7, 14], "These": [6, 10, 11, 12, 13, 15, 16, 17], "intern": [6, 7], "volum": 6, "In": [6, 7], "chang": [6, 7], "made": [6, 9], "id": [6, 7, 9], "appli": [6, 7], "insid": 6, "carri": 6, "out": 6, "quickli": 6, "our": [6, 7], "exampl": [6, 7, 8, 9, 13], "name": [6, 7], "new_provid": 6, "placehold": 6, "must": 6, "like": [6, 7, 8], "version": [6, 7, 8, 9, 18], "now": 6, "project": [6, 7], "below": [6, 7, 8, 9], "structur": [6, 7], "understand": 6, "compon": [6, 7], "If": [6, 7, 8], "still": 6, "doubt": 6, "build": [6, 7], "open": 6, "issu": 6, "so": [6, 7], "__init__": 6, "py": [6, 7], "example_dag": 6, "example_": 6, "test_": 6, "_system": 6, "consid": [6, 7], "ll": 6, "run": [6, 7, 9, 18], "my": [6, 7], "root": 6, "fafd8d630e46": 6, "opt": 6, "python": [6, 7, 8, 9, 10, 11, 12, 13, 15, 16, 17, 18], "m": 6, "pytest": 6, "import": [6, 7, 9], "part": [6, 7], "occur": 6, "pre": 6, "commit": 6, "instal": [6, 18], "rst": [6, 7], "setup": [6, 7], "spelling_wordlist": 6, "txt": [6, 8], "packag": [6, 8], "ref": 6, "logo": 6, "png": 6, "index": [6, 7], "yaml": 6, "changelog": 6, "releas": [6, 7, 14], "There": [6, 7, 9], "chanc": 6, "common": [6, 9, 14], "english": 6, "word": 6, "case": [6, 7], "add": [6, 7], "begin": 6, "capit": 6, "lowercas": 6, "second": 6, "block": 6, "namespac": 6, "nextdoor": 6, "line": [6, 7], "none": 6, "notfound": 6, "nullabl": 6, "neq": 6, "networkuri": 6, "nginx": 6, "nobr": 6, "nodash": 6, "under": [6, 7], "kei": [6, 9], "doesn": 6, "t": [6, 7], "ani": [6, 7], "empti": 6, "It": [6, 7, 9], "addit": [6, 7], "_howto": 6, "newprovideroper": 6, "class": [6, 7], "do": [6, 7, 8, 9], "someth": [6, 7], "amaz": 6, "requir": [6, 7, 8], "connection_id": 6, "awesom": 6, "exampleinclud": 6, "languag": 6, "after": [6, 7], "howto_operator_": 6, "end": [6, 7], "befor": [6, 7], "purpos": [6, 7], "check": [6, 7, 9, 16], "complet": 6, "best": [6, 7], "descript": [6, 7], "io": 6, "__": 6, "1": [6, 9, 18], "url": [6, 7], "www": 6, "tag": 6, "modul": [6, 7], "newproviderhook": 6, "deprec": [6, 7], "onli": [6, 7, 8], "behavior": 6, "howev": [6, 7], "support": [6, 8, 10, 11, 12, 13], "target": [6, 7, 10, 11, 12, 13, 15, 16, 17], "arrai": [6, 7], "allow": [6, 7, 10, 11, 12, 13, 15, 16, 17], "optim": [6, 7], "individu": [6, 7], "handl": [6, 7], "both": [6, 7], "definit": [6, 7], "two": [6, 7], "command": [6, 7, 8], "serv": 6, "accomplish": 6, "ensur": 6, "main": [6, 7, 9], "involv": 6, "filter": 6, "3": [6, 7, 8, 18], "might": [6, 7], "which": [6, 7, 12], "librari": 6, "Such": 6, "typic": [6, 8], "result": 6, "importerror": 6, "error": 6, "silent": 6, "ignor": 6, "pollut": 6, "fals": 6, "warn": [6, 9], "veri": [6, 7], "bad": 6, "pattern": 6, "tend": 6, "turn": [6, 7], "blind": 6, "spot": 6, "avoid": 6, "encourag": 6, "until": 6, "had": 6, "mechan": [6, 7], "select": [6, 9], "known": 6, "come": [6, 7, 8], "actual": 6, "lead": 6, "even": 6, "without": [6, 7], "give": [6, 7], "clue": 6, "miss": 6, "except": 6, "optionalproviderfeatureexcept": 6, "been": [6, 7], "introduc": [6, 7], "signal": 6, "providersmanag": 6, "would": [6, 7], "remain": 6, "compat": [6, 7], "continu": 6, "throw": 6, "plyvel": 6, "condit": 6, "keep": [6, 7], "try": [6, 7], "db": [6, 14], "airflowexcept": 6, "basehook": 6, "e": [6, 7], "As": [6, 7], "airflowoptionalproviderfeatureexcept": 6, "backward": [6, 7], "cannot": 6, "origin": 6, "rais": 6, "remov": [6, 7], "ad": [6, 7, 14, 18], "assign": 6, "uniqu": [6, 7], "mean": [6, 7], "want": [6, 7], "retriev": [6, 7], "calcul": 6, "alwai": [6, 7], "ti_kei": 6, "pass": 6, "get_valu": 6, "earlier": [6, 7], "note": [6, 7], "def": [6, 7], "get_link": 6, "self": [6, 9], "baseoper": 6, "dttm": 6, "datetim": 6, "taskinstancekei": 6, "job_id": 6, "els": 6, "assert": 6, "get_on": 6, "dag_id": 6, "dag": [6, 7, 13], "task_id": 6, "execution_d": 6, "len": 6, "bigquery_job_details_link_fmt": 6, "format": [6, 7, 8], "achiev": 6, "instanc": [6, 14], "pokereturnvalu": 6, "object": [6, 7], "poke": 6, "method": 6, "sensorwithxcomvalu": 6, "basesensoroper": 6, "context": 6, "union": 6, "bool": 6, "is_don": 6, "true": 6, "stop": [6, 7], "xcom_valu": 6, "push": 6, "To": [6, 9, 13, 16, 17, 18], "explicitli": 6, "ti": 6, "xcom_push": 6, "xcom_kei": 6, "built": 7, "modular": 7, "schedul": 7, "basic": 7, "call": 7, "sensor": 7, "multitud": 7, "new": 7, "separ": 7, "interfac": [7, 15], "given": 7, "60": 7, "free": 7, "exactli": 7, "same": [7, 8], "written": 7, "share": [7, 8], "full": 7, "refer": 7, "solid": 7, "discov": 7, "onc": 7, "re": 7, "becom": 7, "extens": [7, 13], "public": 7, "privat": 7, "form": 7, "deliv": 7, "visibl": 7, "detail": 7, "view": 7, "By": [7, 9], "default": 7, "save": 7, "make": 7, "them": [7, 8], "where": 7, "remot": [7, 9, 14, 18], "logger": 7, "were": 7, "latest": 7, "particular": 7, "downgrad": 7, "previou": [7, 9], "problem": 7, "impact": 7, "increment": 7, "independ": [7, 14], "valid": [7, 9], "updat": 7, "follow": [7, 9], "usual": 7, "test": 7, "capac": 7, "matter": 7, "third": 7, "parti": 7, "chapter": 7, "point": 7, "multipl": 7, "semver": 7, "scheme": 7, "gener": 7, "approach": 7, "unless": 7, "good": [7, 9], "reason": 7, "recent": 7, "x": 7, "vari": 7, "per": 7, "limit": [7, 12], "constrain": 7, "includ": [7, 8, 18], "correspond": 7, "togeth": 7, "correct": [7, 9], "constraint": [7, 8], "appropri": [7, 8], "cross": 7, "well": [7, 9], "simpli": 7, "enabl": [7, 18], "often": 7, "between": 7, "differ": [7, 9], "again": 7, "kind": 7, "break": 7, "document": [7, 10, 11, 12, 13, 14, 15, 16, 17], "everi": 7, "could": 7, "back": 7, "port": 7, "last": 7, "done": 7, "march": 7, "17": 7, "2021": [7, 9], "longer": 7, "sinc": 7, "reach": 7, "Of": 7, "life": 7, "june": 7, "standard": [7, 15], "moreov": 7, "mention": 7, "just": 7, "right": [7, 8], "meta": 7, "data": [7, 10, 14, 15, 16], "entri": 7, "apache_airflow_provid": 7, "callabl": 7, "return": 7, "dictionari": 7, "discover": 7, "json": 7, "schema": 7, "displai": [7, 14], "cli": 7, "human": 7, "friendli": 7, "revers": 7, "chronolog": 7, "current": [7, 8], "taken": 7, "provider_info": 7, "replac": 7, "queri": 7, "verifi": [7, 9], "properli": 7, "recogn": 7, "whether": 7, "sub": 7, "convent": 7, "possibli": 7, "beyond": 7, "practic": 7, "time": 7, "autom": 7, "semi": 7, "verif": 7, "go": 7, "reli": 7, "manual": 7, "advis": 7, "stage": 7, "choic": 7, "either": 7, "round": 7, "probabl": 7, "safer": 7, "older": 7, "lightli": 7, "fine": 7, "am": 7, "scope": 7, "later": 7, "incompat": 7, "speak": 7, "major": 7, "modif": 7, "long": 7, "anyth": 7, "special": 7, "besid": 7, "anyon": 7, "who": 7, "what": [7, 18], "cfg": 7, "tell": 7, "get": [7, 9], "metadata": 7, "sure": 7, "pypi": [7, 9, 18], "compliant": 7, "runtim": 7, "sever": 7, "org": [7, 9], "draft": 7, "07": 7, "properti": 7, "repositori": [7, 18], "string": 7, "item": 7, "favour": 7, "perform": [7, 10, 11, 12, 13, 15, 16, 17], "deprecatedvers": 7, "map": 7, "handler": 7, "decor": 7, "taskflow": 7, "path": 7, "entry_point": 7, "get_provider_info": 7, "myproviderpackag": 7, "somemodul": 7, "sourc": [7, 10, 11, 12, 13, 15, 16, 17], "sourcehook": 7, "hood": 7, "least": 7, "three": 7, "itself": 7, "venv": 7, "pip": [7, 8, 18], "relat": [7, 11], "g": 7, "folder": 7, "normal": 7, "doe": 7, "loop": 7, "through": 7, "section": 7, "valu": 7, "statement": 7, "translat": 7, "being": 7, "get_ui_field_behaviour": 7, "get_connection_form_widget": 7, "attribut": 7, "conn_typ": 7, "hook_nam": 7, "quit": 7, "number": 7, "intent": 7, "pr": 7, "But": 7, "conflict": 7, "prefer": 7, "choos": [7, 9], "domain": 7, "question": 7, "glad": 7, "ask": 7, "_default": 7, "few": 7, "google_cloud_default": 7, "aws_default": 7, "succe": 7, "cours": 7, "better": 7, "mail": 7, "accept": 7, "invest": 7, "enough": 7, "fulli": 7, "peopl": 7, "posit": 7, "think": 7, "match": [7, 9], "expect": 7, "prerequisit": 7, "discuss": 7, "devlist": 7, "team": 7, "publish": [7, 8], "whatev": 7, "find": 7, "advertis": 7, "absolut": 7, "ecosystem": 7, "area": 7, "websit": 7, "non": 7, "feel": 7, "evalu": 7, "merg": 7, "charg": 7, "outsid": 7, "control": 7, "commerci": 7, "busi": 7, "around": 7, "softwar": [7, 14], "never": 7, "3rd": 7, "12": [7, 9], "stabl": 7, "becaus": 7, "don": 7, "anymor": 7, "know": [7, 9], "highest": 7, "offici": 8, "success": 8, "poetri": 8, "especi": 8, "v": 8, "wish": 8, "convert": 8, "celeri": [8, 9], "raw": 8, "githubusercont": 8, "com": [8, 9], "7": [8, 18], "how": [8, 13, 16, 17], "upgrad": 8, "beam": [9, 14], "kylin": [9, 14], "pinot": [9, 14], "datadog": [9, 14], "powershel": [9, 14, 18], "psrp": [9, 15], "window": [9, 14, 18], "winrm": [9, 14], "openfaa": [9, 14], "papermil": [9, 14], "plexu": [9, 14], "sendgrid": 9, "singular": [9, 14], "telegram": [9, 14], "drop": 9, "down": 9, "top": 9, "left": 9, "pgp": 9, "signatur": 9, "essenti": 9, "download": [9, 13], "sha": 9, "gpg": 9, "pleas": 9, "asc": 9, "relev": 9, "distribut": [9, 14, 18], "directori": 9, "mirror": 9, "pgpk": 9, "ka": 9, "binari": 9, "pgpv": 9, "tar": 9, "gz": 9, "sat": 9, "11": 9, "sep": 9, "49": 9, "54": 9, "bst": 9, "rsa": 9, "cde15c6e4d3a8ec4ecf4ba4b6674e08ad7de406f": 9, "issuer": 9, "kaxilnaik": 9, "kaxil": 9, "naik": 9, "unknown": 9, "aka": 9, "gmail": 9, "certifi": 9, "trust": 9, "indic": 9, "belong": 9, "owner": 9, "primari": 9, "fingerprint": 9, "cde1": 9, "5c6e": 9, "4d3a": 9, "8ec4": 9, "ecf4": 9, "ba4b": 9, "6674": 9, "e08a": 9, "d7de": 9, "406f": 9, "worri": 9, "certif": 9, "sign": 9, "why": 9, "sha512": 9, "sum": 9, "shasum": 9, "512": 9, "diff": 9, "variou": [10, 11, 12, 13, 15, 16, 17], "within": [10, 11, 12, 13, 15, 16], "product": [10, 11, 12, 13, 15, 16, 17], "druid_check": 10, "hive_stat": 10, "kylin_cub": 10, "spark_submit": 10, "spark_jdbc_script": 10, "copi": [10, 11, 12, 13, 15, 17], "hive_to_dynamodb": [10, 11], "hive_to_druid": 10, "vertica_to_h": [10, 17], "hive_to_mysql": [10, 17], "hive_to_samba": [10, 15], "s3_to_hiv": [10, 11], "mysql_to_h": [10, 17], "mssql_to_hiv": [10, 17], "cassandra_to_gc": [10, 13], "base_aw": 11, "cloud_form": 11, "comput": 11, "elasticache_replication_group": 11, "redshift_sql": 11, "redshift_clust": 11, "redshift_data": 11, "secrets_manag": 11, "batch_client": 11, "batch_wait": 11, "dm": 11, "glue_crawl": 11, "glue_catalog": 11, "aws_lambda": 11, "lambda_funct": 11, "step_funct": 11, "dynamodb_to_s3": 11, "gcs_to_s3": [11, 13], "glacier_to_gc": [11, 13], "google_api_to_s3": 11, "attach": [11, 15], "imap_attachment_to_s3": [11, 15], "mongo_to_s3": [11, 17], "redshift_to_s3": 11, "s3_to_redshift": 11, "s3_to_sftp": [11, 15], "sftp_to_s3": [11, 15], "s3_to_ftp": [11, 15], "exasol_to_s3": [11, 17], "ftp_to_s3": [11, 15], "salesforce_to_s3": [11, 16], "filesystem": [11, 12, 13], "local_to_s3": 11, "sql_to_s3": [11, 17], "s3_to_gc": [11, 13], "s3_to_mysql": [11, 17], "s3tosnowflakeoper": [11, 16], "s3_to_snowflak": [11, 16], "copy_into_snowflak": [11, 12, 13, 16], "base_azur": 12, "sdk": 12, "client": 12, "wasb_delete_blob": 12, "container_inst": 12, "container_volum": 12, "container_registri": 12, "adx": 12, "data_factori": 12, "adl": 12, "data_lak": 12, "datalak": 12, "asb": 12, "azure_fileshare_to_gc": [12, 13], "adls_to_gc": [12, 13], "upload": [12, 13], "local_to_adl": 12, "oracle_to_azure_data_lak": [12, 17], "local_to_wasb": 12, "azure_blob_to_gc": [12, 13], "sftp_to_wasb": [12, 15], "base_googl": 13, "discovery_api": 13, "analyz": 13, "code": 13, "bigquery_dt": 13, "marketing_platform": 13, "campaign_manag": 13, "cloud_build": 13, "firebas": [13, 18], "life_sci": 13, "cloud_memorystor": 13, "memcach": 13, "natural_languag": 13, "os_login": 13, "pubsub": 13, "secret_manag": 13, "speech_to_text": 13, "cloud_sql": 13, "cloud_storage_transfer_servic": 13, "text_to_speech": 13, "translate_speech": 13, "video_intellig": 13, "compute_ssh": 13, "datacatalog": 13, "datafus": 13, "dataproc_metastor": 13, "gdm": 13, "kubernetes_engin": 13, "mlengin": 13, "vertex_ai": 13, "dataset": 13, "custom_job": 13, "auto_ml": 13, "batch_prediction_job": 13, "endpoint_servic": 13, "hyperparameter_tuning_job": 13, "model_servic": 13, "vertexai": 13, "presto_to_gc": [13, 17], "trino_to_gc": [13, 17], "sql_to_gc": [13, 17], "suit": [13, 17, 18], "gcs_to_gdriv": 13, "gdrive_to_gc": 13, "mssql_to_gc": [13, 17], "calendar_to_gc": 13, "sheet": [13, 17], "sheets_to_gc": 13, "gcs_to_sftp": [13, 15], "postgres_to_gc": [13, 17], "bigquery_to_mysql": [13, 17], "bigquery_to_mssql": [13, 17], "gcs_to_bigqueri": 13, "gcs_to_gc": 13, "facebook_ads_to_gc": [13, 16], "sftp_to_gc": [13, 15], "bigquery_to_bigqueri": 13, "mysql_to_gc": [13, 17], "oracle_to_gc": [13, 17], "gcs_to_sheet": 13, "local_to_gc": 13, "bigquery_to_gc": 13, "gcs_to_loc": 13, "salesforce_to_gc": [13, 16], "ads_to_gc": 13, "gcs_to_presto": [13, 17], "gcs_to_trino": [13, 17], "analyt": 13, "display_video": 13, "search_ad": 13, "cloud_compos": 13, "A": 14, "asf": 14, "foundat": 14, "hadoop": [14, 18], "webhdf": [14, 18], "dynamodb": 14, "simpl": 14, "s3": 14, "gc": 14, "appflow": 14, "athena": 14, "cloudform": 14, "cloudwatch": 14, "datasync": 14, "ec2": 14, "ec": 14, "elast": 14, "ek": 14, "elasticach": 14, "glacier": 14, "kinesi": 14, "firehos": 14, "quicksight": 14, "rd": 14, "sagemak": 14, "secretsmanag": 14, "email": 14, "se": 14, "notif": 14, "sn": 14, "queue": 14, "sq": 14, "batch": 14, "migrat": 14, "glue": 14, "lambda": 14, "token": 14, "st": 14, "cosmo": 14, "explor": 14, "factori": 14, "lake": 14, "fileshar": 14, "bu": 14, "synaps": 14, "market": [14, 18], "analytics360": 14, "video": 14, "360": 14, "search": 14, "calendar": 14, "compos": 14, "dataform": 14, "discoveri": 14, "drive": 14, "spreadsheet": 14, "shell": [14, 18], "repo": 14, "dataproc": 14, "swarm": 14, "bigqueri": 14, "jdbcoper": 15, "airbytetriggersyncoper": 16, "databricks_bas": 16, "databrickssubmitrunoper": 16, "databricksrunnowoper": 16, "databricks_repo": 16, "databricksreposcreateoper": 16, "databricksreposupdateoper": 16, "databricksreposdeleteoper": 16, "databricks_sql": 16, "databrickssqloper": 16, "databrickscopyintooper": 16, "discord_webhook": 16, "opsgeniecreatealertoper": 16, "job": 16, "qubole_check": 16, "salesforce_apex_rest": 16, "bulk": 16, "salesforceapexrestoper": 16, "salesforcebulkoper": 16, "segment_track_ev": 16, "slack_webhook": 16, "snowflakeoper": 16, "tableauoper": 16, "telegramoper": 16, "yandexcloud_dataproc": 16, "proc": 16, "sqltoslackoper": [16, 17], "sql_to_slack": [16, 17], "snowflaketoslackoper": 16, "snowflake_to_slack": 16, "asana_task": 17, "asanacreatetaskoper": 17, "docker_swarm": 17, "jenkins_job_trigg": 17, "cncf": 17, "kubernetes_pod": 17, "spark_kubernet": 17, "kubernetespodoper": 17, "mssqloper": 17, "mysqloper": 17, "neo4joper": 17, "postgresoper": 17, "redis_publish": 17, "sqliteoper": 17, "trinooper": 17, "sql_to_sheet": 17, "vertica_to_mysql": 17, "presto_to_mysql": 17, "trino_to_mysql": 17, "oracle_to_oracl": 17, "4": 18, "6": 18, "gcp": 18, "workspac": 18, "formerli": 18, "8": 18, "9": 18}, "objects": {"": [[18, 0, 1, "std-provider-apache-airflow-providers-airbyte", "apache-airflow-providers-airbyte"], [18, 0, 1, "std-provider-apache-airflow-providers-alibaba", "apache-airflow-providers-alibaba"], [18, 0, 1, "std-provider-apache-airflow-providers-amazon", "apache-airflow-providers-amazon"], [18, 0, 1, "std-provider-apache-airflow-providers-apache-beam", "apache-airflow-providers-apache-beam"], [18, 0, 1, "std-provider-apache-airflow-providers-apache-cassandra", "apache-airflow-providers-apache-cassandra"], [18, 0, 1, "std-provider-apache-airflow-providers-apache-drill", "apache-airflow-providers-apache-drill"], [18, 0, 1, "std-provider-apache-airflow-providers-apache-druid", "apache-airflow-providers-apache-druid"], [18, 0, 1, "std-provider-apache-airflow-providers-apache-hdfs", "apache-airflow-providers-apache-hdfs"], [18, 0, 1, "std-provider-apache-airflow-providers-apache-hive", "apache-airflow-providers-apache-hive"], [18, 0, 1, "std-provider-apache-airflow-providers-apache-kylin", "apache-airflow-providers-apache-kylin"], [18, 0, 1, "std-provider-apache-airflow-providers-apache-livy", "apache-airflow-providers-apache-livy"], [18, 0, 1, "std-provider-apache-airflow-providers-apache-pig", "apache-airflow-providers-apache-pig"], [18, 0, 1, "std-provider-apache-airflow-providers-apache-pinot", "apache-airflow-providers-apache-pinot"], [18, 0, 1, "std-provider-apache-airflow-providers-apache-spark", "apache-airflow-providers-apache-spark"], [18, 0, 1, "std-provider-apache-airflow-providers-apache-sqoop", "apache-airflow-providers-apache-sqoop"], [18, 0, 1, "std-provider-apache-airflow-providers-arangodb", "apache-airflow-providers-arangodb"], [18, 0, 1, "std-provider-apache-airflow-providers-asana", "apache-airflow-providers-asana"], [18, 0, 1, "std-provider-apache-airflow-providers-atlassian-jira", "apache-airflow-providers-atlassian-jira"], [18, 0, 1, "std-provider-apache-airflow-providers-celery", "apache-airflow-providers-celery"], [18, 0, 1, "std-provider-apache-airflow-providers-cloudant", "apache-airflow-providers-cloudant"], [18, 0, 1, "std-provider-apache-airflow-providers-cncf-kubernetes", "apache-airflow-providers-cncf-kubernetes"], [18, 0, 1, "std-provider-apache-airflow-providers-common-sql", "apache-airflow-providers-common-sql"], [18, 0, 1, "std-provider-apache-airflow-providers-databricks", "apache-airflow-providers-databricks"], [18, 0, 1, "std-provider-apache-airflow-providers-datadog", "apache-airflow-providers-datadog"], [18, 0, 1, "std-provider-apache-airflow-providers-dbt-cloud", "apache-airflow-providers-dbt-cloud"], [18, 0, 1, "std-provider-apache-airflow-providers-dingding", "apache-airflow-providers-dingding"], [18, 0, 1, "std-provider-apache-airflow-providers-discord", "apache-airflow-providers-discord"], [18, 0, 1, "std-provider-apache-airflow-providers-docker", "apache-airflow-providers-docker"], [18, 0, 1, "std-provider-apache-airflow-providers-elasticsearch", "apache-airflow-providers-elasticsearch"], [18, 0, 1, "std-provider-apache-airflow-providers-exasol", "apache-airflow-providers-exasol"], [18, 0, 1, "std-provider-apache-airflow-providers-facebook", "apache-airflow-providers-facebook"], [18, 0, 1, "std-provider-apache-airflow-providers-ftp", "apache-airflow-providers-ftp"], [18, 0, 1, "std-provider-apache-airflow-providers-github", "apache-airflow-providers-github"], [18, 0, 1, "std-provider-apache-airflow-providers-google", "apache-airflow-providers-google"], [18, 0, 1, "std-provider-apache-airflow-providers-grpc", "apache-airflow-providers-grpc"], [18, 0, 1, "std-provider-apache-airflow-providers-hashicorp", "apache-airflow-providers-hashicorp"], [18, 0, 1, "std-provider-apache-airflow-providers-http", "apache-airflow-providers-http"], [18, 0, 1, "std-provider-apache-airflow-providers-imap", "apache-airflow-providers-imap"], [18, 0, 1, "std-provider-apache-airflow-providers-influxdb", "apache-airflow-providers-influxdb"], [18, 0, 1, "std-provider-apache-airflow-providers-jdbc", "apache-airflow-providers-jdbc"], [18, 0, 1, "std-provider-apache-airflow-providers-jenkins", "apache-airflow-providers-jenkins"], [18, 0, 1, "std-provider-apache-airflow-providers-microsoft-azure", "apache-airflow-providers-microsoft-azure"], [18, 0, 1, "std-provider-apache-airflow-providers-microsoft-mssql", "apache-airflow-providers-microsoft-mssql"], [18, 0, 1, "std-provider-apache-airflow-providers-microsoft-psrp", "apache-airflow-providers-microsoft-psrp"], [18, 0, 1, "std-provider-apache-airflow-providers-microsoft-winrm", "apache-airflow-providers-microsoft-winrm"], [18, 0, 1, "std-provider-apache-airflow-providers-mongo", "apache-airflow-providers-mongo"], [18, 0, 1, "std-provider-apache-airflow-providers-mysql", "apache-airflow-providers-mysql"], [18, 0, 1, "std-provider-apache-airflow-providers-neo4j", "apache-airflow-providers-neo4j"], [18, 0, 1, "std-provider-apache-airflow-providers-odbc", "apache-airflow-providers-odbc"], [18, 0, 1, "std-provider-apache-airflow-providers-openfaas", "apache-airflow-providers-openfaas"], [18, 0, 1, "std-provider-apache-airflow-providers-opsgenie", "apache-airflow-providers-opsgenie"], [18, 0, 1, "std-provider-apache-airflow-providers-oracle", "apache-airflow-providers-oracle"], [18, 0, 1, "std-provider-apache-airflow-providers-pagerduty", "apache-airflow-providers-pagerduty"], [18, 0, 1, "std-provider-apache-airflow-providers-papermill", "apache-airflow-providers-papermill"], [18, 0, 1, "std-provider-apache-airflow-providers-plexus", "apache-airflow-providers-plexus"], [18, 0, 1, "std-provider-apache-airflow-providers-postgres", "apache-airflow-providers-postgres"], [18, 0, 1, "std-provider-apache-airflow-providers-presto", "apache-airflow-providers-presto"], [18, 0, 1, "std-provider-apache-airflow-providers-qubole", "apache-airflow-providers-qubole"], [18, 0, 1, "std-provider-apache-airflow-providers-redis", "apache-airflow-providers-redis"], [18, 0, 1, "std-provider-apache-airflow-providers-salesforce", "apache-airflow-providers-salesforce"], [18, 0, 1, "std-provider-apache-airflow-providers-samba", "apache-airflow-providers-samba"], [18, 0, 1, "std-provider-apache-airflow-providers-segment", "apache-airflow-providers-segment"], [18, 0, 1, "std-provider-apache-airflow-providers-sendgrid", "apache-airflow-providers-sendgrid"], [18, 0, 1, "std-provider-apache-airflow-providers-sftp", "apache-airflow-providers-sftp"], [18, 0, 1, "std-provider-apache-airflow-providers-singularity", "apache-airflow-providers-singularity"], [18, 0, 1, "std-provider-apache-airflow-providers-slack", "apache-airflow-providers-slack"], [18, 0, 1, "std-provider-apache-airflow-providers-snowflake", "apache-airflow-providers-snowflake"], [18, 0, 1, "std-provider-apache-airflow-providers-sqlite", "apache-airflow-providers-sqlite"], [18, 0, 1, "std-provider-apache-airflow-providers-ssh", "apache-airflow-providers-ssh"], [18, 0, 1, "std-provider-apache-airflow-providers-tableau", "apache-airflow-providers-tableau"], [18, 0, 1, "std-provider-apache-airflow-providers-tabular", "apache-airflow-providers-tabular"], [18, 0, 1, "std-provider-apache-airflow-providers-telegram", "apache-airflow-providers-telegram"], [18, 0, 1, "std-provider-apache-airflow-providers-trino", "apache-airflow-providers-trino"], [18, 0, 1, "std-provider-apache-airflow-providers-vertica", "apache-airflow-providers-vertica"], [18, 0, 1, "std-provider-apache-airflow-providers-yandex", "apache-airflow-providers-yandex"], [18, 0, 1, "std-provider-apache-airflow-providers-zendesk", "apache-airflow-providers-zendesk"]]}, "objtypes": {"0": "std:provider"}, "objnames": {"0": ["std", "provider", "provider"]}, "titleterms": {"auth": [0, 7], "backend": [0, 5, 7], "googl": [0, 1, 2, 4, 5, 10, 11, 12, 13, 15, 16, 17, 18], "connect": [1, 7, 15], "airbyt": [1, 16, 18], "alibaba": [1, 4, 18], "amazon": [1, 2, 4, 5, 10, 11, 13, 15, 16, 17, 18], "apach": [1, 10, 11, 13, 15, 17, 18], "cassandra": [1, 10, 13, 18], "drill": [1, 10, 18], "druid": [1, 10, 18], "hdf": [1, 10, 18], "hive": [1, 10, 11, 15, 17, 18], "livi": [1, 10, 18], "pig": [1, 10, 18], "spark": [1, 10, 18], "sqoop": [1, 10, 18], "arangodb": [1, 17, 18], "asana": [1, 17, 18], "atlassian": [1, 17, 18], "jira": [1, 17, 18], "ibm": [1, 16], "cloudant": [1, 16, 18], "kubernet": [1, 11, 13, 17, 18], "databrick": [1, 2, 16, 18], "dbt": [1, 2, 18], "cloud": [1, 2, 10, 11, 12, 13, 15, 16, 17, 18], "dingd": [1, 16, 18], "discord": [1, 16, 18], "docker": [1, 17, 18], "elasticsearch": [1, 4, 17, 18], "exasol": [1, 11, 17, 18], "facebook": [1, 13, 16, 18], "file": [1, 10, 11, 12, 13, 15], "transfer": [1, 10, 11, 12, 13, 15, 16, 17], "protocol": [1, 11, 12, 13, 15], "ftp": [1, 11, 15, 18], "github": [1, 17, 18], "grpc": [1, 15, 18], "hashicorp": [1, 5, 17, 18], "hypertext": [1, 15], "http": [1, 15, 18], "internet": [1, 11, 15], "messag": [1, 11, 15], "access": [1, 11, 15], "imap": [1, 11, 15, 18], "influxdb": [1, 17, 18], "java": [1, 15], "databas": [1, 11, 15], "jdbc": [1, 15, 18], "jenkin": [1, 17, 18], "microsoft": [1, 2, 4, 5, 10, 12, 13, 15, 16, 17, 18], "azur": [1, 2, 4, 5, 12, 13, 15, 16, 17, 18], "sql": [1, 10, 11, 13, 16, 17, 18], "server": [1, 10, 13, 17], "mssql": [1, 10, 13, 17, 18], "mongodb": [1, 11, 17], "mysql": [1, 10, 11, 13, 17, 18], "neo4j": [1, 17, 18], "odbc": [1, 15, 18], "opsgeni": [1, 16, 18], "oracl": [1, 12, 13, 17, 18], "pagerduti": [1, 16, 18], "postgresql": [1, 13, 17], "presto": [1, 13, 17, 18], "qubol": [1, 2, 16, 18], "redi": [1, 17, 18], "salesforc": [1, 11, 13, 16, 18], "samba": [1, 10, 15, 18], "segment": [1, 16, 18], "sftp": [1, 11, 12, 13, 15, 18], "slack": [1, 16, 17, 18], "snowflak": [1, 11, 12, 13, 16, 18], "sqlite": [1, 17, 18], "ssh": [1, 11, 12, 13, 15, 18], "tableau": [1, 16, 18], "tabular": [1, 17, 18], "trino": [1, 13, 17, 18], "vertica": [1, 10, 17, 18], "yandex": [1, 16, 18], "zendesk": [1, 17, 18], "extra": [2, 7], "link": [2, 7], "core": [3, 7], "extens": 3, "write": 4, "log": [4, 7, 11], "secret": [5, 7, 13], "commun": [6, 7], "provid": [6, 7, 9, 18], "how": [6, 7], "creat": [6, 7], "new": 6, "initi": 6, "code": 6, "unit": 6, "test": 6, "integr": [6, 9, 15, 17], "document": 6, "option": 6, "featur": 6, "us": [6, 7], "dynam": 6, "task": [6, 13], "map": 6, "have": 6, "sensor": 6, "return": 6, "xcom": 6, "valu": 6, "updat": 6, "packag": [7, 9, 18], "extend": 7, "airflow": [7, 18], "function": [7, 11, 13], "custom": 7, "instal": [7, 8, 9], "upgrad": 7, "type": 7, "maintain": 7, "your": 7, "own": 7, "faq": 7, "2": 7, "0": 7, "backport": 7, "1": 7, "10": 7, "from": [8, 9], "pypi": 8, "tool": 8, "sourc": 9, "releas": 9, "asf": 10, "softwar": [10, 17], "foundat": 10, "beam": [10, 18], "kylin": [10, 18], "pinot": [10, 18], "hadoop": 10, "distribut": 10, "system": 10, "webhdf": 10, "dynamodb": [10, 11], "simpl": [10, 11, 13, 15, 16, 17], "storag": [10, 11, 12, 13, 15, 16, 17], "servic": [10, 11, 12, 13, 15, 16, 17], "s3": [10, 11, 13, 15, 16, 17], "gc": [10, 11, 12, 13, 15, 16, 17], "aw": 11, "web": 11, "appflow": 11, "athena": 11, "cloudform": 11, "cloudwatch": 11, "datasync": 11, "ec2": 11, "ec": 11, "elast": 11, "ek": 11, "elasticach": 11, "emr": 11, "glacier": [11, 13], "kinesi": 11, "data": [11, 12, 13, 17], "firehos": 11, "quicksight": 11, "rd": 11, "redshift": 11, "sagemak": 11, "secretsmanag": 11, "email": 11, "se": 11, "notif": 11, "sn": 11, "queue": 11, "sq": 11, "batch": [11, 12], "migrat": 11, "glue": 11, "lambda": 11, "secur": [11, 15], "token": 11, "st": 11, "step": 11, "local": [11, 12, 13], "common": [11, 13, 16, 17, 18], "blob": [12, 13, 15, 16], "contain": 12, "instanc": 12, "cosmo": 12, "db": 12, "explor": 12, "factori": 12, "lake": [12, 13, 17], "fileshar": [12, 13], "bu": 12, "synaps": 12, "automl": 13, "bigqueri": [13, 17], "bigtabl": 13, "campaign": 13, "manag": [13, 15], "build": 13, "loss": 13, "prevent": 13, "dlp": 13, "firestor": 13, "kei": 13, "km": 13, "life": 13, "scienc": 13, "memorystor": 13, "natur": 13, "languag": 13, "o": 13, "login": 13, "pub": 13, "sub": 13, "spanner": 13, "speech": 13, "text": 13, "stackdriv": 13, "translat": 13, "video": 13, "intellig": 13, "vision": 13, "workflow": 13, "comput": 13, "engin": 13, "catalog": 13, "fusion": 13, "dataflow": 13, "dataplex": 13, "dataprep": 13, "dataproc": [13, 16], "metastor": 13, "datastor": 13, "deploy": 13, "looker": 13, "machin": 13, "learn": 13, "vertex": 13, "ai": 13, "drive": 13, "calendar": 13, "spreadsheet": [13, 17], "ad": [13, 16], "market": 13, "platform": 13, "analytics360": 13, "displai": 13, "360": 13, "search": 13, "other": 13, "oper": [13, 14], "hook": [13, 14], "compos": 13, "dataform": 13, "discoveri": 13, "api": 13, "leveldb": 13, "refer": [14, 18], "shell": 15, "window": 15, "powershel": 15, "remot": 15, "winrm": [15, 18], "repo": 16, "datadog": [16, 18], "plexu": [16, 18], "telegram": [16, 18], "swarm": 17, "vault": 17, "openfaa": [17, 18], "papermil": [17, 18], "singular": [17, 18], "celeri": 18, "cncf": 18, "psrp": 18, "mongo": 18, "postgr": 18, "sendgrid": 18}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 8, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.viewcode": 1, "sphinx.ext.intersphinx": 1, "sphinx": 57}, "alltitles": {"Auth backends": [[0, "auth-backends"], [7, "auth-backends"]], "Google": [[0, "google"], [1, "google"], [2, "google"], [4, "google"], [5, "google"], [13, "google"], [13, "id3"]], "Connections": [[1, "connections"]], "Airbyte": [[1, "airbyte"], [16, "airbyte"]], "Alibaba": [[1, "alibaba"], [4, "alibaba"]], "Amazon": [[1, "amazon"], [2, "amazon"], [4, "amazon"], [5, "amazon"]], "Apache Cassandra": [[1, "apache-cassandra"], [10, "apache-cassandra"]], "Apache Drill": [[1, "apache-drill"], [10, "apache-drill"]], "Apache Druid": [[1, "apache-druid"], [10, "apache-druid"]], "Apache HDFS": [[1, "apache-hdfs"]], "Apache Hive": [[1, "apache-hive"], [10, "apache-hive"]], "Apache Livy": [[1, "apache-livy"], [10, "apache-livy"]], "Apache Pig": [[1, "apache-pig"], [10, "apache-pig"]], "Apache Spark": [[1, "apache-spark"], [10, "apache-spark"]], "Apache Sqoop": [[1, "apache-sqoop"], [10, "apache-sqoop"]], "ArangoDB": [[1, "arangodb"], [17, "arangodb"]], "Asana": [[1, "asana"], [17, "asana"]], "Atlassian Jira": [[1, "atlassian-jira"], [17, "atlassian-jira"]], "IBM Cloudant": [[1, "ibm-cloudant"], [16, "ibm-cloudant"]], "Kubernetes": [[1, "kubernetes"], [17, "kubernetes"]], "Databricks": [[1, "databricks"], [2, "databricks"], [16, "databricks"]], "dbt Cloud": [[1, "dbt-cloud"], [2, "dbt-cloud"]], "Dingding": [[1, "dingding"], [16, "dingding"]], "Discord": [[1, "discord"], [16, "discord"]], "Docker": [[1, "docker"], [17, "docker"]], "Elasticsearch": [[1, "elasticsearch"], [4, "elasticsearch"], [17, "elasticsearch"]], "Exasol": [[1, "exasol"], [17, "exasol"]], "Facebook": [[1, "facebook"]], "File Transfer Protocol (FTP)": [[1, "file-transfer-protocol-ftp"], [15, "file-transfer-protocol-ftp"]], "Github": [[1, "github"], [17, "github"]], "gRPC": [[1, "grpc"], [15, "grpc"]], "Hashicorp": [[1, "hashicorp"], [5, "hashicorp"]], "Hypertext Transfer Protocol (HTTP)": [[1, "hypertext-transfer-protocol-http"], [15, "hypertext-transfer-protocol-http"]], "Internet Message Access Protocol (IMAP)": [[1, "internet-message-access-protocol-imap"], [15, "internet-message-access-protocol-imap"]], "Influxdb": [[1, "influxdb"], [17, "influxdb"]], "Java Database Connectivity (JDBC)": [[1, "java-database-connectivity-jdbc"], [15, "java-database-connectivity-jdbc"]], "Jenkins": [[1, "jenkins"], [17, "jenkins"]], "Microsoft Azure": [[1, "microsoft-azure"], [2, "microsoft-azure"], [4, "microsoft-azure"], [5, "microsoft-azure"], [12, "microsoft-azure"]], "Microsoft SQL Server (MSSQL)": [[1, "microsoft-sql-server-mssql"], [17, "microsoft-sql-server-mssql"]], "MongoDB": [[1, "mongodb"], [17, "mongodb"]], "MySQL": [[1, "mysql"], [17, "mysql"]], "Neo4j": [[1, "neo4j"], [17, "neo4j"]], "ODBC": [[1, "odbc"], [15, "odbc"]], "Opsgenie": [[1, "opsgenie"], [16, "opsgenie"]], "Oracle": [[1, "oracle"], [17, "oracle"]], "Pagerduty": [[1, "pagerduty"], [16, "pagerduty"]], "PostgreSQL": [[1, "postgresql"], [17, "postgresql"]], "Presto": [[1, "presto"], [17, "presto"]], "Qubole": [[1, "qubole"], [2, "qubole"], [16, "qubole"]], "Redis": [[1, "redis"], [17, "redis"]], "Salesforce": [[1, "salesforce"], [16, "salesforce"]], "Samba": [[1, "samba"], [15, "samba"]], "Segment": [[1, "segment"], [16, "segment"]], "SFTP": [[1, "sftp"]], "Slack": [[1, "slack"], [16, "slack"]], "Snowflake": [[1, "snowflake"], [16, "snowflake"]], "SQLite": [[1, "sqlite"], [17, "sqlite"]], "SSH": [[1, "ssh"]], "Tableau": [[1, "tableau"], [16, "tableau"]], "Tabular": [[1, "tabular"], [17, "tabular"]], "Trino": [[1, "trino"], [17, "trino"]], "Vertica": [[1, "vertica"], [17, "vertica"]], "Yandex": [[1, "yandex"]], "Zendesk": [[1, "zendesk"], [17, "zendesk"]], "Extra Links": [[2, "extra-links"]], "Core Extensions": [[3, "core-extensions"]], "Writing logs": [[4, "writing-logs"]], "Secret backends": [[5, "secret-backends"], [7, "secret-backends"]], "Community Providers": [[6, "community-providers"]], "How-to creating a new community provider": [[6, "how-to-creating-a-new-community-provider"]], "Initial Code and Unit Tests": [[6, "initial-code-and-unit-tests"]], "Integration tests": [[6, "integration-tests"]], "Documentation": [[6, "documentation"]], "Optional provider features": [[6, "optional-provider-features"]], "Using Providers with dynamic task mapping": [[6, "using-providers-with-dynamic-task-mapping"]], "Having sensors return XCOM values": [[6, "having-sensors-return-xcom-values"]], "How-to Update a community provider": [[6, "how-to-update-a-community-provider"]], "Provider packages": [[7, "provider-packages"]], "Extending Airflow core functionality": [[7, "extending-airflow-core-functionality"]], "Custom connections": [[7, "custom-connections"]], "Extra links": [[7, "extra-links"]], "Logging": [[7, "logging"]], "Installing and upgrading providers": [[7, "installing-and-upgrading-providers"]], "Types of providers": [[7, "types-of-providers"]], "Community maintained providers": [[7, "community-maintained-providers"]], "Custom provider packages": [[7, "custom-provider-packages"]], "How to create your own provider": [[7, "how-to-create-your-own-provider"]], "FAQ for Airflow and Providers": [[7, "faq-for-airflow-and-providers"]], "Upgrading Airflow 2.0 and Providers": [[7, "upgrading-airflow-2-0-and-providers"]], "Customizing Provider Packages": [[7, "customizing-provider-packages"]], "Creating your own providers": [[7, "creating-your-own-providers"]], "Using Backport Providers in Airflow 1.10": [[7, "using-backport-providers-in-airflow-1-10"]], "Installation from PyPI": [[8, "installation-from-pypi"]], "Installation tools": [[8, "installation-tools"]], "Installing Providers from Sources": [[9, "installing-providers-from-sources"]], "Released packages": [[9, "released-packages"]], "Release integrity": [[9, "release-integrity"]], "ASF: Apache Software Foundation": [[10, "asf-apache-software-foundation"]], "Software": [[10, "software"], [17, "software"]], "Apache Beam": [[10, "apache-beam"]], "Apache Kylin": [[10, "apache-kylin"]], "Apache Pinot": [[10, "apache-pinot"]], "Hadoop Distributed File System (HDFS)": [[10, "hadoop-distributed-file-system-hdfs"]], "WebHDFS": [[10, "webhdfs"]], "Transfers": [[10, "transfers"], [11, "transfers"], [12, "transfers"], [13, "transfers"], [15, "transfers"], [16, "transfers"], [17, "transfers"]], "Apache Hive to Amazon DynamoDB": [[10, "apache-hive-to-amazon-dynamodb"], [11, "apache-hive-to-amazon-dynamodb"]], "Apache Hive to Apache Druid": [[10, "apache-hive-to-apache-druid"]], "Vertica to Apache Hive": [[10, "vertica-to-apache-hive"], [17, "vertica-to-apache-hive"]], "Apache Hive to MySQL": [[10, "apache-hive-to-mysql"], [17, "apache-hive-to-mysql"]], "Apache Hive to Samba": [[10, "apache-hive-to-samba"], [15, "apache-hive-to-samba"]], "Amazon Simple Storage Service (S3) to Apache Hive": [[10, "amazon-simple-storage-service-s3-to-apache-hive"], [11, "amazon-simple-storage-service-s3-to-apache-hive"]], "MySQL to Apache Hive": [[10, "mysql-to-apache-hive"], [17, "mysql-to-apache-hive"]], "Microsoft SQL Server (MSSQL) to Apache Hive": [[10, "microsoft-sql-server-mssql-to-apache-hive"], [17, "microsoft-sql-server-mssql-to-apache-hive"]], "Apache Cassandra to Google Cloud Storage (GCS)": [[10, "apache-cassandra-to-google-cloud-storage-gcs"], [13, "apache-cassandra-to-google-cloud-storage-gcs"]], "AWS: Amazon Web Services": [[11, "aws-amazon-web-services"]], "Services": [[11, "services"], [12, "services"], [13, "services"], [16, "services"], [16, "id1"]], "Amazon Appflow": [[11, "amazon-appflow"]], "Amazon Athena": [[11, "amazon-athena"]], "Amazon CloudFormation": [[11, "amazon-cloudformation"]], "Amazon CloudWatch Logs": [[11, "amazon-cloudwatch-logs"]], "Amazon DataSync": [[11, "amazon-datasync"]], "Amazon DynamoDB": [[11, "amazon-dynamodb"]], "Amazon EC2": [[11, "amazon-ec2"]], "Amazon ECS": [[11, "amazon-ecs"]], "Amazon Elastic Kubernetes Service (EKS)": [[11, "amazon-elastic-kubernetes-service-eks"]], "Amazon ElastiCache": [[11, "amazon-elasticache"]], "Amazon EMR": [[11, "amazon-emr"]], "Amazon EMR on EKS": [[11, "amazon-emr-on-eks"]], "Amazon Glacier": [[11, "amazon-glacier"]], "Amazon Kinesis Data Firehose": [[11, "amazon-kinesis-data-firehose"]], "Amazon QuickSight": [[11, "amazon-quicksight"]], "Amazon RDS": [[11, "amazon-rds"]], "Amazon Redshift": [[11, "amazon-redshift"]], "Amazon SageMaker": [[11, "amazon-sagemaker"]], "Amazon SecretsManager": [[11, "amazon-secretsmanager"]], "Amazon Simple Email Service (SES)": [[11, "amazon-simple-email-service-ses"]], "Amazon Simple Notification Service (SNS)": [[11, "amazon-simple-notification-service-sns"]], "Amazon Simple Queue Service (SQS)": [[11, "amazon-simple-queue-service-sqs"]], "Amazon Simple Storage Service (S3)": [[11, "amazon-simple-storage-service-s3"]], "Amazon Web Services": [[11, "amazon-web-services"]], "AWS Batch": [[11, "aws-batch"]], "AWS Database Migration Service": [[11, "aws-database-migration-service"]], "AWS Glue": [[11, "aws-glue"]], "AWS Lambda": [[11, "aws-lambda"]], "AWS Security Token Service (STS)": [[11, "aws-security-token-service-sts"]], "AWS Step Functions": [[11, "aws-step-functions"]], "Amazon DynamoDB to Amazon Simple Storage Service (S3)": [[11, "amazon-dynamodb-to-amazon-simple-storage-service-s3"]], "Google Cloud Storage (GCS) to Amazon Simple Storage Service (S3)": [[11, "google-cloud-storage-gcs-to-amazon-simple-storage-service-s3"], [13, "google-cloud-storage-gcs-to-amazon-simple-storage-service-s3"]], "Amazon Glacier to Google Cloud Storage (GCS)": [[11, "amazon-glacier-to-google-cloud-storage-gcs"], [13, "amazon-glacier-to-google-cloud-storage-gcs"]], "Google to Amazon Simple Storage Service (S3)": [[11, "google-to-amazon-simple-storage-service-s3"]], "Internet Message Access Protocol (IMAP) to Amazon Simple Storage Service (S3)": [[11, "internet-message-access-protocol-imap-to-amazon-simple-storage-service-s3"], [15, "internet-message-access-protocol-imap-to-amazon-simple-storage-service-s3"]], "MongoDB to Amazon Simple Storage Service (S3)": [[11, "mongodb-to-amazon-simple-storage-service-s3"], [17, "mongodb-to-amazon-simple-storage-service-s3"]], "Amazon Redshift to Amazon Simple Storage Service (S3)": [[11, "amazon-redshift-to-amazon-simple-storage-service-s3"]], "Amazon Simple Storage Service (S3) to Amazon Redshift": [[11, "amazon-simple-storage-service-s3-to-amazon-redshift"]], "Amazon Simple Storage Service (S3) to SSH File Transfer Protocol (SFTP)": [[11, "amazon-simple-storage-service-s3-to-ssh-file-transfer-protocol-sftp"], [15, "amazon-simple-storage-service-s3-to-ssh-file-transfer-protocol-sftp"]], "SSH File Transfer Protocol (SFTP) to Amazon Simple Storage Service (S3)": [[11, "ssh-file-transfer-protocol-sftp-to-amazon-simple-storage-service-s3"], [15, "ssh-file-transfer-protocol-sftp-to-amazon-simple-storage-service-s3"]], "Amazon Simple Storage Service (S3) to File Transfer Protocol (FTP)": [[11, "amazon-simple-storage-service-s3-to-file-transfer-protocol-ftp"], [15, "amazon-simple-storage-service-s3-to-file-transfer-protocol-ftp"]], "Exasol to Amazon Simple Storage Service (S3)": [[11, "exasol-to-amazon-simple-storage-service-s3"], [17, "exasol-to-amazon-simple-storage-service-s3"]], "File Transfer Protocol (FTP) to Amazon Simple Storage Service (S3)": [[11, "file-transfer-protocol-ftp-to-amazon-simple-storage-service-s3"], [15, "file-transfer-protocol-ftp-to-amazon-simple-storage-service-s3"]], "Salesforce to Amazon Simple Storage Service (S3)": [[11, "salesforce-to-amazon-simple-storage-service-s3"], [16, "salesforce-to-amazon-simple-storage-service-s3"]], "Local to Amazon Simple Storage Service (S3)": [[11, "local-to-amazon-simple-storage-service-s3"]], "Common SQL to Amazon Simple Storage Service (S3)": [[11, "common-sql-to-amazon-simple-storage-service-s3"], [17, "common-sql-to-amazon-simple-storage-service-s3"]], "Amazon Simple Storage Service (S3) to Google Cloud Storage (GCS)": [[11, "amazon-simple-storage-service-s3-to-google-cloud-storage-gcs"], [13, "amazon-simple-storage-service-s3-to-google-cloud-storage-gcs"]], "Amazon Simple Storage Service (S3) to MySQL": [[11, "amazon-simple-storage-service-s3-to-mysql"], [17, "amazon-simple-storage-service-s3-to-mysql"]], "Amazon Simple Storage Service (S3) to Snowflake": [[11, "amazon-simple-storage-service-s3-to-snowflake"], [11, "id1"], [16, "amazon-simple-storage-service-s3-to-snowflake"], [16, "id2"]], "Azure: Microsoft Azure": [[12, "azure-microsoft-azure"]], "Microsoft Azure Batch": [[12, "microsoft-azure-batch"]], "Microsoft Azure Blob Storage": [[12, "microsoft-azure-blob-storage"]], "Microsoft Azure Container Instances": [[12, "microsoft-azure-container-instances"]], "Microsoft Azure Cosmos DB": [[12, "microsoft-azure-cosmos-db"]], "Microsoft Azure Data Explorer": [[12, "microsoft-azure-data-explorer"]], "Microsoft Azure Data Factory": [[12, "microsoft-azure-data-factory"]], "Microsoft Azure Data Lake Storage": [[12, "microsoft-azure-data-lake-storage"]], "Microsoft Azure FileShare": [[12, "microsoft-azure-fileshare"]], "Microsoft Azure Service Bus": [[12, "microsoft-azure-service-bus"]], "Microsoft Azure Synapse": [[12, "microsoft-azure-synapse"]], "Microsoft Azure FileShare to Google Cloud Storage (GCS)": [[12, "microsoft-azure-fileshare-to-google-cloud-storage-gcs"], [13, "microsoft-azure-fileshare-to-google-cloud-storage-gcs"]], "Microsoft Azure Data Lake Storage to Google Cloud Storage (GCS)": [[12, "microsoft-azure-data-lake-storage-to-google-cloud-storage-gcs"], [13, "microsoft-azure-data-lake-storage-to-google-cloud-storage-gcs"]], "Local to Microsoft Azure Data Lake Storage": [[12, "local-to-microsoft-azure-data-lake-storage"]], "Oracle to Microsoft Azure Data Lake Storage": [[12, "oracle-to-microsoft-azure-data-lake-storage"], [17, "oracle-to-microsoft-azure-data-lake-storage"]], "Local to Microsoft Azure Blob Storage": [[12, "local-to-microsoft-azure-blob-storage"]], "Microsoft Azure Blob Storage to Google Cloud Storage (GCS)": [[12, "microsoft-azure-blob-storage-to-google-cloud-storage-gcs"], [13, "microsoft-azure-blob-storage-to-google-cloud-storage-gcs"]], "SSH File Transfer Protocol (SFTP) to Microsoft Azure Blob Storage": [[12, "ssh-file-transfer-protocol-sftp-to-microsoft-azure-blob-storage"], [15, "ssh-file-transfer-protocol-sftp-to-microsoft-azure-blob-storage"]], "Microsoft Azure Blob Storage to Snowflake": [[12, "microsoft-azure-blob-storage-to-snowflake"], [16, "microsoft-azure-blob-storage-to-snowflake"]], "Google Cloud": [[13, "google-cloud"]], "Google AutoML": [[13, "google-automl"]], "Google BigQuery": [[13, "google-bigquery"]], "Google BigQuery Data Transfer Service": [[13, "google-bigquery-data-transfer-service"]], "Google Bigtable": [[13, "google-bigtable"]], "Google Campaign Manager": [[13, "google-campaign-manager"]], "Google Cloud Build": [[13, "google-cloud-build"]], "Google Cloud Data Loss Prevention (DLP)": [[13, "google-cloud-data-loss-prevention-dlp"]], "Google Cloud Firestore": [[13, "google-cloud-firestore"]], "Google Cloud Functions": [[13, "google-cloud-functions"]], "Google Cloud Key Management Service (KMS)": [[13, "google-cloud-key-management-service-kms"]], "Google Cloud Life Sciences": [[13, "google-cloud-life-sciences"]], "Google Cloud Memorystore": [[13, "google-cloud-memorystore"]], "Google Cloud Natural Language": [[13, "google-cloud-natural-language"]], "Google Cloud OS Login": [[13, "google-cloud-os-login"]], "Google Cloud Pub/Sub": [[13, "google-cloud-pub-sub"]], "Google Cloud Secret Manager": [[13, "google-cloud-secret-manager"]], "Google Cloud Spanner": [[13, "google-cloud-spanner"]], "Google Cloud Speech-to-Text": [[13, "google-cloud-speech-to-text"]], "Google Cloud SQL": [[13, "google-cloud-sql"]], "Google Cloud Stackdriver": [[13, "google-cloud-stackdriver"]], "Google Cloud Storage (GCS)": [[13, "google-cloud-storage-gcs"]], "Google Cloud Storage Transfer Service": [[13, "google-cloud-storage-transfer-service"]], "Google Cloud Tasks": [[13, "google-cloud-tasks"]], "Google Cloud Text-to-Speech": [[13, "google-cloud-text-to-speech"]], "Google Cloud Translation": [[13, "google-cloud-translation"]], "Google Cloud Video Intelligence": [[13, "google-cloud-video-intelligence"]], "Google Cloud Vision": [[13, "google-cloud-vision"]], "Google Cloud Workflows": [[13, "google-cloud-workflows"]], "Google Compute Engine": [[13, "google-compute-engine"]], "Google Data Catalog": [[13, "google-data-catalog"]], "Google Data Fusion": [[13, "google-data-fusion"]], "Google Dataflow": [[13, "google-dataflow"]], "Google Dataplex": [[13, "google-dataplex"]], "Google Dataprep": [[13, "google-dataprep"]], "Google Dataproc": [[13, "google-dataproc"]], "Google Dataproc Metastore": [[13, "google-dataproc-metastore"]], "Google Datastore": [[13, "google-datastore"]], "Google Deployment Manager": [[13, "google-deployment-manager"]], "Google Kubernetes Engine": [[13, "google-kubernetes-engine"]], "Google Looker": [[13, "google-looker"]], "Google Machine Learning Engine": [[13, "google-machine-learning-engine"]], "Google Vertex AI": [[13, "google-vertex-ai"]], "Presto to Google Cloud Storage (GCS)": [[13, "presto-to-google-cloud-storage-gcs"], [17, "presto-to-google-cloud-storage-gcs"]], "Trino to Google Cloud Storage (GCS)": [[13, "trino-to-google-cloud-storage-gcs"], [17, "trino-to-google-cloud-storage-gcs"]], "Common SQL to Google Cloud Storage (GCS)": [[13, "common-sql-to-google-cloud-storage-gcs"], [17, "common-sql-to-google-cloud-storage-gcs"]], "Google Cloud Storage (GCS) to Google Drive": [[13, "google-cloud-storage-gcs-to-google-drive"]], "Google Drive to Google Cloud Storage (GCS)": [[13, "google-drive-to-google-cloud-storage-gcs"]], "Microsoft SQL Server (MSSQL) to Google Cloud Storage (GCS)": [[13, "microsoft-sql-server-mssql-to-google-cloud-storage-gcs"], [13, "id2"], [17, "microsoft-sql-server-mssql-to-google-cloud-storage-gcs"], [17, "id1"]], "Google Calendar to Google Cloud Storage (GCS)": [[13, "google-calendar-to-google-cloud-storage-gcs"]], "Google Spreadsheet to Google Cloud Storage (GCS)": [[13, "google-spreadsheet-to-google-cloud-storage-gcs"]], "Google Cloud Storage (GCS) to SSH File Transfer Protocol (SFTP)": [[13, "google-cloud-storage-gcs-to-ssh-file-transfer-protocol-sftp"], [15, "google-cloud-storage-gcs-to-ssh-file-transfer-protocol-sftp"]], "PostgreSQL to Google Cloud Storage (GCS)": [[13, "postgresql-to-google-cloud-storage-gcs"], [17, "postgresql-to-google-cloud-storage-gcs"]], "Google BigQuery to MySQL": [[13, "google-bigquery-to-mysql"], [17, "google-bigquery-to-mysql"]], "Google BigQuery to Microsoft SQL Server (MSSQL)": [[13, "google-bigquery-to-microsoft-sql-server-mssql"], [17, "google-bigquery-to-microsoft-sql-server-mssql"]], "Google Cloud Storage (GCS) to Google BigQuery": [[13, "google-cloud-storage-gcs-to-google-bigquery"]], "Google Cloud Storage (GCS) to Google Cloud Storage (GCS)": [[13, "google-cloud-storage-gcs-to-google-cloud-storage-gcs"]], "Facebook Ads to Google Cloud Storage (GCS)": [[13, "facebook-ads-to-google-cloud-storage-gcs"], [16, "facebook-ads-to-google-cloud-storage-gcs"]], "SSH File Transfer Protocol (SFTP) to Google Cloud Storage (GCS)": [[13, "ssh-file-transfer-protocol-sftp-to-google-cloud-storage-gcs"], [15, "ssh-file-transfer-protocol-sftp-to-google-cloud-storage-gcs"]], "Google BigQuery to Google BigQuery": [[13, "google-bigquery-to-google-bigquery"]], "MySQL to Google Cloud Storage (GCS)": [[13, "mysql-to-google-cloud-storage-gcs"], [17, "mysql-to-google-cloud-storage-gcs"]], "Oracle to Google Cloud Storage (GCS)": [[13, "oracle-to-google-cloud-storage-gcs"], [17, "oracle-to-google-cloud-storage-gcs"]], "Google Cloud Storage (GCS) to Google Spreadsheet": [[13, "google-cloud-storage-gcs-to-google-spreadsheet"]], "Local to Google Cloud Storage (GCS)": [[13, "local-to-google-cloud-storage-gcs"]], "Google BigQuery to Google Cloud Storage (GCS)": [[13, "google-bigquery-to-google-cloud-storage-gcs"]], "Google Cloud Storage (GCS) to Local": [[13, "google-cloud-storage-gcs-to-local"]], "Salesforce to Google Cloud Storage (GCS)": [[13, "salesforce-to-google-cloud-storage-gcs"], [16, "salesforce-to-google-cloud-storage-gcs"]], "Google Ads to Google Cloud Storage (GCS)": [[13, "google-ads-to-google-cloud-storage-gcs"]], "Google Cloud Storage (GCS) to Presto": [[13, "google-cloud-storage-gcs-to-presto"], [17, "google-cloud-storage-gcs-to-presto"]], "Google Cloud Storage (GCS) to Snowflake": [[13, "google-cloud-storage-gcs-to-snowflake"], [16, "google-cloud-storage-gcs-to-snowflake"]], "Google Cloud Storage (GCS) to Trino": [[13, "google-cloud-storage-gcs-to-trino"], [17, "google-cloud-storage-gcs-to-trino"]], "Google Marketing Platform": [[13, "google-marketing-platform"]], "Google Ads": [[13, "google-ads"]], "Google Analytics360": [[13, "google-analytics360"]], "Google Display&Video 360": [[13, "google-display-video-360"]], "Google Search Ads 360": [[13, "google-search-ads-360"]], "Other Google operators and hooks": [[13, "other-google-operators-and-hooks"]], "Google Calendar": [[13, "google-calendar"]], "Google Cloud Composer": [[13, "google-cloud-composer"]], "Google Cloud Dataform": [[13, "google-cloud-dataform"]], "Google Discovery API": [[13, "google-discovery-api"]], "Google Drive": [[13, "google-drive"]], "Google LevelDB": [[13, "google-leveldb"]], "Google Spreadsheet": [[13, "google-spreadsheet"]], "Operators and Hooks Reference": [[14, "operators-and-hooks-reference"]], "Protocol integrations": [[15, "protocol-integrations"]], "Protocols": [[15, "protocols"]], "Secure Shell (SSH)": [[15, "secure-shell-ssh"]], "SSH File Transfer Protocol (SFTP)": [[15, "ssh-file-transfer-protocol-sftp"]], "Windows PowerShell Remoting Protocol": [[15, "windows-powershell-remoting-protocol"]], "Windows Remote Management (WinRM)": [[15, "windows-remote-management-winrm"]], "Databricks Repos": [[16, "databricks-repos"]], "Databricks SQL": [[16, "databricks-sql"]], "Datadog": [[16, "datadog"]], "Facebook Ads": [[16, "facebook-ads"]], "Plexus": [[16, "plexus"]], "Telegram": [[16, "telegram"]], "Yandex.Cloud": [[16, "yandex-cloud"]], "Yandex.Cloud Dataproc": [[16, "yandex-cloud-dataproc"]], "Common SQL to Slack": [[16, "common-sql-to-slack"], [17, "common-sql-to-slack"]], "Snowflake to Slack": [[16, "snowflake-to-slack"]], "Software integrations": [[17, "software-integrations"]], "Common SQL": [[17, "common-sql"]], "Docker Swarm": [[17, "docker-swarm"]], "Hashicorp Vault": [[17, "hashicorp-vault"]], "OpenFaaS": [[17, "openfaas"]], "Papermill": [[17, "papermill"]], "Singularity": [[17, "singularity"]], "Common SQL to Google Spreadsheet": [[17, "common-sql-to-google-spreadsheet"]], "Vertica to MySQL": [[17, "vertica-to-mysql"]], "Presto to MySQL": [[17, "presto-to-mysql"]], "Trino to MySQL": [[17, "trino-to-mysql"]], "Oracle to Oracle": [[17, "oracle-to-oracle"]], "Providers packages reference": [[18, "providers-packages-reference"]], "apache-airflow-providers-airbyte": [[18, "apache-airflow-providers-airbyte"]], "apache-airflow-providers-alibaba": [[18, "apache-airflow-providers-alibaba"]], "apache-airflow-providers-amazon": [[18, "apache-airflow-providers-amazon"]], "apache-airflow-providers-apache-beam": [[18, "apache-airflow-providers-apache-beam"]], "apache-airflow-providers-apache-cassandra": [[18, "apache-airflow-providers-apache-cassandra"]], "apache-airflow-providers-apache-drill": [[18, "apache-airflow-providers-apache-drill"]], "apache-airflow-providers-apache-druid": [[18, "apache-airflow-providers-apache-druid"]], "apache-airflow-providers-apache-hdfs": [[18, "apache-airflow-providers-apache-hdfs"]], "apache-airflow-providers-apache-hive": [[18, "apache-airflow-providers-apache-hive"]], "apache-airflow-providers-apache-kylin": [[18, "apache-airflow-providers-apache-kylin"]], "apache-airflow-providers-apache-livy": [[18, "apache-airflow-providers-apache-livy"]], "apache-airflow-providers-apache-pig": [[18, "apache-airflow-providers-apache-pig"]], "apache-airflow-providers-apache-pinot": [[18, "apache-airflow-providers-apache-pinot"]], "apache-airflow-providers-apache-spark": [[18, "apache-airflow-providers-apache-spark"]], "apache-airflow-providers-apache-sqoop": [[18, "apache-airflow-providers-apache-sqoop"]], "apache-airflow-providers-arangodb": [[18, "apache-airflow-providers-arangodb"]], "apache-airflow-providers-asana": [[18, "apache-airflow-providers-asana"]], "apache-airflow-providers-atlassian-jira": [[18, "apache-airflow-providers-atlassian-jira"]], "apache-airflow-providers-celery": [[18, "apache-airflow-providers-celery"]], "apache-airflow-providers-cloudant": [[18, "apache-airflow-providers-cloudant"]], "apache-airflow-providers-cncf-kubernetes": [[18, "apache-airflow-providers-cncf-kubernetes"]], "apache-airflow-providers-common-sql": [[18, "apache-airflow-providers-common-sql"]], "apache-airflow-providers-databricks": [[18, "apache-airflow-providers-databricks"]], "apache-airflow-providers-datadog": [[18, "apache-airflow-providers-datadog"]], "apache-airflow-providers-dbt-cloud": [[18, "apache-airflow-providers-dbt-cloud"]], "apache-airflow-providers-dingding": [[18, "apache-airflow-providers-dingding"]], "apache-airflow-providers-discord": [[18, "apache-airflow-providers-discord"]], "apache-airflow-providers-docker": [[18, "apache-airflow-providers-docker"]], "apache-airflow-providers-elasticsearch": [[18, "apache-airflow-providers-elasticsearch"]], "apache-airflow-providers-exasol": [[18, "apache-airflow-providers-exasol"]], "apache-airflow-providers-facebook": [[18, "apache-airflow-providers-facebook"]], "apache-airflow-providers-ftp": [[18, "apache-airflow-providers-ftp"]], "apache-airflow-providers-github": [[18, "apache-airflow-providers-github"]], "apache-airflow-providers-google": [[18, "apache-airflow-providers-google"]], "apache-airflow-providers-grpc": [[18, "apache-airflow-providers-grpc"]], "apache-airflow-providers-hashicorp": [[18, "apache-airflow-providers-hashicorp"]], "apache-airflow-providers-http": [[18, "apache-airflow-providers-http"]], "apache-airflow-providers-imap": [[18, "apache-airflow-providers-imap"]], "apache-airflow-providers-influxdb": [[18, "apache-airflow-providers-influxdb"]], "apache-airflow-providers-jdbc": [[18, "apache-airflow-providers-jdbc"]], "apache-airflow-providers-jenkins": [[18, "apache-airflow-providers-jenkins"]], "apache-airflow-providers-microsoft-azure": [[18, "apache-airflow-providers-microsoft-azure"]], "apache-airflow-providers-microsoft-mssql": [[18, "apache-airflow-providers-microsoft-mssql"]], "apache-airflow-providers-microsoft-psrp": [[18, "apache-airflow-providers-microsoft-psrp"]], "apache-airflow-providers-microsoft-winrm": [[18, "apache-airflow-providers-microsoft-winrm"]], "apache-airflow-providers-mongo": [[18, "apache-airflow-providers-mongo"]], "apache-airflow-providers-mysql": [[18, "apache-airflow-providers-mysql"]], "apache-airflow-providers-neo4j": [[18, "apache-airflow-providers-neo4j"]], "apache-airflow-providers-odbc": [[18, "apache-airflow-providers-odbc"]], "apache-airflow-providers-openfaas": [[18, "apache-airflow-providers-openfaas"]], "apache-airflow-providers-opsgenie": [[18, "apache-airflow-providers-opsgenie"]], "apache-airflow-providers-oracle": [[18, "apache-airflow-providers-oracle"]], "apache-airflow-providers-pagerduty": [[18, "apache-airflow-providers-pagerduty"]], "apache-airflow-providers-papermill": [[18, "apache-airflow-providers-papermill"]], "apache-airflow-providers-plexus": [[18, "apache-airflow-providers-plexus"]], "apache-airflow-providers-postgres": [[18, "apache-airflow-providers-postgres"]], "apache-airflow-providers-presto": [[18, "apache-airflow-providers-presto"]], "apache-airflow-providers-qubole": [[18, "apache-airflow-providers-qubole"]], "apache-airflow-providers-redis": [[18, "apache-airflow-providers-redis"]], "apache-airflow-providers-salesforce": [[18, "apache-airflow-providers-salesforce"]], "apache-airflow-providers-samba": [[18, "apache-airflow-providers-samba"]], "apache-airflow-providers-segment": [[18, "apache-airflow-providers-segment"]], "apache-airflow-providers-sendgrid": [[18, "apache-airflow-providers-sendgrid"]], "apache-airflow-providers-sftp": [[18, "apache-airflow-providers-sftp"]], "apache-airflow-providers-singularity": [[18, "apache-airflow-providers-singularity"]], "apache-airflow-providers-slack": [[18, "apache-airflow-providers-slack"]], "apache-airflow-providers-snowflake": [[18, "apache-airflow-providers-snowflake"]], "apache-airflow-providers-sqlite": [[18, "apache-airflow-providers-sqlite"]], "apache-airflow-providers-ssh": [[18, "apache-airflow-providers-ssh"]], "apache-airflow-providers-tableau": [[18, "apache-airflow-providers-tableau"]], "apache-airflow-providers-tabular": [[18, "apache-airflow-providers-tabular"]], "apache-airflow-providers-telegram": [[18, "apache-airflow-providers-telegram"]], "apache-airflow-providers-trino": [[18, "apache-airflow-providers-trino"]], "apache-airflow-providers-vertica": [[18, "apache-airflow-providers-vertica"]], "apache-airflow-providers-yandex": [[18, "apache-airflow-providers-yandex"]], "apache-airflow-providers-zendesk": [[18, "apache-airflow-providers-zendesk"]]}, "indexentries": {}}) \ No newline at end of file +Search.setIndex({"docnames": ["core-extensions/auth-backends", "core-extensions/connections", "core-extensions/extra-links", "core-extensions/index", "core-extensions/logging", "core-extensions/secrets-backends", "howto/create-update-providers", "index", "installing-from-pypi", "installing-from-sources", "operators-and-hooks-ref/apache", "operators-and-hooks-ref/aws", "operators-and-hooks-ref/azure", "operators-and-hooks-ref/google", "operators-and-hooks-ref/index", "operators-and-hooks-ref/protocol", "operators-and-hooks-ref/services", "operators-and-hooks-ref/software", "packages-ref"], "filenames": ["core-extensions/auth-backends.rst", "core-extensions/connections.rst", "core-extensions/extra-links.rst", "core-extensions/index.rst", "core-extensions/logging.rst", "core-extensions/secrets-backends.rst", "howto/create-update-providers.rst", "index.rst", "installing-from-pypi.rst", "installing-from-sources.rst", "operators-and-hooks-ref/apache.rst", "operators-and-hooks-ref/aws.rst", "operators-and-hooks-ref/azure.rst", "operators-and-hooks-ref/google.rst", "operators-and-hooks-ref/index.rst", "operators-and-hooks-ref/protocol.rst", "operators-and-hooks-ref/services.rst", "operators-and-hooks-ref/software.rst", "packages-ref.rst"], "titles": ["Auth backends", "Connections", "Extra Links", "Core Extensions", "Writing logs", "Secret backends", "Community Providers", "Provider packages", "Installation from PyPI", "Installing Providers from Sources", "ASF: Apache Software Foundation", "AWS: Amazon Web Services", "Azure: Microsoft Azure", "Google", "Operators and Hooks Reference", "Protocol integrations", "Services", "Software integrations", "Providers packages reference"], "terms": {"thi": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18], "i": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18], "summari": [0, 1, 2, 4, 5, 7], "all": [0, 1, 2, 4, 5, 6, 7, 11, 13], "apach": [0, 2, 3, 4, 5, 6, 7, 8, 9, 12, 14, 16], "airflow": [0, 1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17], "commun": [0, 1, 2, 4, 5, 15], "provid": [0, 1, 2, 3, 4, 5, 8, 10, 11, 12, 13, 15, 16, 17], "implement": [0, 1, 2, 3, 4, 5, 6, 7], "authent": [0, 7, 12], "expos": [0, 1, 2, 4, 5, 7], "via": [0, 1, 2, 4, 5, 7, 8, 9, 18], "manag": [0, 1, 2, 4, 5, 7, 8, 9, 14, 18], "": [0, 5, 6, 7, 9, 12, 18], "web": [0, 7, 14, 18], "server": [0, 3, 7, 9, 14, 18], "api": [0, 7, 10, 11, 12, 14, 15, 16, 17, 18], "base": [0, 6, 11, 12, 13], "flask": 0, "applic": 0, "builder": 0, "capabl": [0, 5, 7, 18], "you": [0, 1, 2, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 15, 16, 17], "can": [0, 1, 2, 3, 4, 5, 6, 7, 9, 13], "read": [0, 5, 6, 7], "more": [0, 6, 7, 8], "about": [0, 6, 7, 9], "those": [0, 1, 4, 5, 6, 7, 8], "fab": 0, "secur": [0, 14, 18], "doc": [0, 6], "also": [0, 1, 2, 4, 5, 6, 7, 9, 13], "take": [0, 5], "look": [0, 5, 6, 7, 8], "avail": [0, 2, 4, 5, 6, 7, 14, 18], "core": [0, 4, 5, 14], "webserv": 0, "see": [0, 1, 2, 4, 5, 6, 7, 8, 13], "google_openid": 0, "devel": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18], "2": [0, 1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18], "6": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18], "0": [0, 1, 2, 3, 4, 5, 6, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18], "dev0": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18], "an": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18], "experiment": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18], "featur": [0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18], "extend": [1, 2, 3], "custom": [1, 2, 5, 6], "each": [1, 2, 6, 7], "defin": [1, 2, 6, 7], "own": [1, 2, 5, 6], "paramet": [1, 6], "ui": [1, 6, 7], "field": [1, 7], "behaviour": [1, 7], "when": [1, 6, 7, 8], "type": [1, 6, 13], "us": [1, 3, 8, 9, 13, 15, 17], "automat": [1, 6, 7], "creat": [1, 10], "hook": [1, 6, 7, 10, 11, 12, 15, 16, 17], "specif": [1, 3, 7], "The": [1, 2, 6, 7, 9], "explain": [1, 2, 6, 7], "airbytehook": 1, "oss": 1, "osshook": 1, "aw": [1, 7, 10, 13, 14, 15, 16, 17, 18], "awsgenerichook": 1, "emr": [1, 14], "emrhook": 1, "redshift": [1, 14], "redshiftsqlhook": 1, "cassandrahook": 1, "drillhook": 1, "druiddbapihook": 1, "hdfshook": 1, "hive_cli": 1, "hiveclihook": 1, "hiveserver2": 1, "hiveserver2hook": 1, "hive_metastor": 1, "hivemetastorehook": 1, "livyhook": 1, "pig_cli": 1, "pigclihook": 1, "spark_jdbc": [1, 10], "sparkjdbchook": 1, "spark_sql": [1, 10], "sparksqlhook": 1, "sparksubmithook": 1, "sqoophook": 1, "arangodbhook": 1, "asanahook": 1, "jirahook": 1, "cloudanthook": 1, "kuberneteshook": 1, "databrickshook": 1, "dbt_cloud": 1, "dbtcloudhook": 1, "dingdinghook": 1, "discordwebhookhook": 1, "dockerhook": 1, "elasticsearchhook": 1, "exasolhook": 1, "facebook_soci": 1, "facebookadsreportinghook": 1, "ftphook": 1, "githubhook": 1, "google_cloud_platform": [1, 7], "googlebasehook": [1, 13], "dataprep": 1, "googledataprephook": 1, "gcpcloudsql": 1, "cloudsqlhook": 1, "gcpcloudsqldb": 1, "cloudsqldatabasehook": 1, "gcpbigqueri": 1, "bigqueryhook": 1, "gcpssh": 1, "computeenginesshhook": 1, "leveldb": [1, 14, 18], "leveldbhook": 1, "grpchook": 1, "vault": [1, 14, 18], "vaulthook": 1, "httphook": 1, "imaphook": 1, "influxdbhook": 1, "jdbchook": 1, "jenkinshook": 1, "azurebasehook": 1, "azure_data_explor": 1, "azuredataexplorerhook": 1, "azure_batch": 1, "azurebatchhook": 1, "azure_cosmo": 1, "azurecosmosdbhook": 1, "azure_data_lak": 1, "azuredatalakehook": 1, "azure_fileshar": 1, "azurefilesharehook": 1, "azure_container_volum": 1, "azurecontainervolumehook": 1, "azure_container_inst": 1, "azurecontainerinstancehook": 1, "wasb": [1, 12], "wasbhook": 1, "azure_data_factori": 1, "azuredatafactoryhook": 1, "azure_container_registri": 1, "azurecontainerregistryhook": 1, "azure_service_bu": 1, "baseazureservicebushook": 1, "azure_synaps": 1, "azuresynapsehook": 1, "mssqlhook": 1, "mongo": [1, 17], "mongohook": 1, "mysqlhook": 1, "neo4jhook": 1, "odbchook": 1, "opsgeniealerthook": 1, "oraclehook": 1, "pagerdutyhook": 1, "pagerduty_ev": [1, 16], "pagerdutyeventshook": 1, "postgr": [1, 17], "postgreshook": 1, "prestohook": 1, "qubolehook": 1, "redishook": 1, "salesforcehook": 1, "sambahook": 1, "segmenthook": 1, "sftphook": 1, "slackhook": 1, "slackwebhook": 1, "slackwebhookhook": 1, "snowflakehook": 1, "sqlitehook": 1, "sshhook": 1, "tableauhook": 1, "tabularhook": 1, "trinohook": 1, "verticahook": 1, "yandexcloud": 1, "yandexcloudbasehook": 1, "zendeskhook": 1, "oper": [2, 6, 7, 10, 11, 12, 15, 16, 17], "For": [2, 6, 7, 9], "its": [2, 5, 7], "redirect": 2, "user": [2, 6, 7, 9], "extern": [2, 6, 7], "system": [2, 7, 14, 18], "button": 2, "task": [2, 4, 7], "page": [2, 7, 8, 9], "ar": [2, 6, 7, 8, 9, 11, 12, 13, 14], "here": [2, 3, 4, 5, 7, 14, 18], "batchjobdefinitionlink": 2, "batchjobdetailslink": 2, "batchjobqueuelink": 2, "emrclusterlink": 2, "cloudwatcheventslink": 2, "databricksjobrunlink": 2, "dbtcloudrunjoboperatorlink": 2, "bigqueryconsolelink": 2, "bigqueryconsoleindexablelink": 2, "aiplatformconsolelink": 2, "dataformrepositorylink": 2, "dataformworkspacelink": 2, "dataformworkflowinvocationlink": 2, "datafusioninstancelink": 2, "datafusionpipelinelink": 2, "datafusionpipelineslink": 2, "cloudsqlinstancelink": 2, "cloudsqlinstancedatabaselink": 2, "dataplextasklink": 2, "dataplextaskslink": 2, "dataplexlakelink": 2, "bigquerydatasetlink": 2, "bigquerytablelink": 2, "bigquerydatatransferconfiglink": 2, "computeinstancedetailslink": 2, "computeinstancetemplatedetailslink": 2, "computeinstancegroupmanagerdetailslink": 2, "cloudtasksqueuelink": 2, "cloudtaskslink": 2, "datacatalogentrygrouplink": 2, "datacatalogentrylink": 2, "datacatalogtagtemplatelink": 2, "dataproclink": 2, "dataproclistlink": 2, "dataprocmetastoredetailedlink": 2, "dataprocmetastorelink": 2, "dataprepflowlink": 2, "dataprepjobgrouplink": 2, "vertexaimodellink": 2, "vertexaimodellistlink": 2, "vertexaimodelexportlink": 2, "vertexaitraininglink": 2, "vertexaitrainingpipelineslink": 2, "vertexaidatasetlink": 2, "vertexaidatasetlistlink": 2, "vertexaihyperparametertuningjoblistlink": 2, "vertexaibatchpredictionjoblink": 2, "vertexaibatchpredictionjoblistlink": 2, "vertexaiendpointlink": 2, "vertexaiendpointlistlink": 2, "workflowsworkflowdetailslink": 2, "workflowslistofworkflowslink": 2, "workflowsexecutionlink": 2, "cloudcomposerenvironmentlink": 2, "cloudcomposerenvironmentslink": 2, "dataflowjoblink": 2, "clouddatastoreimportexportlink": 2, "clouddatastoreentitieslink": 2, "bigtableinstancelink": 2, "bigtableclusterlink": 2, "bigtabletableslink": 2, "spannerdatabaselink": 2, "spannerinstancelink": 2, "stackdrivernotificationslink": 2, "stackdriverpolicieslink": 2, "kubernetesengineclusterlink": 2, "kubernetesenginepodlink": 2, "pubsubsubscriptionlink": 2, "pubsubtopiclink": 2, "memcachedinstancedetailslink": 2, "memcachedinstancelistlink": 2, "redisinstancedetailslink": 2, "redisinstancelistlink": 2, "cloudbuildlink": 2, "cloudbuildlistlink": 2, "cloudbuildtriggerslistlink": 2, "cloudbuildtriggerdetailslink": 2, "lifescienceslink": 2, "cloudfunctionsdetailslink": 2, "cloudfunctionslistlink": 2, "cloudstoragetransferlistlink": 2, "cloudstoragetransferjoblink": 2, "cloudstoragetransferdetailslink": 2, "clouddlpdeidentifytemplateslistlink": 2, "clouddlpdeidentifytemplatedetailslink": 2, "clouddlpjobtriggerslistlink": 2, "clouddlpjobtriggerdetailslink": 2, "clouddlpjobslistlink": 2, "clouddlpjobdetailslink": 2, "clouddlpinspecttemplateslistlink": 2, "clouddlpinspecttemplatedetailslink": 2, "clouddlpinfotypeslistlink": 2, "clouddlpinfotypedetailslink": 2, "clouddlppossibleinfotypeslistlink": 2, "mlenginemodellink": 2, "mlenginemodelslistlink": 2, "mlenginejobdetailslink": 2, "mlenginejobslistlink": 2, "mlenginemodelversiondetailslink": 2, "storagelink": 2, "filedetailslink": 2, "azuredatafactorypipelinerunlink": 2, "qdslink": 2, "list": [3, 6, 7, 14, 18], "function": [3, 14], "thei": [3, 6, 7, 8, 18], "certain": [3, 7], "auth": 3, "backend": 3, "googl": [3, 6, 7, 9, 14], "connect": [3, 5, 6, 9, 13, 14, 18], "airbyt": [3, 9, 14], "alibaba": [3, 9], "amazon": [3, 7, 9, 14], "cassandra": [3, 9, 14], "drill": [3, 9, 14], "druid": [3, 9, 14], "hdf": [3, 9, 14], "hive": [3, 9, 14], "livi": [3, 9, 14], "pig": [3, 9, 14], "spark": [3, 9, 14], "sqoop": [3, 9, 14], "arangodb": [3, 9, 14], "asana": [3, 9, 14], "atlassian": [3, 9, 14], "jira": [3, 9, 14], "ibm": [3, 9, 14, 18], "cloudant": [3, 9, 14], "kubernet": [3, 9, 14], "databrick": [3, 9, 14], "dbt": [3, 9], "cloud": [3, 4, 7, 9, 14], "dingd": [3, 9, 14], "discord": [3, 9, 14], "docker": [3, 6, 9, 14], "elasticsearch": [3, 9, 14], "exasol": [3, 9, 14], "facebook": [3, 9, 14], "file": [3, 6, 7, 9, 14, 18], "transfer": [3, 6, 7, 9, 14, 18], "protocol": [3, 9, 14, 18], "ftp": [3, 9, 14], "github": [3, 6, 9, 14], "grpc": [3, 9, 14], "hashicorp": [3, 9, 14], "hypertext": [3, 9, 14, 18], "http": [3, 6, 7, 8, 9, 14], "internet": [3, 9, 14, 18], "messag": [3, 9, 14, 18], "access": [3, 6, 7, 9, 14, 18], "imap": [3, 9, 14], "influxdb": [3, 9, 14], "java": [3, 9, 14, 18], "databas": [3, 5, 7, 9, 14, 18], "jdbc": [3, 9, 14], "jenkin": [3, 9, 14], "microsoft": [3, 9, 14], "azur": [3, 9, 14], "sql": [3, 9, 14], "mssql": [3, 7, 9, 14], "mongodb": [3, 9, 14, 18], "mysql": [3, 9, 14], "neo4j": [3, 6, 9, 14], "odbc": [3, 9, 14], "opsgeni": [3, 9, 14], "oracl": [3, 9, 14], "pagerduti": [3, 9, 14], "postgresql": [3, 9, 14, 18], "presto": [3, 9, 14], "qubol": [3, 9, 14], "redi": [3, 9, 14], "salesforc": [3, 9, 14], "samba": [3, 9, 14], "segment": [3, 9, 14], "sftp": [3, 9, 14], "slack": [3, 9, 14], "snowflak": [3, 9, 14], "sqlite": [3, 9, 14], "ssh": [3, 9, 14], "tableau": [3, 9, 14], "tabular": [3, 9, 14], "trino": [3, 9, 14], "vertica": [3, 9, 14], "yandex": [3, 9, 14], "zendesk": [3, 9, 14], "extra": [3, 6], "link": [3, 6], "write": [3, 7], "log": [3, 6, 14], "secret": 3, "option": [4, 7], "osstaskhandl": 4, "s3taskhandl": 4, "cloudwatchtaskhandl": 4, "elasticsearchtaskhandl": 4, "platform": [4, 14, 18], "gcstaskhandl": 4, "stackdrivertaskhandl": 4, "blob": [4, 14], "storag": [4, 14], "wasbtaskhandl": 4, "ha": [5, 6, 7, 11, 12, 13], "variabl": [5, 7], "configur": [5, 6, 7, 13], "from": [5, 6, 7, 10, 11, 12, 13], "rather": [5, 6, 7], "than": [5, 6, 7], "while": [5, 6, 7, 8], "store": 5, "inform": [5, 6, 7], "possibl": [5, 6, 7], "mani": [5, 7], "enterpris": 5, "alreadi": [5, 6, 9], "have": [5, 7], "some": [5, 6, 7, 8, 12, 13], "tap": 5, "servic": [5, 6, 7, 14, 18], "integr": [5, 7, 10, 11, 12, 13, 14, 16, 18], "ones": [5, 6], "secretsmanagerbackend": 5, "systemsmanagerparameterstorebackend": 5, "cloudsecretmanagerbackend": 5, "vaultbackend": 5, "azurekeyvaultbackend": 5, "gather": 6, "necessari": 6, "step": [6, 7, 9, 14], "guidelin": 6, "exist": [6, 7], "should": [6, 7, 8, 9], "awar": 6, "mai": 6, "distinct": 6, "cover": [6, 7], "guid": [6, 9, 10, 11, 12, 13, 15, 16, 17], "sequenc": 6, "describ": [6, 7, 8], "wa": [6, 7], "design": [6, 7], "meet": 6, "most": [6, 7, 9], "linear": 6, "flow": 6, "order": [6, 7], "develop": [6, 7, 10], "anoth": 6, "recommend": [6, 9], "help": 6, "work": [6, 7], "similar": 6, "your": [6, 8], "That": [6, 7], "wai": [6, 7], "set": 6, "up": 6, "other": [6, 7, 8, 14], "depend": [6, 7], "first": [6, 7], "need": [6, 7, 8], "local": [6, 7, 14], "environ": [6, 7], "contribut": [6, 7], "quick": 6, "start": [6, 7], "did": 6, "yet": 6, "we": [6, 7], "breez": 6, "easili": [6, 7], "abl": [6, 7], "one": [6, 7, 9], "execut": [6, 18], "ci": 6, "workflow": [6, 8], "abov": [6, 7], "contain": [6, 7, 14], "These": [6, 10, 11, 12, 13, 15, 16, 17], "intern": [6, 7], "volum": 6, "In": [6, 7], "chang": [6, 7], "made": [6, 9], "id": [6, 7, 9], "appli": [6, 7], "insid": 6, "carri": 6, "out": 6, "quickli": 6, "our": [6, 7], "exampl": [6, 7, 8, 9, 13], "name": [6, 7], "new_provid": 6, "placehold": 6, "must": 6, "like": [6, 7, 8], "version": [6, 7, 8, 9, 18], "now": 6, "project": [6, 7], "below": [6, 7, 8, 9], "structur": [6, 7], "understand": 6, "compon": [6, 7], "If": [6, 7, 8], "still": 6, "doubt": 6, "build": [6, 7], "open": 6, "issu": 6, "so": [6, 7], "__init__": 6, "py": [6, 7], "example_dag": 6, "example_": 6, "test_": 6, "_system": 6, "consid": [6, 7], "ll": 6, "run": [6, 7, 9, 18], "my": [6, 7], "root": 6, "fafd8d630e46": 6, "opt": 6, "python": [6, 7, 8, 9, 10, 11, 12, 13, 15, 16, 17, 18], "m": 6, "pytest": 6, "import": [6, 7, 9], "part": [6, 7], "occur": 6, "pre": 6, "commit": 6, "instal": [6, 18], "rst": [6, 7], "setup": [6, 7], "spelling_wordlist": 6, "txt": [6, 8], "packag": [6, 8], "ref": 6, "logo": 6, "png": 6, "index": [6, 7], "yaml": 6, "changelog": 6, "releas": [6, 7, 14], "There": [6, 7, 9], "chanc": 6, "common": [6, 9, 14], "english": 6, "word": 6, "case": [6, 7], "add": [6, 7], "begin": 6, "capit": 6, "lowercas": 6, "second": 6, "block": 6, "namespac": 6, "nextdoor": 6, "line": [6, 7], "none": 6, "notfound": 6, "nullabl": 6, "neq": 6, "networkuri": 6, "nginx": 6, "nobr": 6, "nodash": 6, "under": [6, 7], "kei": [6, 9], "doesn": 6, "t": [6, 7], "ani": [6, 7], "empti": 6, "It": [6, 7, 9], "addit": [6, 7], "_howto": 6, "newprovideroper": 6, "class": [6, 7], "do": [6, 7, 8, 9], "someth": [6, 7], "amaz": 6, "requir": [6, 7, 8], "connection_id": 6, "awesom": 6, "exampleinclud": 6, "languag": 6, "after": [6, 7], "howto_operator_": 6, "end": [6, 7], "befor": [6, 7], "purpos": [6, 7], "check": [6, 7, 9, 16], "complet": 6, "best": [6, 7], "descript": [6, 7], "io": 6, "__": 6, "1": [6, 9, 18], "url": [6, 7], "www": 6, "tag": 6, "modul": [6, 7], "newproviderhook": 6, "deprec": [6, 7], "onli": [6, 7, 8], "behavior": 6, "howev": [6, 7], "support": [6, 8, 10, 11, 12, 13], "target": [6, 7, 10, 11, 12, 13, 15, 16, 17], "arrai": [6, 7], "allow": [6, 7, 10, 11, 12, 13, 15, 16, 17], "optim": [6, 7], "individu": [6, 7], "handl": [6, 7], "both": [6, 7], "definit": [6, 7], "two": [6, 7], "command": [6, 7, 8], "serv": 6, "accomplish": 6, "ensur": 6, "main": [6, 7, 9], "involv": 6, "filter": 6, "3": [6, 7, 8, 18], "might": [6, 7], "which": [6, 7, 12], "librari": 6, "Such": 6, "typic": [6, 8], "result": 6, "importerror": 6, "error": 6, "silent": 6, "ignor": 6, "pollut": 6, "fals": 6, "warn": [6, 9], "veri": [6, 7], "bad": 6, "pattern": 6, "tend": 6, "turn": [6, 7], "blind": 6, "spot": 6, "avoid": 6, "encourag": 6, "until": 6, "had": 6, "mechan": [6, 7], "select": [6, 9], "known": 6, "come": [6, 7, 8], "actual": 6, "lead": 6, "even": 6, "without": [6, 7], "give": [6, 7], "clue": 6, "miss": 6, "except": 6, "optionalproviderfeatureexcept": 6, "been": [6, 7], "introduc": [6, 7], "signal": 6, "providersmanag": 6, "would": [6, 7], "remain": 6, "compat": [6, 7], "continu": 6, "throw": 6, "plyvel": 6, "condit": 6, "keep": [6, 7], "try": [6, 7], "db": [6, 14], "airflowexcept": 6, "basehook": 6, "e": [6, 7], "As": [6, 7], "airflowoptionalproviderfeatureexcept": 6, "backward": [6, 7], "cannot": 6, "origin": 6, "rais": 6, "remov": [6, 7], "ad": [6, 7, 14, 18], "assign": 6, "uniqu": [6, 7], "mean": [6, 7], "want": [6, 7], "retriev": [6, 7], "calcul": 6, "alwai": [6, 7], "ti_kei": 6, "pass": 6, "get_valu": 6, "earlier": [6, 7], "note": [6, 7], "def": [6, 7], "get_link": 6, "self": [6, 9], "baseoper": 6, "dttm": 6, "datetim": 6, "taskinstancekei": 6, "job_id": 6, "els": 6, "assert": 6, "get_on": 6, "dag_id": 6, "dag": [6, 7, 13], "task_id": 6, "execution_d": 6, "len": 6, "bigquery_job_details_link_fmt": 6, "format": [6, 7, 8], "achiev": 6, "instanc": [6, 14], "pokereturnvalu": 6, "object": [6, 7], "poke": 6, "method": 6, "sensorwithxcomvalu": 6, "basesensoroper": 6, "context": 6, "union": 6, "bool": 6, "is_don": 6, "true": 6, "stop": [6, 7], "xcom_valu": 6, "push": 6, "To": [6, 9, 13, 16, 17, 18], "explicitli": 6, "ti": 6, "xcom_push": 6, "xcom_kei": 6, "built": 7, "modular": 7, "schedul": 7, "basic": 7, "call": 7, "sensor": 7, "multitud": 7, "new": 7, "separ": 7, "interfac": [7, 15], "given": 7, "60": 7, "free": 7, "exactli": 7, "same": [7, 8], "written": 7, "share": [7, 8], "full": 7, "refer": 7, "solid": 7, "discov": 7, "onc": 7, "re": 7, "becom": 7, "extens": [7, 13], "public": 7, "privat": 7, "form": 7, "deliv": 7, "visibl": 7, "detail": 7, "view": 7, "By": [7, 9], "default": 7, "save": 7, "make": 7, "them": [7, 8], "where": 7, "remot": [7, 9, 14, 18], "logger": 7, "were": 7, "latest": 7, "particular": 7, "downgrad": 7, "previou": [7, 9], "problem": 7, "impact": 7, "increment": 7, "independ": [7, 14], "valid": [7, 9], "updat": 7, "follow": [7, 9], "usual": 7, "test": 7, "capac": 7, "matter": 7, "third": 7, "parti": 7, "chapter": 7, "point": 7, "multipl": 7, "semver": 7, "scheme": 7, "gener": 7, "approach": 7, "unless": 7, "good": [7, 9], "reason": 7, "recent": 7, "x": 7, "vari": 7, "per": 7, "limit": [7, 12], "constrain": 7, "includ": [7, 8, 18], "correspond": 7, "togeth": 7, "correct": [7, 9], "constraint": [7, 8], "appropri": [7, 8], "cross": 7, "well": [7, 9], "simpli": 7, "enabl": [7, 18], "often": 7, "between": 7, "differ": [7, 9], "again": 7, "kind": 7, "break": 7, "document": [7, 10, 11, 12, 13, 14, 15, 16, 17], "everi": 7, "could": 7, "back": 7, "port": 7, "last": 7, "done": 7, "march": 7, "17": 7, "2021": [7, 9], "longer": 7, "sinc": 7, "reach": 7, "Of": 7, "life": 7, "june": 7, "standard": [7, 15], "moreov": 7, "mention": 7, "just": 7, "right": [7, 8], "meta": 7, "data": [7, 10, 14, 15, 16], "entri": 7, "apache_airflow_provid": 7, "callabl": 7, "return": 7, "dictionari": 7, "discover": 7, "json": 7, "schema": 7, "displai": [7, 14], "cli": 7, "human": 7, "friendli": 7, "revers": 7, "chronolog": 7, "current": [7, 8], "taken": 7, "provider_info": 7, "replac": 7, "queri": 7, "verifi": [7, 9], "properli": 7, "recogn": 7, "whether": 7, "sub": 7, "convent": 7, "possibli": 7, "beyond": 7, "practic": 7, "time": 7, "autom": 7, "semi": 7, "verif": 7, "go": 7, "reli": 7, "manual": 7, "advis": 7, "stage": 7, "choic": 7, "either": 7, "round": 7, "probabl": 7, "safer": 7, "older": 7, "lightli": 7, "fine": 7, "am": 7, "scope": 7, "later": 7, "incompat": 7, "speak": 7, "major": 7, "modif": 7, "long": 7, "anyth": 7, "special": 7, "besid": 7, "anyon": 7, "who": 7, "what": [7, 18], "cfg": 7, "tell": 7, "get": [7, 9], "metadata": 7, "sure": 7, "pypi": [7, 9, 18], "compliant": 7, "runtim": 7, "sever": 7, "org": [7, 9], "draft": 7, "07": 7, "properti": 7, "repositori": [7, 18], "string": 7, "item": 7, "favour": 7, "perform": [7, 10, 11, 12, 13, 15, 16, 17], "deprecatedvers": 7, "map": 7, "handler": 7, "decor": 7, "taskflow": 7, "path": 7, "entry_point": 7, "get_provider_info": 7, "myproviderpackag": 7, "somemodul": 7, "sourc": [7, 10, 11, 12, 13, 15, 16, 17], "sourcehook": 7, "hood": 7, "least": 7, "three": 7, "itself": 7, "venv": 7, "pip": [7, 8, 18], "relat": [7, 11], "g": 7, "folder": 7, "normal": 7, "doe": 7, "loop": 7, "through": 7, "section": 7, "valu": 7, "statement": 7, "translat": 7, "being": 7, "get_ui_field_behaviour": 7, "get_connection_form_widget": 7, "attribut": 7, "conn_typ": 7, "hook_nam": 7, "quit": 7, "number": 7, "intent": 7, "pr": 7, "But": 7, "conflict": 7, "prefer": 7, "choos": [7, 9], "domain": 7, "question": 7, "glad": 7, "ask": 7, "_default": 7, "few": 7, "google_cloud_default": 7, "aws_default": 7, "succe": 7, "cours": 7, "better": 7, "mail": 7, "accept": 7, "invest": 7, "enough": 7, "fulli": 7, "peopl": 7, "posit": 7, "think": 7, "match": [7, 9], "expect": 7, "prerequisit": 7, "discuss": 7, "devlist": 7, "team": 7, "publish": [7, 8], "whatev": 7, "find": 7, "advertis": 7, "absolut": 7, "ecosystem": 7, "area": 7, "websit": 7, "non": 7, "feel": 7, "evalu": 7, "merg": 7, "charg": 7, "outsid": 7, "control": 7, "commerci": 7, "busi": 7, "around": 7, "softwar": [7, 14], "never": 7, "3rd": 7, "12": [7, 9], "stabl": 7, "becaus": 7, "don": 7, "anymor": 7, "know": [7, 9], "highest": 7, "offici": 8, "success": 8, "poetri": 8, "especi": 8, "v": 8, "wish": 8, "convert": 8, "celeri": [8, 9], "raw": 8, "githubusercont": 8, "com": [8, 9], "7": [8, 18], "how": [8, 13, 16, 17], "upgrad": 8, "beam": [9, 14], "kylin": [9, 14], "pinot": [9, 14], "datadog": [9, 14], "powershel": [9, 14, 18], "psrp": [9, 15], "window": [9, 14, 18], "winrm": [9, 14], "openfaa": [9, 14], "papermil": [9, 14], "plexu": [9, 14], "sendgrid": 9, "singular": [9, 14], "telegram": [9, 14], "drop": 9, "down": 9, "top": 9, "left": 9, "pgp": 9, "signatur": 9, "essenti": 9, "download": [9, 13], "sha": 9, "gpg": 9, "pleas": 9, "asc": 9, "relev": 9, "distribut": [9, 14, 18], "directori": 9, "mirror": 9, "pgpk": 9, "ka": 9, "binari": 9, "pgpv": 9, "tar": 9, "gz": 9, "sat": 9, "11": 9, "sep": 9, "49": 9, "54": 9, "bst": 9, "rsa": 9, "cde15c6e4d3a8ec4ecf4ba4b6674e08ad7de406f": 9, "issuer": 9, "kaxilnaik": 9, "kaxil": 9, "naik": 9, "unknown": 9, "aka": 9, "gmail": 9, "certifi": 9, "trust": 9, "indic": 9, "belong": 9, "owner": 9, "primari": 9, "fingerprint": 9, "cde1": 9, "5c6e": 9, "4d3a": 9, "8ec4": 9, "ecf4": 9, "ba4b": 9, "6674": 9, "e08a": 9, "d7de": 9, "406f": 9, "worri": 9, "certif": 9, "sign": 9, "why": 9, "sha512": 9, "sum": 9, "shasum": 9, "512": 9, "diff": 9, "variou": [10, 11, 12, 13, 15, 16, 17], "within": [10, 11, 12, 13, 15, 16], "product": [10, 11, 12, 13, 15, 16, 17], "druid_check": 10, "hive_stat": 10, "kylin_cub": 10, "spark_submit": 10, "spark_jdbc_script": 10, "copi": [10, 11, 12, 13, 15, 17], "hive_to_dynamodb": [10, 11], "hive_to_druid": 10, "vertica_to_h": [10, 17], "hive_to_mysql": [10, 17], "hive_to_samba": [10, 15], "s3_to_hiv": [10, 11], "mysql_to_h": [10, 17], "mssql_to_hiv": [10, 17], "cassandra_to_gc": [10, 13], "base_aw": 11, "cloud_form": 11, "comput": 11, "elasticache_replication_group": 11, "redshift_sql": 11, "redshift_clust": 11, "redshift_data": 11, "secrets_manag": 11, "batch_client": 11, "batch_wait": 11, "dm": 11, "glue_crawl": 11, "glue_catalog": 11, "aws_lambda": 11, "lambda_funct": 11, "step_funct": 11, "dynamodb_to_s3": 11, "gcs_to_s3": [11, 13], "glacier_to_gc": [11, 13], "google_api_to_s3": 11, "attach": [11, 15], "imap_attachment_to_s3": [11, 15], "mongo_to_s3": [11, 17], "redshift_to_s3": 11, "s3_to_redshift": 11, "s3_to_sftp": [11, 15], "sftp_to_s3": [11, 15], "s3_to_ftp": [11, 15], "exasol_to_s3": [11, 17], "ftp_to_s3": [11, 15], "salesforce_to_s3": [11, 16], "filesystem": [11, 12, 13], "local_to_s3": 11, "sql_to_s3": [11, 17], "s3_to_gc": [11, 13], "s3_to_mysql": [11, 17], "s3tosnowflakeoper": [11, 16], "s3_to_snowflak": [11, 16], "copy_into_snowflak": [11, 12, 13, 16], "base_azur": 12, "sdk": 12, "client": 12, "wasb_delete_blob": 12, "container_inst": 12, "container_volum": 12, "container_registri": 12, "adx": 12, "data_factori": 12, "adl": 12, "data_lak": 12, "datalak": 12, "asb": 12, "azure_fileshare_to_gc": [12, 13], "adls_to_gc": [12, 13], "upload": [12, 13], "local_to_adl": 12, "oracle_to_azure_data_lak": [12, 17], "local_to_wasb": 12, "azure_blob_to_gc": [12, 13], "sftp_to_wasb": [12, 15], "base_googl": 13, "discovery_api": 13, "analyz": 13, "code": 13, "bigquery_dt": 13, "marketing_platform": 13, "campaign_manag": 13, "cloud_build": 13, "firebas": [13, 18], "life_sci": 13, "cloud_memorystor": 13, "memcach": 13, "natural_languag": 13, "os_login": 13, "pubsub": 13, "secret_manag": 13, "speech_to_text": 13, "cloud_sql": 13, "cloud_storage_transfer_servic": 13, "text_to_speech": 13, "translate_speech": 13, "video_intellig": 13, "compute_ssh": 13, "datacatalog": 13, "datafus": 13, "dataproc_metastor": 13, "gdm": 13, "kubernetes_engin": 13, "mlengin": 13, "vertex_ai": 13, "dataset": 13, "custom_job": 13, "auto_ml": 13, "batch_prediction_job": 13, "endpoint_servic": 13, "hyperparameter_tuning_job": 13, "model_servic": 13, "vertexai": 13, "presto_to_gc": [13, 17], "trino_to_gc": [13, 17], "sql_to_gc": [13, 17], "suit": [13, 17, 18], "gcs_to_gdriv": 13, "gdrive_to_gc": 13, "mssql_to_gc": [13, 17], "calendar_to_gc": 13, "sheet": [13, 17], "sheets_to_gc": 13, "gcs_to_sftp": [13, 15], "postgres_to_gc": [13, 17], "bigquery_to_mysql": [13, 17], "bigquery_to_mssql": [13, 17], "gcs_to_bigqueri": 13, "gcs_to_gc": 13, "facebook_ads_to_gc": [13, 16], "sftp_to_gc": [13, 15], "bigquery_to_bigqueri": 13, "mysql_to_gc": [13, 17], "oracle_to_gc": [13, 17], "gcs_to_sheet": 13, "local_to_gc": 13, "bigquery_to_gc": 13, "gcs_to_loc": 13, "salesforce_to_gc": [13, 16], "ads_to_gc": 13, "gcs_to_presto": [13, 17], "gcs_to_trino": [13, 17], "analyt": 13, "display_video": 13, "search_ad": 13, "cloud_compos": 13, "A": 14, "asf": 14, "foundat": 14, "hadoop": [14, 18], "webhdf": [14, 18], "dynamodb": 14, "simpl": 14, "s3": 14, "gc": 14, "appflow": 14, "athena": 14, "cloudform": 14, "cloudwatch": 14, "datasync": 14, "ec2": 14, "ec": 14, "elast": 14, "ek": 14, "elasticach": 14, "glacier": 14, "kinesi": 14, "firehos": 14, "quicksight": 14, "rd": 14, "sagemak": 14, "secretsmanag": 14, "email": 14, "se": 14, "notif": 14, "sn": 14, "queue": 14, "sq": 14, "batch": 14, "migrat": 14, "glue": 14, "lambda": 14, "token": 14, "st": 14, "cosmo": 14, "explor": 14, "factori": 14, "lake": 14, "fileshar": 14, "bu": 14, "synaps": 14, "market": [14, 18], "analytics360": 14, "video": 14, "360": 14, "search": 14, "calendar": 14, "compos": 14, "dataform": 14, "discoveri": 14, "drive": 14, "spreadsheet": 14, "shell": [14, 18], "repo": 14, "dataproc": 14, "swarm": 14, "bigqueri": 14, "jdbcoper": 15, "airbytetriggersyncoper": 16, "databricks_bas": 16, "databrickssubmitrunoper": 16, "databricksrunnowoper": 16, "databricks_repo": 16, "databricksreposcreateoper": 16, "databricksreposupdateoper": 16, "databricksreposdeleteoper": 16, "databricks_sql": 16, "databrickssqloper": 16, "databrickscopyintooper": 16, "discord_webhook": 16, "opsgeniecreatealertoper": 16, "job": 16, "qubole_check": 16, "salesforce_apex_rest": 16, "bulk": 16, "salesforceapexrestoper": 16, "salesforcebulkoper": 16, "segment_track_ev": 16, "slack_webhook": 16, "snowflakeoper": 16, "tableauoper": 16, "telegramoper": 16, "yandexcloud_dataproc": 16, "proc": 16, "sqltoslackoper": [16, 17], "sql_to_slack": [16, 17], "snowflaketoslackoper": 16, "snowflake_to_slack": 16, "asana_task": 17, "asanacreatetaskoper": 17, "docker_swarm": 17, "jenkins_job_trigg": 17, "cncf": 17, "kubernetes_pod": 17, "spark_kubernet": 17, "kubernetespodoper": 17, "mssqloper": 17, "mysqloper": 17, "neo4joper": 17, "postgresoper": 17, "redis_publish": 17, "sqliteoper": 17, "trinooper": 17, "sql_to_sheet": 17, "vertica_to_mysql": 17, "presto_to_mysql": 17, "trino_to_mysql": 17, "oracle_to_oracl": 17, "4": 18, "5": 18, "gcp": 18, "workspac": 18, "formerli": 18, "8": 18, "9": 18}, "objects": {"": [[18, 0, 1, "std-provider-apache-airflow-providers-airbyte", "apache-airflow-providers-airbyte"], [18, 0, 1, "std-provider-apache-airflow-providers-alibaba", "apache-airflow-providers-alibaba"], [18, 0, 1, "std-provider-apache-airflow-providers-amazon", "apache-airflow-providers-amazon"], [18, 0, 1, "std-provider-apache-airflow-providers-apache-beam", "apache-airflow-providers-apache-beam"], [18, 0, 1, "std-provider-apache-airflow-providers-apache-cassandra", "apache-airflow-providers-apache-cassandra"], [18, 0, 1, "std-provider-apache-airflow-providers-apache-drill", "apache-airflow-providers-apache-drill"], [18, 0, 1, "std-provider-apache-airflow-providers-apache-druid", "apache-airflow-providers-apache-druid"], [18, 0, 1, "std-provider-apache-airflow-providers-apache-hdfs", "apache-airflow-providers-apache-hdfs"], [18, 0, 1, "std-provider-apache-airflow-providers-apache-hive", "apache-airflow-providers-apache-hive"], [18, 0, 1, "std-provider-apache-airflow-providers-apache-kylin", "apache-airflow-providers-apache-kylin"], [18, 0, 1, "std-provider-apache-airflow-providers-apache-livy", "apache-airflow-providers-apache-livy"], [18, 0, 1, "std-provider-apache-airflow-providers-apache-pig", "apache-airflow-providers-apache-pig"], [18, 0, 1, "std-provider-apache-airflow-providers-apache-pinot", "apache-airflow-providers-apache-pinot"], [18, 0, 1, "std-provider-apache-airflow-providers-apache-spark", "apache-airflow-providers-apache-spark"], [18, 0, 1, "std-provider-apache-airflow-providers-apache-sqoop", "apache-airflow-providers-apache-sqoop"], [18, 0, 1, "std-provider-apache-airflow-providers-arangodb", "apache-airflow-providers-arangodb"], [18, 0, 1, "std-provider-apache-airflow-providers-asana", "apache-airflow-providers-asana"], [18, 0, 1, "std-provider-apache-airflow-providers-atlassian-jira", "apache-airflow-providers-atlassian-jira"], [18, 0, 1, "std-provider-apache-airflow-providers-celery", "apache-airflow-providers-celery"], [18, 0, 1, "std-provider-apache-airflow-providers-cloudant", "apache-airflow-providers-cloudant"], [18, 0, 1, "std-provider-apache-airflow-providers-cncf-kubernetes", "apache-airflow-providers-cncf-kubernetes"], [18, 0, 1, "std-provider-apache-airflow-providers-common-sql", "apache-airflow-providers-common-sql"], [18, 0, 1, "std-provider-apache-airflow-providers-databricks", "apache-airflow-providers-databricks"], [18, 0, 1, "std-provider-apache-airflow-providers-datadog", "apache-airflow-providers-datadog"], [18, 0, 1, "std-provider-apache-airflow-providers-dbt-cloud", "apache-airflow-providers-dbt-cloud"], [18, 0, 1, "std-provider-apache-airflow-providers-dingding", "apache-airflow-providers-dingding"], [18, 0, 1, "std-provider-apache-airflow-providers-discord", "apache-airflow-providers-discord"], [18, 0, 1, "std-provider-apache-airflow-providers-docker", "apache-airflow-providers-docker"], [18, 0, 1, "std-provider-apache-airflow-providers-elasticsearch", "apache-airflow-providers-elasticsearch"], [18, 0, 1, "std-provider-apache-airflow-providers-exasol", "apache-airflow-providers-exasol"], [18, 0, 1, "std-provider-apache-airflow-providers-facebook", "apache-airflow-providers-facebook"], [18, 0, 1, "std-provider-apache-airflow-providers-ftp", "apache-airflow-providers-ftp"], [18, 0, 1, "std-provider-apache-airflow-providers-github", "apache-airflow-providers-github"], [18, 0, 1, "std-provider-apache-airflow-providers-google", "apache-airflow-providers-google"], [18, 0, 1, "std-provider-apache-airflow-providers-grpc", "apache-airflow-providers-grpc"], [18, 0, 1, "std-provider-apache-airflow-providers-hashicorp", "apache-airflow-providers-hashicorp"], [18, 0, 1, "std-provider-apache-airflow-providers-http", "apache-airflow-providers-http"], [18, 0, 1, "std-provider-apache-airflow-providers-imap", "apache-airflow-providers-imap"], [18, 0, 1, "std-provider-apache-airflow-providers-influxdb", "apache-airflow-providers-influxdb"], [18, 0, 1, "std-provider-apache-airflow-providers-jdbc", "apache-airflow-providers-jdbc"], [18, 0, 1, "std-provider-apache-airflow-providers-jenkins", "apache-airflow-providers-jenkins"], [18, 0, 1, "std-provider-apache-airflow-providers-microsoft-azure", "apache-airflow-providers-microsoft-azure"], [18, 0, 1, "std-provider-apache-airflow-providers-microsoft-mssql", "apache-airflow-providers-microsoft-mssql"], [18, 0, 1, "std-provider-apache-airflow-providers-microsoft-psrp", "apache-airflow-providers-microsoft-psrp"], [18, 0, 1, "std-provider-apache-airflow-providers-microsoft-winrm", "apache-airflow-providers-microsoft-winrm"], [18, 0, 1, "std-provider-apache-airflow-providers-mongo", "apache-airflow-providers-mongo"], [18, 0, 1, "std-provider-apache-airflow-providers-mysql", "apache-airflow-providers-mysql"], [18, 0, 1, "std-provider-apache-airflow-providers-neo4j", "apache-airflow-providers-neo4j"], [18, 0, 1, "std-provider-apache-airflow-providers-odbc", "apache-airflow-providers-odbc"], [18, 0, 1, "std-provider-apache-airflow-providers-openfaas", "apache-airflow-providers-openfaas"], [18, 0, 1, "std-provider-apache-airflow-providers-opsgenie", "apache-airflow-providers-opsgenie"], [18, 0, 1, "std-provider-apache-airflow-providers-oracle", "apache-airflow-providers-oracle"], [18, 0, 1, "std-provider-apache-airflow-providers-pagerduty", "apache-airflow-providers-pagerduty"], [18, 0, 1, "std-provider-apache-airflow-providers-papermill", "apache-airflow-providers-papermill"], [18, 0, 1, "std-provider-apache-airflow-providers-plexus", "apache-airflow-providers-plexus"], [18, 0, 1, "std-provider-apache-airflow-providers-postgres", "apache-airflow-providers-postgres"], [18, 0, 1, "std-provider-apache-airflow-providers-presto", "apache-airflow-providers-presto"], [18, 0, 1, "std-provider-apache-airflow-providers-qubole", "apache-airflow-providers-qubole"], [18, 0, 1, "std-provider-apache-airflow-providers-redis", "apache-airflow-providers-redis"], [18, 0, 1, "std-provider-apache-airflow-providers-salesforce", "apache-airflow-providers-salesforce"], [18, 0, 1, "std-provider-apache-airflow-providers-samba", "apache-airflow-providers-samba"], [18, 0, 1, "std-provider-apache-airflow-providers-segment", "apache-airflow-providers-segment"], [18, 0, 1, "std-provider-apache-airflow-providers-sendgrid", "apache-airflow-providers-sendgrid"], [18, 0, 1, "std-provider-apache-airflow-providers-sftp", "apache-airflow-providers-sftp"], [18, 0, 1, "std-provider-apache-airflow-providers-singularity", "apache-airflow-providers-singularity"], [18, 0, 1, "std-provider-apache-airflow-providers-slack", "apache-airflow-providers-slack"], [18, 0, 1, "std-provider-apache-airflow-providers-snowflake", "apache-airflow-providers-snowflake"], [18, 0, 1, "std-provider-apache-airflow-providers-sqlite", "apache-airflow-providers-sqlite"], [18, 0, 1, "std-provider-apache-airflow-providers-ssh", "apache-airflow-providers-ssh"], [18, 0, 1, "std-provider-apache-airflow-providers-tableau", "apache-airflow-providers-tableau"], [18, 0, 1, "std-provider-apache-airflow-providers-tabular", "apache-airflow-providers-tabular"], [18, 0, 1, "std-provider-apache-airflow-providers-telegram", "apache-airflow-providers-telegram"], [18, 0, 1, "std-provider-apache-airflow-providers-trino", "apache-airflow-providers-trino"], [18, 0, 1, "std-provider-apache-airflow-providers-vertica", "apache-airflow-providers-vertica"], [18, 0, 1, "std-provider-apache-airflow-providers-yandex", "apache-airflow-providers-yandex"], [18, 0, 1, "std-provider-apache-airflow-providers-zendesk", "apache-airflow-providers-zendesk"]]}, "objtypes": {"0": "std:provider"}, "objnames": {"0": ["std", "provider", "provider"]}, "titleterms": {"auth": [0, 7], "backend": [0, 5, 7], "googl": [0, 1, 2, 4, 5, 10, 11, 12, 13, 15, 16, 17, 18], "connect": [1, 7, 15], "airbyt": [1, 16, 18], "alibaba": [1, 4, 18], "amazon": [1, 2, 4, 5, 10, 11, 13, 15, 16, 17, 18], "apach": [1, 10, 11, 13, 15, 17, 18], "cassandra": [1, 10, 13, 18], "drill": [1, 10, 18], "druid": [1, 10, 18], "hdf": [1, 10, 18], "hive": [1, 10, 11, 15, 17, 18], "livi": [1, 10, 18], "pig": [1, 10, 18], "spark": [1, 10, 18], "sqoop": [1, 10, 18], "arangodb": [1, 17, 18], "asana": [1, 17, 18], "atlassian": [1, 17, 18], "jira": [1, 17, 18], "ibm": [1, 16], "cloudant": [1, 16, 18], "kubernet": [1, 11, 13, 17, 18], "databrick": [1, 2, 16, 18], "dbt": [1, 2, 18], "cloud": [1, 2, 10, 11, 12, 13, 15, 16, 17, 18], "dingd": [1, 16, 18], "discord": [1, 16, 18], "docker": [1, 17, 18], "elasticsearch": [1, 4, 17, 18], "exasol": [1, 11, 17, 18], "facebook": [1, 13, 16, 18], "file": [1, 10, 11, 12, 13, 15], "transfer": [1, 10, 11, 12, 13, 15, 16, 17], "protocol": [1, 11, 12, 13, 15], "ftp": [1, 11, 15, 18], "github": [1, 17, 18], "grpc": [1, 15, 18], "hashicorp": [1, 5, 17, 18], "hypertext": [1, 15], "http": [1, 15, 18], "internet": [1, 11, 15], "messag": [1, 11, 15], "access": [1, 11, 15], "imap": [1, 11, 15, 18], "influxdb": [1, 17, 18], "java": [1, 15], "databas": [1, 11, 15], "jdbc": [1, 15, 18], "jenkin": [1, 17, 18], "microsoft": [1, 2, 4, 5, 10, 12, 13, 15, 16, 17, 18], "azur": [1, 2, 4, 5, 12, 13, 15, 16, 17, 18], "sql": [1, 10, 11, 13, 16, 17, 18], "server": [1, 10, 13, 17], "mssql": [1, 10, 13, 17, 18], "mongodb": [1, 11, 17], "mysql": [1, 10, 11, 13, 17, 18], "neo4j": [1, 17, 18], "odbc": [1, 15, 18], "opsgeni": [1, 16, 18], "oracl": [1, 12, 13, 17, 18], "pagerduti": [1, 16, 18], "postgresql": [1, 13, 17], "presto": [1, 13, 17, 18], "qubol": [1, 2, 16, 18], "redi": [1, 17, 18], "salesforc": [1, 11, 13, 16, 18], "samba": [1, 10, 15, 18], "segment": [1, 16, 18], "sftp": [1, 11, 12, 13, 15, 18], "slack": [1, 16, 17, 18], "snowflak": [1, 11, 12, 13, 16, 18], "sqlite": [1, 17, 18], "ssh": [1, 11, 12, 13, 15, 18], "tableau": [1, 16, 18], "tabular": [1, 17, 18], "trino": [1, 13, 17, 18], "vertica": [1, 10, 17, 18], "yandex": [1, 16, 18], "zendesk": [1, 17, 18], "extra": [2, 7], "link": [2, 7], "core": [3, 7], "extens": 3, "write": 4, "log": [4, 7, 11], "secret": [5, 7, 13], "commun": [6, 7], "provid": [6, 7, 9, 18], "how": [6, 7], "creat": [6, 7], "new": 6, "initi": 6, "code": 6, "unit": 6, "test": 6, "integr": [6, 9, 15, 17], "document": 6, "option": 6, "featur": 6, "us": [6, 7], "dynam": 6, "task": [6, 13], "map": 6, "have": 6, "sensor": 6, "return": 6, "xcom": 6, "valu": 6, "updat": 6, "packag": [7, 9, 18], "extend": 7, "airflow": [7, 18], "function": [7, 11, 13], "custom": 7, "instal": [7, 8, 9], "upgrad": 7, "type": 7, "maintain": 7, "your": 7, "own": 7, "faq": 7, "2": 7, "0": 7, "backport": 7, "1": 7, "10": 7, "from": [8, 9], "pypi": 8, "tool": 8, "sourc": 9, "releas": 9, "asf": 10, "softwar": [10, 17], "foundat": 10, "beam": [10, 18], "kylin": [10, 18], "pinot": [10, 18], "hadoop": 10, "distribut": 10, "system": 10, "webhdf": 10, "dynamodb": [10, 11], "simpl": [10, 11, 13, 15, 16, 17], "storag": [10, 11, 12, 13, 15, 16, 17], "servic": [10, 11, 12, 13, 15, 16, 17], "s3": [10, 11, 13, 15, 16, 17], "gc": [10, 11, 12, 13, 15, 16, 17], "aw": 11, "web": 11, "appflow": 11, "athena": 11, "cloudform": 11, "cloudwatch": 11, "datasync": 11, "ec2": 11, "ec": 11, "elast": 11, "ek": 11, "elasticach": 11, "emr": 11, "glacier": [11, 13], "kinesi": 11, "data": [11, 12, 13, 17], "firehos": 11, "quicksight": 11, "rd": 11, "redshift": 11, "sagemak": 11, "secretsmanag": 11, "email": 11, "se": 11, "notif": 11, "sn": 11, "queue": 11, "sq": 11, "batch": [11, 12], "migrat": 11, "glue": 11, "lambda": 11, "secur": [11, 15], "token": 11, "st": 11, "step": 11, "local": [11, 12, 13], "common": [11, 13, 16, 17, 18], "blob": [12, 13, 15, 16], "contain": 12, "instanc": 12, "cosmo": 12, "db": 12, "explor": 12, "factori": 12, "lake": [12, 13, 17], "fileshar": [12, 13], "bu": 12, "synaps": 12, "automl": 13, "bigqueri": [13, 17], "bigtabl": 13, "campaign": 13, "manag": [13, 15], "build": 13, "loss": 13, "prevent": 13, "dlp": 13, "firestor": 13, "kei": 13, "km": 13, "life": 13, "scienc": 13, "memorystor": 13, "natur": 13, "languag": 13, "o": 13, "login": 13, "pub": 13, "sub": 13, "spanner": 13, "speech": 13, "text": 13, "stackdriv": 13, "translat": 13, "video": 13, "intellig": 13, "vision": 13, "workflow": 13, "comput": 13, "engin": 13, "catalog": 13, "fusion": 13, "dataflow": 13, "dataplex": 13, "dataprep": 13, "dataproc": [13, 16], "metastor": 13, "datastor": 13, "deploy": 13, "looker": 13, "machin": 13, "learn": 13, "vertex": 13, "ai": 13, "drive": 13, "calendar": 13, "spreadsheet": [13, 17], "ad": [13, 16], "market": 13, "platform": 13, "analytics360": 13, "displai": 13, "360": 13, "search": 13, "other": 13, "oper": [13, 14], "hook": [13, 14], "compos": 13, "dataform": 13, "discoveri": 13, "api": 13, "leveldb": 13, "refer": [14, 18], "shell": 15, "window": 15, "powershel": 15, "remot": 15, "winrm": [15, 18], "repo": 16, "datadog": [16, 18], "plexu": [16, 18], "telegram": [16, 18], "swarm": 17, "vault": 17, "openfaa": [17, 18], "papermil": [17, 18], "singular": [17, 18], "celeri": 18, "cncf": 18, "psrp": 18, "mongo": 18, "postgr": 18, "sendgrid": 18}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 8, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.viewcode": 1, "sphinx.ext.intersphinx": 1, "sphinx": 57}, "alltitles": {"Auth backends": [[0, "auth-backends"], [7, "auth-backends"]], "Google": [[0, "google"], [1, "google"], [2, "google"], [4, "google"], [5, "google"], [13, "google"], [13, "id3"]], "Connections": [[1, "connections"]], "Airbyte": [[1, "airbyte"], [16, "airbyte"]], "Alibaba": [[1, "alibaba"], [4, "alibaba"]], "Amazon": [[1, "amazon"], [2, "amazon"], [4, "amazon"], [5, "amazon"]], "Apache Cassandra": [[1, "apache-cassandra"], [10, "apache-cassandra"]], "Apache Drill": [[1, "apache-drill"], [10, "apache-drill"]], "Apache Druid": [[1, "apache-druid"], [10, "apache-druid"]], "Apache HDFS": [[1, "apache-hdfs"]], "Apache Hive": [[1, "apache-hive"], [10, "apache-hive"]], "Apache Livy": [[1, "apache-livy"], [10, "apache-livy"]], "Apache Pig": [[1, "apache-pig"], [10, "apache-pig"]], "Apache Spark": [[1, "apache-spark"], [10, "apache-spark"]], "Apache Sqoop": [[1, "apache-sqoop"], [10, "apache-sqoop"]], "ArangoDB": [[1, "arangodb"], [17, "arangodb"]], "Asana": [[1, "asana"], [17, "asana"]], "Atlassian Jira": [[1, "atlassian-jira"], [17, "atlassian-jira"]], "IBM Cloudant": [[1, "ibm-cloudant"], [16, "ibm-cloudant"]], "Kubernetes": [[1, "kubernetes"], [17, "kubernetes"]], "Databricks": [[1, "databricks"], [2, "databricks"], [16, "databricks"]], "dbt Cloud": [[1, "dbt-cloud"], [2, "dbt-cloud"]], "Dingding": [[1, "dingding"], [16, "dingding"]], "Discord": [[1, "discord"], [16, "discord"]], "Docker": [[1, "docker"], [17, "docker"]], "Elasticsearch": [[1, "elasticsearch"], [4, "elasticsearch"], [17, "elasticsearch"]], "Exasol": [[1, "exasol"], [17, "exasol"]], "Facebook": [[1, "facebook"]], "File Transfer Protocol (FTP)": [[1, "file-transfer-protocol-ftp"], [15, "file-transfer-protocol-ftp"]], "Github": [[1, "github"], [17, "github"]], "gRPC": [[1, "grpc"], [15, "grpc"]], "Hashicorp": [[1, "hashicorp"], [5, "hashicorp"]], "Hypertext Transfer Protocol (HTTP)": [[1, "hypertext-transfer-protocol-http"], [15, "hypertext-transfer-protocol-http"]], "Internet Message Access Protocol (IMAP)": [[1, "internet-message-access-protocol-imap"], [15, "internet-message-access-protocol-imap"]], "Influxdb": [[1, "influxdb"], [17, "influxdb"]], "Java Database Connectivity (JDBC)": [[1, "java-database-connectivity-jdbc"], [15, "java-database-connectivity-jdbc"]], "Jenkins": [[1, "jenkins"], [17, "jenkins"]], "Microsoft Azure": [[1, "microsoft-azure"], [2, "microsoft-azure"], [4, "microsoft-azure"], [5, "microsoft-azure"], [12, "microsoft-azure"]], "Microsoft SQL Server (MSSQL)": [[1, "microsoft-sql-server-mssql"], [17, "microsoft-sql-server-mssql"]], "MongoDB": [[1, "mongodb"], [17, "mongodb"]], "MySQL": [[1, "mysql"], [17, "mysql"]], "Neo4j": [[1, "neo4j"], [17, "neo4j"]], "ODBC": [[1, "odbc"], [15, "odbc"]], "Opsgenie": [[1, "opsgenie"], [16, "opsgenie"]], "Oracle": [[1, "oracle"], [17, "oracle"]], "Pagerduty": [[1, "pagerduty"], [16, "pagerduty"]], "PostgreSQL": [[1, "postgresql"], [17, "postgresql"]], "Presto": [[1, "presto"], [17, "presto"]], "Qubole": [[1, "qubole"], [2, "qubole"], [16, "qubole"]], "Redis": [[1, "redis"], [17, "redis"]], "Salesforce": [[1, "salesforce"], [16, "salesforce"]], "Samba": [[1, "samba"], [15, "samba"]], "Segment": [[1, "segment"], [16, "segment"]], "SFTP": [[1, "sftp"]], "Slack": [[1, "slack"], [16, "slack"]], "Snowflake": [[1, "snowflake"], [16, "snowflake"]], "SQLite": [[1, "sqlite"], [17, "sqlite"]], "SSH": [[1, "ssh"]], "Tableau": [[1, "tableau"], [16, "tableau"]], "Tabular": [[1, "tabular"], [17, "tabular"]], "Trino": [[1, "trino"], [17, "trino"]], "Vertica": [[1, "vertica"], [17, "vertica"]], "Yandex": [[1, "yandex"]], "Zendesk": [[1, "zendesk"], [17, "zendesk"]], "Extra Links": [[2, "extra-links"]], "Core Extensions": [[3, "core-extensions"]], "Writing logs": [[4, "writing-logs"]], "Secret backends": [[5, "secret-backends"], [7, "secret-backends"]], "Community Providers": [[6, "community-providers"]], "How-to creating a new community provider": [[6, "how-to-creating-a-new-community-provider"]], "Initial Code and Unit Tests": [[6, "initial-code-and-unit-tests"]], "Integration tests": [[6, "integration-tests"]], "Documentation": [[6, "documentation"]], "Optional provider features": [[6, "optional-provider-features"]], "Using Providers with dynamic task mapping": [[6, "using-providers-with-dynamic-task-mapping"]], "Having sensors return XCOM values": [[6, "having-sensors-return-xcom-values"]], "How-to Update a community provider": [[6, "how-to-update-a-community-provider"]], "Provider packages": [[7, "provider-packages"]], "Extending Airflow core functionality": [[7, "extending-airflow-core-functionality"]], "Custom connections": [[7, "custom-connections"]], "Extra links": [[7, "extra-links"]], "Logging": [[7, "logging"]], "Installing and upgrading providers": [[7, "installing-and-upgrading-providers"]], "Types of providers": [[7, "types-of-providers"]], "Community maintained providers": [[7, "community-maintained-providers"]], "Custom provider packages": [[7, "custom-provider-packages"]], "How to create your own provider": [[7, "how-to-create-your-own-provider"]], "FAQ for Airflow and Providers": [[7, "faq-for-airflow-and-providers"]], "Upgrading Airflow 2.0 and Providers": [[7, "upgrading-airflow-2-0-and-providers"]], "Customizing Provider Packages": [[7, "customizing-provider-packages"]], "Creating your own providers": [[7, "creating-your-own-providers"]], "Using Backport Providers in Airflow 1.10": [[7, "using-backport-providers-in-airflow-1-10"]], "Installation from PyPI": [[8, "installation-from-pypi"]], "Installation tools": [[8, "installation-tools"]], "Installing Providers from Sources": [[9, "installing-providers-from-sources"]], "Released packages": [[9, "released-packages"]], "Release integrity": [[9, "release-integrity"]], "ASF: Apache Software Foundation": [[10, "asf-apache-software-foundation"]], "Software": [[10, "software"], [17, "software"]], "Apache Beam": [[10, "apache-beam"]], "Apache Kylin": [[10, "apache-kylin"]], "Apache Pinot": [[10, "apache-pinot"]], "Hadoop Distributed File System (HDFS)": [[10, "hadoop-distributed-file-system-hdfs"]], "WebHDFS": [[10, "webhdfs"]], "Transfers": [[10, "transfers"], [11, "transfers"], [12, "transfers"], [13, "transfers"], [15, "transfers"], [16, "transfers"], [17, "transfers"]], "Apache Hive to Amazon DynamoDB": [[10, "apache-hive-to-amazon-dynamodb"], [11, "apache-hive-to-amazon-dynamodb"]], "Apache Hive to Apache Druid": [[10, "apache-hive-to-apache-druid"]], "Vertica to Apache Hive": [[10, "vertica-to-apache-hive"], [17, "vertica-to-apache-hive"]], "Apache Hive to MySQL": [[10, "apache-hive-to-mysql"], [17, "apache-hive-to-mysql"]], "Apache Hive to Samba": [[10, "apache-hive-to-samba"], [15, "apache-hive-to-samba"]], "Amazon Simple Storage Service (S3) to Apache Hive": [[10, "amazon-simple-storage-service-s3-to-apache-hive"], [11, "amazon-simple-storage-service-s3-to-apache-hive"]], "MySQL to Apache Hive": [[10, "mysql-to-apache-hive"], [17, "mysql-to-apache-hive"]], "Microsoft SQL Server (MSSQL) to Apache Hive": [[10, "microsoft-sql-server-mssql-to-apache-hive"], [17, "microsoft-sql-server-mssql-to-apache-hive"]], "Apache Cassandra to Google Cloud Storage (GCS)": [[10, "apache-cassandra-to-google-cloud-storage-gcs"], [13, "apache-cassandra-to-google-cloud-storage-gcs"]], "AWS: Amazon Web Services": [[11, "aws-amazon-web-services"]], "Services": [[11, "services"], [12, "services"], [13, "services"], [16, "services"], [16, "id1"]], "Amazon Appflow": [[11, "amazon-appflow"]], "Amazon Athena": [[11, "amazon-athena"]], "Amazon CloudFormation": [[11, "amazon-cloudformation"]], "Amazon CloudWatch Logs": [[11, "amazon-cloudwatch-logs"]], "Amazon DataSync": [[11, "amazon-datasync"]], "Amazon DynamoDB": [[11, "amazon-dynamodb"]], "Amazon EC2": [[11, "amazon-ec2"]], "Amazon ECS": [[11, "amazon-ecs"]], "Amazon Elastic Kubernetes Service (EKS)": [[11, "amazon-elastic-kubernetes-service-eks"]], "Amazon ElastiCache": [[11, "amazon-elasticache"]], "Amazon EMR": [[11, "amazon-emr"]], "Amazon EMR on EKS": [[11, "amazon-emr-on-eks"]], "Amazon Glacier": [[11, "amazon-glacier"]], "Amazon Kinesis Data Firehose": [[11, "amazon-kinesis-data-firehose"]], "Amazon QuickSight": [[11, "amazon-quicksight"]], "Amazon RDS": [[11, "amazon-rds"]], "Amazon Redshift": [[11, "amazon-redshift"]], "Amazon SageMaker": [[11, "amazon-sagemaker"]], "Amazon SecretsManager": [[11, "amazon-secretsmanager"]], "Amazon Simple Email Service (SES)": [[11, "amazon-simple-email-service-ses"]], "Amazon Simple Notification Service (SNS)": [[11, "amazon-simple-notification-service-sns"]], "Amazon Simple Queue Service (SQS)": [[11, "amazon-simple-queue-service-sqs"]], "Amazon Simple Storage Service (S3)": [[11, "amazon-simple-storage-service-s3"]], "Amazon Web Services": [[11, "amazon-web-services"]], "AWS Batch": [[11, "aws-batch"]], "AWS Database Migration Service": [[11, "aws-database-migration-service"]], "AWS Glue": [[11, "aws-glue"]], "AWS Lambda": [[11, "aws-lambda"]], "AWS Security Token Service (STS)": [[11, "aws-security-token-service-sts"]], "AWS Step Functions": [[11, "aws-step-functions"]], "Amazon DynamoDB to Amazon Simple Storage Service (S3)": [[11, "amazon-dynamodb-to-amazon-simple-storage-service-s3"]], "Google Cloud Storage (GCS) to Amazon Simple Storage Service (S3)": [[11, "google-cloud-storage-gcs-to-amazon-simple-storage-service-s3"], [13, "google-cloud-storage-gcs-to-amazon-simple-storage-service-s3"]], "Amazon Glacier to Google Cloud Storage (GCS)": [[11, "amazon-glacier-to-google-cloud-storage-gcs"], [13, "amazon-glacier-to-google-cloud-storage-gcs"]], "Google to Amazon Simple Storage Service (S3)": [[11, "google-to-amazon-simple-storage-service-s3"]], "Internet Message Access Protocol (IMAP) to Amazon Simple Storage Service (S3)": [[11, "internet-message-access-protocol-imap-to-amazon-simple-storage-service-s3"], [15, "internet-message-access-protocol-imap-to-amazon-simple-storage-service-s3"]], "MongoDB to Amazon Simple Storage Service (S3)": [[11, "mongodb-to-amazon-simple-storage-service-s3"], [17, "mongodb-to-amazon-simple-storage-service-s3"]], "Amazon Redshift to Amazon Simple Storage Service (S3)": [[11, "amazon-redshift-to-amazon-simple-storage-service-s3"]], "Amazon Simple Storage Service (S3) to Amazon Redshift": [[11, "amazon-simple-storage-service-s3-to-amazon-redshift"]], "Amazon Simple Storage Service (S3) to SSH File Transfer Protocol (SFTP)": [[11, "amazon-simple-storage-service-s3-to-ssh-file-transfer-protocol-sftp"], [15, "amazon-simple-storage-service-s3-to-ssh-file-transfer-protocol-sftp"]], "SSH File Transfer Protocol (SFTP) to Amazon Simple Storage Service (S3)": [[11, "ssh-file-transfer-protocol-sftp-to-amazon-simple-storage-service-s3"], [15, "ssh-file-transfer-protocol-sftp-to-amazon-simple-storage-service-s3"]], "Amazon Simple Storage Service (S3) to File Transfer Protocol (FTP)": [[11, "amazon-simple-storage-service-s3-to-file-transfer-protocol-ftp"], [15, "amazon-simple-storage-service-s3-to-file-transfer-protocol-ftp"]], "Exasol to Amazon Simple Storage Service (S3)": [[11, "exasol-to-amazon-simple-storage-service-s3"], [17, "exasol-to-amazon-simple-storage-service-s3"]], "File Transfer Protocol (FTP) to Amazon Simple Storage Service (S3)": [[11, "file-transfer-protocol-ftp-to-amazon-simple-storage-service-s3"], [15, "file-transfer-protocol-ftp-to-amazon-simple-storage-service-s3"]], "Salesforce to Amazon Simple Storage Service (S3)": [[11, "salesforce-to-amazon-simple-storage-service-s3"], [16, "salesforce-to-amazon-simple-storage-service-s3"]], "Local to Amazon Simple Storage Service (S3)": [[11, "local-to-amazon-simple-storage-service-s3"]], "Common SQL to Amazon Simple Storage Service (S3)": [[11, "common-sql-to-amazon-simple-storage-service-s3"], [17, "common-sql-to-amazon-simple-storage-service-s3"]], "Amazon Simple Storage Service (S3) to Google Cloud Storage (GCS)": [[11, "amazon-simple-storage-service-s3-to-google-cloud-storage-gcs"], [13, "amazon-simple-storage-service-s3-to-google-cloud-storage-gcs"]], "Amazon Simple Storage Service (S3) to MySQL": [[11, "amazon-simple-storage-service-s3-to-mysql"], [17, "amazon-simple-storage-service-s3-to-mysql"]], "Amazon Simple Storage Service (S3) to Snowflake": [[11, "amazon-simple-storage-service-s3-to-snowflake"], [11, "id1"], [16, "amazon-simple-storage-service-s3-to-snowflake"], [16, "id2"]], "Azure: Microsoft Azure": [[12, "azure-microsoft-azure"]], "Microsoft Azure Batch": [[12, "microsoft-azure-batch"]], "Microsoft Azure Blob Storage": [[12, "microsoft-azure-blob-storage"]], "Microsoft Azure Container Instances": [[12, "microsoft-azure-container-instances"]], "Microsoft Azure Cosmos DB": [[12, "microsoft-azure-cosmos-db"]], "Microsoft Azure Data Explorer": [[12, "microsoft-azure-data-explorer"]], "Microsoft Azure Data Factory": [[12, "microsoft-azure-data-factory"]], "Microsoft Azure Data Lake Storage": [[12, "microsoft-azure-data-lake-storage"]], "Microsoft Azure FileShare": [[12, "microsoft-azure-fileshare"]], "Microsoft Azure Service Bus": [[12, "microsoft-azure-service-bus"]], "Microsoft Azure Synapse": [[12, "microsoft-azure-synapse"]], "Microsoft Azure FileShare to Google Cloud Storage (GCS)": [[12, "microsoft-azure-fileshare-to-google-cloud-storage-gcs"], [13, "microsoft-azure-fileshare-to-google-cloud-storage-gcs"]], "Microsoft Azure Data Lake Storage to Google Cloud Storage (GCS)": [[12, "microsoft-azure-data-lake-storage-to-google-cloud-storage-gcs"], [13, "microsoft-azure-data-lake-storage-to-google-cloud-storage-gcs"]], "Local to Microsoft Azure Data Lake Storage": [[12, "local-to-microsoft-azure-data-lake-storage"]], "Oracle to Microsoft Azure Data Lake Storage": [[12, "oracle-to-microsoft-azure-data-lake-storage"], [17, "oracle-to-microsoft-azure-data-lake-storage"]], "Local to Microsoft Azure Blob Storage": [[12, "local-to-microsoft-azure-blob-storage"]], "Microsoft Azure Blob Storage to Google Cloud Storage (GCS)": [[12, "microsoft-azure-blob-storage-to-google-cloud-storage-gcs"], [13, "microsoft-azure-blob-storage-to-google-cloud-storage-gcs"]], "SSH File Transfer Protocol (SFTP) to Microsoft Azure Blob Storage": [[12, "ssh-file-transfer-protocol-sftp-to-microsoft-azure-blob-storage"], [15, "ssh-file-transfer-protocol-sftp-to-microsoft-azure-blob-storage"]], "Microsoft Azure Blob Storage to Snowflake": [[12, "microsoft-azure-blob-storage-to-snowflake"], [16, "microsoft-azure-blob-storage-to-snowflake"]], "Google Cloud": [[13, "google-cloud"]], "Google AutoML": [[13, "google-automl"]], "Google BigQuery": [[13, "google-bigquery"]], "Google BigQuery Data Transfer Service": [[13, "google-bigquery-data-transfer-service"]], "Google Bigtable": [[13, "google-bigtable"]], "Google Campaign Manager": [[13, "google-campaign-manager"]], "Google Cloud Build": [[13, "google-cloud-build"]], "Google Cloud Data Loss Prevention (DLP)": [[13, "google-cloud-data-loss-prevention-dlp"]], "Google Cloud Firestore": [[13, "google-cloud-firestore"]], "Google Cloud Functions": [[13, "google-cloud-functions"]], "Google Cloud Key Management Service (KMS)": [[13, "google-cloud-key-management-service-kms"]], "Google Cloud Life Sciences": [[13, "google-cloud-life-sciences"]], "Google Cloud Memorystore": [[13, "google-cloud-memorystore"]], "Google Cloud Natural Language": [[13, "google-cloud-natural-language"]], "Google Cloud OS Login": [[13, "google-cloud-os-login"]], "Google Cloud Pub/Sub": [[13, "google-cloud-pub-sub"]], "Google Cloud Secret Manager": [[13, "google-cloud-secret-manager"]], "Google Cloud Spanner": [[13, "google-cloud-spanner"]], "Google Cloud Speech-to-Text": [[13, "google-cloud-speech-to-text"]], "Google Cloud SQL": [[13, "google-cloud-sql"]], "Google Cloud Stackdriver": [[13, "google-cloud-stackdriver"]], "Google Cloud Storage (GCS)": [[13, "google-cloud-storage-gcs"]], "Google Cloud Storage Transfer Service": [[13, "google-cloud-storage-transfer-service"]], "Google Cloud Tasks": [[13, "google-cloud-tasks"]], "Google Cloud Text-to-Speech": [[13, "google-cloud-text-to-speech"]], "Google Cloud Translation": [[13, "google-cloud-translation"]], "Google Cloud Video Intelligence": [[13, "google-cloud-video-intelligence"]], "Google Cloud Vision": [[13, "google-cloud-vision"]], "Google Cloud Workflows": [[13, "google-cloud-workflows"]], "Google Compute Engine": [[13, "google-compute-engine"]], "Google Data Catalog": [[13, "google-data-catalog"]], "Google Data Fusion": [[13, "google-data-fusion"]], "Google Dataflow": [[13, "google-dataflow"]], "Google Dataplex": [[13, "google-dataplex"]], "Google Dataprep": [[13, "google-dataprep"]], "Google Dataproc": [[13, "google-dataproc"]], "Google Dataproc Metastore": [[13, "google-dataproc-metastore"]], "Google Datastore": [[13, "google-datastore"]], "Google Deployment Manager": [[13, "google-deployment-manager"]], "Google Kubernetes Engine": [[13, "google-kubernetes-engine"]], "Google Looker": [[13, "google-looker"]], "Google Machine Learning Engine": [[13, "google-machine-learning-engine"]], "Google Vertex AI": [[13, "google-vertex-ai"]], "Presto to Google Cloud Storage (GCS)": [[13, "presto-to-google-cloud-storage-gcs"], [17, "presto-to-google-cloud-storage-gcs"]], "Trino to Google Cloud Storage (GCS)": [[13, "trino-to-google-cloud-storage-gcs"], [17, "trino-to-google-cloud-storage-gcs"]], "Common SQL to Google Cloud Storage (GCS)": [[13, "common-sql-to-google-cloud-storage-gcs"], [17, "common-sql-to-google-cloud-storage-gcs"]], "Google Cloud Storage (GCS) to Google Drive": [[13, "google-cloud-storage-gcs-to-google-drive"]], "Google Drive to Google Cloud Storage (GCS)": [[13, "google-drive-to-google-cloud-storage-gcs"]], "Microsoft SQL Server (MSSQL) to Google Cloud Storage (GCS)": [[13, "microsoft-sql-server-mssql-to-google-cloud-storage-gcs"], [13, "id2"], [17, "microsoft-sql-server-mssql-to-google-cloud-storage-gcs"], [17, "id1"]], "Google Calendar to Google Cloud Storage (GCS)": [[13, "google-calendar-to-google-cloud-storage-gcs"]], "Google Spreadsheet to Google Cloud Storage (GCS)": [[13, "google-spreadsheet-to-google-cloud-storage-gcs"]], "Google Cloud Storage (GCS) to SSH File Transfer Protocol (SFTP)": [[13, "google-cloud-storage-gcs-to-ssh-file-transfer-protocol-sftp"], [15, "google-cloud-storage-gcs-to-ssh-file-transfer-protocol-sftp"]], "PostgreSQL to Google Cloud Storage (GCS)": [[13, "postgresql-to-google-cloud-storage-gcs"], [17, "postgresql-to-google-cloud-storage-gcs"]], "Google BigQuery to MySQL": [[13, "google-bigquery-to-mysql"], [17, "google-bigquery-to-mysql"]], "Google BigQuery to Microsoft SQL Server (MSSQL)": [[13, "google-bigquery-to-microsoft-sql-server-mssql"], [17, "google-bigquery-to-microsoft-sql-server-mssql"]], "Google Cloud Storage (GCS) to Google BigQuery": [[13, "google-cloud-storage-gcs-to-google-bigquery"]], "Google Cloud Storage (GCS) to Google Cloud Storage (GCS)": [[13, "google-cloud-storage-gcs-to-google-cloud-storage-gcs"]], "Facebook Ads to Google Cloud Storage (GCS)": [[13, "facebook-ads-to-google-cloud-storage-gcs"], [16, "facebook-ads-to-google-cloud-storage-gcs"]], "SSH File Transfer Protocol (SFTP) to Google Cloud Storage (GCS)": [[13, "ssh-file-transfer-protocol-sftp-to-google-cloud-storage-gcs"], [15, "ssh-file-transfer-protocol-sftp-to-google-cloud-storage-gcs"]], "Google BigQuery to Google BigQuery": [[13, "google-bigquery-to-google-bigquery"]], "MySQL to Google Cloud Storage (GCS)": [[13, "mysql-to-google-cloud-storage-gcs"], [17, "mysql-to-google-cloud-storage-gcs"]], "Oracle to Google Cloud Storage (GCS)": [[13, "oracle-to-google-cloud-storage-gcs"], [17, "oracle-to-google-cloud-storage-gcs"]], "Google Cloud Storage (GCS) to Google Spreadsheet": [[13, "google-cloud-storage-gcs-to-google-spreadsheet"]], "Local to Google Cloud Storage (GCS)": [[13, "local-to-google-cloud-storage-gcs"]], "Google BigQuery to Google Cloud Storage (GCS)": [[13, "google-bigquery-to-google-cloud-storage-gcs"]], "Google Cloud Storage (GCS) to Local": [[13, "google-cloud-storage-gcs-to-local"]], "Salesforce to Google Cloud Storage (GCS)": [[13, "salesforce-to-google-cloud-storage-gcs"], [16, "salesforce-to-google-cloud-storage-gcs"]], "Google Ads to Google Cloud Storage (GCS)": [[13, "google-ads-to-google-cloud-storage-gcs"]], "Google Cloud Storage (GCS) to Presto": [[13, "google-cloud-storage-gcs-to-presto"], [17, "google-cloud-storage-gcs-to-presto"]], "Google Cloud Storage (GCS) to Snowflake": [[13, "google-cloud-storage-gcs-to-snowflake"], [16, "google-cloud-storage-gcs-to-snowflake"]], "Google Cloud Storage (GCS) to Trino": [[13, "google-cloud-storage-gcs-to-trino"], [17, "google-cloud-storage-gcs-to-trino"]], "Google Marketing Platform": [[13, "google-marketing-platform"]], "Google Ads": [[13, "google-ads"]], "Google Analytics360": [[13, "google-analytics360"]], "Google Display&Video 360": [[13, "google-display-video-360"]], "Google Search Ads 360": [[13, "google-search-ads-360"]], "Other Google operators and hooks": [[13, "other-google-operators-and-hooks"]], "Google Calendar": [[13, "google-calendar"]], "Google Cloud Composer": [[13, "google-cloud-composer"]], "Google Cloud Dataform": [[13, "google-cloud-dataform"]], "Google Discovery API": [[13, "google-discovery-api"]], "Google Drive": [[13, "google-drive"]], "Google LevelDB": [[13, "google-leveldb"]], "Google Spreadsheet": [[13, "google-spreadsheet"]], "Operators and Hooks Reference": [[14, "operators-and-hooks-reference"]], "Protocol integrations": [[15, "protocol-integrations"]], "Protocols": [[15, "protocols"]], "Secure Shell (SSH)": [[15, "secure-shell-ssh"]], "SSH File Transfer Protocol (SFTP)": [[15, "ssh-file-transfer-protocol-sftp"]], "Windows PowerShell Remoting Protocol": [[15, "windows-powershell-remoting-protocol"]], "Windows Remote Management (WinRM)": [[15, "windows-remote-management-winrm"]], "Databricks Repos": [[16, "databricks-repos"]], "Databricks SQL": [[16, "databricks-sql"]], "Datadog": [[16, "datadog"]], "Facebook Ads": [[16, "facebook-ads"]], "Plexus": [[16, "plexus"]], "Telegram": [[16, "telegram"]], "Yandex.Cloud": [[16, "yandex-cloud"]], "Yandex.Cloud Dataproc": [[16, "yandex-cloud-dataproc"]], "Common SQL to Slack": [[16, "common-sql-to-slack"], [17, "common-sql-to-slack"]], "Snowflake to Slack": [[16, "snowflake-to-slack"]], "Software integrations": [[17, "software-integrations"]], "Common SQL": [[17, "common-sql"]], "Docker Swarm": [[17, "docker-swarm"]], "Hashicorp Vault": [[17, "hashicorp-vault"]], "OpenFaaS": [[17, "openfaas"]], "Papermill": [[17, "papermill"]], "Singularity": [[17, "singularity"]], "Common SQL to Google Spreadsheet": [[17, "common-sql-to-google-spreadsheet"]], "Vertica to MySQL": [[17, "vertica-to-mysql"]], "Presto to MySQL": [[17, "presto-to-mysql"]], "Trino to MySQL": [[17, "trino-to-mysql"]], "Oracle to Oracle": [[17, "oracle-to-oracle"]], "Providers packages reference": [[18, "providers-packages-reference"]], "apache-airflow-providers-airbyte": [[18, "apache-airflow-providers-airbyte"]], "apache-airflow-providers-alibaba": [[18, "apache-airflow-providers-alibaba"]], "apache-airflow-providers-amazon": [[18, "apache-airflow-providers-amazon"]], "apache-airflow-providers-apache-beam": [[18, "apache-airflow-providers-apache-beam"]], "apache-airflow-providers-apache-cassandra": [[18, "apache-airflow-providers-apache-cassandra"]], "apache-airflow-providers-apache-drill": [[18, "apache-airflow-providers-apache-drill"]], "apache-airflow-providers-apache-druid": [[18, "apache-airflow-providers-apache-druid"]], "apache-airflow-providers-apache-hdfs": [[18, "apache-airflow-providers-apache-hdfs"]], "apache-airflow-providers-apache-hive": [[18, "apache-airflow-providers-apache-hive"]], "apache-airflow-providers-apache-kylin": [[18, "apache-airflow-providers-apache-kylin"]], "apache-airflow-providers-apache-livy": [[18, "apache-airflow-providers-apache-livy"]], "apache-airflow-providers-apache-pig": [[18, "apache-airflow-providers-apache-pig"]], "apache-airflow-providers-apache-pinot": [[18, "apache-airflow-providers-apache-pinot"]], "apache-airflow-providers-apache-spark": [[18, "apache-airflow-providers-apache-spark"]], "apache-airflow-providers-apache-sqoop": [[18, "apache-airflow-providers-apache-sqoop"]], "apache-airflow-providers-arangodb": [[18, "apache-airflow-providers-arangodb"]], "apache-airflow-providers-asana": [[18, "apache-airflow-providers-asana"]], "apache-airflow-providers-atlassian-jira": [[18, "apache-airflow-providers-atlassian-jira"]], "apache-airflow-providers-celery": [[18, "apache-airflow-providers-celery"]], "apache-airflow-providers-cloudant": [[18, "apache-airflow-providers-cloudant"]], "apache-airflow-providers-cncf-kubernetes": [[18, "apache-airflow-providers-cncf-kubernetes"]], "apache-airflow-providers-common-sql": [[18, "apache-airflow-providers-common-sql"]], "apache-airflow-providers-databricks": [[18, "apache-airflow-providers-databricks"]], "apache-airflow-providers-datadog": [[18, "apache-airflow-providers-datadog"]], "apache-airflow-providers-dbt-cloud": [[18, "apache-airflow-providers-dbt-cloud"]], "apache-airflow-providers-dingding": [[18, "apache-airflow-providers-dingding"]], "apache-airflow-providers-discord": [[18, "apache-airflow-providers-discord"]], "apache-airflow-providers-docker": [[18, "apache-airflow-providers-docker"]], "apache-airflow-providers-elasticsearch": [[18, "apache-airflow-providers-elasticsearch"]], "apache-airflow-providers-exasol": [[18, "apache-airflow-providers-exasol"]], "apache-airflow-providers-facebook": [[18, "apache-airflow-providers-facebook"]], "apache-airflow-providers-ftp": [[18, "apache-airflow-providers-ftp"]], "apache-airflow-providers-github": [[18, "apache-airflow-providers-github"]], "apache-airflow-providers-google": [[18, "apache-airflow-providers-google"]], "apache-airflow-providers-grpc": [[18, "apache-airflow-providers-grpc"]], "apache-airflow-providers-hashicorp": [[18, "apache-airflow-providers-hashicorp"]], "apache-airflow-providers-http": [[18, "apache-airflow-providers-http"]], "apache-airflow-providers-imap": [[18, "apache-airflow-providers-imap"]], "apache-airflow-providers-influxdb": [[18, "apache-airflow-providers-influxdb"]], "apache-airflow-providers-jdbc": [[18, "apache-airflow-providers-jdbc"]], "apache-airflow-providers-jenkins": [[18, "apache-airflow-providers-jenkins"]], "apache-airflow-providers-microsoft-azure": [[18, "apache-airflow-providers-microsoft-azure"]], "apache-airflow-providers-microsoft-mssql": [[18, "apache-airflow-providers-microsoft-mssql"]], "apache-airflow-providers-microsoft-psrp": [[18, "apache-airflow-providers-microsoft-psrp"]], "apache-airflow-providers-microsoft-winrm": [[18, "apache-airflow-providers-microsoft-winrm"]], "apache-airflow-providers-mongo": [[18, "apache-airflow-providers-mongo"]], "apache-airflow-providers-mysql": [[18, "apache-airflow-providers-mysql"]], "apache-airflow-providers-neo4j": [[18, "apache-airflow-providers-neo4j"]], "apache-airflow-providers-odbc": [[18, "apache-airflow-providers-odbc"]], "apache-airflow-providers-openfaas": [[18, "apache-airflow-providers-openfaas"]], "apache-airflow-providers-opsgenie": [[18, "apache-airflow-providers-opsgenie"]], "apache-airflow-providers-oracle": [[18, "apache-airflow-providers-oracle"]], "apache-airflow-providers-pagerduty": [[18, "apache-airflow-providers-pagerduty"]], "apache-airflow-providers-papermill": [[18, "apache-airflow-providers-papermill"]], "apache-airflow-providers-plexus": [[18, "apache-airflow-providers-plexus"]], "apache-airflow-providers-postgres": [[18, "apache-airflow-providers-postgres"]], "apache-airflow-providers-presto": [[18, "apache-airflow-providers-presto"]], "apache-airflow-providers-qubole": [[18, "apache-airflow-providers-qubole"]], "apache-airflow-providers-redis": [[18, "apache-airflow-providers-redis"]], "apache-airflow-providers-salesforce": [[18, "apache-airflow-providers-salesforce"]], "apache-airflow-providers-samba": [[18, "apache-airflow-providers-samba"]], "apache-airflow-providers-segment": [[18, "apache-airflow-providers-segment"]], "apache-airflow-providers-sendgrid": [[18, "apache-airflow-providers-sendgrid"]], "apache-airflow-providers-sftp": [[18, "apache-airflow-providers-sftp"]], "apache-airflow-providers-singularity": [[18, "apache-airflow-providers-singularity"]], "apache-airflow-providers-slack": [[18, "apache-airflow-providers-slack"]], "apache-airflow-providers-snowflake": [[18, "apache-airflow-providers-snowflake"]], "apache-airflow-providers-sqlite": [[18, "apache-airflow-providers-sqlite"]], "apache-airflow-providers-ssh": [[18, "apache-airflow-providers-ssh"]], "apache-airflow-providers-tableau": [[18, "apache-airflow-providers-tableau"]], "apache-airflow-providers-tabular": [[18, "apache-airflow-providers-tabular"]], "apache-airflow-providers-telegram": [[18, "apache-airflow-providers-telegram"]], "apache-airflow-providers-trino": [[18, "apache-airflow-providers-trino"]], "apache-airflow-providers-vertica": [[18, "apache-airflow-providers-vertica"]], "apache-airflow-providers-yandex": [[18, "apache-airflow-providers-yandex"]], "apache-airflow-providers-zendesk": [[18, "apache-airflow-providers-zendesk"]]}, "indexentries": {}}) \ No newline at end of file diff --git a/sphinx_airflow_theme/demo/docs.sh b/sphinx_airflow_theme/demo/docs.sh index 17212066f30..c1024f39afb 100755 --- a/sphinx_airflow_theme/demo/docs.sh +++ b/sphinx_airflow_theme/demo/docs.sh @@ -20,7 +20,7 @@ set -euox pipefail MY_DIR="$(cd "$(dirname "$0")" && pwd)" -pushd "${MY_DIR}" &>/dev/null || exit 1 +pushd "${MY_DIR}" >/dev/null || exit 1 SOURCE_DIR="." BUILD_DIR="${MY_DIR}/_build" @@ -89,6 +89,3 @@ else usage exit 0 fi - - -popd &>/dev/null || exit 1