Skip to content

Commit

Permalink
[AIRFLOW-4446] Fix typos (#5217)
Browse files Browse the repository at this point in the history
(cherry picked from commit 8562730)
  • Loading branch information
andriisoldatenko authored and ashb committed Apr 30, 2019
1 parent 1b620ca commit 365f782
Show file tree
Hide file tree
Showing 8 changed files with 19 additions and 19 deletions.
10 changes: 5 additions & 5 deletions CHANGELOG.txt
Original file line number Diff line number Diff line change
Expand Up @@ -526,7 +526,7 @@ Bug fixes
- [AIRFLOW-3264] URL decoding when parsing URI for connection (#4109)
- [AIRFLOW-3365][AIRFLOW-3366] Allow celery_broker_transport_options to be set with environment variables (#4211)
- [AIRFLOW-2642] fix wrong value git-sync initcontainer env GIT_SYNC_ROOT (#3519)
- [AIRFLOW-3353] Pin redis verison (#4195)
- [AIRFLOW-3353] Pin redis version (#4195)
- [AIRFLOW-3251] KubernetesPodOperator now uses 'image_pull_secrets' argument when creating Pods (#4188)
- [AIRFLOW-2705] Move class-level moto decorator to method-level
- [AIRFLOW-3233] Fix deletion of DAGs in the UI (#4069)
Expand Down Expand Up @@ -625,7 +625,7 @@ Improvements
- [AIRFLOW-2622] Add "confirm=False" option to SFTPOperator
- [AIRFLOW-2662] support affinity & nodeSelector policies for kubernetes executor/operator
- [AIRFLOW-2709] Improve error handling in Databricks hook
- [AIRFLOW-2723] Update lxml dependancy to >= 4.0.
- [AIRFLOW-2723] Update lxml dependency to >= 4.0.
- [AIRFLOW-2763] No precheck mechanism in place during worker initialisation for the connection to metadata database
- [AIRFLOW-2789] Add ability to create single node cluster to DataprocClusterCreateOperator
- [AIRFLOW-2797] Add ability to create Google Dataproc cluster with custom image
Expand Down Expand Up @@ -776,7 +776,7 @@ Airflow 1.10.0, 2018-08-03
- [AIRFLOW-2534] Fix bug in HiveServer2Hook
- [AIRFLOW-2586] Stop getting AIRFLOW_HOME value from config file in bash operator
- [AIRFLOW-2605] Fix autocommit for MySqlHook
- [AIRFLOW-2539][AIRFLOW-2359] Move remaing log config to configuration file
- [AIRFLOW-2539][AIRFLOW-2359] Move remaining log config to configuration file
- [AIRFLOW-1656] Tree view dags query changed
- [AIRFLOW-2617] add imagePullPolicy config for kubernetes executor
- [AIRFLOW-2429] Fix security/task/sensors/ti_deps folders flake8 error
Expand Down Expand Up @@ -872,7 +872,7 @@ Airflow 1.10.0, 2018-08-03
- [AIRFLOW-2429] Make Airflow flake8 compliant
- [AIRFLOW-2491] Resolve flask version conflict
- [AIRFLOW-2484] Remove duplicate key in MySQL to GCS Op
- [ARIFLOW-2458] Add cassandra-to-gcs operator
- [AIRFLOW-2458] Add cassandra-to-gcs operator
- [AIRFLOW-2477] Improve time units for task duration and landing times charts for RBAC UI
- [AIRFLOW-2474] Only import snakebite if using py2
- [AIRFLOW-48] Parse connection uri querystring
Expand Down Expand Up @@ -2107,7 +2107,7 @@ Airflow 1.8.0, 2017-03-12
- [AIRFLOW-784] Pin funcsigs to 1.0.0
- [AIRFLOW-624] Fix setup.py to not import airflow.version as version
- [AIRFLOW-779] Task should fail with specific message when deleted
- [AIRFLOW-778] Fix completey broken MetastorePartitionSensor
- [AIRFLOW-778] Fix completely broken MetastorePartitionSensor
- [AIRFLOW-739] Set pickle_info log to debug
- [AIRFLOW-771] Make S3 logs append instead of clobber
- [AIRFLOW-773] Fix flaky datetime addition in api test
Expand Down
2 changes: 1 addition & 1 deletion airflow/contrib/hooks/gcp_vision_hook.py
Original file line number Diff line number Diff line change
Expand Up @@ -340,7 +340,7 @@ def create_reference_image(
self.log.debug('ReferenceImage created:\n%s', response)

if not reference_image_id:
# Refernece image id was generated by the API
# Reference image id was generated by the API
reference_image_id = self._get_autogenerated_id(response)
self.log.info(
'Extracted autogenerated ReferenceImage ID from the response: %s', reference_image_id
Expand Down
2 changes: 1 addition & 1 deletion airflow/contrib/hooks/grpc_hook.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def run(self, stub_class, call_func, streaming=False, data={}):
yield single_response
except grpc.RpcError as ex:
self.log.exception(
"Error occured when calling the grpc service: {0}, method: {1} \
"Error occurred when calling the grpc service: {0}, method: {1} \
status code: {2}, error details: {3}"
.format(stub.__class__.__name__, call_func, ex.code(), ex.details()))
raise ex
Expand Down
16 changes: 8 additions & 8 deletions airflow/contrib/operators/gcp_natural_language_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,9 @@ class CloudLanguageAnalyzeEntitiesOperator(BaseOperator):
:type gcp_conn_id: str
"""

# [START natural_langauge_analyze_entities_template_fields]
# [START natural_language_analyze_entities_template_fields]
template_fields = ("document", "gcp_conn_id")
# [END natural_langauge_analyze_entities_template_fields]
# [END natural_language_analyze_entities_template_fields]

def __init__(
self,
Expand Down Expand Up @@ -108,9 +108,9 @@ class CloudLanguageAnalyzeEntitySentimentOperator(BaseOperator):
:type gcp_conn_id: str
"""

# [START natural_langauge_analyze_entity_sentiment_template_fields]
# [START natural_language_analyze_entity_sentiment_template_fields]
template_fields = ("document", "gcp_conn_id")
# [END natural_langauge_analyze_entity_sentiment_template_fields]
# [END natural_language_analyze_entity_sentiment_template_fields]

def __init__(
self,
Expand Down Expand Up @@ -172,9 +172,9 @@ class CloudLanguageAnalyzeSentimentOperator(BaseOperator):
:type gcp_conn_id: str
"""

# [START natural_langauge_analyze_sentiment_template_fields]
# [START natural_language_analyze_sentiment_template_fields]
template_fields = ("document", "gcp_conn_id")
# [END natural_langauge_analyze_sentiment_template_fields]
# [END natural_language_analyze_sentiment_template_fields]

def __init__(
self,
Expand Down Expand Up @@ -229,9 +229,9 @@ class CloudLanguageClassifyTextOperator(BaseOperator):
:type gcp_conn_id: str
"""

# [START natural_langauge_classify_text_template_fields]
# [START natural_language_classify_text_template_fields]
template_fields = ("document", "gcp_conn_id")
# [END natural_langauge_classify_text_template_fields]
# [END natural_language_classify_text_template_fields]

def __init__(
self,
Expand Down
2 changes: 1 addition & 1 deletion airflow/www/templates/airflow/dag.html
Original file line number Diff line number Diff line change
Expand Up @@ -340,7 +340,7 @@ <h4 class="modal-title" id="dagModalLabel">
var id = '';
var dag_id = '{{ dag.dag_id }}';
var task_id = '';
var exection_date = '';
var execution_date = '';
var subdag_id = '';
function call_modal(t, d, try_numbers, sd) {
task_id = t;
Expand Down
2 changes: 1 addition & 1 deletion airflow/www/templates/airflow/gantt.html
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@
$( document ).ready(function() {
var dag_id = '{{ dag.dag_id }}';
var task_id = '';
var exection_date = '';
var execution_date = '';
data = {{ data |tojson|safe }};
var gantt = d3.gantt()
.taskTypes(data.taskNames)
Expand Down
2 changes: 1 addition & 1 deletion tests/models/test_taskinstance.py
Original file line number Diff line number Diff line change
Expand Up @@ -477,7 +477,7 @@ def run_ti_and_assert(run_date, expected_start_date, expected_end_date, expected

# Run with multiple reschedules.
# During reschedule the try number remains the same, but each reschedule is recorded.
# The start date is expected to remain the inital date, hence the duration increases.
# The start date is expected to remain the initial date, hence the duration increases.
# When finished the try number is incremented and there is no reschedule expected
# for this try.

Expand Down
2 changes: 1 addition & 1 deletion tests/operators/test_http_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def test_response_in_logs(self, m):
def test_response_in_logs_after_failed_check(self, m):
"""
Test that when using SimpleHttpOperator with log_response=True,
the reponse is logged even if request_check fails
the response is logged even if request_check fails
"""

def response_check(response):
Expand Down

0 comments on commit 365f782

Please sign in to comment.