diff --git a/docs/_sources/index.rst.txt b/docs/_sources/index.rst.txt
index b0f658e..5f52b62 100644
--- a/docs/_sources/index.rst.txt
+++ b/docs/_sources/index.rst.txt
@@ -22,6 +22,7 @@ Control-M Python Client is built for data scientists and developers who prefer a
notebooks/jobproperties
notebooks/connectionprofiles
notebooks/run_ondemand
+ notebooks/get_jobs
beyond
.. toctree::
diff --git a/docs/_sources/notebooks/get_jobs.ipynb.txt b/docs/_sources/notebooks/get_jobs.ipynb.txt
new file mode 100644
index 0000000..e82a4b1
--- /dev/null
+++ b/docs/_sources/notebooks/get_jobs.ipynb.txt
@@ -0,0 +1,189 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "060c8aab",
+ "metadata": {},
+ "source": [
+ "# Get Jobs"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "06976a85",
+ "metadata": {},
+ "source": [
+ "[AutomationAPI Documentation](https://documents.bmc.com/supportu/API/Monthly/en-US/Documentation/API_Services_DeployService.htm#deploy3)\n",
+ "\n",
+ "\n",
+ "The get_jobs() method in the Control-M Python Client provides an easy way to retrieve job and folder definitions from the Control-M Server. It wraps the deploy jobs::get AAPI command and deserializes the resulting JSON into Python objects, allowing users to fetch and work with jobs and folders in a way that mirrors the state before deployment.\n",
+ "\n",
+ "This guide demonstrates how to use get_jobs() to fetch jobs from the Control-M server."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "id": "93e45a7e",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "{\n",
+ " \"Folder_Sanity_1\": {\n",
+ " \"Type\": \"Folder\",\n",
+ " \"ControlmServer\": \"workbench\",\n",
+ " \"LoadForecasts\": {\n",
+ " \"Type\": \"Job:Databricks\",\n",
+ " \"RunAs\": \"workbench\",\n",
+ " \"RunAsDummy\": true,\n",
+ " \"ConnectionProfile\": \"DATABRICKS\",\n",
+ " \"Databricks Job ID\": \"991955986358417\",\n",
+ " \"Parameters\": \"\\\"params\\\":{}\",\n",
+ " \"Idempotency Token\": \"tokeni_%%ORDERID\"\n",
+ " },\n",
+ " \"RunAs\": \"workbench\"\n",
+ " },\n",
+ " \"Folder_Sanity_2\": {\n",
+ " \"Type\": \"Folder\",\n",
+ " \"ControlmServer\": \"workbench\",\n",
+ " \"InventoryForecastModel\": {\n",
+ " \"Type\": \"Job:AWS SageMaker\",\n",
+ " \"RerunLimit\": {\n",
+ " \"Times\": \"5\"\n",
+ " },\n",
+ " \"RunAs\": \"workbench\",\n",
+ " \"RunAsDummy\": true,\n",
+ " \"ConnectionProfile\": \"SAGEMAKER\",\n",
+ " \"Pipeline Name\": \"InferencePipeline\",\n",
+ " \"Idempotency Token\": \"Token_ControlM_for_SageMaker%%ORDERID\",\n",
+ " \"Add Parameters\": \"checked\",\n",
+ " \"Parameters\": \"{\\\"Name\\\":\\\"input_file\\\", \\\"Value\\\": \\\"file\\\"}\",\n",
+ " \"Retry Pipeline Execution\": \"unchecked\"\n",
+ " },\n",
+ " \"RunAs\": \"workbench\"\n",
+ " }\n",
+ "}\n"
+ ]
+ }
+ ],
+ "source": [
+ "from ctm_python_client.core.workflow import Workflow, WorkflowDefaults\n",
+ "from ctm_python_client.core.comm import Environment\n",
+ "from aapi import *\n",
+ "\n",
+ "# Step 1: Define the environment (Control-M Workbench in this case)\n",
+ "env = Environment.create_workbench('workbench')\n",
+ "\n",
+ "# Step 2: Define your workflow with jobs and folders\n",
+ "workflow = Workflow(env, WorkflowDefaults(run_as='workbench'))\n",
+ "workflow.clear_all()\n",
+ "run_as_dummy = True\n",
+ "\n",
+ "# Define jobs\n",
+ "databricksLoad = JobDatabricks(\n",
+ " 'LoadForecasts',\n",
+ " connection_profile='DATABRICKS',\n",
+ " databricks_job_id='991955986358417',\n",
+ " parameters='\"params\":{}',\n",
+ " idempotency_token='tokeni_%%ORDERID',\n",
+ " run_as_dummy=run_as_dummy\n",
+ ")\n",
+ "\n",
+ "sagemaker_job = JobAwsSageMaker(\n",
+ " 'InventoryForecastModel',\n",
+ " connection_profile='SAGEMAKER',\n",
+ " pipeline_name='InferencePipeline',\n",
+ " idempotency_token='Token_ControlM_for_SageMaker%%ORDERID',\n",
+ " add_parameters='checked',\n",
+ " parameters='{\"Name\":\"input_file\", \"Value\": \"file\"}',\n",
+ " run_as_dummy=run_as_dummy,\n",
+ " retry_pipeline_execution='unchecked',\n",
+ " rerun_limit=Job.RerunLimit(times='5')\n",
+ ")\n",
+ "\n",
+ "# Define folders\n",
+ "folder1 = Folder('Folder_Sanity_1', controlm_server='workbench', job_list=[databricksLoad])\n",
+ "folder2 = Folder('Folder_Sanity_2', controlm_server='workbench', job_list=[sagemaker_job])\n",
+ "\n",
+ "workflow.add(folder1)\n",
+ "workflow.add(folder2)\n",
+ "\n",
+ "# Step 3: Deploy workflow\n",
+ "workflow.build()\n",
+ "workflow.deploy()\n",
+ "\n",
+ "# Step 4: Retrieve jobs after deployment\n",
+ "workflow_actual = Workflow.get_jobs(env, server=\"workbench\", folder=\"Folder_Sanity_*\")\n",
+ "\n",
+ "print(workflow.dumps_json(indent=2))\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "7c0baf86",
+ "metadata": {},
+ "source": [
+ "## Arguments"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "feb81b89",
+ "metadata": {},
+ "source": [
+ "- **environment: Environment** :\n",
+ " The Control-M environment to connect to. Required. \n",
+ " This object defines the Control-M endpoint (Automation API endpoint, same as Control-M/EM), the authentication method, and the environment mode (on-prem or SaaS). \n",
+ " You can create an Environment instance using one of the following static methods: \n",
+ " - `create_workbench()` – for local development with Workbench. \n",
+ " - `create_onprem(host, username, password)` – for on-premises Control-M using username/password authentication. \n",
+ " - `create_saas(endpoint, api_key)` – for Helix Control-M (SaaS) using an API key. \n",
+ " The environment determines how to authenticate and which API variant to use, depending on whether the backend is Control-M or Helix Control-M.\n",
+ "\n",
+ "\n",
+ "- **server: str** :\n",
+ "The exact Control-M/Server name to query. Required. \n",
+ "No wildcards allowed.\n",
+ "\n",
+ "- **folder: str** :\n",
+ "The folder name or pattern to fetch. Required. \n",
+ "Supports wildcards (e.g., \"MyFolder_*\"). Filters jobs by folder name.\n",
+ "\n",
+ "- **job: str** (Not supported yet) \n",
+ " Currently not supported. The API ignores this parameter if provided."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "59899dfc",
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "venv",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.12.6"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/docs/aapi.html b/docs/aapi.html
index fdc44e2..9e2d62e 100644
--- a/docs/aapi.html
+++ b/docs/aapi.html
@@ -217,6 +217,7 @@
Job and Folder Properties
Connection Profiles
Run On Demand
+Get Jobs
Going Forward
References:
@@ -283,6 +284,15 @@ ConnectionProfileAlibabaECS
+
ConnectionProfileAlteryxTrifacta
ConnectionProfileAwsGlueDataBrew
ConnectionProfileAwsMWAA
ConnectionProfileAwsQuickSight
@@ -636,14 +673,17 @@ SubpackagesConnectionProfileAzureDatabricks
@@ -808,10 +848,31 @@ SubpackagesConnectionProfileDatabricks
+
+
ConnectionProfileDatadog
+
+ConnectionProfileFivetran
ConnectionProfileGCPBatch
+ConnectionProfileSAPBTP
+
ConnectionProfileSnowflake
+JobAlibabaECS
+
JobAlteryxTrifacta
@@ -1731,6 +1817,7 @@ SubpackagesJobAzureDatabricks.idempotency_token
JobAzureDatabricks.object_name
JobAzureDatabricks.parameters
+JobAzureDatabricks.show_tasks_output
JobAzureDatabricks.status_polling_frequency
@@ -1825,12 +1912,15 @@ SubpackagesJobAzureVM
+
JobDatadog
+
+JobFivetran
+
JobGCPBatch
JobGCPBigQuery.job_timeout
JobGCPBigQuery.load_options
+JobGCPBigQuery.load_query_from_file
JobGCPBigQuery.object_name
JobGCPBigQuery.project_name
JobGCPBigQuery.query_parameters
+JobGCPBigQuery.query_type
JobGCPBigQuery.routine
JobGCPBigQuery.run_select_query_and_copy_to_table
JobGCPBigQuery.show_load_options
@@ -2064,6 +2185,8 @@ SubpackagesJobGCPVM.operation
JobGCPVM.parameters
JobGCPVM.project_id
+JobGCPVM.tag_name
+JobGCPVM.tag_value
JobGCPVM.template_name
JobGCPVM.tolerance
JobGCPVM.verification_poll_interval
@@ -2338,6 +2461,22 @@ SubpackagesJobRabbitMQ.vhost
+
JobSAPBTP
+
JobSnowflake
JobVeritasNetBackup.client_name
JobVeritasNetBackup.connection_profile
+JobVeritasNetBackup.create_jobs_per_client
JobVeritasNetBackup.database_name
JobVeritasNetBackup.db_id
JobVeritasNetBackup.db_unique_name
@@ -2732,7 +2872,7 @@ Submodules