Skip to content

Commit a0dbcf8

Browse files
Merge pull request #6349 from EnterpriseDB/release/2024-12-12a
Release: 2024-12-12a
2 parents 3f91fd4 + fb53637 commit a0dbcf8

File tree

24 files changed

+526
-168
lines changed

24 files changed

+526
-168
lines changed
Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
name: generate release notes
2+
on:
3+
pull_request:
4+
types: [opened, synchronize]
5+
paths:
6+
- "**/src/*.yml"
7+
- "**/src/*.yaml"
8+
jobs:
9+
release-notes:
10+
runs-on: ubuntu-latest
11+
steps:
12+
- name: Checkout repo
13+
uses: actions/checkout@v4
14+
with:
15+
ref: ${{ github.event.pull_request.head.ref }}
16+
path: content
17+
sparse-checkout: |
18+
advocacy_docs
19+
product_docs
20+
21+
- name: Checkout relgen tool
22+
uses: actions/checkout@v4
23+
with:
24+
ref: develop
25+
path: tools
26+
sparse-checkout: |
27+
tools
28+
29+
- name: setup node
30+
uses: actions/setup-node@v4
31+
32+
- name: install dependencies
33+
run: npm --prefix ./tools/tools/automation/generators/relgen ci
34+
35+
# TODO: limit this to paths that have actually *changed*
36+
- name: regenerate relnotes
37+
run: |
38+
shopt -s globstar
39+
for rnmetapath in ./content/**/src/meta.yml; do
40+
./tools/tools/automation/generators/relgen/relgen.js -p ${rnmetapath%/src/meta.yml}
41+
done
42+
43+
- name: check for modified files
44+
id: changes
45+
run: |
46+
cd ./content
47+
echo "files=`git ls-files --other --modified --exclude-standard | wc -l`" >> $GITHUB_OUTPUT
48+
49+
- name: commit modified files
50+
if: steps.changes.outputs.files > 0
51+
run: |
52+
cd ./content
53+
git config user.name "github-actions[bot]"
54+
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
55+
git add .
56+
git commit -m "update generated release notes"
57+
git push

.github/workflows/sync-and-process-files.yml

Lines changed: 57 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -2,55 +2,99 @@ name: sync and process files from another repo
22
on:
33
repository_dispatch:
44
types: [sync-to-docs]
5+
workflow_dispatch:
6+
inputs:
7+
repo:
8+
description: Repository to source documentation from
9+
required: true
10+
type: string
11+
ref:
12+
description: Ref name in the source repo
13+
required: true
14+
type: string
15+
sha:
16+
description: SHA in the source repo, should correspond to ref
17+
required: true
18+
type: string
19+
520
jobs:
621
sync-and-process-files:
22+
permissions:
23+
contents: write
24+
pull-requests: write
725
env:
26+
SOURCE_REPO: ${{ github.event.client_payload.repo || inputs.repo }}
27+
SOURCE_REF: ${{ github.event.client_payload.ref || inputs.ref }}
28+
SOURCE_SHA: ${{ github.event.client_payload.sha || inputs.sha }}
29+
830
# The body text of the PR requests that will be created
9-
BODY: "Automated changes to pull in and process updates from repo: ${{ github.event.client_payload.repo }} ref: ${{ github.event.client_payload.ref }}"
31+
BODY: |
32+
Automated changes to pull in and process updates from repo: ${{ github.event.client_payload.repo || inputs.repo }} ref: ${{ github.event.client_payload.ref || inputs.ref }}
1033
11-
# The name of the branch that will be created
12-
BRANCH_NAME: automatic_docs_update/repo_${{ github.event.client_payload.repo }}/ref_${{ github.event.client_payload.ref }}
34+
## Reviewing
35+
- Look for formatting that may not work as intended
36+
- Watch out for local changes (factual corrections, copy edits, link fixes) that may be overwritten
37+
- You may need to resolve conflicts before merging - check the upstream repo for context when this isn't obvious
1338
14-
# The users that should be assigned to the PR as a comma separated list of github usernames.
15-
REVIEWERS:
39+
# The name of the branch that will be created
40+
BRANCH_NAME: automatic_docs_update/repo_${{ github.event.client_payload.repo || inputs.repo }}/ref_${{ github.event.client_payload.ref || inputs.ref }}
1641

1742
# The title of the PR request that will be created
18-
TITLE: "Process changes to docs from: repo: ${{ github.event.client_payload.repo }} ref: ${{ github.event.client_payload.ref }}"
43+
TITLE: "Process changes to docs from: repo: ${{ github.event.client_payload.repo || inputs.repo }} ref: ${{ github.event.client_payload.ref || inputs.ref }}"
1944

20-
runs-on: ubuntu-latest
45+
runs-on: ubuntu-22.04
2146
steps:
47+
- name: Check inputs
48+
if: ${{ !env.SOURCE_REPO || !env.SOURCE_REF || !env.SOURCE_SHA }}
49+
run: |
50+
echo "::error title=missing inputs::must provide source repo, source ref and source SHA"
51+
exit 1
52+
2253
- name: Checkout destination
2354
uses: actions/checkout@v4
2455
with:
2556
path: destination
57+
lfs: true
2658

2759
- name: Checkout source repo
2860
uses: actions/checkout@v4
2961
with:
30-
ref: ${{ github.event.client_payload.sha }}
31-
repository: ${{ github.event.client_payload.repo }}
62+
ref: ${{ env.SOURCE_SHA }}
63+
repository: ${{ env.SOURCE_REPO }}
3264
token: ${{ secrets.SYNC_FILES_TOKEN }}
3365
path: source
3466

3567
- name: setup node
3668
uses: actions/setup-node@v4
3769
with:
38-
node-version: "14"
70+
node-version: "18"
3971

4072
- name: update npm
41-
run: npm install -g npm@7
73+
run: npm install -g npm@10
4274

4375
- name: Process changes
44-
run: ${{ github.workspace }}/destination/scripts/source/dispatch_product.py ${{ github.event.client_payload.repo }} ${{ github.workspace }}
76+
id: changes
77+
run: ${{ github.workspace }}/destination/scripts/source/dispatch_product.py ${{env.SOURCE_REPO }} ${{ github.workspace }}
4578
working-directory: source
4679

80+
- name: Update PR body
81+
if: ${{ steps.changes.outputs.new-tag }}
82+
run: |
83+
echo 'BODY<<EOF' >> $GITHUB_ENV
84+
echo "$BODY" >> $GITHUB_ENV
85+
echo '## After merging' >> $GITHUB_ENV
86+
echo 'Create a tag named `${{ steps.changes.outputs.new-tag }}` that points to the merge commit' >> $GITHUB_ENV
87+
echo 'EOF' >> $GITHUB_ENV
88+
4789
- name: Create pull request
4890
if: ${{ !env.ACT }}
4991
uses: peter-evans/create-pull-request@v6
5092
with:
5193
body: ${{ env.BODY }}
5294
branch: ${{ env.BRANCH_NAME }}
95+
base: develop
5396
path: destination/
54-
reviewers: ${{ env.REVIEWERS }}
5597
title: ${{ env.TITLE }}
5698
token: ${{ secrets.GITHUB_TOKEN }}
99+
commit-message: "Sync ${{ env.SOURCE_REPO }} ${{ steps.changes.outputs.new-tag }}"
100+
Lines changed: 104 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,104 @@
1+
---
2+
title: "EDB Postgres AI Q4 2024 release highlights"
3+
navTitle: Q4 2024 release highlights
4+
description: The latest features released and updated in EDB Postgres AI.
5+
date: 2024-12-10
6+
---
7+
8+
Date: **December 10, 2024**
9+
10+
This [release roundup](https://www.enterprisedb.com/blog/solving-enterprise-generative-ai-and-analytics-challenges-zooming-our-q4-2024-release) originally appeared on the EDB blog.
11+
12+
## Introducing the EDB Postgres AI Software Deployment: cloud agility, on your terms
13+
14+
### **Enable cloud agility and AI sovereignty for critical data infrastructure –  anywhere, any environment.**
15+
16+
Earlier today, we [announced](https://www.enterprisedb.com/news/edb-brings-cloud-agility-and-observability-hybrid-environments-sovereign-control) major updates to the [EDB Postgres AI](https://www.enterprisedb.com/products/edb-postgres-ai) sovereign data and AI platform. In the wake of data and AI becoming increasingly important to business innovation, our customers have asked us for more flexible solutions that offer both agility and control. 
17+
18+
In response, we’ve launched a number of new generally available and preview capabilities to help accelerate deployment of EDB Postgres AI in [sovereign](https://www.enterprisedb.com/use-case/sovereign-ai), hybrid environments as an [omni-data platform](https://www.enterprisedb.com/use-case/omni-data-platform) that works across your enterprise’s data corpus to drive faster time to market for data-driven applications. With the new [EDB Postgres AI Software Deployment](https://www.enterprisedb.com/products/software-deployment), you can deploy, manage, scale, and observe mission critical data infrastructure in any self-managed, hybrid, or public cloud environment.
19+
20+
The single container-driven software installation enables the consolidation of structured and unstructured data in a single multi-model data platform to accelerate transactional, analytical, and AI workloads. The Software Deployment unlocks a number of new capabilities:
21+
22+
1. **Hybrid Control Plane**, enabling a hybrid database-as-a-service (DBaaS) with Kubernetes-driven automation and advanced observability across 200+ metrics to enable a cloud-like experience – even in your private data center.
23+
2. **Analytics Accelerator**, which unlocks rapid analytics across unified business data in Postgres, powering 30x faster query performance and improving cost efficiency.
24+
3. **AI Accelerator**, the fastest way to test and launch enterprise generative AI (GenAI) applications like chatbots and recommendation engines, so you can build cutting-edge GenAI functionality with just 5 lines of familiar SQL code (rather than 130+ using standard approaches).
25+
26+
To continue supporting our customers’ requirements as they evolve with their growing transactional workloads, we’ve also released enhancements to our **transactional database server software** and tooling, including **EDB Postgres 17** to meet the demands of modern workloads and the **EDB Software Bill of Materials**, offering visibility into your secure open source supply chain. 
27+
28+
Today, these **transactional database enhancements** are **generally available**, along with the **AI Accelerator**. The **Hybrid Control Plane** and **Analytics Accelerator** are **now available for preview** through a [concierge demo experience](https://www.enterprisedb.com/engage).
29+
30+
## What’s in Preview? Unlock Cloud Scale And Rapid Analytics In Hybrid Environments 
31+
32+
### **Hybrid Control Plane**
33+
34+
_Automation, single pane of glass management, and observability across hybrid data estates._
35+
36+
Modern enterprises manage data across multiple clouds and on-premises deployments. The undifferentiated heavy lifting of database administration often distracts operators and engineers from more value-oriented work, like improving app scalability and accelerating time to market for data initiatives. While public cloud Database-as-a-Service (DBaaS) offerings provide automation of administrative tasks, they require tradeoffs on control, data sovereignty and deployment flexibility. 
37+
38+
The **Hybrid Control Plane** is a **centralized management and automation solution** for the EDB Postgres AI Software Deployment, providing cloud automation and agility in a self-hosted environment. It boosts productivity up to 30% by automating time-consuming and expensive administrative functions like backups, provisioning, and point-in-time recovery – enabling a [hybrid (DBaaS) experience](https://www.enterprisedb.com/use-case/hybrid-dbaas). Monitor, observe, and respond to issues in real-time with visibility into 200+ metrics, keeping databases secure and enabling up to 99.999% availability. Plus, with built-in query diagnostics, you can identify problems and bottlenecks up to 5x faster and accelerate application performance up to 8x. 
39+
40+
See a demo of the Hybrid Control Plane in action!
41+
42+
<div style="padding:56.25% 0 0 0;position:relative;"><iframe src="https://player.vimeo.com/video/1018060043?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479" frameborder="0" allow="autoplay; fullscreen; picture-in-picture; clipboard-write" style="position:absolute;top:0;left:0;width:100%;height:100%;" title="Hybrid DBaaS: Cluster Upgrade, Workload Testing, and Failover Simulation with EDB"></iframe></div><script src="https://player.vimeo.com/api/player.js"></script>
43+
44+
45+
### **Analytics Accelerator**
46+
47+
_Unify transactional and analytical workloads in Postgres with lower cost, faster performance, and simpler operations._ 
48+
49+
Scaling analytics workloads is crucial for modern enterprises that deal with high volumes of data and demand for rapid insights. Running these analytics queries directly on transactional data requires teams to spend significant time on data management and can degrade operational performance and slow down time-to-insights.
50+
51+
EDB’s [**Analytics Accelerator**](https://www.enterprisedb.com/products/analytics-accelerator) leverages lakehouse ecosystem integration and a Vectorized Query Engine so you can use SQL to query columnar data in external object storage. This allows you to run complex analytical queries across core business data with no lag on existing transactional workloads — 30x faster than standard Postgres.
52+
53+
It also supports Tiered Tables functionality, ensuring optimal performance by automatically offloading cold data to columnar tables in object storage, reducing overall storage costs with 18x more cost efficiency and simplifying the process of managing analytics over multiple data tiers. 
54+
55+
Watch a demo to see how to add an analytics node, sync data, and integrate with Databricks, improving insights without sacrificing performance.
56+
57+
<div style="padding:56.25% 0 0 0;position:relative;"><iframe src="https://player.vimeo.com/video/1018059957?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479" frameborder="0" allow="autoplay; fullscreen; picture-in-picture; clipboard-write" style="position:absolute;top:0;left:0;width:100%;height:100%;" title="Improve Analytics Insight Without Sacrificing Performance"></iframe></div><script src="https://player.vimeo.com/api/player.js"></script>
58+
59+
60+
### **EDB Data Migration Service (DMS) and Data Sync**
61+
62+
_Accelerate seamless migrations to break free from legacy constraints and innovate faster._
63+
64+
Today, organizations want to break free from legacy systems to tackle next-gen application development, which requires diverse data models and open standards that integrate with modern data stacks.
65+
66+
[Modernizing](https://www.enterprisedb.com/use-case/modernize-legacy-applications) from legacy systems to EDB Postgres AI unlocks rapid innovation and growth for enterprises enabling seamless migrations to enterprise-grade PostgreSQL. The [**EDB Data Migration Service (DMS)**](https://www.enterprisedb.com/docs/edb-postgres-ai/migration-etl/data-migration-service/) **and Data Sync** enable a secure and fault-tolerant way to migrate Oracle and Postgres data from on-premises and cloud environments into EDB Postgres AI. This enables organizations with strict security compliance and data privacy needs to utilize EDB’s migration capabilities in their own environments. **EDB's Oracle Estate Migration Assessments** also make it easier to get a quick understanding of the complexity and level of effort required to migrate their Oracle databases to Postgres.
67+
68+
Learn more about [Oracle compatibility](https://www.enterprisedb.com/products/edb-postgres-advanced-server) enhancements and how EDB Postgres AI unlocks rapid innovation and growth for enterprises undergoing modernization of their legacy data infrastructure.
69+
70+
## Generally Available Today – Enhanced AI and Transactional Workloads
71+
72+
### **AI Accelerator** 
73+
74+
_The fastest way to test and launch enterprise generative AI (GenAI) applications_
75+
76+
Postgres users can already use the open source pgvector extension for foundational vector data support. This is powerful on its own but still requires developers to do a lot of manual work to create data pipelines, select embedding models, and keep embeddings up to date to avoid data staleness. 
77+
78+
The [**AI Accelerator**](https://www.enterprisedb.com/products/ai-accelerator) provides the fastest way to test and launch multi-modal enterprise GenAI applications with the powerful EDB Pipelines extension, which is preloaded with pgvector and advanced AI workflow functionality like managed pipelines and automatic embedding generation. This enables customers to get GenAI apps to market faster with out-of-the-box vector data capabilities, less custom code, lower maintenance, and fewer application integration efforts. Now, developers can build complex GenAI functionality using SQL commands in the familiar Postgres environment—with just 5 lines of code instead of 130+.
79+
80+
You can also transform your Postgres database into a powerful GenAI semantic search engine that’s [4.22x faster](https://www.confident-ai.com/blog/why-we-replaced-pinecone-with-pgvector) than other purpose-built vector databases. Want to see this in real time? Check out this demo of a GenAI application that provides quick, accurate recommendations based on text or image searches. The AI Accelerator is generally available today – [get started here](https://enterprisedb.com/docs/purl/aidb/gettingstarted).
81+
82+
<div style="padding:56.25% 0 0 0;position:relative;"><iframe src="https://player.vimeo.com/video/1018059901?badge=0&amp;autopause=0&amp;player_id=0&amp;app_id=58479" frameborder="0" allow="autoplay; fullscreen; picture-in-picture; clipboard-write" style="position:absolute;top:0;left:0;width:100%;height:100%;" title="Bring AI Models to Your Postgres Data"></iframe></div><script src="https://player.vimeo.com/api/player.js"></script>
83+
84+
85+
### **EDB Postgres 17**
86+
87+
_Use PostgreSQL to meet the demands of modern workloads_
88+
89+
The recent [PostgreSQL 17 release](https://www.enterprisedb.com/news/edb-contributions-postgresqlr-17-help-enterprises-unlock-greater-performance-complex-workloads) equipped users with backup and recovery, JSON enhancements, and performance improvements to support modern database operations. EDB was a key contributor to these Postgres enhancements and we’re excited to make these community features generally available on EDB Postgres AI transactional database, tools, and extensions. [EDB Postgres Advanced Server](https://www.enterprisedb.com/products/edb-postgres-advanced-server) (EPAS), [EDB Postgres Extended](https://www.enterprisedb.com/products/edb-postgres-extended) (PGE) Server. 
90+
91+
These releases are generally available and ready for download. Visit EDB docs for detailed [EPAS](https://www.enterprisedb.com/docs/epas/latest/) and [PGE](https://www.enterprisedb.com/docs/pge/latest/) Release Notes or [check out this blog](https://www.enterprisedb.com/blog/edb-postgresr-17-transactional-database-highlights) for a recap of what’s new in EDB Postgres 17. 
92+
93+
### **EDB Software Bill of Materials**
94+
95+
_Build with open source confidently and ensure security and Postgres compliance readiness_ 
96+
97+
Enterprises today must ensure that customer data is protected and access to databases is controlled. While open source software (OSS) deployments can provide cost benefits, allow flexibility, and enable rapid innovation, they also introduce a challenge in identifying and mitigating potential security vulnerabilities. Today, the **EDB Software Bill of Materials (SBOM)** is available for **EDB Postgres Advanced Server** and **EDB Postgres Distributed** software packages through the [**EDB Trust Center**](https://trust.enterprisedb.com/?itemName=continuous_monitoring&source=click). It offers visibility into your open source supply chain with a detailed inventory of components and dependencies that comprise the, including up-to-date license reporting.
98+
99+
By enabling you to easily identify potential security vulnerabilities, you can ensure [secure open source software](https://www.enterprisedb.com/use-case/secure-oss) and mitigate risk and reduce your attack surface as you invest in open source. [Learn more about securing your open source software](https://www.enterprisedb.com/blog/edb-announces-secure-open-software-solution-edb-postgres-air-enterprise-and-government).
100+
101+
### **That’s a wrap!** 
102+
103+
To learn more about EDB Postgres AI Software Deployment, read more and register for the preview experience [here](https://www.enterprisedb.com/preview). You can also zoom in even further on AI and Analytics launches with our [dedicated post](https://www.enterprisedb.com/blog/solving-enterprise-generative-ai-and-analytics-challenges-zooming-our-q4-2024-release) about them.
104+

0 commit comments

Comments
 (0)