Skip to content

Commit 91ce96c

Browse files
migrate to pixi for dependency management (#185)
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
1 parent 0920d5b commit 91ce96c

File tree

11 files changed

+4511
-106
lines changed

11 files changed

+4511
-106
lines changed

.gitattributes

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
# SCM syntax highlighting & preventing 3-way merges
2+
pixi.lock merge=binary linguist-language=YAML linguist-generated=true

.github/workflows/main.yaml

Lines changed: 7 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -62,27 +62,20 @@ jobs:
6262
role-session-name: offsets-db-ci-role-session
6363
aws-region: ${{ env.AWS_DEFAULT_REGION }}
6464

65-
- name: set up conda environment
66-
uses: mamba-org/setup-micromamba@v2
65+
- name: Set up pixi
66+
uses: prefix-dev/setup-[email protected]
6767
with:
68-
environment-file: environment-dev.yml
69-
init-shell: >-
70-
bash
71-
cache-environment: true
72-
cache-downloads: true
73-
post-cleanup: 'all'
74-
75-
- name: Install package
76-
run: |
77-
python -m pip install -e .
68+
pixi-version: v0.59.0
69+
cache: true
70+
locked: true
7871

7972
- name: Database migration
8073
run: |
81-
python -m alembic upgrade head
74+
pixi run migrate
8275
8376
- name: Run tests
8477
run: |
85-
python -m pytest
78+
pixi run test
8679
8780
- name: Upload code coverage to Codecov
8881
uses: codecov/[email protected]

.github/workflows/update-db.yaml

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -45,22 +45,21 @@ jobs:
4545
id: time
4646
run: echo "date=$(date -u +'%Y-%m-%d %H:%M')" >> $GITHUB_OUTPUT
4747

48-
- uses: actions/setup-python@v6
48+
- name: Set up pixi
49+
uses: prefix-dev/[email protected]
4950
with:
50-
python-version: '3.10'
51-
- name: Install dependencies
52-
run: |
53-
python -m pip install --upgrade pip
54-
python -m pip install fsspec requests s3fs pandas
51+
pixi-version: v0.59.0
52+
cache: true
53+
locked: true
5554

5655
- name: Seed Staging Database
5756
run: |
58-
python update_database.py staging --url https://offsets-db-staging.fly.dev/files/
57+
pixi run update-db-staging
5958
6059
- name: Seed Production Database
6160
if: github.event_name == 'workflow_dispatch' || github.event_name == 'schedule' || github.event_name == 'repository_dispatch'
6261
run: |
63-
python update_database.py production --url https://offsets-db.fly.dev/files/
62+
pixi run update-db-production
6463
6564
- name: Notify Slack on Failure
6665
if: failure() && (github.event_name == 'workflow_dispatch' || github.event_name == 'schedule')

.gitignore

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -135,3 +135,6 @@ offsets_db_api/_version.py
135135
*.gz
136136
cache-watch-dog/
137137
staging-files.json
138+
# pixi environments
139+
.pixi/*
140+
!.pixi/config.toml

.pre-commit-config.yaml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
ci:
22
autoupdate_schedule: monthly
3+
exclude: ^pixi\.lock$
34
repos:
45
- repo: https://github.com/pre-commit/pre-commit-hooks
56
rev: v6.0.0

Dockerfile

Lines changed: 38 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,30 +1,52 @@
1-
# Start with an official Python image
2-
FROM python:3.12-slim
1+
# Build stage using pixi
2+
FROM ghcr.io/prefix-dev/pixi:0.59.0 AS build
33

44
# Set environment variables
55
ENV PYTHONDONTWRITEBYTECODE=1
66
ENV PYTHONUNBUFFERED=1
77

8-
# Create and set the working directory
8+
# Install git and ca-certificates (needed for git+https:// dependencies in pyproject.toml)
9+
RUN apt-get update && \
10+
apt-get install -y --no-install-recommends git ca-certificates && \
11+
rm -rf /var/lib/apt/lists/*
12+
13+
# Copy source code, pixi.toml and pixi.lock to the container
914
WORKDIR /app
15+
COPY . .
16+
17+
# Install dependencies using pixi (uses the default environment)
18+
RUN pixi install --locked
19+
20+
# Create the shell-hook bash script to activate the environment
21+
RUN pixi shell-hook > /shell-hook.sh
22+
23+
# Extend the shell-hook script to run the command passed to the container
24+
RUN echo 'exec "$@"' >> /shell-hook.sh
25+
26+
# Production stage
27+
FROM ubuntu:24.04 AS production
1028

11-
# Install build dependencies
29+
# Set environment variables
30+
ENV PYTHONDONTWRITEBYTECODE=1
31+
ENV PYTHONUNBUFFERED=1
32+
33+
# Install runtime dependencies (libpq for PostgreSQL)
1234
RUN apt-get update && \
13-
apt-get install -y --no-install-recommends \
14-
git \
15-
libpq-dev \
16-
build-essential && \
35+
apt-get install -y --no-install-recommends git libpq5 && \
1736
rm -rf /var/lib/apt/lists/*
18-
# Install Python dependencies
19-
COPY requirements.txt .
20-
RUN pip install --upgrade pip && pip install -r requirements.txt
2137

22-
# Copy the rest of the application code
23-
COPY . .
38+
# Copy the pixi environment from the build stage
39+
# Note: the prefix path must stay the same as in the build container
40+
COPY --from=build /app/.pixi/envs/default /app/.pixi/envs/default
41+
COPY --from=build /shell-hook.sh /shell-hook.sh
42+
COPY --from=build /app /app
43+
WORKDIR /app
2444

2545
# Expose the port
2646
EXPOSE 8000
2747

28-
COPY entrypoint.sh /entrypoint.sh
29-
RUN chmod +x /entrypoint.sh
30-
CMD ["/entrypoint.sh"]
48+
# Set the entrypoint to the shell-hook script (activates the environment and runs the command)
49+
ENTRYPOINT ["/bin/bash", "/shell-hook.sh"]
50+
51+
# Run the application using gunicorn (supports OFFSETS_DB_WEB_CONCURRENCY env var)
52+
CMD ["sh", "-c", "gunicorn -w ${OFFSETS_DB_WEB_CONCURRENCY:-2} -t 600 -k uvicorn.workers.UvicornWorker offsets_db_api.main:app --config gunicorn_config.py --access-logfile - --error-logfile -"]

README.md

Lines changed: 7 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -30,26 +30,21 @@ OffsetsDB-API, is a fastAPI application, designed to integrate and harmonize dat
3030

3131
## installation
3232

33-
To install the package, you can use pip:
34-
35-
```console
36-
python -m pip install git+https://github.com/carbonplan/offsets-db-api
37-
```
38-
39-
You can also install the package locally by cloning the repository and running:
33+
This project uses [pixi](https://pixi.sh) for dependency management. To get started:
4034

4135
```console
4236
git clone https://github.com/carbonplan/offsets-db-api
4337
cd offsets-db-api
44-
python -m pip install -e .
38+
pixi install
4539
```
4640

47-
## run locally
48-
49-
To run the API locally, you can use the following command:
41+
## usage
5042

5143
```console
52-
uvicorn offsets_db_api.main:app --reload
44+
pixi run serve # Start development server with hot reload
45+
pixi run test # Run tests
46+
pixi run migrate # Run database migrations
47+
pixi run serve-prod # Start production server
5348
```
5449

5550
## license

environment-dev.yml

Lines changed: 0 additions & 32 deletions
This file was deleted.

0 commit comments

Comments
 (0)