Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/docker standalone #4

Draft
wants to merge 2 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
FROM python:3.11.10

RUN echo POSTGRES
ENV APP_ROOT=/src
ENV CONFIG_ROOT=/config
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1

RUN mkdir ${CONFIG_ROOT}
COPY requirements/ ${CONFIG_ROOT}/

RUN pip install --upgrade pip \
&& pip install --no-cache-dir -r ${CONFIG_ROOT}/deploy.txt

RUN pip install flower
RUN pip install redis
WORKDIR ${APP_ROOT}

ADD task_queue ${APP_ROOT}

RUN adduser appuser --system --no-create-home --shell /bin/sh \
&& chown -R appuser ${APP_ROOT}
USER appuser
61 changes: 61 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
services:
redis:
image: redis
container_name: 'demand-redis'
restart: always
expose:
- 6379
networks:
- demand_network
worker_default:
container_name: 'demand-celery-worker'
build: .
restart: always
command: celery -A celery_worker.worker worker --loglevel=INFO --pool prefork --concurrency=16 --queues=default_queue
environment:
- CELERY_BROKER_URL=redis://redis:6379/0
- CELERY_RESULT_BACKEND=redis://redis:6379/0
- FLOWER_PORT=5555
- PW=/run/secrets/pw
depends_on:
- redis
secrets:
- pw
networks:
- demand_network
flower:
container_name: 'demand-flower'
build: .
restart: always
command: celery -A celery_worker.worker flower --port=5555
environment:
- CELERY_BROKER_URL=redis://redis:6379/0
- CELERY_RESULT_BACKEND=redis://redis:6379/0
- FLOWER_PORT=5555
- PW=/run/secrets/pw
- DB_HOST=mysql
expose:
- 5555
depends_on:
- redis
- worker_default
secrets:
- pw
networks:
- demand_network
nginx:
image: nginx:1.27-alpine
container_name: demand_ngx
restart: always
ports:
- "5555:80"
volumes:
- ./nginx:/etc/nginx/conf.d
networks:
- demand_network
secrets:
pw:
file: ./secrets/secret.txt

networks:
demand_network:
29 changes: 29 additions & 0 deletions nginx/server.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
upstream django {
# app_pg and the exposed port are defined in docker-compose-postgres.yml
server flower:5555;
}

server {

location / {
try_files $uri @proxy_to_app;
}

location /static/ {
alias /static/;
}

location @proxy_to_app {
proxy_pass http://django;

proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";

proxy_redirect off;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Host $server_name;
}
}
30 changes: 30 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"

[project]
name = "wefe_demand"
authors = [
{ name="Giovanni", email="a@b.com" },
{ name="Pierre-François Duc", email="pierre-francois.duc@rl-institut.de" },
]
description = "An interface between KOBO toolbox surveys and RAMP to generate demand timeseries for WEFE simulations"
requires-python = ">=3.9"
classifiers = [
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
]
dynamic = ["version", "dependencies"] #, "readme"]

[tool.setuptools.dynamic]
#readme = {file = ["README.rst"]}
version = {attr = "wefe_demand.version"}
dependencies = {file = ["requirements/build_requirements.txt"]}

[project.scripts]
cli-name = "mypkg.mymodule:some_func"

[project.urls]
Homepage = "https://github.com/rl-institut/WEFEDemand"
Issues = "https://github.com/rl-institut/WEFEDemand/issues"
2 changes: 2 additions & 0 deletions requirements/deploy.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
celery
-r default.txt
12 changes: 12 additions & 0 deletions src/wefe_demand.egg-info/PKG-INFO
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
Metadata-Version: 2.1
Name: wefe_demand
Version: 0.0.1
Summary: An interface between KOBO toolbox surveys and RAMP to generate demand timeseries for WEFE simulations
Author-email: Giovanni <a@b.com>, Pierre-François Duc <pierre-francois.duc@rl-institut.de>
Project-URL: Homepage, https://github.com/rl-institut/WEFEDemand
Project-URL: Issues, https://github.com/rl-institut/WEFEDemand/issues
Classifier: Programming Language :: Python :: 3
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: OS Independent
Requires-Python: >=3.9
License-File: LICENSE
28 changes: 28 additions & 0 deletions src/wefe_demand.egg-info/SOURCES.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
LICENSE
README.md
pyproject.toml
src/wefe_demand/__init__.py
src/wefe_demand/_version.py
src/wefe_demand.egg-info/PKG-INFO
src/wefe_demand.egg-info/SOURCES.txt
src/wefe_demand.egg-info/dependency_links.txt
src/wefe_demand.egg-info/entry_points.txt
src/wefe_demand.egg-info/top_level.txt
src/wefe_demand/helpers/exceptions.py
src/wefe_demand/helpers/extend_vs_preallocation_benchmark.py
src/wefe_demand/helpers/plotting.py
src/wefe_demand/input/admin_input.py
src/wefe_demand/input/agro_processing_demand.py
src/wefe_demand/input/complete_input.py
src/wefe_demand/input/cooking_demand.py
src/wefe_demand/input/defaults.py
src/wefe_demand/input/drinking_water_demand.py
src/wefe_demand/input/household_elec_demand.py
src/wefe_demand/input/service_water_demand.py
src/wefe_demand/preprocessing/constants.py
src/wefe_demand/preprocessing/formparser.py
src/wefe_demand/preprocessing/surveyparser.py
src/wefe_demand/preprocessing/utils.py
src/wefe_demand/ramp_model/ramp_control.py
src/wefe_demand/ramp_model/ramp_model_main.py
tests/test_something.py
1 change: 1 addition & 0 deletions src/wefe_demand.egg-info/dependency_links.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@

2 changes: 2 additions & 0 deletions src/wefe_demand.egg-info/entry_points.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[console_scripts]
cli-name = mypkg.mymodule:some_func
1 change: 1 addition & 0 deletions src/wefe_demand.egg-info/top_level.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
wefe_demand
1 change: 1 addition & 0 deletions src/wefe_demand/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
from ._version import __version__ as version
2 changes: 2 additions & 0 deletions src/wefe_demand/_version.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
__version__ = "0.0.1"
__date__ = "2024-10-16"
File renamed without changes.
File renamed without changes.
File renamed without changes.
Empty file.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
Preprocessing code for extracting info from forms of a Kobo survey.
Preprocessing code for extracting info from forms of a Kobo survey.
The code is based on two main classes:
- FormParser
- SurveyParser
Expand All @@ -9,4 +9,4 @@ The second class handle an entire survey counting how many form there are of a s

`constants.py` module contains useful variables to perform conversion and also prefix and suffix in order to read the online forms.

`utils.py` module contains functions shared across several modules.
`utils.py` module contains functions shared across several modules.
Empty file.
File renamed without changes.
File renamed without changes.
Empty file.
File renamed without changes.
Empty file added task_queue/__init__.py
Empty file.
35 changes: 35 additions & 0 deletions task_queue/celery_tasks.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import asyncio


from celery_worker import worker


@worker.task(
name="worker.task_demand",
force=True,
track_started=True,
autoretry_for=(Exception,),
retry_kwargs={"max_retries": 1, "countdown": 10},
)
def task_demand(user_id, project_id):
# TODO provide here some argument of the task
result = 42
return result


@worker.task(bind=True, name="refresh")
def refresh():
return "Hello world"


def get_status_of_task(task_id):
status = worker.AsyncResult(task_id).status.lower()
return status


def task_is_finished(task_id):
status = get_status_of_task(task_id)
if status in ["success", "failure", "revoked"]:
return True
else:
return False
37 changes: 37 additions & 0 deletions task_queue/celery_worker.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import os

from celery import Celery

"""
This module sets up a Celery worker for asynchronous task processing in the FastAPI application. It configures the
Celery worker with a broker and result backend, both typically using Redis. The worker includes tasks defined in the
"fastapi_app.main" module.

The configuration defines two task queues: default_queue and milp_queue, with their respective exchanges and routing
keys. This setup allows for categorizing tasks based on their nature or processing requirements.

Specific tasks are routed to these queues: task_grid_opt and task_remove_anonymous_users are directed to the
default_queue, while task_supply_opt is routed to the milp_queue. This segregation helps in managing task execution
based on priority or resource requirements, making the system efficient and scalable.
"""

CELERY_BROKER_URL = (os.environ.get("CELERY_BROKER_URL", "redis://redis:6379/0"),)
CELERY_RESULT_BACKEND = os.environ.get("CELERY_RESULT_BACKEND", "redis://redis:6379/0")

worker = Celery(
"worker",
broker=CELERY_BROKER_URL,
results_backend=CELERY_RESULT_BACKEND,
include=["celery_tasks"],
)

worker.conf.task_queues = {
"default_queue": {
"exchange": "default_exchange",
"routing_key": "default",
},
}

worker.conf.task_routes = {
"task_demand": {"queue": "default_queue"},
}