Skip to content

Commit

Permalink
Fix unit tests and updated queue information
Browse files Browse the repository at this point in the history
  • Loading branch information
kshitijrajsharma committed Feb 19, 2024
1 parent 46635f5 commit fc574f0
Show file tree
Hide file tree
Showing 5 changed files with 31 additions and 9 deletions.
3 changes: 2 additions & 1 deletion API/custom_exports.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from fastapi.responses import JSONResponse
from fastapi_versioning import version

from src.config import DEFAULT_QUEUE_NAME
from src.config import LIMITER as limiter
from src.config import RATE_LIMIT_PER_MIN
from src.validation.models import DynamicCategoriesModel
Expand Down Expand Up @@ -798,7 +799,7 @@ async def process_custom_requests(
dict: Result message.
"""
queue_name = params.queue
if params.queue != "raw_daemon" and user.role != UserRole.ADMIN.value:
if params.queue != DEFAULT_QUEUE_NAME and user.role != UserRole.ADMIN.value:
raise HTTPException(
status_code=403,
detail=[{"msg": "Insufficient Permission to choose queue"}],
Expand Down
9 changes: 7 additions & 2 deletions API/raw_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,12 @@
from fastapi_versioning import version

from src.app import RawData
from src.config import ALLOW_BIND_ZIP_FILTER, CELERY_BROKER_URL, EXPORT_MAX_AREA_SQKM
from src.config import (
ALLOW_BIND_ZIP_FILTER,
CELERY_BROKER_URL,
DEFAULT_QUEUE_NAME,
EXPORT_MAX_AREA_SQKM,
)
from src.config import LIMITER as limiter
from src.config import RATE_LIMIT_PER_MIN as export_rate_limit
from src.validation.models import (
Expand Down Expand Up @@ -441,7 +446,7 @@ def get_osm_current_snapshot_as_file(
)

# queue_name = "raw_daemon" if not params.uuid else "raw_ondemand"
queue_name = "raw_ondemand" # Everything directs to default now
queue_name = DEFAULT_QUEUE_NAME # Everything directs to default now
task = process_raw_data.apply_async(
args=(params.model_dump(),),
queue=queue_name,
Expand Down
4 changes: 2 additions & 2 deletions API/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from fastapi.responses import JSONResponse
from fastapi_versioning import version

from src.config import CELERY_BROKER_URL
from src.config import CELERY_BROKER_URL, DAEMON_QUEUE_NAME, DEFAULT_QUEUE_NAME
from src.validation.models import SnapshotTaskResponse

from .api_worker import celery
Expand Down Expand Up @@ -156,7 +156,7 @@ def discard_all_waiting_tasks(user: AuthUser = Depends(admin_required)):
return JSONResponse({"tasks_discarded": purged})


queues = ["raw_ondemand", "raw_daemon"]
queues = [DEFAULT_QUEUE_NAME, DAEMON_QUEUE_NAME]


@router.get("/queue/")
Expand Down
4 changes: 4 additions & 0 deletions docs/src/installation/configurations.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,8 @@ The following are the different configuration options that are accepted.
| `USE_CONNECTION_POOLING` | `USE_CONNECTION_POOLING` | `[API_CONFIG]` | `false` | Enable psycopg2 connection pooling | OPTIONAL |
| `ALLOW_BIND_ZIP_FILTER` | `ALLOW_BIND_ZIP_FILTER` | `[API_CONFIG]` | `true` | Enable zip compression for exports | OPTIONAL |
| `ENABLE_TILES` | `ENABLE_TILES` | `[API_CONFIG]` | `false` | Enable Tile Output (Pmtiles and Mbtiles) | OPTIONAL |
| `DEFAULT_QUEUE_NAME` | `DEFAULT_QUEUE_NAME` | `[API_CONFIG]` | `raw_ondemand` | Option to define default queue name| OPTIONAL |
| `DAEMON_QUEUE_NAME` | `DAEMON_QUEUE_NAME` | `[API_CONFIG]` | `raw_daemon` | Option to define daemon queue name for scheduled and long exports | OPTIONAL |
| `ENABLE_POLYGON_STATISTICS_ENDPOINTS` | `ENABLE_POLYGON_STATISTICS_ENDPOINTS` | `[API_CONFIG]` | `False` | Option to enable endpoints related the polygon statistics about the approx buildings,road length in passed polygon| OPTIONAL |
| `ENABLE_CUSTOM_EXPORTS` | `ENABLE_CUSTOM_EXPORTS` | `[API_CONFIG]` | False | Enables custom exports endpoint and imports | OPTIONAL |
| `POLYGON_STATISTICS_API_URL` | `POLYGON_STATISTICS_API_URL` | `[API_CONFIG]` | `None` | API URL for the polygon statistics to fetch the metadata , Currently tested with graphql query endpoint of Kontour , Only required if it is enabled from ENABLE_POLYGON_STATISTICS_ENDPOINTS | OPTIONAL |
Expand Down Expand Up @@ -115,6 +117,8 @@ API Tokens have expiry date, It is `important to update API Tokens manually each
| `ENABLE_TILES` | `[API_CONFIG]` | Yes | Yes |
| `ALLOW_BIND_ZIP_FILTER` | `[API_CONFIG]` | Yes | Yes |
| `INDEX_THRESHOLD` | `[API_CONFIG]` | No | Yes |
| `DEFAULT_QUEUE_NAME` | `[API_CONFIG]` | Yes | No |
| `DAEMON_QUEUE_NAME` | `[API_CONFIG]` | Yes | No |
| `ENABLE_POLYGON_STATISTICS_ENDPOINTS` | `[API_CONFIG]` | Yes | Yes |
| `POLYGON_STATISTICS_API_URL` | `[API_CONFIG]` | Yes | Yes |
| `POLYGON_STATISTICS_API_RATE_LIMIT` | `[API_CONFIG]` | Yes | No |
Expand Down
20 changes: 16 additions & 4 deletions src/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,6 +177,14 @@ def not_raises(func, *args, **kwargs):
config.getboolean("API_CONFIG", "USE_CONNECTION_POOLING", fallback=False),
)

## Queue

DEFAULT_QUEUE_NAME = os.environ.get("DEFAULT_QUEUE_NAME") or config.get(
"API_CONFIG", "DEFAULT_QUEUE_NAME", fallback="raw_ondemand"
)
DAEMON_QUEUE_NAME = os.environ.get("DAEMON_QUEUE_NAME") or config.get(
"API_CONFIG", "DAEMON_QUEUE_NAME", fallback="raw_daemon"
)

### Polygon statistics which will deliver the stats of approx buildings/ roads in the area

Expand All @@ -194,13 +202,17 @@ def not_raises(func, *args, **kwargs):
"POLYGON_STATISTICS_API_RATE_LIMIT"
) or config.get("API_CONFIG", "POLYGON_STATISTICS_API_RATE_LIMIT", fallback=5)

## task limit

DEFAULT_SOFT_TASK_LIMIT = os.environ.get("DEFAULT_SOFT_TASK_LIMIT") or config.get(
"API_CONFIG", "DEFAULT_SOFT_TASK_LIMIT", fallback=2 * 60 * 60
)
DEFAULT_HARD_TASK_LIMIT = os.environ.get("DEFAULT_HARD_TASK_LIMIT") or config.get(
"API_CONFIG", "DEFAULT_HARD_TASK_LIMIT", fallback=3 * 60 * 60
)

## duckdb

USE_DUCK_DB_FOR_CUSTOM_EXPORTS = get_bool_env_var(
"USE_DUCK_DB_FOR_CUSTOM_EXPORTS",
config.getboolean("API_CONFIG", "USE_DUCK_DB_FOR_CUSTOM_EXPORTS", fallback=False),
Expand All @@ -217,6 +229,8 @@ def not_raises(func, *args, **kwargs):
DUCK_DB_THREAD_LIMIT = os.environ.get("DUCK_DB_THREAD_LIMIT") or config.get(
"API_CONFIG", "DUCK_DB_THREAD_LIMIT", fallback=None
)

## hdx and custom exports
ENABLE_CUSTOM_EXPORTS = get_bool_env_var(
"ENABLE_CUSTOM_EXPORTS",
config.getboolean("API_CONFIG", "ENABLE_CUSTOM_EXPORTS", fallback=False),
Expand Down Expand Up @@ -276,10 +290,8 @@ def not_raises(func, *args, **kwargs):
from hdx.data.dataset import Dataset
from hdx.data.vocabulary import Vocabulary

parse_list = (
lambda value, delimiter=",": value.split(delimiter)
if isinstance(value, str)
else value or []
parse_list = lambda value, delimiter=",": (
value.split(delimiter) if isinstance(value, str) else value or []
)

ALLOWED_HDX_TAGS = parse_list(
Expand Down

0 comments on commit fc574f0

Please sign in to comment.