Skip to content

adding mssql support #40

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 3 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,4 @@ RUN \

ADD src /srv/qwc_service/

ENV SERVICE_MOUNTPOINT=/api/v1/data
ENV SERVICE_MOUNTPOINT=/api/v1/data
22 changes: 22 additions & 0 deletions Dockerfile.mssql
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
FROM sourcepole/qwc-uwsgi-base:alpine-v2025.01.24

WORKDIR /srv/qwc_service
ADD pyproject.toml uv.lock ./

# git: Required for pip with git repos
# postgresql-dev g++ python3-dev: Required for psycopg2
# unixodbc-dev: Required for pyodbc (SQL Server support)
RUN \
apk add --no-cache --update --virtual runtime-deps postgresql-libs unixodbc && \
apk add --no-cache --update --virtual build-deps git postgresql-dev g++ python3-dev unixodbc-dev curl && \
# Install Microsoft ODBC Driver for SQL Server
curl -O https://download.microsoft.com/download/e/4/e/e4e67866-dffd-428c-aac7-8d28ddafb39b/msodbcsql17_17.10.2.1-1_amd64.apk && \
apk add --allow-untrusted msodbcsql17_17.10.2.1-1_amd64.apk && \
rm msodbcsql17_17.10.2.1-1_amd64.apk && \
uv sync --frozen && \
uv cache clean && \
apk del build-deps

ADD src /srv/qwc_service/

ENV SERVICE_MOUNTPOINT=/api/v1/data
9 changes: 7 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,15 @@ dependencies = [
"requests~=2.32.0",
"SQLAlchemy~=2.0.29",
"clamd~=1.0.2",
"qwc-services-core~=1.4.0"
"qwc-services-core~=1.4.0",
]

[project.optional-dependencies]
mssql = [
"pyodbc>=4.0.30",
]

[dependency-groups]
dev = [
"python-dotenv>=1.0.1",
]
]
54 changes: 50 additions & 4 deletions src/data_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,15 @@
from collections import OrderedDict

from sqlalchemy.exc import (DataError, IntegrityError,
InternalError, ProgrammingError)
InternalError, ProgrammingError)
from sqlalchemy import text as sql_text

from qwc_services_core.auth import get_username
from qwc_services_core.database import DatabaseEngine
from qwc_services_core.permissions_reader import PermissionsReader
from dataset_features_provider import DatasetFeaturesProvider
from dataset_features_provider_factory import create_dataset_features_provider
from attachments_service import AttachmentsService
from spatial_adapter import SpatialAdapter
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This seems unused?


ERROR_DETAILS_LOG_ONLY = os.environ.get(
'ERROR_DETAILS_LOG_ONLY', 'False').lower() == 'true'
Expand All @@ -34,6 +36,10 @@ def __init__(self, tenant, logger, config):
self.permissions_handler = PermissionsReader(tenant, logger)
self.attachments_service = AttachmentsService(tenant, logger)
self.db_engine = DatabaseEngine()

# Add detection for database dialect
self.default_db_dialect = config.get('default_db_dialect', 'postgresql')
self.dialect_engines = {}

def index(self, identity, translator, dataset, bbox, crs, filterexpr, filter_geom):
"""Find dataset features inside bounding box.
Expand Down Expand Up @@ -455,7 +461,7 @@ def write_relation_values(self, identity, fk, relationValues, uploadfiles, trans
ret[rel_table]["features"].append(result['feature'])

return ret

def dataset_features_provider(self, identity, translator, dataset, write):
"""Return DatasetFeaturesProvider if available and permitted.

Expand All @@ -471,7 +477,7 @@ def dataset_features_provider(self, identity, translator, dataset, write):
)
if permissions:
self.logger.debug(f"Have permissions for identity {identity} dataset {dataset} with write={write}")
dataset_features_provider = DatasetFeaturesProvider(
dataset_features_provider = create_dataset_features_provider(
permissions, self.db_engine, self.logger, translator
)
else:
Expand Down Expand Up @@ -761,3 +767,43 @@ def parse_crs(self, crs):
# conversion failed
pass
return srid

def dataset_features_provider(self, identity, translator, dataset, write):
"""Return DatasetFeaturesProvider if available and permitted.

:param str|obj identity: User identity
:param object translator: Translator
:param str dataset: Dataset ID
:param bool write: Whether to include permissions relevant for writing to the dataset (create/update)
"""
dataset_features_provider = None

permissions = self.dataset_edit_permissions(
dataset, identity, translator, write
)
if permissions:
self.logger.debug(f"Have permissions for dataset {dataset} with write={write}")

# Detect and cache database dialect
if permissions["database_read"] not in self.dialect_engines:
engine = self.db_engine.db_engine(permissions["database_read"])
try:
with engine.connect() as conn:
# Detect actual dialect from connection
self.dialect_engines[permissions["database_read"]] = conn.dialect.name
except Exception as e:
self.logger.warning(f"Could not detect database dialect: {str(e)}")
# Fall back to default dialect if connection fails
self.dialect_engines[permissions["database_read"]] = self.default_db_dialect

# Pass the detected dialect to the provider via permissions
permissions["dialect"] = self.dialect_engines.get(permissions["database_read"], self.default_db_dialect)

# Create the dataset features provider with permissions
dataset_features_provider = DatasetFeaturesProvider(
permissions, self.db_engine, self.logger, translator
)
else:
self.logger.debug(f"NO permissions for dataset {dataset} with write={write}")

return dataset_features_provider
Loading