diff --git a/server/.test.env b/server/.test.env index bdaa7bfa..63294a3f 100644 --- a/server/.test.env +++ b/server/.test.env @@ -24,3 +24,4 @@ SECURITY_BEARER_SALT='bearer' SECURITY_EMAIL_SALT='email' SECURITY_PASSWORD_SALT='password' DIAGNOSTIC_LOGS_DIR=/tmp/diagnostic_logs +GEVENT_WORKER=0 \ No newline at end of file diff --git a/server/mergin/auth/forms.py b/server/mergin/auth/forms.py index b45638eb..f7403bb8 100644 --- a/server/mergin/auth/forms.py +++ b/server/mergin/auth/forms.py @@ -1,6 +1,7 @@ # Copyright (C) Lutra Consulting Limited # # SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-MerginMaps-Commercial + import re import safe from flask_wtf import FlaskForm @@ -48,18 +49,22 @@ class ExtendedEmail(Email): 1. spaces, 2. special characters ,:;()<>[]\" 3, multiple @ symbols, - 4, leading, trailing, or consecutive dots in the local part - 5, invalid domain part - missing top level domain (user@example), consecutive dots - Custom check for additional invalid characters disallows |'— because they make our email sending service to fail + 4, leading, trailing, or consecutive dots in the local part, + 5, invalid domain part - missing top level domain (user@example), consecutive dots, + The extended validation checks email addresses using the regex provided by Brevo, + so that we stay consistent with their validation rules and avoid API failures. """ def __call__(self, form, field): super().__call__(form, field) - if re.search(r"[|'—]", field.data): - raise ValidationError( - f"Email address '{field.data}' contains an invalid character." - ) + email = field.data.strip() + + pattern = r"^[\x60#&*\/=?^{!}~'+\w-]+(\.[\x60#&*\/=?^{!}~'+\w-]+)*\.?@([_a-zA-Z0-9-]+(\.[_a-zA-Z0-9-]+)*\.)[a-zA-Z0-9-]*[a-zA-Z0-9]{2,}$" + email_regexp = re.compile(pattern, re.IGNORECASE) + + if not email_regexp.match(email): + raise ValidationError(f"Email address '{email}' is invalid.") class PasswordValidator: diff --git a/server/mergin/sync/files.py b/server/mergin/sync/files.py index fd77c597..a85bb5e6 100644 --- a/server/mergin/sync/files.py +++ b/server/mergin/sync/files.py @@ -13,10 +13,10 @@ from .utils import ( is_file_name_blacklisted, - is_qgis, is_supported_extension, is_valid_path, is_versioned_file, + has_trailing_space, ) from ..app import DateTimeWithZ, ma @@ -212,14 +212,21 @@ def validate(self, data, **kwargs): if not is_valid_path(file_path): raise ValidationError( - f"Unsupported file name detected: {file_path}. Please remove the invalid characters." + f"Unsupported file name detected: '{file_path}'. Please remove the invalid characters." ) if not is_supported_extension(file_path): raise ValidationError( - f"Unsupported file type detected: {file_path}. " + f"Unsupported file type detected: '{file_path}'. " f"Please remove the file or try compressing it into a ZIP file before uploading.", ) + # new checks must restrict only new files not to block existing projects + for file in data["added"]: + file_path = file["path"] + if has_trailing_space(file_path): + raise ValidationError( + f"Folder name contains a trailing space. Please remove the space in: '{file_path}'." + ) class ProjectFileSchema(FileSchema): @@ -230,5 +237,5 @@ class ProjectFileSchema(FileSchema): def patch_field(self, data, **kwargs): # drop 'diff' key entirely if empty or None as clients would expect if not data.get("diff"): - data.pop("diff") + data.pop("diff", None) return data diff --git a/server/mergin/sync/permissions.py b/server/mergin/sync/permissions.py index 4305a15f..7dd042d5 100644 --- a/server/mergin/sync/permissions.py +++ b/server/mergin/sync/permissions.py @@ -209,7 +209,21 @@ def require_project(ws, project_name, permission) -> Project: return project -def require_project_by_uuid(uuid: str, permission: ProjectPermissions, scheduled=False): +def require_project_by_uuid( + uuid: str, permission: ProjectPermissions, scheduled=False, expose=True +) -> Project: + """ + Retrieves a project by UUID after validating existence, workspace status, and permissions. + + Args: + uuid (str): The unique identifier of the project. + permission (ProjectPermissions): The permission level required to access the project. + scheduled (bool, optional): If ``True``, bypasses the check for projects marked for deletion. + expose (bool, optional): Controls security disclosure behavior on permission failure. + - If `True`: Returns 403 Forbidden (reveals project exists but access is denied). + - If `False`: Returns 404 Not Found (hides project existence for security). + Standard is that reading results in 404, while writing results in 403 + """ if not is_valid_uuid(uuid): abort(404) @@ -219,6 +233,10 @@ def require_project_by_uuid(uuid: str, permission: ProjectPermissions, scheduled if not scheduled: project = project.filter(Project.removed_at.is_(None)) project = project.first_or_404() + if not expose and current_user.is_anonymous and not project.public: + # we don't want to tell anonymous user if a private project exists + abort(404) + workspace = project.workspace if not workspace: abort(404) @@ -226,6 +244,7 @@ def require_project_by_uuid(uuid: str, permission: ProjectPermissions, scheduled abort(404, "Workspace doesn't exist") if not permission.check(project, current_user): abort(403, "You do not have permissions for this project") + return project diff --git a/server/mergin/sync/public_api_controller.py b/server/mergin/sync/public_api_controller.py index 6aa8f63e..0b487874 100644 --- a/server/mergin/sync/public_api_controller.py +++ b/server/mergin/sync/public_api_controller.py @@ -55,9 +55,7 @@ from .files import ( ProjectFileChange, ChangesSchema, - UploadFileSchema, ProjectFileSchema, - FileSchema, files_changes_from_upload, mergin_secure_filename, ) @@ -83,17 +81,11 @@ generate_checksum, Toucher, get_x_accel_uri, - is_file_name_blacklisted, get_ip, get_user_agent, generate_location, is_valid_uuid, - is_versioned_file, - get_project_path, get_device_id, - is_valid_path, - is_supported_type, - is_supported_extension, get_mimetype, wkb2wkt, ) @@ -980,7 +972,7 @@ def push_finish(transaction_id): if len(unsupported_files): abort( 400, - f"Unsupported file type detected: {unsupported_files[0]}. " + f"Unsupported file type detected: '{unsupported_files[0]}'. " f"Please remove the file or try compressing it into a ZIP file before uploading.", ) @@ -1036,14 +1028,6 @@ def push_finish(transaction_id): # let's move uploaded files where they are expected to be os.renames(files_dir, version_dir) - # remove used chunks - for file in upload.changes["added"] + upload.changes["updated"]: - file_chunks = file.get("chunks", []) - for chunk_id in file_chunks: - chunk_file = os.path.join(upload.upload_dir, "chunks", chunk_id) - if os.path.exists(chunk_file): - move_to_tmp(chunk_file) - logging.info( f"Push finished for project: {project.id}, project version: {v_next_version}, transaction id: {transaction_id}." ) diff --git a/server/mergin/sync/public_api_v2.yaml b/server/mergin/sync/public_api_v2.yaml index 9ed062d5..c1c74f68 100644 --- a/server/mergin/sync/public_api_v2.yaml +++ b/server/mergin/sync/public_api_v2.yaml @@ -76,6 +76,34 @@ paths: "409": $ref: "#/components/responses/Conflict" x-openapi-router-controller: mergin.sync.public_api_v2_controller + get: + tags: + - project + summary: Get project info + operationId: get_project + parameters: + - name: files_at_version + in: query + description: Include list of files at specific version + required: false + schema: + $ref: "#/components/schemas/VersionName" + responses: + "200": + description: Success + content: + application/json: + schema: + $ref: "#/components/schemas/ProjectDetail" + "400": + $ref: "#/components/responses/BadRequest" + "401": + $ref: "#/components/responses/Unauthorized" + "403": + $ref: "#/components/responses/Forbidden" + "404": + $ref: "#/components/responses/NotFound" + x-openapi-router-controller: mergin.sync.public_api_v2_controller /projects/{id}/scheduleDelete: post: tags: @@ -276,9 +304,7 @@ paths: default: false example: true version: - type: string - pattern: '^$|^v\d+$' - example: v2 + $ref: "#/components/schemas/VersionName" changes: type: object required: @@ -502,6 +528,72 @@ components: $ref: "#/components/schemas/ProjectRole" role: $ref: "#/components/schemas/Role" + ProjectDetail: + type: object + required: + - id + - name + - workspace + - role + - version + - created_at + - updated_at + - public + - size + properties: + id: + type: string + description: project uuid + example: c1ae6439-0056-42df-a06d-79cc430dd7df + name: + type: string + example: survey + workspace: + type: object + properties: + id: + type: integer + example: 123 + name: + type: string + example: mergin + role: + $ref: "#/components/schemas/ProjectRole" + version: + type: string + description: latest project version + example: v2 + created_at: + type: string + format: date-time + description: project creation timestamp + example: 2025-10-24T08:27:56Z + updated_at: + type: string + format: date-time + description: last project update timestamp + example: 2025-10-24T08:28:00.279699Z + public: + type: boolean + description: whether the project is public + example: false + size: + type: integer + description: project size in bytes for this version + example: 17092380 + files: + type: array + description: List of files in the project + items: + allOf: + - $ref: '#/components/schemas/File' + - type: object + properties: + mtime: + type: string + format: date-time + description: File modification timestamp + example: 2024-11-19T13:50:00Z File: type: object description: Project file metadata @@ -754,3 +846,7 @@ components: - editor - writer - owner + VersionName: + type: string + pattern: '^$|^v\d+$' + example: v2 diff --git a/server/mergin/sync/public_api_v2_controller.py b/server/mergin/sync/public_api_v2_controller.py index 6717a083..1bfd8738 100644 --- a/server/mergin/sync/public_api_v2_controller.py +++ b/server/mergin/sync/public_api_v2_controller.py @@ -14,6 +14,9 @@ from marshmallow import ValidationError from sqlalchemy.exc import IntegrityError +from mergin.sync.tasks import remove_transaction_chunks + +from .schemas_v2 import ProjectSchema as ProjectSchemaV2 from ..app import db from ..auth import auth_required from ..auth.models import User @@ -26,7 +29,7 @@ StorageLimitHit, UploadError, ) -from .files import ChangesSchema +from .files import ChangesSchema, ProjectFileSchema from .forms import project_name_validation from .models import ( Project, @@ -41,7 +44,6 @@ from .public_api_controller import catch_sync_failure from .schemas import ( ProjectMemberSchema, - ProjectVersionSchema, UploadChunkSchema, ProjectSchema, ) @@ -162,6 +164,22 @@ def remove_project_collaborator(id, user_id): return NoContent, 204 +def get_project(id, files_at_version=None): + """Get project info. Include list of files at specific version if requested.""" + project = require_project_by_uuid(id, ProjectPermissions.Read, expose=False) + data = ProjectSchemaV2().dump(project) + if files_at_version: + pv = ProjectVersion.query.filter_by( + project_id=project.id, name=ProjectVersion.from_v_name(files_at_version) + ).first() + if pv: + data["files"] = ProjectFileSchema( + only=("path", "mtime", "size", "checksum"), many=True + ).dump(pv.files) + + return data, 200 + + @auth_required @catch_sync_failure def create_project_version(id): @@ -302,12 +320,12 @@ def create_project_version(id): os.renames(temp_files_dir, version_dir) # remove used chunks + # get chunks from added and updated files + chunks_ids = [] for file in to_be_added_files + to_be_updated_files: file_chunks = file.get("chunks", []) - for chunk_id in file_chunks: - chunk_file = get_chunk_location(chunk_id) - if os.path.exists(chunk_file): - move_to_tmp(chunk_file) + chunks_ids.extend(file_chunks) + remove_transaction_chunks.delay(chunks_ids) logging.info( f"Push finished for project: {project.id}, project version: {v_next_version}, upload id: {upload.id}." @@ -360,7 +378,6 @@ def upload_chunk(id: str): # we could have used request.data here, but it could eventually cause OOM issue save_to_file(request.stream, dest_file, current_app.config["MAX_CHUNK_SIZE"]) except IOError: - move_to_tmp(dest_file, chunk_id) return BigChunkError().response(413) except Exception as e: return UploadError(error="Error saving chunk").response(400) diff --git a/server/mergin/sync/schemas_v2.py b/server/mergin/sync/schemas_v2.py new file mode 100644 index 00000000..d6b781ee --- /dev/null +++ b/server/mergin/sync/schemas_v2.py @@ -0,0 +1,48 @@ +# Copyright (C) Lutra Consulting Limited +# +# SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-MerginMaps-Commercial + +from marshmallow import fields +from flask_login import current_user + +from ..app import DateTimeWithZ, ma +from .permissions import ProjectPermissions +from .models import ( + Project, + ProjectVersion, +) + + +class ProjectSchema(ma.SQLAlchemyAutoSchema): + id = fields.UUID() + name = fields.String() + version = fields.Function(lambda obj: ProjectVersion.to_v_name(obj.latest_version)) + public = fields.Boolean() + size = fields.Integer(attribute="disk_usage") + + created_at = DateTimeWithZ(attribute="created") + updated_at = DateTimeWithZ(attribute="updated") + + workspace = fields.Function( + lambda obj: {"id": obj.workspace.id, "name": obj.workspace.name} + ) + role = fields.Method("_role") + + def _role(self, obj): + role = ProjectPermissions.get_user_project_role(obj, current_user) + return role.value if role else None + + class Meta: + model = Project + load_instance = True + fields = ( + "id", + "name", + "version", + "public", + "size", + "created_at", + "updated_at", + "workspace", + "role", + ) diff --git a/server/mergin/sync/tasks.py b/server/mergin/sync/tasks.py index 7688a3ee..9392997c 100644 --- a/server/mergin/sync/tasks.py +++ b/server/mergin/sync/tasks.py @@ -7,13 +7,14 @@ import os import time from datetime import datetime, timedelta, timezone +from typing import List, Optional from zipfile import ZIP_DEFLATED, ZipFile from flask import current_app from .models import Project, ProjectVersion, FileHistory from .storages.disk import move_to_tmp from .config import Configuration -from .utils import remove_outdated_files +from .utils import get_chunk_location, remove_outdated_files from ..celery import celery from ..app import db @@ -169,3 +170,14 @@ def remove_unused_chunks(): if not os.path.isdir(dir): continue remove_outdated_files(dir, time_delta) + + +@celery.task +def remove_transaction_chunks(chunks: Optional[List[str]] = None): + """Remove chunks related to a specific sync transaction""" + if not chunks: + return + for chunk in chunks: + chunk_path = get_chunk_location(chunk) + if os.path.exists(chunk_path): + os.remove(chunk_path) diff --git a/server/mergin/sync/utils.py b/server/mergin/sync/utils.py index b73da860..de0fbe94 100644 --- a/server/mergin/sync/utils.py +++ b/server/mergin/sync/utils.py @@ -25,6 +25,7 @@ ) import magic from flask import current_app +from pathlib import Path def generate_checksum(file, chunk_size=4096): @@ -100,11 +101,6 @@ def is_qgis(path: str) -> bool: return ext.lower() in [".qgs", ".qgz"] -def int_version(version): - """Convert v format of version to integer representation.""" - return int(version.lstrip("v")) if re.match(r"v\d", version) else None - - def is_versioned_file(file): """Check if file is compatible with geodiff lib and hence suitable for versioning.""" diff_extensions = [".gpkg", ".sqlite"] @@ -338,7 +334,7 @@ def files_size(): def is_valid_path(filepath: str) -> bool: """Check filepath and filename for invalid characters, absolute path or path traversal""" return ( - not len(re.split(r"\.[/\\]", filepath)) > 1 # ./ or .\ + not re.search(r"\.[/\\]", filepath) # ./ or .\ and is_valid_filepath(filepath) # invalid characters in filepath, absolute path and is_valid_filename( os.path.basename(filepath) @@ -346,6 +342,11 @@ def is_valid_path(filepath: str) -> bool: ) +def has_trailing_space(filepath: str) -> bool: + """Check filepath for trailing spaces that makes the project impossible to download on Windows""" + return any(part != part.rstrip() for part in Path(filepath).parts) + + def is_supported_extension(filepath) -> bool: """Check whether file's extension is supported.""" ext = os.path.splitext(filepath)[1].lower() diff --git a/server/mergin/tests/test_auth.py b/server/mergin/tests/test_auth.py index d53b01bc..69ba37c6 100644 --- a/server/mergin/tests/test_auth.py +++ b/server/mergin/tests/test_auth.py @@ -125,9 +125,14 @@ def test_logout(client): 400, ), # tests with upper case, but email already exists (" mergin@mergin.com ", "#pwd123", 400), # invalid password - ("verylonglonglonglonglonglonglongemail@example.com", "#pwd1234", 201), + ( + "verylonglonglonglonglonglonglongemail@lutra-consulting.co.uk", + "#pwd1234", + 201, + ), # long local part, second-level domain, dash in domain ("us.er@mergin.com", "#pwd1234", 201), # dot is allowed ("us er@mergin.com", "#pwd1234", 400), # space is disallowed + ("test@gmaiñ.com", "#pwd1234", 400), # non-ASCII character in the domain ] @@ -936,15 +941,16 @@ def test_server_usage(client): ("日人日本人", True), # non-ascii character ("usér", True), # non-ascii character ("user\\", False), # disallowed character - ("user\260", True), # non-ascii character (°) + ("user\260", False), # not letter character (°) ("user|", False), # vertical bar ("us er", False), # space in the middle ("us,er", False), # comma ("us—er", False), # dash - ("us'er", False), # apostrophe + ("us´er", False), # acute accent (" user", True), # starting with space (will be stripped) ("us.er", True), # dot in the middle (".user", False), # starting with dot + ("us-er", True), # hyphen ] diff --git a/server/mergin/tests/test_middleware.py b/server/mergin/tests/test_middleware.py index 82b9cf26..2f5cbe4f 100644 --- a/server/mergin/tests/test_middleware.py +++ b/server/mergin/tests/test_middleware.py @@ -6,6 +6,7 @@ import psycogreen.gevent import pytest import sqlalchemy +from unittest.mock import patch from ..app import create_simple_app, GeventTimeoutMiddleware, db from ..config import Configuration @@ -14,58 +15,74 @@ @pytest.mark.parametrize("use_middleware", [True, False]) def test_use_middleware(use_middleware): """Test using middleware""" - Configuration.GEVENT_WORKER = use_middleware - Configuration.GEVENT_REQUEST_TIMEOUT = 1 - application = create_simple_app() + with patch.object( + Configuration, + "GEVENT_WORKER", + use_middleware, + ), patch.object( + Configuration, + "GEVENT_REQUEST_TIMEOUT", + 1, + ): + application = create_simple_app() - def ping(): - gevent.sleep(Configuration.GEVENT_REQUEST_TIMEOUT + 1) - return "pong" + def ping(): + gevent.sleep(Configuration.GEVENT_REQUEST_TIMEOUT + 1) + return "pong" - application.add_url_rule("/test", "ping", ping) - app_context = application.app_context() - app_context.push() + application.add_url_rule("/test", "ping", ping) + app_context = application.app_context() + app_context.push() - assert isinstance(application.wsgi_app, GeventTimeoutMiddleware) == use_middleware - # in case of gevent, dummy endpoint it set to time out - assert application.test_client().get("/test").status_code == ( - 504 if use_middleware else 200 - ) + assert ( + isinstance(application.wsgi_app, GeventTimeoutMiddleware) == use_middleware + ) + # in case of gevent, dummy endpoint it set to time out + assert application.test_client().get("/test").status_code == ( + 504 if use_middleware else 200 + ) def test_catch_timeout(): """Test proper handling of gevent timeout with db.session.rollback""" psycogreen.gevent.patch_psycopg() - Configuration.GEVENT_WORKER = True - Configuration.GEVENT_REQUEST_TIMEOUT = 1 - application = create_simple_app() + with patch.object( + Configuration, + "GEVENT_WORKER", + True, + ), patch.object( + Configuration, + "GEVENT_REQUEST_TIMEOUT", + 1, + ): + application = create_simple_app() - def unhandled(): - try: - db.session.execute("SELECT pg_sleep(1.1);") - finally: - db.session.execute("SELECT 1;") - return "" + def unhandled(): + try: + db.session.execute("SELECT pg_sleep(1.1);") + finally: + db.session.execute("SELECT 1;") + return "" - def timeout(): - try: - db.session.execute("SELECT pg_sleep(1.1);") - except gevent.timeout.Timeout: - db.session.rollback() - raise - finally: - db.session.execute("SELECT 1;") - return "" + def timeout(): + try: + db.session.execute("SELECT pg_sleep(1.1);") + except gevent.timeout.Timeout: + db.session.rollback() + raise + finally: + db.session.execute("SELECT 1;") + return "" - application.add_url_rule("/unhandled", "unhandled", unhandled) - application.add_url_rule("/timeout", "timeout", timeout) - app_context = application.app_context() - app_context.push() + application.add_url_rule("/unhandled", "unhandled", unhandled) + application.add_url_rule("/timeout", "timeout", timeout) + app_context = application.app_context() + app_context.push() - assert application.test_client().get("/timeout").status_code == 504 + assert application.test_client().get("/timeout").status_code == 504 - # in case of missing rollback sqlalchemy would raise error - with pytest.raises(sqlalchemy.exc.PendingRollbackError): - application.test_client().get("/unhandled") + # in case of missing rollback sqlalchemy would raise error + with pytest.raises(sqlalchemy.exc.PendingRollbackError): + application.test_client().get("/unhandled") - db.session.rollback() + db.session.rollback() diff --git a/server/mergin/tests/test_project_controller.py b/server/mergin/tests/test_project_controller.py index 1cba91cc..c7a0550e 100644 --- a/server/mergin/tests/test_project_controller.py +++ b/server/mergin/tests/test_project_controller.py @@ -2495,7 +2495,7 @@ def test_filepath_manipulation(client): assert resp.status_code == 400 assert ( resp.json["detail"] - == f"Unsupported file name detected: {manipulated_path}. Please remove the invalid characters." + == f"Unsupported file name detected: '{manipulated_path}'. Please remove the invalid characters." ) @@ -2528,7 +2528,7 @@ def test_supported_file_upload(client): assert resp.status_code == 400 assert ( resp.json["detail"] - == f"Unsupported file type detected: {script_filename}. Please remove the file or try compressing it into a ZIP file before uploading." + == f"Unsupported file type detected: '{script_filename}'. Please remove the file or try compressing it into a ZIP file before uploading." ) # Extension spoofing to trick the validator spoof_name = "script.gpkg" @@ -2567,7 +2567,7 @@ def test_supported_file_upload(client): assert resp.status_code == 400 assert ( resp.json["detail"] - == f"Unsupported file type detected: {spoof_name}. Please remove the file or try compressing it into a ZIP file before uploading." + == f"Unsupported file type detected: '{spoof_name}'. Please remove the file or try compressing it into a ZIP file before uploading." ) diff --git a/server/mergin/tests/test_public_api_v2.py b/server/mergin/tests/test_public_api_v2.py index 85177190..34a5a2a1 100644 --- a/server/mergin/tests/test_public_api_v2.py +++ b/server/mergin/tests/test_public_api_v2.py @@ -1,6 +1,21 @@ # Copyright (C) Lutra Consulting Limited # # SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-MerginMaps-Commercial + +from mergin.sync.tasks import remove_transaction_chunks, remove_unused_chunks +from . import DEFAULT_USER +from .utils import ( + add_user, + logout, + login_as_admin, + create_workspace, + create_project, + upload_file_to_project, + login, + file_info, +) + +from ..auth.models import User import os import shutil from unittest.mock import patch @@ -10,6 +25,7 @@ from mergin.app import db from mergin.config import Configuration +from mergin.sync.config import Configuration as SyncConfiguration from mergin.sync.errors import ( BigChunkError, ProjectLocked, @@ -35,7 +51,6 @@ _get_changes_with_diff_0_size, _get_changes_without_added, ) -from .utils import add_user, file_info def test_schedule_delete_project(client): @@ -154,6 +169,82 @@ def test_project_members(client): # access provided by workspace role cannot be removed directly response = client.delete(url + f"/{user.id}") assert response.status_code == 404 + Configuration.GLOBAL_READ = 0 + + +def test_get_project(client): + """Test get project info endpoint""" + admin = User.query.filter_by(username=DEFAULT_USER[0]).first() + test_workspace = create_workspace() + project = create_project("new_project", test_workspace, admin) + logout(client) + # anonymous user cannot access the private resource + response = client.get(f"v2/projects/{project.id}") + assert response.status_code == 404 + # lack of permissions + user = add_user("tests", "tests") + login(client, user.username, "tests") + response = client.get(f"v2/projects/{project.id}") + assert response.status_code == 403 + # access public project + project.public = True + db.session.commit() + response = client.get(f"v2/projects/{project.id}") + assert response.status_code == 200 + assert response.json["public"] is True + # project scheduled for deletion + login_as_admin(client) + project.public = False + project.removed_at = datetime.utcnow() + db.session.commit() + response = client.get(f"v2/projects/{project.id}") + assert response.status_code == 404 + # success + project.removed_at = None + db.session.commit() + response = client.get(f"v2/projects/{project.id}") + assert response.status_code == 200 + expected_keys = { + "id", + "name", + "workspace", + "role", + "version", + "created_at", + "updated_at", + "public", + "size", + } + assert expected_keys == response.json.keys() + # create new versions + files = ["test.txt", "test3.txt", "test.qgs"] + for file in files: + upload_file_to_project(project, file, client) + # project version does not exist + response = client.get( + f"v2/projects/{project.id}?files_at_version=v{project.latest_version+1}" + ) + assert response.status_code == 200 + assert response.json["id"] == str(project.id) + assert "files" not in response.json.keys() + # files + response = client.get( + f"v2/projects/{project.id}?files_at_version=v{project.latest_version-2}" + ) + assert response.status_code == 200 + assert len(response.json["files"]) == 1 + assert any(resp_files["path"] == files[0] for resp_files in response.json["files"]) + assert not any( + resp_files["path"] == files[1] for resp_files in response.json["files"] + ) + response = client.get( + f"v2/projects/{project.id}?files_at_version=v{project.latest_version}" + ) + assert len(response.json["files"]) == 3 + assert {f["path"] for f in response.json["files"]} == set(files) + # invalid version format parameter + response = client.get(f"v2/projects/{project.id}?files_at_version=3") + assert response.status_code == 400 push_data = [ @@ -277,6 +368,7 @@ def test_create_version(client, data, expected, err_code): assert project.latest_version == 1 chunks = [] + chunk_ids = [] if expected == 201: # mimic chunks were uploaded for f in data["changes"]["added"] + data["changes"]["updated"]: @@ -293,12 +385,21 @@ def test_create_version(client, data, expected, err_code): out_file.write(in_file.read(CHUNK_SIZE)) chunks.append(chunk_location) + chunk_ids.append(chunk) - response = client.post(f"v2/projects/{project.id}/versions", json=data) + with patch( + "mergin.sync.public_api_v2_controller.remove_transaction_chunks.delay" + ) as mock_remove: + response = client.post(f"v2/projects/{project.id}/versions", json=data) assert response.status_code == expected if expected == 201: assert response.json["version"] == "v2" assert project.latest_version == 2 + # chunks exists after upload, cleanup job did not remove them + assert all(os.path.exists(chunk) for chunk in chunks) + if chunk_ids: + assert mock_remove.called_once_with(chunk_ids) + remove_transaction_chunks(chunk_ids) assert all(not os.path.exists(chunk) for chunk in chunks) else: assert project.latest_version == 1 diff --git a/server/mergin/tests/test_utils.py b/server/mergin/tests/test_utils.py index ed66cdd0..bf5f4666 100644 --- a/server/mergin/tests/test_utils.py +++ b/server/mergin/tests/test_utils.py @@ -5,7 +5,6 @@ import base64 from datetime import datetime import json -import os import pytest from flask import url_for, current_app from sqlalchemy import desc @@ -13,6 +12,7 @@ from unittest.mock import patch from pathvalidate import sanitize_filename from pygeodiff import GeoDiff +from pathlib import PureWindowsPath from ..utils import save_diagnostic_log_file @@ -24,6 +24,7 @@ is_valid_path, get_x_accel_uri, wkb2wkt, + has_trailing_space, ) from ..auth.models import LoginHistory, User from . import json_headers @@ -228,6 +229,24 @@ def test_is_valid_path(client, filepath, allow): assert is_valid_path(filepath) == allow +trailing_spaces_paths = [ + ("photos /lutraHQ.jpg", "posix", True), + ("photo s/ lutraHQ.jpg", "posix", False), + ("assets\photos \lutraHQ.jpg", "windows", True), + ("assets\ photos\lutraHQ.jpg", "windows", False), +] + + +@pytest.mark.parametrize("path,path_platform,result", trailing_spaces_paths) +def test_has_trailing_space(path, path_platform, result): + if path_platform == "windows": + # we must mock Path to instantiate as Windows path + with patch("mergin.sync.utils.Path", PureWindowsPath): + assert has_trailing_space(path) is result + else: + assert has_trailing_space(path) is result + + def test_get_x_accell_uri(client): """Test get_x_accell_uri""" client.application.config["LOCAL_PROJECTS"] = "/data/" diff --git a/server/mergin/tests/utils.py b/server/mergin/tests/utils.py index 6dcfd157..dab9c02c 100644 --- a/server/mergin/tests/utils.py +++ b/server/mergin/tests/utils.py @@ -379,3 +379,9 @@ def modify_file_times(path, time: datetime, accessed=True, modified=True): mtime = epoch_time if modified else file_stat.st_mtime os.utime(path, (atime, mtime)) + + +def logout(client): + """Test helper to log out the client""" + resp = client.get(url_for("/.mergin_auth_controller_logout")) + assert resp.status_code == 200 diff --git a/server/mergin/version.py b/server/mergin/version.py index cdacf710..10d74e1e 100644 --- a/server/mergin/version.py +++ b/server/mergin/version.py @@ -4,4 +4,4 @@ def get_version(): - return "2025.7.3" + return "2025.8.2" diff --git a/server/setup.py b/server/setup.py index 33f41ea7..7a3d1939 100644 --- a/server/setup.py +++ b/server/setup.py @@ -6,7 +6,7 @@ setup( name="mergin", - version="2025.7.3", + version="2025.8.2", url="https://github.com/MerginMaps/mergin", license="AGPL-3.0-only", author="Lutra Consulting Limited", diff --git a/web-app/packages/lib/src/common/number_utils.ts b/web-app/packages/lib/src/common/number_utils.ts index 7d7cd834..a1128973 100644 --- a/web-app/packages/lib/src/common/number_utils.ts +++ b/web-app/packages/lib/src/common/number_utils.ts @@ -39,9 +39,10 @@ export function formatFileSize( export function formatToCurrency( value: number, currency: string, - digits = 2 + digits = 2, + locale = 'en-UK' ): string { - return value.toLocaleString('en-UK', { + return value.toLocaleString(locale, { style: 'currency', currency, currencySign: 'accounting',