|
14 | 14 | from marshmallow import ValidationError |
15 | 15 | from sqlalchemy.exc import IntegrityError |
16 | 16 |
|
| 17 | +from mergin.sync.tasks import remove_transaction_chunks |
| 18 | + |
| 19 | +from .schemas_v2 import ProjectSchema as ProjectSchemaV2 |
17 | 20 | from ..app import db |
18 | 21 | from ..auth import auth_required |
19 | 22 | from ..auth.models import User |
|
26 | 29 | StorageLimitHit, |
27 | 30 | UploadError, |
28 | 31 | ) |
29 | | -from .files import ChangesSchema |
| 32 | +from .files import ChangesSchema, ProjectFileSchema |
30 | 33 | from .forms import project_name_validation |
31 | 34 | from .models import ( |
32 | 35 | Project, |
|
41 | 44 | from .public_api_controller import catch_sync_failure |
42 | 45 | from .schemas import ( |
43 | 46 | ProjectMemberSchema, |
44 | | - ProjectVersionSchema, |
45 | 47 | UploadChunkSchema, |
46 | 48 | ProjectSchema, |
47 | 49 | ) |
@@ -162,6 +164,22 @@ def remove_project_collaborator(id, user_id): |
162 | 164 | return NoContent, 204 |
163 | 165 |
|
164 | 166 |
|
| 167 | +def get_project(id, files_at_version=None): |
| 168 | + """Get project info. Include list of files at specific version if requested.""" |
| 169 | + project = require_project_by_uuid(id, ProjectPermissions.Read, expose=False) |
| 170 | + data = ProjectSchemaV2().dump(project) |
| 171 | + if files_at_version: |
| 172 | + pv = ProjectVersion.query.filter_by( |
| 173 | + project_id=project.id, name=ProjectVersion.from_v_name(files_at_version) |
| 174 | + ).first() |
| 175 | + if pv: |
| 176 | + data["files"] = ProjectFileSchema( |
| 177 | + only=("path", "mtime", "size", "checksum"), many=True |
| 178 | + ).dump(pv.files) |
| 179 | + |
| 180 | + return data, 200 |
| 181 | + |
| 182 | + |
165 | 183 | @auth_required |
166 | 184 | @catch_sync_failure |
167 | 185 | def create_project_version(id): |
@@ -302,12 +320,12 @@ def create_project_version(id): |
302 | 320 | os.renames(temp_files_dir, version_dir) |
303 | 321 |
|
304 | 322 | # remove used chunks |
| 323 | + # get chunks from added and updated files |
| 324 | + chunks_ids = [] |
305 | 325 | for file in to_be_added_files + to_be_updated_files: |
306 | 326 | file_chunks = file.get("chunks", []) |
307 | | - for chunk_id in file_chunks: |
308 | | - chunk_file = get_chunk_location(chunk_id) |
309 | | - if os.path.exists(chunk_file): |
310 | | - move_to_tmp(chunk_file) |
| 327 | + chunks_ids.extend(file_chunks) |
| 328 | + remove_transaction_chunks.delay(chunks_ids) |
311 | 329 |
|
312 | 330 | logging.info( |
313 | 331 | f"Push finished for project: {project.id}, project version: {v_next_version}, upload id: {upload.id}." |
@@ -360,7 +378,6 @@ def upload_chunk(id: str): |
360 | 378 | # we could have used request.data here, but it could eventually cause OOM issue |
361 | 379 | save_to_file(request.stream, dest_file, current_app.config["MAX_CHUNK_SIZE"]) |
362 | 380 | except IOError: |
363 | | - move_to_tmp(dest_file, chunk_id) |
364 | 381 | return BigChunkError().response(413) |
365 | 382 | except Exception as e: |
366 | 383 | return UploadError(error="Error saving chunk").response(400) |
|
0 commit comments