Coverage for gws-app / gws / plugin / qfieldcloud / action.py: 0%

421 statements  

« prev     ^ index     » next       coverage.py v7.13.4, created at 2026-03-03 10:12 +0100

1from typing import Optional, cast 

2 

3import re 

4import os 

5import hashlib 

6 

7import gws 

8import gws.base.job 

9import gws.base.shape 

10import gws.base.action 

11import gws.lib.mime 

12import gws.lib.jsonx 

13import gws.lib.datetimex as dtx 

14import gws.lib.osx as osx 

15 

16from . import core, packager, patcher, api, caps 

17 

18gws.ext.new.action('qfieldcloud') 

19 

20 

21class Config(gws.ConfigWithAccess): 

22 projects: list[core.ProjectConfig] 

23 """QField Cloud projects.""" 

24 

25 

26class Props(gws.base.action.Props): 

27 pass 

28 

29 

30class Request(gws.Data): 

31 req: gws.WebRequester 

32 """The original web request.""" 

33 route: str 

34 """Request method and path.""" 

35 parts: dict 

36 """Variables from the path.""" 

37 qs: dict 

38 """Query string parameters.""" 

39 post: dict 

40 """POST payload.""" 

41 project: gws.Project 

42 """GWS Project context.""" 

43 qfcProject: core.QfcProject 

44 """QField Cloud Project context.""" 

45 user: gws.User 

46 """Authenticated user.""" 

47 sess: gws.AuthSession 

48 """Authentication session.""" 

49 token: str 

50 """Authentication token.""" 

51 

52 

53class WorkerPayload(gws.Data): 

54 actionUid: str 

55 jobType: str 

56 qfcProjectUid: str 

57 projectUid: str 

58 

59 

60def route(pattern: str): 

61 def decorator(fn): 

62 fn._route_pattern = pattern 

63 return fn 

64 

65 return decorator 

66 

67 

68class AuthMethod(gws.AuthMethod): 

69 """Dummy auth method for API sessions.""" 

70 

71 def configure(self): 

72 self.uid = 'gws.plugin.qfieldcloud.auth' 

73 

74 

75class Object(gws.base.action.Object): 

76 """QField Cloud API action.""" 

77 

78 qfcProjects: list[core.QfcProject] 

79 capsCache: dict[str, caps.Caps] 

80 method: gws.AuthMethod 

81 

82 def configure(self): 

83 self.method = cast(gws.AuthMethod, self.create_child(AuthMethod)) 

84 self.qfcProjects = [] 

85 for p in self.cfg('projects') or []: 

86 qp = self.create_child(core.QfcProject, p) 

87 if qp: 

88 self.qfcProjects.append(cast(core.QfcProject, qp)) 

89 

90 def __getstate__(self): 

91 return gws.u.omit(vars(self), 'capsCache') 

92 

93 @gws.ext.command.raw('qfieldcloudApi') 

94 def raw_request(self, req: gws.WebRequester, p: gws.Request) -> gws.ContentResponse: 

95 path = req.path().strip('/') 

96 if not path: 

97 raise gws.NotFoundError('API path not specified') 

98 

99 path_parts = path.split('/') 

100 project = cast(gws.Project, self.find_closest(gws.ext.object.project)) 

101 

102 if path_parts[0] == 'projectUid': 

103 try: 

104 path_parts.pop(0) 

105 uid = path_parts.pop(0) 

106 path = '/'.join(path_parts) 

107 except IndexError: 

108 raise gws.NotFoundError('gws project UID not specified') 

109 if not project: 

110 project = req.user.require_project(uid) 

111 elif uid != project.uid: 

112 raise gws.NotFoundError(f'gws project UID mismatch: {uid=} != {project.uid=}') 

113 

114 if not project: 

115 raise gws.NotFoundError('gws project not found') 

116 

117 path = path.strip('/') 

118 route = f'{req.method} {path}' 

119 

120 for name in dir(self): 

121 fn = getattr(self, name) 

122 if callable(fn) and hasattr(fn, '_route_pattern'): 

123 m = re.match(f'^{fn._route_pattern}$', route) 

124 if m: 

125 rx = Request( 

126 req=req, 

127 project=project, 

128 route=route, 

129 parts=m.groupdict(), 

130 post={}, 

131 qs=req.query_params(), 

132 ) 

133 return self._handle_route(fn, rx) 

134 

135 raise gws.NotFoundError(f'API {route=} not found') 

136 

137 _public_routes = [ 

138 'GET api/v1/auth/providers', 

139 'POST api/v1/auth/token', 

140 ] 

141 

142 def _handle_route(self, fn, rx: Request) -> gws.ContentResponse: 

143 if rx.req.isApi: 

144 rx.post = rx.req.struct() 

145 elif rx.req.isForm: 

146 rx.post = dict(rx.req.form()) 

147 elif rx.req.isPost: 

148 rx.post = dict(raw=rx.req.data()) 

149 

150 gws.log.debug(f'API_REQUEST {rx.route=} -> {fn.__name__} {rx=}') 

151 

152 if rx.route not in self._public_routes: 

153 self.authorize_from_token(rx) 

154 

155 res = fn(rx) 

156 

157 if not res: 

158 return gws.ContentResponse(content='') 

159 

160 if isinstance(res, gws.ContentResponse): 

161 return res 

162 

163 if isinstance(res, (list, dict, gws.Data)): 

164 return gws.ContentResponse( 

165 content=gws.lib.jsonx.to_string(res), 

166 mime=gws.lib.mime.JSON, 

167 ) 

168 

169 raise gws.Error(f'API {rx.route=} invalid response type: {type(res)}') 

170 

171 ## 

172 

173 @route('POST api/v1/auth/logout') 

174 def on_post_auth_logout(self, rx: Request): 

175 am = self.root.app.authMgr 

176 am.sessionMgr.delete(rx.sess) 

177 gws.log.debug(f'{self=} {rx=}') 

178 

179 @route('GET api/v1/auth/providers') 

180 def on_get_auth_providers(self, rx: Request) -> list[api.AuthProvider]: 

181 return [ 

182 api.AuthProvider(type='credentials', id='credentials', name='Username / Password'), 

183 ] 

184 

185 @route('POST api/v1/auth/token') 

186 def on_post_auth_token(self, rx: Request) -> api.AuthToken: 

187 self.authorize_from_credentials( 

188 gws.Data( 

189 username=rx.post.get('username', ''), 

190 password=rx.post.get('password', ''), 

191 ), 

192 rx, 

193 ) 

194 am = self.root.app.authMgr 

195 return api.AuthToken( 

196 token=rx.token, 

197 expires_at=dtx.to_iso_string(dtx.add(seconds=am.sessionMgr.lifeTime)), 

198 username=rx.user.loginName, 

199 type=api.UserType.person, 

200 full_name=rx.user.displayName, 

201 avatar_url='', 

202 email='', 

203 first_name='', 

204 last_name='', 

205 ) 

206 

207 @route('GET api/v1/auth/user') 

208 def on_get_auth_user(self, rx: Request) -> api.CompleteUser: 

209 return api.CompleteUser( 

210 username=rx.user.loginName, 

211 type=api.UserType.person, 

212 full_name=rx.user.displayName, 

213 avatar_url='', 

214 email='', 

215 first_name='', 

216 last_name='', 

217 ) 

218 

219 @route('GET api/v1/projects') 

220 def on_get_projects(self, rx: Request) -> list[api.Project]: 

221 limit = int(rx.qs.get('limit', 100)) 

222 offset = int(rx.qs.get('offset', 0)) 

223 qps = self.get_qfc_projects(rx.project, rx.user) 

224 return [_format_project(qp, rx) for qp in qps[offset : offset + limit]] 

225 

226 @route('GET api/v1/projects/(?P<project_id>[^/]+)') 

227 def on_get_projects_id(self, rx: Request) -> api.Project: 

228 self.set_qfc_project_from_parts(rx) 

229 return _format_project(rx.qfcProject, rx) 

230 

231 @route('POST api/v1/jobs') 

232 def on_post_jobs(self, rx: Request) -> api.Job: 

233 project_id = rx.post.get('project_id', '') 

234 type = rx.post.get('type', '') 

235 self.set_qfc_project(project_id, rx) 

236 if type != api.TypeEnum.package: 

237 raise gws.Error(f'Unsupported job type: {type!r}') 

238 

239 job = self.create_package_job(rx) 

240 return _format_job(job, rx) 

241 

242 @route('GET api/v1/jobs/(?P<job_id>[^/]+)') 

243 def on_get_jobs_id(self, rx: Request) -> api.Job: 

244 job_id = rx.parts.get('job_id', '') 

245 job = self.get_job(job_id, rx.project, rx.user) 

246 if not job: 

247 raise gws.NotFoundError(f'Job {job_id!r} not found') 

248 return _format_job(job, rx) 

249 

250 @route('GET api/v1/packages/(?P<project_id>[^/]+)/(?P<package_version>[^/]+)') 

251 def on_get_package(self, rx: Request) -> api.Package: 

252 self.set_qfc_project_from_parts(rx) 

253 

254 # @TODO do we need versions? 

255 # @TODO do we need layers? 

256 # package_version = rx.parts.get('package_version', '') 

257 

258 path_map = self.get_latest_package_path_map(rx) 

259 return api.Package( 

260 files=_format_files(path_map), 

261 layers=[], 

262 status=api.JobStatusEnum.finished, 

263 package_id=rx.qfcProject.uid, 

264 packaged_at=dtx.to_iso_string(), 

265 data_last_updated_at=dtx.to_iso_string(), 

266 ) 

267 

268 @route('GET api/v1/packages/(?P<project_id>[^/]+)/(?P<package_version>[^/]+)/files/(?P<file_name>.+)') 

269 def on_get_package_file(self, rx: Request) -> gws.ContentResponse: 

270 self.set_qfc_project_from_parts(rx) 

271 

272 file_name = rx.parts.get('file_name', '') 

273 path_map = self.get_latest_package_path_map(rx) 

274 for fname, p in path_map.items(): 

275 if file_name == fname: 

276 return gws.ContentResponse(contentPath=p) 

277 raise gws.NotFoundError(f'file {file_name!r} not found') 

278 

279 @route('GET api/v1/files/(?P<project_id>[^/]+)') 

280 def on_get_files(self, rx: Request) -> list[api.PackageFile]: 

281 self.set_qfc_project_from_parts(rx) 

282 path_map = self.get_latest_package_path_map(rx) 

283 return _format_files(path_map) 

284 

285 @route('POST api/v1/deltas/(?P<project_id>[^/]+)') 

286 def on_post_deltas(self, rx: Request): 

287 self.set_qfc_project_from_parts(rx) 

288 

289 # deltas come as a multipart file upload 

290 try: 

291 js = gws.lib.jsonx.from_string(rx.post['file'].stream.read().decode('utf-8')) 

292 payload = api.DeltasPayload( 

293 deltas=js['deltas'], 

294 files=js.get('files', []), 

295 id=js['id'], 

296 project=js['project'], 

297 version=js['version'], 

298 ) 

299 except Exception as exc: 

300 raise gws.BadRequestError(f'invalid delta file content: {exc}') 

301 

302 self.store_delta_payload(payload, rx) 

303 

304 changes = [] 

305 for d in payload.deltas: 

306 new = d.get('new', {}) 

307 old = d.get('old', {}) 

308 chg = patcher.Change( 

309 uid=d['uuid'], 

310 type=d['method'], 

311 layerUid=d['localLayerId'], 

312 newAtts=new['attributes'] if new else {}, 

313 oldAtts=old['attributes'] if old else {}, 

314 wkt=new.get('geometry', ''), 

315 ) 

316 changes.append(chg) 

317 

318 args = patcher.Args( 

319 qfcProject=rx.qfcProject, 

320 caps=self.get_caps(rx.qfcProject), 

321 project=rx.project, 

322 user=rx.user, 

323 baseDir='', 

324 changes=changes, 

325 ) 

326 self.get_patcher().apply_changes(self.root, args) 

327 

328 self.set_delta_payload_applied(payload.id, rx) 

329 

330 @route('GET api/v1/deltas/(?P<project_id>[^/]+)/(?P<payload_id>.+)') 

331 def on_get_deltas(self, rx: Request) -> list[api.StoredDelta]: 

332 self.set_qfc_project_from_parts(rx) 

333 

334 # the content of the delta does not seem to matter much, only the ID and status=applied 

335 # see QField/src/core/qfieldcloud/qfieldcloudproject.cpp : getDeltaStatus() 

336 

337 payload_id = rx.parts.get('payload_id', '') 

338 sds = self.get_delta_payload(payload_id, rx) 

339 if not sds: 

340 raise gws.NotFoundError(f'delta {payload_id=} not found') 

341 return sds 

342 

343 @route('POST api/v1/files/(?P<project_id>[^/]+)/(?P<path>.+)') 

344 def on_post_file(self, rx: Request): 

345 self.set_qfc_project_from_parts(rx) 

346 

347 path = rx.parts.get('path', '') 

348 try: 

349 fc = rx.post['file'].stream.read() 

350 except Exception as exc: 

351 raise gws.BadRequestError(f'invalid file upload: {exc}') 

352 

353 args = patcher.Args( 

354 qfcProject=rx.qfcProject, 

355 caps=self.get_caps(rx.qfcProject), 

356 project=rx.project, 

357 user=rx.user, 

358 baseDir='', 

359 filePath=path, 

360 fileContent=fc, 

361 ) 

362 self.get_patcher().apply_upload(self.root, args) 

363 

364 ## 

365 

366 def get_packager(self) -> packager.Object: 

367 return packager.Object() 

368 

369 def get_patcher(self) -> patcher.Object: 

370 return patcher.Object() 

371 

372 ## 

373 

374 def get_caps(self, qfc_project: core.QfcProject) -> caps.Caps: 

375 if not hasattr(self, 'capsCache'): 

376 self.capsCache = {} 

377 cs = self.get_cached_caps(qfc_project) 

378 if cs: 

379 return cs 

380 

381 pa = caps.Parser(qfc_project) 

382 pa.parse() 

383 gws.u.serialize_to_path(pa.caps, f'{self.fs_project_cache_dir(qfc_project)}/caps.pickle') 

384 pa.create_models() 

385 pa.assign_path_props() 

386 

387 self.capsCache[qfc_project.uid] = pa.caps 

388 gws.log.debug(f'get_caps: {qfc_project.uid=}: created') 

389 return pa.caps 

390 

391 def get_cached_caps(self, qfc_project: core.QfcProject) -> Optional[caps.Caps]: 

392 cs = self.capsCache.get(qfc_project.uid) 

393 if not cs: 

394 gws.log.debug(f'get_caps: {qfc_project.uid=}: not found') 

395 return 

396 

397 qp = qfc_project.qgisProvider.qgis_project() 

398 if qp.sourceHash != cs.sourceHash: 

399 gws.log.debug(f'get_caps: {qfc_project.uid=}: hash changed: {cs.sourceHash=} != {qp.sourceHash=}') 

400 self.capsCache.pop(qfc_project.uid, None) 

401 return 

402 

403 gws.log.debug(f'get_caps: {qfc_project.uid=}: CACHED!') 

404 return cs 

405 

406 def authorize_from_credentials(self, credentials: gws.Data, rx: Request): 

407 am = self.root.app.authMgr 

408 user = am.authenticate(self.method, credentials) 

409 if not user: 

410 raise gws.ForbiddenError('invalid username or password') 

411 rx.sess = am.sessionMgr.create(self.method, user) 

412 rx.user = user 

413 rx.token = rx.sess.uid 

414 

415 def authorize_from_token(self, rx: Request): 

416 h = rx.req.header('Authorization', '') 

417 m = re.match(r'^Token (.+)$', h) 

418 if not m: 

419 raise gws.ForbiddenError('token_auth: missing or invalid Authorization header') 

420 token = m.group(1) 

421 am = self.root.app.authMgr 

422 sess = am.sessionMgr.get_valid(token) 

423 if not sess: 

424 raise gws.ForbiddenError(f'token_auth: invalid or expired {token=}') 

425 rx.sess = sess 

426 rx.user = sess.user 

427 rx.token = sess.uid 

428 am.sessionMgr.save(sess) 

429 gws.log.debug(f'token_auth: ok: {rx.token=} {rx.user.uid=} {rx.user.loginName=}') 

430 

431 ## 

432 

433 def set_qfc_project(self, uid: str, rx: Request): 

434 qp = self.get_qfc_project(uid, rx.project, rx.user) 

435 if not qp: 

436 raise gws.NotFoundError(f'project {uid!r} not found') 

437 rx.qfcProject = qp 

438 

439 def set_qfc_project_from_parts(self, rx: Request): 

440 uid = rx.parts.get('project_id', '') 

441 self.set_qfc_project(uid, rx) 

442 

443 ## 

444 

445 def get_qfc_projects(self, project: gws.Project, user: gws.User) -> list[core.QfcProject]: 

446 return [p for p in self.qfcProjects if user.can_use(p)] 

447 

448 def get_qfc_project(self, qfc_project_uid: str, project: gws.Project, user: gws.User) -> Optional[core.QfcProject]: 

449 for qp in self.get_qfc_projects(project, user): 

450 if qp.uid == qfc_project_uid: 

451 return qp 

452 

453 ## 

454 

455 def create_package_job(self, rx: Request) -> gws.Job: 

456 mgr = self.root.app.jobMgr 

457 p = WorkerPayload( 

458 actionUid=self.uid, 

459 jobType='package', 

460 qfcProjectUid=rx.qfcProject.uid, 

461 projectUid=rx.project.uid, 

462 ) 

463 job = mgr.create_job( 

464 PackageWorker, 

465 rx.user, 

466 payload=gws.u.to_dict(p), 

467 ) 

468 return mgr.schedule_job(job) 

469 

470 def create_package_from_worker(self, worker: 'PackageWorker', pa: WorkerPayload): 

471 project = worker.user.require_project(pa.projectUid) 

472 qfc_project = gws.u.require(self.get_qfc_project(pa.qfcProjectUid, project, worker.user)) 

473 

474 self.fs_cleanup_old_packages(qfc_project) 

475 

476 uid = dtx.to_basic_string(with_ms=True) 

477 pkg_dir = self.fs_new_package_dir(qfc_project, uid) 

478 args = packager.Args( 

479 uid=uid, 

480 qfcProject=qfc_project, 

481 caps=self.get_caps(qfc_project), 

482 project=project, 

483 user=worker.user, 

484 packageDir=pkg_dir, 

485 mapCacheDir=self.fs_project_cache_dir(qfc_project), 

486 withBaseMap=True, 

487 withData=True, 

488 withMedia=True, 

489 withQgis=True, 

490 ) 

491 self.get_packager().create_package(self.root, args) 

492 

493 def create_package_from_cli(self, qfc_project_uid: str, target_dir: str, project: gws.Project, user: gws.User): 

494 qfc_project = self.get_qfc_project(qfc_project_uid, project, user) 

495 if not qfc_project: 

496 raise gws.NotFoundError(f'project {qfc_project_uid!r} not found') 

497 

498 args = packager.Args( 

499 uid='cli', 

500 qfcProject=qfc_project, 

501 caps=self.get_caps(qfc_project), 

502 project=project, 

503 user=user, 

504 packageDir=target_dir, 

505 mapCacheDir=self.fs_project_cache_dir(qfc_project), 

506 withBaseMap=True, 

507 withData=True, 

508 withMedia=True, 

509 withQgis=True, 

510 ) 

511 self.get_packager().create_package(self.root, args) 

512 

513 def get_job(self, job_id: str, project: gws.Project, user: gws.User) -> Optional[gws.Job]: 

514 return self.root.app.jobMgr.get_job(job_id, user=user) 

515 

516 ## 

517 

518 def store_delta_payload(self, payload: api.DeltasPayload, rx: Request): 

519 self.fs_cleanup_old_deltas(rx.qfcProject) 

520 sds = [ 

521 api.StoredDelta( 

522 id=delta['uuid'], 

523 deltafile_id=payload.id, 

524 created_by=rx.user.loginName, 

525 created_at=dtx.to_iso_string(), 

526 updated_at=dtx.to_iso_string(), 

527 status='STATUS_PENDING', 

528 client_id=delta['clientId'], 

529 output=None, 

530 last_status='pending', 

531 last_feedback=None, 

532 content=delta, 

533 ) 

534 for delta in payload.deltas 

535 ] 

536 gws.lib.jsonx.to_path( 

537 self.fs_delta_payload_path(rx.qfcProject, payload.id), 

538 sds, 

539 ) 

540 

541 def set_delta_payload_applied(self, payload_id: str, rx: Request): 

542 sds = self.get_delta_payload(payload_id, rx) 

543 if not sds: 

544 return 

545 for sd in sds: 

546 sd.status = 'STATUS_APPLIED' 

547 sd.last_status = 'applied' 

548 sd.updated_at = dtx.to_iso_string() 

549 gws.lib.jsonx.to_path( 

550 self.fs_delta_payload_path(rx.qfcProject, payload_id), 

551 sds, 

552 ) 

553 

554 def get_delta_payload(self, payload_id: str, rx: Request) -> Optional[list[api.StoredDelta]]: 

555 path = self.fs_delta_payload_path(rx.qfcProject, payload_id) 

556 if not os.path.exists(path): 

557 gws.log.warning(f'stored delta {payload_id=}: not found: {path=}') 

558 return 

559 try: 

560 sds = [api.StoredDelta(d) for d in gws.lib.jsonx.from_path(path)] 

561 except Exception as exc: 

562 gws.log.warning(f'stored delta {payload_id=}: failed to load {path=}: {exc}') 

563 return 

564 for sd in sds: 

565 if sd.created_by != rx.user.loginName: 

566 gws.log.warning(f'stored delta {payload_id=}: user mismatch: {sd.created_by=} != {rx.user.loginName=}') 

567 return 

568 return sds 

569 

570 ## 

571 

572 def fs_project_base_dir(self, qfc_project: core.QfcProject) -> str: 

573 return gws.u.ensure_dir(f'{gws.c.VAR_DIR}/qfieldcloud/projects/{qfc_project.uid}') 

574 

575 def fs_latest_package_dir(self, qfc_project: core.QfcProject) -> Optional[str]: 

576 base_dir = self.fs_project_base_dir(qfc_project) 

577 for pkg in sorted(osx.find_directories(base_dir, deep=False), reverse=True): 

578 m = re.search(r'package_(\d+)', pkg) 

579 if m and gws.u.is_file(f'{pkg}/{packager.COMPLETE_FILE}'): 

580 return pkg 

581 

582 def fs_new_package_dir(self, qfc_project: core.QfcProject, uid: str) -> str: 

583 base_dir = self.fs_project_base_dir(qfc_project) 

584 pkg_dir = gws.u.ensure_dir(f'{base_dir}/package_{uid}') 

585 return pkg_dir 

586 

587 def fs_cleanup_old_packages(self, qfc_project: core.QfcProject, keep_seconds: int = 3600): 

588 base_dir = self.fs_project_base_dir(qfc_project) 

589 now = dtx.now().timestamp() 

590 for pkg in osx.find_directories(base_dir, deep=False): 

591 m = re.search(r'package_(\d+)', pkg) 

592 if not m: 

593 continue 

594 t = osx.file_mtime(pkg) 

595 if now - t > keep_seconds: 

596 gws.log.info(f'fs_cleanup_old_packages: removing old package: {pkg=}') 

597 osx.rmdir(pkg) 

598 

599 def fs_project_cache_dir(self, qfc_project: core.QfcProject) -> str: 

600 base_dir = self.fs_project_base_dir(qfc_project) 

601 return gws.u.ensure_dir(f'{base_dir}/cache') 

602 

603 def fs_project_deltas_dir(self, qfc_project: core.QfcProject) -> str: 

604 base_dir = self.fs_project_base_dir(qfc_project) 

605 return gws.u.ensure_dir(f'{base_dir}/deltas') 

606 

607 def fs_delta_payload_path(self, qfc_project: core.QfcProject, payload_id: str) -> str: 

608 d = self.fs_project_deltas_dir(qfc_project) 

609 u = gws.u.to_uid(payload_id) 

610 return f'{d}/{u}.json' 

611 

612 def fs_cleanup_old_deltas(self, qfc_project: core.QfcProject, keep_seconds: int = 3600): 

613 d = self.fs_project_deltas_dir(qfc_project) 

614 now = dtx.now().timestamp() 

615 for f in osx.find_files(d, deep=False): 

616 t = osx.file_mtime(f) 

617 if now - t > keep_seconds: 

618 gws.log.info(f'fs_cleanup_old_deltas: removing old delta: {f=}') 

619 osx.unlink(f) 

620 

621 def get_latest_package_path_map(self, rx: Request) -> dict[str, str]: 

622 d = self.fs_latest_package_dir(rx.qfcProject) 

623 try: 

624 return gws.lib.jsonx.from_path(f'{d}/{packager.PATH_MAP_FILE}') 

625 except Exception: 

626 return {} 

627 

628 

629## 

630 

631 

632class PackageWorker(gws.base.job.worker.Object): 

633 @classmethod 

634 def run(cls, root: gws.Root, job: gws.Job): 

635 w = cls(root, job.user, job) 

636 w.work() 

637 

638 def work(self): 

639 self.update_job(state=gws.JobState.running) 

640 pa = WorkerPayload(gws.u.require(self.get_job()).payload) 

641 action = cast(Object, self.root.get(pa.actionUid)) 

642 action.create_package_from_worker(self, pa) 

643 self.update_job(state=gws.JobState.complete) 

644 

645 

646## 

647 

648 

649_DATE_CREATED = '2025-10-10T14:00:00' 

650 

651 

652def _format_project(qp: core.QfcProject, rx: Request) -> api.Project: 

653 return api.Project( 

654 id=qp.uid, 

655 name=qp.title, 

656 owner=rx.user.loginName, 

657 description='', 

658 private=True, 

659 is_public=False, 

660 created_at=_DATE_CREATED, 

661 updated_at=dtx.to_iso_string(), 

662 data_last_packaged_at=None, 

663 data_last_updated_at=dtx.to_iso_string(), 

664 can_repackage=True, 

665 needs_repackaging=True, 

666 status='ok', 

667 user_role='admin', 

668 user_role_origin='project_owner', 

669 shared_datasets_project_id=None, 

670 is_shared_datasets_project=False, 

671 is_featured=False, 

672 is_attachment_download_on_demand=False, 

673 ) 

674 

675 

676def _format_files(path_map: dict[str, str]): 

677 return [ 

678 api.PackageFile( 

679 name=fname, 

680 size=osx.file_size(p), 

681 uploaded_at=_get_time_iso(p), 

682 is_attachment=False, 

683 md5sum=_get_md5sum(p), 

684 last_modified=_get_time_iso(p), 

685 sha256=_get_sha256(p), 

686 ) 

687 for fname, p in path_map.items() 

688 ] 

689 

690 

691def _format_job(job: gws.Job, rx: Request) -> api.Job: 

692 status_map = { 

693 gws.JobState.open: api.JobStatusEnum.pending, 

694 gws.JobState.running: api.JobStatusEnum.started, 

695 gws.JobState.complete: api.JobStatusEnum.finished, 

696 gws.JobState.error: api.JobStatusEnum.failed, 

697 } 

698 

699 return api.Job( 

700 id=job.uid, 

701 type=job.payload.get('job_type', ''), 

702 created_at=dtx.to_iso_string(job.timeCreated), 

703 created_by=1, 

704 project_id=job.payload.get('project_uid', ''), 

705 status=status_map.get(job.state, api.JobStatusEnum.pending), 

706 updated_at=dtx.to_iso_string(job.timeUpdated), 

707 started_at=dtx.to_iso_string(job.timeUpdated) if job.state == gws.JobState.running else None, 

708 finished_at=dtx.to_iso_string(job.timeUpdated) if job.state == gws.JobState.complete else None, 

709 ) 

710 

711 

712def _get_sha256(path: str) -> str: 

713 with open(path, 'rb') as f: 

714 return hashlib.file_digest(f, 'sha256').hexdigest() 

715 

716 

717def _get_time_iso(path: str) -> str: 

718 t = osx.file_mtime(path) 

719 return dtx.to_iso_string(dtx.from_timestamp(t)) 

720 

721 

722def _get_md5sum(path: str) -> str: 

723 with open(path, 'rb') as f: 

724 return _get_md5sum_file(f) 

725 

726 

727def _get_md5sum_file(fp, part_size: int = 8 * 1024 * 1024) -> str: 

728 """Compute file hash matching QField's fileEtag implementation. 

729 Returns simple MD5 for files <= part_size, or S3-style multipart ETag for larger files. 

730 """ 

731 fp.seek(0, 2) 

732 file_size = fp.tell() 

733 fp.seek(0) 

734 

735 if file_size <= part_size: 

736 hash = hashlib.md5() 

737 hash.update(fp.read()) 

738 return hash.hexdigest() 

739 

740 md5_sums = b'' 

741 read_size = 0 

742 

743 while read_size < file_size: 

744 hash = hashlib.md5() 

745 hash.update(fp.read(part_size)) 

746 md5_sums += hash.digest() 

747 read_size += part_size 

748 

749 hash = hashlib.md5() 

750 hash.update(md5_sums) 

751 return f'{hash.hexdigest()}-{read_size // part_size}'