|
1 | 1 | from __future__ import annotations
|
2 | 2 |
|
3 | 3 | from collections.abc import Callable
|
| 4 | +from glob import glob |
4 | 5 | import json
|
| 6 | +import logging |
| 7 | +from multiprocessing import Manager, Process |
5 | 8 | import os
|
6 | 9 | import os.path
|
7 | 10 | from pathlib import Path
|
|
21 | 24 | from ..consts import DRAFT, dandiset_metadata_file
|
22 | 25 | from ..dandiarchive import DandisetURL
|
23 | 26 | from ..download import (
|
| 27 | + DownloadDirectory, |
24 | 28 | Downloader,
|
25 | 29 | DownloadExisting,
|
26 | 30 | DownloadFormat,
|
@@ -1038,3 +1042,81 @@ def test_pyouthelper_time_remaining_1339():
|
1038 | 1042 | assert len(done) == 2
|
1039 | 1043 | else:
|
1040 | 1044 | assert done[-1] == f"ETA: {10 - i} seconds<"
|
| 1045 | + |
| 1046 | + |
| 1047 | +def test_DownloadDirectory_basic(tmp_path: Path) -> None: |
| 1048 | + with DownloadDirectory(tmp_path, digests={}) as dl: |
| 1049 | + assert dl.dirpath.exists() |
| 1050 | + assert dl.writefile.exists() |
| 1051 | + assert dl.writefile.stat().st_size == 0 |
| 1052 | + assert dl.offset == 0 |
| 1053 | + |
| 1054 | + dl.append(b"123") |
| 1055 | + assert dl.fp is not None |
| 1056 | + dl.fp.flush() # appends are not flushed automatically |
| 1057 | + assert dl.writefile.stat().st_size == 3 |
| 1058 | + assert dl.offset == 0 # doesn't change |
| 1059 | + |
| 1060 | + dl.append(b"456") |
| 1061 | + # but after we are done - should be a full file! |
| 1062 | + assert tmp_path.stat().st_size == 6 |
| 1063 | + assert tmp_path.read_bytes() == b"123456" |
| 1064 | + |
| 1065 | + # no problem with overwriting with new content |
| 1066 | + with DownloadDirectory(tmp_path, digests={}) as dl: |
| 1067 | + dl.append(b"789") |
| 1068 | + assert tmp_path.read_bytes() == b"789" |
| 1069 | + |
| 1070 | + # even if path is a directory which we "overwrite" |
| 1071 | + tmp_path.unlink() |
| 1072 | + tmp_path.mkdir() |
| 1073 | + (tmp_path / "somedata.dat").write_text("content") |
| 1074 | + with DownloadDirectory(tmp_path, digests={}) as dl: |
| 1075 | + assert set(glob(f"{tmp_path}*")) == {str(tmp_path), str(dl.dirpath)} |
| 1076 | + dl.append(b"123") |
| 1077 | + assert tmp_path.read_bytes() == b"123" |
| 1078 | + |
| 1079 | + # no temp .dandidownload folder is left behind |
| 1080 | + assert set(glob(f"{tmp_path}*")) == {str(tmp_path)} |
| 1081 | + |
| 1082 | + # test locking |
| 1083 | + def subproc(path, results): |
| 1084 | + try: |
| 1085 | + with DownloadDirectory(path, digests={}): |
| 1086 | + results.append("re-entered fine") |
| 1087 | + except Exception as exc: |
| 1088 | + results.append(str(exc)) |
| 1089 | + |
| 1090 | + with Manager() as manager: |
| 1091 | + results = manager.list() |
| 1092 | + with DownloadDirectory(tmp_path, digests={}) as dl: |
| 1093 | + dl.append(b"123") |
| 1094 | + p1 = Process(target=subproc, args=(tmp_path, results)) |
| 1095 | + p1.start() |
| 1096 | + p1.join() |
| 1097 | + assert len(results) == 1 |
| 1098 | + assert results[0] == f"Could not acquire download lock for {tmp_path}" |
| 1099 | + assert tmp_path.read_bytes() == b"123" |
| 1100 | + |
| 1101 | + |
| 1102 | +def test_DownloadDirectory_exc( |
| 1103 | + tmp_path: Path, caplog: pytest.LogCaptureFixture |
| 1104 | +) -> None: |
| 1105 | + caplog.set_level(logging.DEBUG, logger="dandi") |
| 1106 | + # and now let's exit with exception |
| 1107 | + with pytest.raises(RuntimeError): |
| 1108 | + with DownloadDirectory(tmp_path, digests={}) as dl: |
| 1109 | + dl.append(b"456") |
| 1110 | + raise RuntimeError("Boom") |
| 1111 | + assert ( |
| 1112 | + "dandi", |
| 1113 | + 10, |
| 1114 | + f"{dl.dirpath} - entered __exit__ with position 3 with exception: " |
| 1115 | + "<class 'RuntimeError'>, Boom", |
| 1116 | + ) == caplog.record_tuples[-1] |
| 1117 | + # and we left without cleanup but closed things up after ourselves |
| 1118 | + assert tmp_path.exists() |
| 1119 | + assert tmp_path.is_dir() |
| 1120 | + assert dl.dirpath.exists() |
| 1121 | + assert dl.fp is None |
| 1122 | + assert dl.writefile.read_bytes() == b"456" |
0 commit comments