Initial commit

This commit is contained in:
René Mathieu
2026-01-17 13:49:51 +01:00
commit 0fef8d96c5
1897 changed files with 396119 additions and 0 deletions

View File

@@ -0,0 +1,20 @@
# Copyright 2008-2018 pydicom authors. See LICENSE file for details.
"""pydicom data manager"""
from .data_manager import (
get_charset_files,
get_testdata_file,
get_testdata_files,
get_palette_files,
DATA_ROOT,
external_data_sources,
fetch_data_files,
)
__all__ = [
"fetch_data_files",
"get_charset_files",
"get_palette_files",
"get_testdata_files",
"get_testdata_file",
]

View File

@@ -0,0 +1,23 @@
Filename Character Sets "Patient's Name"
-------- -------------- '--------------'
chrArab.dcm ISO_IR 127 '\xe2\xc8\xc7\xe6\xea^\xe4\xe6\xd2\xc7\xd1'
chrFren.dcm ISO_IR 100 'Buc^J\xe9r\xf4me'
chrFrenMulti.dcm ISO_IR 100 'Buc^J\xe9r\xf4me'
chrGerm.dcm ISO_IR 100 '\xc4neas^R\xfcdiger'
chrGreek.dcm ISO_IR 126 '\xc4\xe9\xef\xed\xf5\xf3\xe9\xef\xf2'
chrH31.dcm ['', 'ISO 2022 IR 87'] 'Yamada^Tarou=\x1b$B;3ED\x1b(B^\x1b$BB@O:\x1b(B=\x1b$B$d$^$@\x1b(B^\x1b$B$?$m$&\x1b(B'
chrH32.dcm ['ISO 2022 IR 13', 'ISO 2022 IR 87'] '\xd4\xcf\xc0\xde^\xc0\xdb\xb3=\x1b$B;3ED\x1b(J^\x1b$BB@O:\x1b(J=\x1b$B$d$^$@\x1b(J^\x1b$B$?$m$&\x1b(J'
chrHbrw.dcm ISO_IR 138 '\xf9\xf8\xe5\xef^\xe3\xe1\xe5\xf8\xe4'
chrI2.dcm ['', 'ISO 2022 IR 149'] 'Hong^Gildong=\x1b$)C\xfb\xf3^\x1b$)C\xd1\xce\xd4\xd7=\x1b$)C\xc8\xab^\x1b$)C\xb1\xe6\xb5\xbf'
chrRuss.dcm ISO_IR 144 '\xbb\xee\xdace\xdc\xd1yp\xd3'
chrX1.dcm ISO_IR 192 'Wang^XiaoDong=\xe7\x8e\x8b^\xe5\xb0\x8f\xe6\x9d\xb1='
chrX2.dcm GB18030 'Wang^XiaoDong=\xcd\xf5^\xd0\xa1\xb6\xab='
Other
=====
chrFrenMulti.dcm is a modified version of chrFren.dcm with multi-valued PN and LO for testing decoding
chrSQEncoding.dcm is a minimal constructed dataset with a sequence that has
another encoding (['ISO 2022 IR 13', 'ISO 2022 IR 87']) than the dataset (ISO_IR 192)
chrSQEncoding1.dcm is the same dataset with the encoding ['ISO 2022 IR 13', 'ISO 2022 IR 87']
defined in the dataset, but not in the sequence

View File

@@ -0,0 +1,436 @@
# Copyright 2008-2020 pydicom authors. See LICENSE file for details.
"""Management of pydicom's data files.
External Data Sources
---------------------
*pydicom* can also search third-party data sources for matching data. To do so
your project should register its entry points in its `setup.py` file. For
example, a project named "mydata" with the interface class ``MyInterface``
should register:
.. codeblock: python
from setuptools import setup
setup(
...,
entry_points={
"pydicom.data.external_sources": "mydata = mydata:MyInterface",
},
)
The interface class should have, at a minimum, the following two methods:
* ``get_path(self, name: str, dtype: int) -> str`` - returns the absolute path
to the first file with a filename `name` or raises a ``ValueError`` if no
matching file found.
* ``get_paths(self, pattern: str, dtype: int) -> List[str]`` - returns a list
of absolute paths to filenames matching `pattern`.
Where `name` is the name of the filename to search for, `dtype` is an int
that indicates the type of data to search for and should be one of the
following:
* ``0`` - DICOM dataset
* ``1`` - Character set file
* ``2`` - Palette file
* ``3`` - DICOMDIR file
* ``4`` - JPEG file
And lastly, `pattern` is a str used to filter files against when searching.
For a real-life example of an external data source you can look at the
`pydicom-data <https://github.com/pydicom/pydicom-data>`_ repository.
"""
from enum import IntEnum
import fnmatch
import os
from pathlib import Path
from typing import TYPE_CHECKING
from pydicom.data.download import (
data_path_with_download,
calculate_file_hash,
get_cached_filehash,
get_url_map,
get_data_dir,
)
from pydicom.misc import warn_and_log
if TYPE_CHECKING: # pragma: no cover
from pydicom import Dataset
DATA_ROOT = os.fspath(Path(__file__).parent.resolve())
"""The absolute path to the pydicom/data directory."""
class DataTypes(IntEnum):
"""Constants for data types."""
DATASET = 0
CHARSET = 1
PALETTE = 2
DICOMDIR = 3
JPEG = 4
def _check_data_hash(fpath: str) -> bool:
"""Return ``True`` if the SHA256 checksum of the file at ``fpath`` is OK.
Parameters
----------
fpath : str
The absolute path to the file to perform the checksum for.
Returns
-------
bool
``True`` if the checksum matches those in ``hashes.json``, ``False``
otherwise.
Raises
------
pydicom.data.download.NoHashFound
If the file is missing from ``hashes.json``.
"""
p = Path(fpath)
ext_hash = calculate_file_hash(p)
ref_hash = get_cached_filehash(p.name)
return ext_hash == ref_hash
def get_external_sources() -> dict:
"""Return a :class:`dict` of external data source interfaces.
Returns
-------
dict
A dict of ``{'source name': <interface class instance>}``.
"""
from importlib.metadata import entry_points
# Prefer pydicom-data as the source
sources = {
vv.name: vv.load()()
for vv in entry_points(group="pydicom.data.external_sources")
}
out = {}
if "pydicom-data" in sources:
out["pydicom-data"] = sources["pydicom-data"]
out.update(sources)
return out
_EXTERNAL_DATA_SOURCES: dict | None = None
def external_data_sources() -> dict:
"""Return the available external data sources - loaded once."""
global _EXTERNAL_DATA_SOURCES
if _EXTERNAL_DATA_SOURCES is None:
_EXTERNAL_DATA_SOURCES = get_external_sources()
return _EXTERNAL_DATA_SOURCES
def online_test_file_dummy_paths() -> dict[str, str]:
"""Return a :class:`dict` of dummy paths to the downloadable test files.
Returns
-------
dict
A dict of dummy paths to the test files available via download.
"""
filenames = list(get_url_map().keys())
test_files_root = os.path.join(DATA_ROOT, "test_files")
dummy_path_map = {
os.path.join(test_files_root, filename): filename for filename in filenames
}
return dummy_path_map
def fetch_data_files() -> None:
"""Download missing test files to the local cache."""
cache = get_data_dir()
paths = {cache / fname: fname for fname in list(get_url_map().keys())}
error = []
for p in paths:
# Download missing files or files that don't match the hash
try:
data_path_with_download(p.name)
except Exception:
error.append(p.name)
if error:
raise RuntimeError(
f"An error occurred downloading the following files: {', '.join(error)}"
)
def get_files(
base: str | os.PathLike, pattern: str = "**/*", dtype: int = DataTypes.DATASET
) -> list[str]:
"""Return all matching file paths from the available data sources.
First searches the local *pydicom* data store, then any locally available
external sources, and finally the files available in the
pydicom/pydicom-data repository.
.. versionchanged: 2.1
Added the `dtype` keyword parameter, modified to search locally
available external data sources and the pydicom/pydicom-data repository
Parameters
----------
base : str or os.PathLike
Base directory to recursively search.
pattern : str, optional
The pattern to pass to :meth:`~pathlib.Path.glob`, default
(``'**/*'``).
dtype : int, optional
The type of data to search for when using an external source, one of:
* ``0`` - DICOM dataset
* ``1`` - Character set file
* ``2`` - Palette file
* ``3`` - DICOMDIR file
* ``4`` - JPEG file
Returns
-------
list of str
A list of absolute paths to matching files.
"""
base = Path(base)
# Search locally
files = [os.fspath(m) for m in base.glob(pattern)]
# Search external sources
for lib, source in external_data_sources().items():
fpaths = source.get_paths(pattern, dtype)
if lib == "pydicom-data":
# For pydicom-data, check the hash against hashes.json
fpaths = [p for p in fpaths if _check_data_hash(p)]
files.extend(fpaths)
# Search https://github.com/pydicom/pydicom-data or local cache
# To preserve backwards compatibility filter the downloaded files
# as if they are stored within DATA_ROOT/test_files/*.dcm
dummy_online_file_path_map = online_test_file_dummy_paths()
dummy_online_file_path_filtered = fnmatch.filter(
dummy_online_file_path_map.keys(), os.path.join(base, pattern)
)
download_names = [
os.fspath(dummy_online_file_path_map[dummy_path])
for dummy_path in dummy_online_file_path_filtered
]
real_online_file_paths = []
download_error = False
for filename in download_names:
try:
real_online_file_paths.append(os.fspath(data_path_with_download(filename)))
except Exception:
download_error = True
files += real_online_file_paths
if download_error:
warn_and_log(
"One or more download failures occurred, the list of matching "
"file paths may be incomplete"
)
return files
def get_palette_files(pattern: str = "**/*") -> list[str]:
"""Return a list of absolute paths to palettes with filenames matching
`pattern`.
Parameters
----------
pattern : str, optional
The pattern to pass to :meth:`~pathlib.Path.glob`, default
(``'**/*'``).
Returns
-------
list of str
A list of absolute paths to matching files.
"""
data_path = Path(DATA_ROOT) / "palettes"
files = get_files(base=data_path, pattern=pattern, dtype=DataTypes.PALETTE)
files = [filename for filename in files if not filename.endswith(".py")]
return files
def get_testdata_file(
name: str,
read: bool = False,
download: bool = True,
) -> "str | Dataset | None":
"""Return an absolute path to the first matching dataset with filename
`name` that is found in a local or external pydicom datastore.
First searches the local *pydicom* data store, then any locally available
external sources, and finally the files available in the
pydicom/pydicom-data repository.
.. versionchanged:: 2.1
Modified to search locally available external data sources and the
pydicom/pydicom-data repository
.. versionchanged:: 2.2
Added the `read` keyword parameter.
.. versionchanged:: 2.3
Added the `download` keyword parameter.
Parameters
----------
name : str
The full file name (without path)
read : bool, optional
If ``True`` then use :func:`~pydicom.filereader.dcmread` to read the
file and return the corresponding
:class:`~pydicom.dataset.FileDataset`. Default ``False``.
download : bool, optional
If ``True`` (default) download the file if missed locally.
Returns
-------
str, pydicom.dataset.Dataset or None
The absolute path of the file if found, the dataset itself if `read` is
``True``, or ``None`` if the file is not found.
Raises
______
ValueError
If `name` is an absolute path.
"""
if os.path.isabs(name):
raise ValueError(
f"'get_testdata_file' does not support absolute paths, as it only works"
f" with internal pydicom test data - did you mean 'dcmread(\"{name}\")'?"
)
path = _get_testdata_file(name=name, download=download)
if read and path is not None:
from pydicom.filereader import dcmread
return dcmread(path, force=True)
return path
def _get_testdata_file(name: str, download: bool = True) -> str | None:
# Check pydicom local
data_path = Path(DATA_ROOT) / "test_files"
matches = [m for m in data_path.rglob(name)]
if matches:
return os.fspath(matches[0])
# Check external data sources
fpath: str | None
for lib, source in external_data_sources().items():
try:
fpath = source.get_path(name, dtype=DataTypes.DATASET)
except ValueError:
fpath = None
# For pydicom-data, check the hash against hashes.json
if lib == "pydicom-data":
if fpath and _check_data_hash(fpath):
return fpath
elif fpath:
return fpath
# Try online
if download:
for filename in get_url_map().keys():
if filename != name:
continue
try:
return os.fspath(data_path_with_download(filename))
except Exception:
warn_and_log(
f"A download failure occurred while attempting to "
f"retrieve {name}"
)
return None
def get_testdata_files(pattern: str = "**/*") -> list[str]:
"""Return a list of absolute paths to datasets with filenames matching
`pattern`.
Parameters
----------
pattern : str, optional
The pattern to pass to :meth:`~pathlib.Path.glob`, default
(``'**/*'``).
Returns
-------
list of str
A list of absolute paths to matching files.
Raises
______
ValueError
If `pattern` matches an absolute path.
"""
if os.path.isabs(pattern):
raise ValueError(
"'get_testdata_files' does not support absolute paths, as it only works"
" with internal pydicom test data."
)
data_path = Path(DATA_ROOT) / "test_files"
files = get_files(base=data_path, pattern=pattern, dtype=DataTypes.DATASET)
files = [filename for filename in files if not filename.endswith(".py")]
return files
def get_charset_files(pattern: str = "**/*") -> list[str]:
"""Return a list of absolute paths to charsets with filenames matching
`pattern`.
Parameters
----------
pattern : str, optional
The pattern to pass to :meth:`~pathlib.Path.glob`, default
(``'**/*'``).
Returns
----------
list of str
A list of absolute paths to matching files.
"""
data_path = Path(DATA_ROOT) / "charset_files"
files = get_files(base=data_path, pattern=pattern, dtype=DataTypes.CHARSET)
files = [filename for filename in files if not filename.endswith(".py")]
return files

View File

@@ -0,0 +1,295 @@
# Copyright 2020 pydicom authors. See LICENSE file for details.
# Copyright 2018-2019 Cancer Care Associates.
# Relicensed under pydicom LICENSE by Simon Biggs.
import functools
import hashlib
import json
import os
import pathlib
from typing import cast
import urllib.request
import urllib.error
try:
import requests
HAVE_REQUESTS = True
except ImportError:
HAVE_REQUESTS = False
try:
import tqdm
if HAVE_REQUESTS is False:
class DownloadProgressBar(tqdm.tqdm):
def update_to(
self, b: int = 1, bsize: int = 1, tsize: int | None = None
) -> None:
if tsize is not None:
self.total = tsize
self.update(b * bsize - self.n)
USE_PROGRESS_BAR = True
except ImportError:
USE_PROGRESS_BAR = False
from . import retry
from pydicom.misc import warn_and_log
HERE = pathlib.Path(__file__).resolve().parent
_SIMULATE_NETWORK_OUTAGE = False # For testing network outages
def calculate_file_hash(fpath: pathlib.Path) -> str:
"""Return the SHA256 checksum for the file at `fpath`.
Parameters
----------
fpath : pathlib.Path
The absolute path to the file that is to be checksummed.
Returns
-------
str
The SHA256 checksum of the file.
"""
BLOCKSIZE = 65536
hasher = hashlib.sha256()
with open(fpath, "rb") as f:
buf = f.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
buf = f.read(BLOCKSIZE)
return hasher.hexdigest()
def get_config_dir() -> pathlib.Path:
"""Return the path to the pydicom config directory, creating it if required
The config directory will be named ``.pydicom`` and will be created in the
local user's home directory.
"""
config_dir = pathlib.Path.home() / ".pydicom"
config_dir.mkdir(exist_ok=True)
return config_dir
@retry.retry(
(urllib.error.HTTPError, urllib.error.URLError),
exc_msg=("Installing the `requests` package may help"),
)
def download_with_progress(url: str, fpath: pathlib.Path) -> None:
"""Download the file at `url` to `fpath` with a progress bar.
Parameters
----------
url : str
The URL to download the file from.
fpath : pathlib.Path
The absolute path where the file will be written to.
"""
filename = os.fspath(fpath)
if HAVE_REQUESTS:
if USE_PROGRESS_BAR:
r = requests.get(url, stream=True)
total_size_in_bytes = int(r.headers.get("content-length", 0))
with open(fpath, "wb") as file:
for data in tqdm.tqdm(
r.iter_content(chunk_size=4096),
total=total_size_in_bytes,
unit="B",
unit_scale=True,
miniters=1,
desc=url.split("/")[-1],
):
file.write(data)
else:
r = requests.get(url)
with open(filename, "wb") as f:
f.write(r.content)
else:
if USE_PROGRESS_BAR:
with DownloadProgressBar(
unit="B", unit_scale=True, miniters=1, desc=url.split("/")[-1]
) as t:
urllib.request.urlretrieve(url, filename, reporthook=t.update_to)
else:
urllib.request.urlretrieve(url, filename)
def get_data_dir() -> pathlib.Path:
"""Return the path to the cache directory, creating it if required."""
data_dir = get_config_dir() / "data"
data_dir.mkdir(exist_ok=True)
return data_dir
@functools.lru_cache
def get_url_map() -> dict[str, str]:
"""Return a dict containing the URL mappings from ``urls.json```."""
with open(HERE / "urls.json") as url_file:
return cast(dict[str, str], json.load(url_file))
def get_url(filename: str) -> str:
"""Return the download URL corresponding to `filename`.
The filename:URL mappings are located in the ``urls.json`` file.
Parameters
----------
filename : str
The filename of the file to get the corresponding URL for.
Returns
-------
str
The download URL corresponding to `filename`.
Raises
------
ValueError
If `filename` is not in the ``urls.json`` record.
"""
# Convert filename to lowercase because windows filenames are
# case-insensitive
urls = {k.lower(): v for k, v in get_url_map().items()}
try:
return urls[filename.lower()]
except KeyError:
raise ValueError("The file provided isn't within pydicom's urls.json record.")
def data_path_with_download(
filename: str,
check_hash: bool = True,
redownload_on_hash_mismatch: bool = True,
url: str | None = None,
quiet: bool = True,
) -> pathlib.Path:
"""Return the absolute path to the cached file with `filename`.
If the file isn't available in the cache then it will be downloaded.
Parameters
----------
filename : str
The filename of the file to return the path to.
check_hash : bool, optional
``True`` to perform a SHA256 checksum on the file, ``False`` otherwise.
redownload_on_hash_mismatch : bool, optional
``True`` to redownload the file on checksum failure, ``False``
otherwise.
url : str, optional
The file's corresponding download URL
Returns
-------
pathlib.Path
The absolute path to the file.
"""
if _SIMULATE_NETWORK_OUTAGE:
raise RuntimeError("No network!")
filepath = get_data_dir().joinpath(filename)
if check_hash and filepath.exists():
try:
get_cached_filehash(filename)
except NoHashFound:
filepath.unlink() # Force a redownload
if not filepath.exists():
if url is None:
url = get_url(filename)
download_with_progress(url, filepath)
if check_hash:
try:
hash_agrees = data_file_hash_check(filename)
except NoHashFound:
return filepath.resolve()
if not hash_agrees:
if redownload_on_hash_mismatch:
filepath.unlink()
return data_path_with_download(
filename, redownload_on_hash_mismatch=False
)
raise ValueError("The file on disk does not match the recorded hash.")
return filepath.resolve()
class NoHashFound(KeyError):
pass
def get_cached_filehash(filename: str) -> str:
"""Return the SHA256 checksum of a cached file.
Parameters
----------
filename : str
The filename of the cached file to calculate the checksum for.
Returns
-------
str
The SHA256 checksum of the cached file.
"""
with open(HERE / "hashes.json") as hash_file:
hashes = cast(dict[str, str], json.load(hash_file))
# Convert filenames to lowercase because windows filenames are
# case-insensitive
hashes = {k.lower(): v for k, v in hashes.items()}
try:
return hashes[filename.lower()]
except KeyError:
raise NoHashFound
def data_file_hash_check(filename: str) -> bool:
"""Return ``True`` if the SHA256 checksum of the cached file is correct.
Parameters
----------
filename : str
The filename of the cached file to check.
Returns
-------
bool
``True`` if the cached file has the correct checksum, ``False``
otherwise.
"""
filename = os.fspath(filename)
filepath = get_data_dir().joinpath(filename)
calculated_filehash = calculate_file_hash(filepath)
try:
cached_filehash = get_cached_filehash(filename)
except NoHashFound:
warn_and_log("Hash not found in hashes.json. File will be updated.")
with open(HERE / "hashes.json") as hash_file:
hashes = json.load(hash_file)
hashes[filename] = calculated_filehash
with open(HERE / "hashes.json", "w") as hash_file:
json.dump(hashes, hash_file, indent=2, sort_keys=True)
raise
return cached_filehash == calculated_filehash

View File

@@ -0,0 +1,81 @@
{
"693_J2KR.dcm": "c392d8bd1f952ed2d9387d5143d34c5a29ac9d74566688169731a50ac6a82aa2",
"693_UNCI.dcm": "42d6c33d6666bf569a53951211be6fca2ab04956db43c3f75a9720d976ab128c",
"693_UNCR.dcm": "cc4cdd599231922ecf63de2ddacf03d51c4588805c9154c2eef1ff49c23b32be",
"JPEG-LL.dcm": "c9d000c75d92b143ce1c0421471a7e9a69c8996d98b2589e533e311615a10079",
"JPEG2000_UNC.dcm": "645ff302c7f7ee6c402d74c7c9e3cb5efdb861a828959cc2adc8775a8260688d",
"JPGLosslessP14SV1_1s_1f_8b.dcm": "1978d4f058e52d3239fae33f261b3dc74605fdd9f89031fffd57bea6218d0dbf",
"MR-SIEMENS-DICOM-WithOverlays.dcm": "094faf56c63bff84c30567e29de0c67d7c5a8ae05cf880ac12175491b6b645d2",
"MR2_J2KI.dcm": "8319846e6ad6dc70dbbaf61748b1987a6807fd02db3da24e7989fd5a5ce19e4e",
"MR2_J2KR.dcm": "707f0a1b648b79f17b61e241af31fb9edea7fe596681a4bcab6cce890300a9a5",
"MR2_UNCI.dcm": "7f79ac33e1ab32e1a8ca10ce62f18e5a2372e78c8a6684af17302b1a0171fc46",
"MR2_UNCR.dcm": "c14c7f0c6e25bd4dfbb822fe264e540fc7142bf1c9d15d4c652ec8f5f97fa9e8",
"OBXXXX1A.dcm": "164a460bebdc15fbe391ad4bfe4c84672eb2bad57adfe7dad372fd7367b0f63e",
"OBXXXX1A_2frame.dcm": "6627f6e46dbf8c16292fb1eaff8807439bcd233dc68099c07f0b83c4093256b1",
"OBXXXX1A_expb.dcm": "0cf6b4b04a1f239755fe9aef2b093b3004a290c262ec2567f555212d6c679c83",
"OBXXXX1A_expb_2frame.dcm": "2ccd16a61d680e85ad82fe1d60c4c50496a91309aa2fd27157fc9660291d6ba5",
"OBXXXX1A_rle.dcm": "aaf57785817dbe35503c6175d677d2efa811f90e931fc5017611ba9ff4c7f92a",
"OBXXXX1A_rle_2frame.dcm": "65bee869c507f535edea93a446a26e941fb9cbc3819e4d73395f11eef56d4687",
"OT-PAL-8-face.dcm": "d5560470077f77ef6a0a52d22f9f61e803436d2b468a9550a4d12c5675ee0a97",
"RG1_J2KI.dcm": "744d01372fdda4e21b507bb7f97329065de961f4f263342079b369b430064d65",
"RG1_J2KR.dcm": "7fbfd29360af806770102fd7c4ffcb2a133075bd00920a4bb63460d516f67ac4",
"RG1_UNCI.dcm": "3561020824868615a93a51078671b3ff73bb2578c966f76def99b4d982897e75",
"RG1_UNCR.dcm": "946f28f48b9fbf360196a9b835c8fce83b0c654bf85a5107663c8a61df02e498",
"RG3_J2KI.dcm": "c90c915c0c373eb6d244151f9476b05e50623c303ac20334ca9ce4aab0dddf19",
"RG3_J2KR.dcm": "ffde92ba154a7d5ed2ab70b7cd37892772f8bef63fb26f9080327c6a089c205b",
"RG3_UNCI.dcm": "9ef0260919de89774da90336ad16c03a5be899a8bb663bbaea52b6d0769bec78",
"RG3_UNCR.dcm": "6babfc42dd404213e1758d6dbb93648c248783cc23f593103fff4295c3374dfb",
"SC_rgb.dcm": "b0f868d6a689a0ff96c39b459caf1b628eacd74134114ce84549573321231138",
"SC_rgb_16bit.dcm": "3dc969768431d1cb2695dcd3f190588b02413798dab8420418d2fbb9cb4d4075",
"SC_rgb_16bit_2frame.dcm": "f251a296e1aa5dde37423a9aacba7f31b0b4869328caa6e42bf6b110f007c401",
"SC_rgb_2frame.dcm": "9b5c0306679675c688c2044d97878a6a14ce9976ecdf022309e5f6e9ceaffd9c",
"SC_rgb_32bit.dcm": "c3dac5c807ab27227c0d36b7cd34bb776103bb08d230ff74e62259eeeef0769a",
"SC_rgb_32bit_2frame.dcm": "33f78c27519f23e0410e9c5d24f55380a431255f00ff95e12a26fd45765a7920",
"SC_rgb_dcmtk_ebcr_dcmd.dcm": "e183a37c833c78da6c516aed9920527d80d7f1bbaf805a92530024e1aa2e74ff",
"SC_rgb_dcmtk_ebcyn1_dcmd.dcm": "a963683216b270b788682dc132a65965406a3100722c2d0c2fd2219a0ea53c66",
"SC_rgb_dcmtk_ebcyn2_dcmd.dcm": "2692a16f99b879c742398f3a5b4b9508165d4fe6b056eaa85642ff6bed80ff62",
"SC_rgb_dcmtk_ebcynp_dcmd.dcm": "6324aa7eb90e57299087a70ff6875b10f4d17b8e359ee2f20f1eaaf3d0876993",
"SC_rgb_dcmtk_ebcys2_dcmd.dcm": "f6334492b38d4494b0e8929c4f6b34e9decba9b2dae4e01749263bf254a8c096",
"SC_rgb_dcmtk_ebcys4_dcmd.dcm": "9fb6b7e5dd1f1097ecb23fcd2afafeee9c5233f75680b0922b723f2f1b7b09ab",
"SC_rgb_expb.dcm": "e92997e0cf83407693478ca6f2ce44f42f50f73751f11c355ce555ef86dc8e84",
"SC_rgb_expb_16bit.dcm": "5e8e2340ba9698deba857f76e0ee007c1acb88de84841519425afe76b5b25c11",
"SC_rgb_expb_16bit_2frame.dcm": "fb88f409d21ca9c08672f32f756d0ba0d57de91f8240cf807971085a600e866b",
"SC_rgb_expb_2frame.dcm": "b8b9adb32b2c3ce33c3136620a9b00c2440e047574305e76d176e28ad374134f",
"SC_rgb_expb_32bit.dcm": "5153bb5df191a2b1ec40f592d433a097523a2979ee2ec22ae47ad2bf823bebd3",
"SC_rgb_expb_32bit_2frame.dcm": "cb4e18465d10d4c60afcf8e591b44687ffac8cfd63ab9ca3b6ad45ec25dc2175",
"SC_rgb_gdcm2k_uncompressed.dcm": "abf72c420b8bb97a29b93cb5d63a633271b65038d8323e28d71334bc56ef1a2b",
"SC_ybr_full_uncompressed.dcm": "3c9f4b2b82a3f88ce5340cb07ce14782dcbb09840938e4489e8c21eac1f02dd6",
"US1_J2KI.dcm": "22340375674ff253196ce8a147acf0458bea3f105ff2c6af81f0eb119729605b",
"US1_J2KR.dcm": "2427fdc82d90cd4ce8a69b5157eecb37549902dce138ac15c6456a7eae70b83d",
"US1_UNCI.dcm": "b7556a5414d5ed6bd0359b8222eda10efcce81762428848d9a3ac6be5b55cb6c",
"US1_UNCR.dcm": "af5a66e40cd49d15dfbf7b78c850eba0662bdc7339339c3fa13f123a57e812cb",
"bad_sequence.dcm": "0677915e5c3e8c98498eb3d1b726ccf38ba0d8ada657c8ca7fe1b8b9b5890f4f",
"color-pl.dcm": "16bfc3134e59d789985efddfc70d924420b16e1c6d1f21c960bb4544c9e9dbf9",
"color-px.dcm": "bf10a89f277743ea337b7c4741efa0709a086f0161e1ff2b94cff01e428047e4",
"color3d_jpeg_baseline.dcm": "c8798b8abf8ae0a18e8c9952e7c7f75f3cc8465234b1b63f9e3ba3bebb9d5625",
"eCT_Supplemental.dcm": "0a4c3aa02d1b0b4826daa5ffe85ef13be83c1433842a9a98b901e075136dd86f",
"emri_small.dcm": "151233ec63f64ebb63b979df51aa827cd612a53422c073f6ef341770c7bc9a56",
"emri_small_RLE.dcm": "93c19bca3fb6b7202dcd067de8d16cb6b3f7c6e9a0632e474aab81175ee45266",
"emri_small_big_endian.dcm": "8e18ed3542bc4df70dc6acda87eab5095b19e2b4c1b7fb72ba457e7c217b1ab7",
"emri_small_jpeg_2k_lossless.dcm": "b2b4063359a08ed3b0afa9f4e4f72f84af79e5116515b446d9a30da9dc7f1888",
"emri_small_jpeg_2k_lossless_too_short.dcm": "8742a49b7d02dedb11e7926d30900a415c42efeff02a64a0aa0f0873cf6da582",
"emri_small_jpeg_ls_lossless.dcm": "24de03c9c0f8b5aa75d7fbcc894f94e612b66702175b4936589a0849ec9f87b4",
"explicit_VR-UN.dcm": "28c4a61022d7dbebec97e2f1bbdad0ed097bee2c62727c26a3f3720248c9c6e7",
"gdcm-US-ALOKA-16.dcm": "f1a2d5f7c4ffe87dc589b12738084099fe44a436f6980f5d7e96a026ad356d65",
"gdcm-US-ALOKA-16_big.dcm": "2a801cbd7bd04ed28b9c14c7a8edb04b43384e38f00574e27c0fab8f4aa62db4",
"liver.dcm": "4f8fb316b6df067bdf2ef7bc2385fd571ad5be67e171aed3ed902a71293d9d5c",
"liver_expb.dcm": "fe3323f3f4a2166e4c5305a2380a035a66504197f3f01a6e2b50bbd9814721d5",
"mlut_18.dcm": "9c65b39df55dc46a4670f76e0ec1093d097206ed46c2d7e23b8051c87ef0228b",
"vlut_04.dcm": "64f54c0f490ce3fa2faac0a90a7ca0166caa025f8fdcfbe181906387a7867c27",
"HTJ2KLossless_08_RGB.dcm": "38f8e8adf46b928a12f1905df1405bc8a32c10286733c47562c75be84ceae00e",
"HTJ2K_08_RGB.dcm": "9a7ae1960f18315c4d58876c2a8333a704e89ca3697edd5b69f600773220eb90",
"JLSL_RGB_ILV0.dcm": "f8836a650728f4f1b014a52905e321490f3eefcc0f71ac27fd2c1bd7fc5bbcc4",
"JLSL_RGB_ILV1.dcm": "281610d528d8e22bd52e79d17261ef0c238ef8cfc50696a2d9875a933108864e",
"JLSL_RGB_ILV2.dcm": "f8d670e9988cbca207d3367d916aff3cb508c076495211a0d132692266e9546d",
"JLSN_RGB_ILV0.dcm": "a377750d24bd3413d21faa343662dfff997db9acf65c0b095c5d8a95beb866fa",
"JLSL_08_07_0_1F.dcm": "308fb028c8fbdd1e9a93e731978ea4da6b15cb55b40451cf6f21e7c9ba35dd8a",
"JLSL_16_15_1_1F.dcm": "61f38f250a7dc82c44529c0face2eeab3ffd02ca8b9dfc756dd818eb252104b6",
"parametric_map_float.dcm": "957f34397c26d82f7a90cad7a653ce0f7238f4be6aa9dfa9a33bae5dc2ce7e23",
"parametric_map_double_float.dcm": "a41e0b78b05e543a2448e22435858f9ca8d5f94807d7b391b93b4bca80e23a22",
"liver_nonbyte_aligned.dcm": "530c6af2a2a0caa6033d99ad407fe1f6e3942c64a8fcfc5649d4d06c26473862"
}

View File

@@ -0,0 +1,29 @@
DICOM Well-known Color Palettes
http://dicom.nema.org/medical/dicom/current/output/chtml/part06/chapter_B.html
+----------------------------+---------------------+------------------+
| (0070,0080) Content Label | SOP Instance UID | Filename |
+============================+=====================+==================+
| HOT_IRON | 1.2.840.10008.1.5.1 | hotiron.dcm |
+----------------------------+---------------------+------------------+
| PET | 1.2.840.10008.1.5.2 | pet.dcm |
+----------------------------+---------------------+------------------+
| HOT_METAL_BLUE | 1.2.840.10008.1.5.3 | hotmetalblue.dcm |
+----------------------------+---------------------+------------------+
| PET_20_STEP | 1.2.840.10008.1.5.4 | pet20step.dcm |
+----------------------------+---------------------+------------------+
| SPRING | 1.2.840.10008.1.5.5 | spring.dcm |
+----------------------------+---------------------+------------------+
| SUMMER | 1.2.840.10008.1.5.6 | summer.dcm |
+----------------------------+---------------------+------------------+
| FALL | 1.2.840.10008.1.5.7 | fall.dcm |
+----------------------------+---------------------+------------------+
| WINTER | 1.2.840.10008.1.5.8 | winter.dcm |
+----------------------------+---------------------+------------------+
* All color palettes have 256 LUT entries, a first mapping of 0, and 8-bit
entries
* HOT_IRON, PET, HOT_METAL_BLUE, and PET_20_STEP use normal color palette LUT
data.
* SPRING, SUMMER, FALL and WINTER use segmented color palette LUT data.

View File

@@ -0,0 +1,62 @@
from functools import wraps
import logging
import time
from typing import Any
from collections.abc import Callable
def retry(
exc: type[Exception] | tuple[type[Exception], ...],
exc_msg: str | None = None,
tries: int = 4,
delay: int = 3,
backoff: int = 2,
logger: logging.Logger | None = None,
) -> Callable[[Callable], Any]:
"""Retry calling the decorated function using an exponential backoff.
https://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
original from: https://wiki.python.org/moin/PythonDecoratorLibrary#Retry
Parameters
----------
exc : Exception or Tuple[Exception, ...]
The exception to check. may be a tuple of exceptions to check.
exc_msg : str, optional
The message to be shown if an exception occurs.
tries : int, optional
The number of times to try (not retry) before giving up, default ``4``.
delay : int, optional
The initial delay between retries in seconds, default ``3``.
backoff : int, optional
The backoff multiplier e.g. value of 2 will double the delay each
retry, default ``2``.
logger : logging.Logger, optional
The logger to use. If ``None`` (default), print to stdout.
"""
def deco_retry(f: Callable) -> Any:
@wraps(f)
def f_retry(*args: Any, **kwargs: Any) -> Any:
mtries, mdelay = tries, delay
while mtries > 1:
try:
return f(*args, **kwargs)
except exc as e:
msg = f"{e}: retrying in {mdelay} seconds..."
if exc_msg:
msg += f" {exc_msg}"
if logger:
logger.warning(msg)
else:
print(msg)
time.sleep(mdelay)
mtries -= 1
mdelay *= backoff
return f(*args, **kwargs)
return f_retry # true decorator
return deco_retry

View File

@@ -0,0 +1,397 @@
Test Files used for testing pydicom
2020-06 Many files were moved to an external data store,
and are downloaded as needed.
-----------------------------------
I obtained images to test the pydicom code, and revised them as follow:
* images were often downsized to keep the total file size quite small (typically <50K-ish). I wanted unittests for the code where I could run a number of tests quickly, and with files I could include in the source (and binary) distributions without bloating them too much
* In some cases, the original files have been binary edited to replace anything that looks like a real patient name
I believe there is no restriction on using any of these files in this manner.
First, which transfer syntax the files are:
ExplVR_BigEnd.dcm : Expl VR Big Endian
ExplVR_BigEndNoMeta.dcm : Expl VR Big Endian
MR_small_bigendian.dcm : Expl VR Big Endian
color-pl.dcm : Expl VR Little Endian
color-px.dcm : Expl VR Little Endian
CT_small.dcm : Expl VR Little Endian
ExplVR_LitEndNoMeta.dcm : Expl VR Little Endian
image_dfl.dcm : Expl VR Little Endian
JPEG-LL.dcm : Expl VR Little Endian
JPEG-lossy.dcm : Expl VR Little Endian
JPEG2000.dcm : Expl VR Little Endian
liver.dcm : Expl VR Little Endian
MR_small.dcm : Expl VR Little Endian
OBXXXX1A.dcm : Expl VR Little Endian
reportsi.dcm : Expl VR Little Endian
test-SR.dcm : Expl VR Little Endian
explicit_VR-UN.dcm : Expl VR Little Endian
UN_sequence.dcm : Expl VR Little Endian
MR_small_implicit.dcm : Impl VR Little Endian
nested_priv_SQ.dcm : Impl VR Little Endian
no_meta_group_length.dcm: Impl VR Little Endian
OT-PAL-8-face.dcm : Impl VR Little Endian
priv_SQ.dcm : Impl VR Little Endian
rtdose.dcm : Impl VR Little Endian
rtplan.dcm : Impl VR Little Endian
rtplan_truncated.dcm : Impl VR Little Endian
rtstruct.dcm : Impl VR Little Endian
693_*.dcm
* Regression datasets for issue #693
* JPEG2000, JPEG2000Lossless and uncompressed versions
* Mismatch between BitsStored and sample bit depth
bad_sequence.dcm
* Anonymized test dataset for issue #1067, provided by @sylvainKritter
* JPEGLossless:Non-hierarchical-1stOrderPrediction
* contains invalid sequence (encoded as Implicit Little Endian) with VR
"UN"
CT_small.dcm
* CT image, Explicit VR, LittleEndian
* Downsized to 128x128 from 'CT1_UNC', ftp://medical.nema.org/MEDICAL/Dicom/DataSets/WG04/
eCT_Supplemental.dcm
* Original filename CT0012
* Taken from ftp://medical.nema.org/medical/dicom/Multiframe/CT/nemamfct.images.tar.bz2
* 2 frames, 16 bits allocated/stored, MONOCHROME2
* Enhanced CT with supplemental (at IV 1024) 16-bit palette colour LUT data
GDCMJ2K_TextGBR.dcm (from GDCM)
* JPEG 2000 Lossless transfer syntax
* Contains non-conformant Pixel Data with a JP2 header
* unsigned 8-bit, 3 samples/px, YBR_RCT Photometric Interpretation
J2K_pixelrep_mismatch.dcm
* Dataset from issue 1149
* J2K data is unsigned, Pixel Representation 1
* Bits Stored is 13
MR_small.dcm
* MR image, Explicit VR, LittleEndian
* Downsized to 64x64 from 'MR1_UNC', ftp://medical.nema.org/MEDICAL/Dicom/DataSets/WG04/
* Explicit VR big endian version created using DCMTK's dcmconv for PR #714
MR_small_implicit.dcm
* The same dataset as MR_small, saved with Implicit VR using dcmodify
MR_small_bigendian.dcm
* The same dataset as MR_small, saved as Big Endian using dcmodify
MR2_*.dcm
* JPEG2000, JPEG2000Lossless and uncompressed versions
* unsigned 16-bit/12-bit with rescale and windowing
* From ftp://medical.nema.org/MEDICAL/Dicom/DataSets/WG04
JPGExtended.dcm
* 1.2.840.10008.1.2.4.51 - JPEG Extended
* Fixed version of JPEG-lossy.dcm
JPGLosslessP14SV1_1s_1f_8b.dcm
* 1.2.840.10008.1.2.4.70 - JPEG Lossless, Process 14, Selection Value 1
* 1 sample/px, 1 frame, 8-bits stored, monochrome2
JPEG2000.dcm and JPEG2000_UNC.dcm (uncompressed version)
* JPEG 2000 small image
* to test JPEG transfer syntax, eventually JPEG decompression
* Edited 'NM1_J2KI' from ftp://medical.nema.org/MEDICAL/Dicom/DataSets/WG04
image_dfl.dcm
* Compressed (using "deflate" zlib compression) after FileMeta
* 'image_dfl' from http://www.dclunie.com/images/compressed/
gdcm-US-ALOKA-16.dcm (from GDCM)
* Little endian implicit
* Segmented 16-bit Palette Color LUT Data
* Modified to remove original patient name and ID from Pixel Data
gdcm-US-ALOKA-16_big.dcm (from GDCM)
* Big endian implicit version of gdcm-US-ALOKA-16.dcm
* Converted to big endian using DCMTK's dcmodify +tb
ExplVR_BigEnd.dcm
* Big Endian test image
* Also is Samples Per Pixel of 3 (RGB)
* Downsized to 60x80 from 'US-RGB-8-epicard' at http://www.barre.nom.fr/medical/samples/
JPEG-LL.dcm
* NM1_JPLL from ftp://medical.nema.org/MEDICAL/Dicom/DataSets/WG04/
* Transfer Syntax 1.2.840.10008.1.2.4.70: JPEG Lossless Default Process 14 [Selection Value 1]
JPEG-lossy.dcm
* NM1_JPLY from ftp://medical.nema.org/MEDICAL/Dicom/DataSets/WG04/
* 1.2.840.10008.1.2.4.51 Default Transfer Syntax for Lossy JPEG 12-bit
* GDCM prints when reading this file: "Unsupported JPEG data precision 12" and "Invalid SOS parameters for sequential JPEG", although it does appear to be read properly
JPEG2000-embedded-sequence-delimiter.dcm
* A copy of JPEG2000.dcm, with 4 of the encoded pixel data bytes replaced with the Sequence Delimiter
* Almost certainly not a valid JPEG anymore, but the DICOM structure is valid
* Used to reproduce #1140.
liver.dcm
* The DICOM SEG example was generated using the dcmqi library: https://github.com/qiicr/dcmqi
* Provided by Andrey Fedorov (@fedorov)
* Explicit VR big endian versions created using DCMTK's dcmconv and a script
used to fix the pixel data for PR #714
* Single frame versions created using a script for PR #714
mlut_18.dcm
* Modality LUT Sequence
* One of the IHE (https://wiki.ihe.net/index.php/Main_Page) MESA display test
images
no_meta.dcm
* Same as CT_small.dcm with no File Meta Information header
UN_sequence.dcm
* Contains only one private sequence with VR UN
* Provided by @naterichman to reproduce issue #1312
Created by a commercial radiotherapy treatment planning system and modified:
rtplan.dcm Implicit VR, Little Endian
rtdose.dcm Implicit VR, Little Endian
* Explicit VR big endian version created using DCMTK's dcmconv and the
pixel data corrected using script for PR #714
* Single frame version created using a script for PR #714
* RLE encoded versions created using GDCM's gdcmconv for PR #708
chr*.dcm
* Character set files for testing (0008,0005) Specific Character Set
* from http://www.dclunie.com/images/charset/SCS*
* downsized to 32x32 since pixel data is irrelevant for these (test pattern only)
empty_charset_LEI.dcm
* Dataset with empty Specific Character Set, regression dataset for #1038
* provided by @micjuel
test-SR.dcm
* from ftp://ftp.dcmtk.org/pub/dicom/offis/software/dscope/dscope360/support/srdoc103.zip, file "test.dcm"
* Structured Reporting example, many levels of nesting
priv_SQ.dcm
* a file with an undefined length SQ item in a private tag.
* minimal data elements kept from example files in issues 91, 97, 98
OBXXXX1A.dcm
* a file with a Photometric Interpretation of PALETTE COLOR
* used to check if to pixel_array is interpreted correctly for such a case
* taken from https://github.com/pydicom/pydicom/issues/205#issuecomment-103329677
* supposedly from a Philips machine
* Explicit VR big endian version created using DCMTK's dcmconv
* 2 frame version created using a script for PR #714
* RLE encoded versions created using GDCM's gdcmconv for PR #708
OT-PAL-8-face.dcm
* a file with a Photometric Interpretation of PALETTE COLOR
* used to check if to pixel_array is interpreted correctly for such a case
* taken from http://www.barre.nom.fr/medical/samples/
RG1_*.dcm
* JPEG2000, JPEG2000Lossless and uncompressed versions
* unsigned 16-bit/15-bit with windowing
* From ftp://medical.nema.org/MEDICAL/Dicom/DataSets/WG04
RG3_*.dcm
* JPEG2000, JPEG2000Lossless and uncompressed versions
* unsigned 16-bit/10-bit with windowing
* From ftp://medical.nema.org/MEDICAL/Dicom/DataSets/WG04
SC_rgb*.dcm
* 16 and 32 bit versions created using a script for PR #714
* Explicit VR big endian version created using DCMTK's dcmconv and the
pixel data corrected using script for PR #714
* 2 frame versions created using a script for PR #714
* RLE encoded versions created using GDCM's gdcmconv for PR #708
SC_rgb_small_odd*.dcm
* 3x3 pixel version added for PR #601
* Big Endian version added for PR #1687
SC_jpeg_no_color_transform.dcm
* 8-bit baseline JPEG compressed in RGB color space without transformation
into YCbCr color space
* Individual tile of a TCGA whole slide image in Aperio SVS format obtained
from TCIA
* Created for PR #878 using DCMTK's img2cdm script with the value of the
Photometric Interpretation element patched
SC_jpeg_no_color_transform_2.dcm
* 8-bit baseline JPEG compressed in RGB color space without transformation
into YCbCr color space
* Individual tile of a TCGA whole slide image in Aperio SVS format obtained
from TCIA with APP14 marker segment included in JPEG header
* Created for PR #878 using DCMTK's img2cdm script with the value of the
Photometric Interpretation element patched
SC_ybr_full_uncompressed.dcm
* Uncompressed version of SC_rgb_dcmtk_+eb+cy+n2.dcm using gdcmconv
* PhotometricInterpretation is YBR_FULL
SC_ybr_full_422_uncompressed.dcm
* Uncompressed version of SC_rgb_dcmtk_+eb+cy+n2.dcm using gdcmconv
* Converted to YBR_FULL_422 using a script by @scaramallion
* PhotometricInterpretation is YBR_FULL_422
US1_*.dcm
* JPEG2000, JPEG2000Lossless and uncompressed versions
* unsigned 3 channel 8-bit/8-bit
* From ftp://medical.nema.org/MEDICAL/Dicom/DataSets/WG04
vlut_04.dcm
* VOI LUT Sequence
* One of the IHE (https://wiki.ihe.net/index.php/Main_Page) MESA display test
images
zipMR.gz
* a gzipped version of MR_small.dcm
* used for checking that deferred read reopens as zip again (issue 103)
explicit_VR-UN.dcm
* taken from test data in issue #968
* original image taken from https://www.cancerimagingarchive.net,
(freely available de-identified datasets)
* image was compressed using "gdcmconv --j2k <original.dcm>"
* almost all tags have VR "UN" due to gdcmconv issue
== Examples Datasets ==
* examples_jpeg2k.dcm: identical to US1_J2KR.dcm
* examples_overlay.dcm: MR-SIEMENS-DICOM-WithOverlays.dcm with cropped Pixel Data and Overlay Data
* examples_palette.dcm: OBXXXX1A.dcm with cropped Pixel Data
* examples_rgb_color.dcm: US1_UNCR.dcm with cropped Pixel Data
* examples_ybr_color.dcm: color3d_jpeg_baseline.dcm with reduced frames and rescaled Pixel Data
== DICOMDIR tests ==
dicomdirtests files were from https://www.pcir.org, freely available image sets.
They were downsized to 16x16 images to keep them very small so they
could be added to the source distribution without bloating it. For the
same reason, many were removed, leaving only samples of the studies,
series, and images.
For the subdirectories ending in "N" (e.g. CT2N, CT5N), the name indicates
the number of images inside the folder, i.e. CT2N has two images,
CT5N has five. This was a memory-aid for use in unit tests.
Below is the hierarchy of Patient, Study, Series, Images that comes from a
straight read of the dicomdirtests DICOMDIR file. The DICOMDIR file itself
was created using the dcmtk program dcmgpdir. It complained about different
Specific Character Set in some of the files, so some with 2022 IR6 were set
to ISO_IR 100.
Patient: 77654033: Doe^Archibald
Study 2: 20010101: XR C Spine Comp Min 4 Views
Series 1: CR: (1 image)
['./77654033/CR1/6154']
Series 2: CR: (1 image)
['./77654033/CR2/6247']
Series 3: CR: (1 image)
['./77654033/CR3/6278']
Study 2: 19950903: CT, HEAD/BRAIN WO CONTRAST
Series 2: CT: (4 images)
['./77654033/CT2/17106',
'./77654033/CT2/17136',
'./77654033/CT2/17166',
'./77654033/CT2/17196']
Patient: 98890234: Doe^Peter
Study 2: 20010101:
Series 4: CT: (2 images)
['./98892001/CT2N/6293',
'./98892001/CT2N/6924']
Series 5: CT: (5 images)
['./98892001/CT5N/2062',
'./98892001/CT5N/2392',
'./98892001/CT5N/2693',
'./98892001/CT5N/3023',
'./98892001/CT5N/3353']
Study 428: 20030505: Carotids
Series 1: MR: (1 image)
['./98892003/MR1/15820']
Series 2: MR: (1 image)
['./98892003/MR2/15970']
Study 134: 20030505: Brain
Series 1: MR: (1 image)
['./98892003/MR1/4919']
Series 2: MR: (3 images)
['./98892003/MR2/4950',
'./98892003/MR2/5011',
'./98892003/MR2/4981']
Study 2: 20030505: Brain-MRA
Series 1: MR: (1 image)
['./98892003/MR1/5641']
Series 2: MR: (3 images)
['./98892003/MR2/6935',
'./98892003/MR2/6605',
'./98892003/MR2/6273']
Series 700: MR: (7 images)
['./98892003/MR700/4558',
'./98892003/MR700/4528',
'./98892003/MR700/4588',
'./98892003/MR700/4467',
'./98892003/MR700/4618',
'./98892003/MR700/4678',
'./98892003/MR700/4648']
== Overlay Data ==
MR-SIEMENS-DICOM-WithOverlays.dcm (from GDCM)
* Little Endian Explicit VR
* Single frame, single channel Pixel Data
* Single frame Overlay Data in group 0x6000
* Icon Image Sequence
* 8-bit Palette Color LUT
== ICC Profiles ==
* crayons.icc
ICC profile from https://github.com/mm2/Little-CMS (MIT License)
== Licenses ==
The datasets from GDCM (github.com/malaterre/GDCM) are used under the following
license:
Program: GDCM (Grassroots DICOM). A DICOM library
Copyright (c) 2006-2016 Mathieu Malaterre
Copyright (c) 1993-2005 CREATIS
(CREATIS = Centre de Recherche et d'Applications en Traitement de l'Image)
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither name of Mathieu Malaterre, or CREATIS, nor the names of any
contributors (CNRS, INSERM, UCB, Universite Lyon I), may be used to
endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS''
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

Some files were not shown because too many files have changed in this diff Show More