Initial commit

This commit is contained in:
René Mathieu
2026-01-17 13:49:51 +01:00
commit 0fef8d96c5
1897 changed files with 396119 additions and 0 deletions

View File

@@ -0,0 +1,60 @@
# Copyright 2008-2018 pydicom authors. See LICENSE file for details.
"""pydicom package -- easily handle DICOM files.
See Quick Start below.
-----------
Quick Start
-----------
1. A simple program to read a dicom file, modify a value, and write to a new
file::
from pydicom.filereader import dcmread
dataset = dcmread("file1.dcm")
dataset.PatientName = 'anonymous'
dataset.save_as("file2.dcm")
2. See the files in the examples directory that came with this package for more
examples, including some interactive sessions.
3. Learn the methods of the Dataset class; that is the one you will work with
most directly.
4. Questions and comments can be directed to the pydicom google group:
https://groups.google.com/g/pydicom
5. Bugs and other issues can be reported in the issue tracker:
https://www.github.com/pydicom/pydicom
"""
from pydicom.dataelem import DataElement
from pydicom.dataset import Dataset, FileDataset, FileMetaDataset
import pydicom.examples
from pydicom.filereader import dcmread
from pydicom.filewriter import dcmwrite
from pydicom.pixels.utils import pixel_array, iter_pixels
from pydicom.sequence import Sequence
from ._version import (
__version__,
__version_info__,
__dicom_version__,
__concepts_version__,
)
__all__ = [
"DataElement",
"Dataset",
"FileDataset",
"FileMetaDataset",
"Sequence",
"dcmread",
"dcmwrite",
"pixel_array",
"iter_pixels",
"__version__",
"__version_info__",
"__dicom_version__",
"__concepts_version__",
]

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,486 @@
"""DICOM UID dictionary auto-generated by generate_uid_dict.py"""
# Each dict entry is UID: (Name, Type, Info, Retired, Keyword)
UID_dictionary = {
'1.2.840.10008.1.1': ('Verification SOP Class', 'SOP Class', '', '', 'Verification'),
'1.2.840.10008.1.2': ('Implicit VR Little Endian', 'Transfer Syntax', 'Default Transfer Syntax for DICOM', '', 'ImplicitVRLittleEndian'),
'1.2.840.10008.1.2.1': ('Explicit VR Little Endian', 'Transfer Syntax', '', '', 'ExplicitVRLittleEndian'),
'1.2.840.10008.1.2.1.98': ('Encapsulated Uncompressed Explicit VR Little Endian', 'Transfer Syntax', '', '', 'EncapsulatedUncompressedExplicitVRLittleEndian'),
'1.2.840.10008.1.2.1.99': ('Deflated Explicit VR Little Endian', 'Transfer Syntax', '', '', 'DeflatedExplicitVRLittleEndian'),
'1.2.840.10008.1.2.2': ('Explicit VR Big Endian', 'Transfer Syntax', '', 'Retired', 'ExplicitVRBigEndian'),
'1.2.840.10008.1.2.4.50': ('JPEG Baseline (Process 1)', 'Transfer Syntax', 'Default Transfer Syntax for Lossy JPEG 8 Bit Image Compression', '', 'JPEGBaseline8Bit'),
'1.2.840.10008.1.2.4.51': ('JPEG Extended (Process 2 and 4)', 'Transfer Syntax', 'Default Transfer Syntax for Lossy JPEG 12 Bit Image Compression (Process 4 only)', '', 'JPEGExtended12Bit'),
'1.2.840.10008.1.2.4.52': ('JPEG Extended (Process 3 and 5)', 'Transfer Syntax', '', 'Retired', 'JPEGExtended35'),
'1.2.840.10008.1.2.4.53': ('JPEG Spectral Selection, Non-Hierarchical (Process 6 and 8)', 'Transfer Syntax', '', 'Retired', 'JPEGSpectralSelectionNonHierarchical68'),
'1.2.840.10008.1.2.4.54': ('JPEG Spectral Selection, Non-Hierarchical (Process 7 and 9)', 'Transfer Syntax', '', 'Retired', 'JPEGSpectralSelectionNonHierarchical79'),
'1.2.840.10008.1.2.4.55': ('JPEG Full Progression, Non-Hierarchical (Process 10 and 12)', 'Transfer Syntax', '', 'Retired', 'JPEGFullProgressionNonHierarchical1012'),
'1.2.840.10008.1.2.4.56': ('JPEG Full Progression, Non-Hierarchical (Process 11 and 13)', 'Transfer Syntax', '', 'Retired', 'JPEGFullProgressionNonHierarchical1113'),
'1.2.840.10008.1.2.4.57': ('JPEG Lossless, Non-Hierarchical (Process 14)', 'Transfer Syntax', '', '', 'JPEGLossless'),
'1.2.840.10008.1.2.4.58': ('JPEG Lossless, Non-Hierarchical (Process 15)', 'Transfer Syntax', '', 'Retired', 'JPEGLosslessNonHierarchical15'),
'1.2.840.10008.1.2.4.59': ('JPEG Extended, Hierarchical (Process 16 and 18)', 'Transfer Syntax', '', 'Retired', 'JPEGExtendedHierarchical1618'),
'1.2.840.10008.1.2.4.60': ('JPEG Extended, Hierarchical (Process 17 and 19)', 'Transfer Syntax', '', 'Retired', 'JPEGExtendedHierarchical1719'),
'1.2.840.10008.1.2.4.61': ('JPEG Spectral Selection, Hierarchical (Process 20 and 22)', 'Transfer Syntax', '', 'Retired', 'JPEGSpectralSelectionHierarchical2022'),
'1.2.840.10008.1.2.4.62': ('JPEG Spectral Selection, Hierarchical (Process 21 and 23)', 'Transfer Syntax', '', 'Retired', 'JPEGSpectralSelectionHierarchical2123'),
'1.2.840.10008.1.2.4.63': ('JPEG Full Progression, Hierarchical (Process 24 and 26)', 'Transfer Syntax', '', 'Retired', 'JPEGFullProgressionHierarchical2426'),
'1.2.840.10008.1.2.4.64': ('JPEG Full Progression, Hierarchical (Process 25 and 27)', 'Transfer Syntax', '', 'Retired', 'JPEGFullProgressionHierarchical2527'),
'1.2.840.10008.1.2.4.65': ('JPEG Lossless, Hierarchical (Process 28)', 'Transfer Syntax', '', 'Retired', 'JPEGLosslessHierarchical28'),
'1.2.840.10008.1.2.4.66': ('JPEG Lossless, Hierarchical (Process 29)', 'Transfer Syntax', '', 'Retired', 'JPEGLosslessHierarchical29'),
'1.2.840.10008.1.2.4.70': ('JPEG Lossless, Non-Hierarchical, First-Order Prediction (Process 14 [Selection Value 1])', 'Transfer Syntax', 'Default Transfer Syntax for Lossless JPEG Image Compression', '', 'JPEGLosslessSV1'),
'1.2.840.10008.1.2.4.80': ('JPEG-LS Lossless Image Compression', 'Transfer Syntax', '', '', 'JPEGLSLossless'),
'1.2.840.10008.1.2.4.81': ('JPEG-LS Lossy (Near-Lossless) Image Compression', 'Transfer Syntax', '', '', 'JPEGLSNearLossless'),
'1.2.840.10008.1.2.4.90': ('JPEG 2000 Image Compression (Lossless Only)', 'Transfer Syntax', '', '', 'JPEG2000Lossless'),
'1.2.840.10008.1.2.4.91': ('JPEG 2000 Image Compression', 'Transfer Syntax', '', '', 'JPEG2000'),
'1.2.840.10008.1.2.4.92': ('JPEG 2000 Part 2 Multi-component Image Compression (Lossless Only)', 'Transfer Syntax', '', '', 'JPEG2000MCLossless'),
'1.2.840.10008.1.2.4.93': ('JPEG 2000 Part 2 Multi-component Image Compression', 'Transfer Syntax', '', '', 'JPEG2000MC'),
'1.2.840.10008.1.2.4.94': ('JPIP Referenced', 'Transfer Syntax', '', '', 'JPIPReferenced'),
'1.2.840.10008.1.2.4.95': ('JPIP Referenced Deflate', 'Transfer Syntax', '', '', 'JPIPReferencedDeflate'),
'1.2.840.10008.1.2.4.100': ('MPEG2 Main Profile / Main Level', 'Transfer Syntax', '', '', 'MPEG2MPML'),
'1.2.840.10008.1.2.4.100.1': ('Fragmentable MPEG2 Main Profile / Main Level', 'Transfer Syntax', '', '', 'MPEG2MPMLF'),
'1.2.840.10008.1.2.4.101': ('MPEG2 Main Profile / High Level', 'Transfer Syntax', '', '', 'MPEG2MPHL'),
'1.2.840.10008.1.2.4.101.1': ('Fragmentable MPEG2 Main Profile / High Level', 'Transfer Syntax', '', '', 'MPEG2MPHLF'),
'1.2.840.10008.1.2.4.102': ('MPEG-4 AVC/H.264 High Profile / Level 4.1', 'Transfer Syntax', '', '', 'MPEG4HP41'),
'1.2.840.10008.1.2.4.102.1': ('Fragmentable MPEG-4 AVC/H.264 High Profile / Level 4.1', 'Transfer Syntax', '', '', 'MPEG4HP41F'),
'1.2.840.10008.1.2.4.103': ('MPEG-4 AVC/H.264 BD-compatible High Profile / Level 4.1', 'Transfer Syntax', '', '', 'MPEG4HP41BD'),
'1.2.840.10008.1.2.4.103.1': ('Fragmentable MPEG-4 AVC/H.264 BD-compatible High Profile / Level 4.1', 'Transfer Syntax', '', '', 'MPEG4HP41BDF'),
'1.2.840.10008.1.2.4.104': ('MPEG-4 AVC/H.264 High Profile / Level 4.2 For 2D Video', 'Transfer Syntax', '', '', 'MPEG4HP422D'),
'1.2.840.10008.1.2.4.104.1': ('Fragmentable MPEG-4 AVC/H.264 High Profile / Level 4.2 For 2D Video', 'Transfer Syntax', '', '', 'MPEG4HP422DF'),
'1.2.840.10008.1.2.4.105': ('MPEG-4 AVC/H.264 High Profile / Level 4.2 For 3D Video', 'Transfer Syntax', '', '', 'MPEG4HP423D'),
'1.2.840.10008.1.2.4.105.1': ('Fragmentable MPEG-4 AVC/H.264 High Profile / Level 4.2 For 3D Video', 'Transfer Syntax', '', '', 'MPEG4HP423DF'),
'1.2.840.10008.1.2.4.106': ('MPEG-4 AVC/H.264 Stereo High Profile / Level 4.2', 'Transfer Syntax', '', '', 'MPEG4HP42STEREO'),
'1.2.840.10008.1.2.4.106.1': ('Fragmentable MPEG-4 AVC/H.264 Stereo High Profile / Level 4.2', 'Transfer Syntax', '', '', 'MPEG4HP42STEREOF'),
'1.2.840.10008.1.2.4.107': ('HEVC/H.265 Main Profile / Level 5.1', 'Transfer Syntax', '', '', 'HEVCMP51'),
'1.2.840.10008.1.2.4.108': ('HEVC/H.265 Main 10 Profile / Level 5.1', 'Transfer Syntax', '', '', 'HEVCM10P51'),
'1.2.840.10008.1.2.4.201': ('High-Throughput JPEG 2000 Image Compression (Lossless Only)', 'Transfer Syntax', '', '', 'HTJ2KLossless'),
'1.2.840.10008.1.2.4.202': ('High-Throughput JPEG 2000 with RPCL Options Image Compression (Lossless Only)', 'Transfer Syntax', '', '', 'HTJ2KLosslessRPCL'),
'1.2.840.10008.1.2.4.203': ('High-Throughput JPEG 2000 Image Compression', 'Transfer Syntax', '', '', 'HTJ2K'),
'1.2.840.10008.1.2.4.204': ('JPIP HTJ2K Referenced', 'Transfer Syntax', '', '', 'JPIPHTJ2KReferenced'),
'1.2.840.10008.1.2.4.205': ('JPIP HTJ2K Referenced Deflate', 'Transfer Syntax', '', '', 'JPIPHTJ2KReferencedDeflate'),
'1.2.840.10008.1.2.5': ('RLE Lossless', 'Transfer Syntax', '', '', 'RLELossless'),
'1.2.840.10008.1.2.6.1': ('RFC 2557 MIME encapsulation', 'Transfer Syntax', '', 'Retired', 'RFC2557MIMEEncapsulation'),
'1.2.840.10008.1.2.6.2': ('XML Encoding', 'Transfer Syntax', '', 'Retired', 'XMLEncoding'),
'1.2.840.10008.1.2.7.1': ('SMPTE ST 2110-20 Uncompressed Progressive Active Video', 'Transfer Syntax', '', '', 'SMPTEST211020UncompressedProgressiveActiveVideo'),
'1.2.840.10008.1.2.7.2': ('SMPTE ST 2110-20 Uncompressed Interlaced Active Video', 'Transfer Syntax', '', '', 'SMPTEST211020UncompressedInterlacedActiveVideo'),
'1.2.840.10008.1.2.7.3': ('SMPTE ST 2110-30 PCM Digital Audio', 'Transfer Syntax', '', '', 'SMPTEST211030PCMDigitalAudio'),
'1.2.840.10008.1.3.10': ('Media Storage Directory Storage', 'SOP Class', '', '', 'MediaStorageDirectoryStorage'),
'1.2.840.10008.1.5.1': ('Hot Iron Color Palette SOP Instance', 'Well-known SOP Instance', '', '', 'HotIronPalette'),
'1.2.840.10008.1.5.2': ('PET Color Palette SOP Instance', 'Well-known SOP Instance', '', '', 'PETPalette'),
'1.2.840.10008.1.5.3': ('Hot Metal Blue Color Palette SOP Instance', 'Well-known SOP Instance', '', '', 'HotMetalBluePalette'),
'1.2.840.10008.1.5.4': ('PET 20 Step Color Palette SOP Instance', 'Well-known SOP Instance', '', '', 'PET20StepPalette'),
'1.2.840.10008.1.5.5': ('Spring Color Palette SOP Instance', 'Well-known SOP Instance', '', '', 'SpringPalette'),
'1.2.840.10008.1.5.6': ('Summer Color Palette SOP Instance', 'Well-known SOP Instance', '', '', 'SummerPalette'),
'1.2.840.10008.1.5.7': ('Fall Color Palette SOP Instance', 'Well-known SOP Instance', '', '', 'FallPalette'),
'1.2.840.10008.1.5.8': ('Winter Color Palette SOP Instance', 'Well-known SOP Instance', '', '', 'WinterPalette'),
'1.2.840.10008.1.9': ('Basic Study Content Notification SOP Class', 'SOP Class', '', 'Retired', 'BasicStudyContentNotification'),
'1.2.840.10008.1.20': ('Papyrus 3 Implicit VR Little Endian', 'Transfer Syntax', '(2015c)', 'Retired', 'Papyrus3ImplicitVRLittleEndian'),
'1.2.840.10008.1.20.1': ('Storage Commitment Push Model SOP Class', 'SOP Class', '', '', 'StorageCommitmentPushModel'),
'1.2.840.10008.1.20.1.1': ('Storage Commitment Push Model SOP Instance', 'Well-known SOP Instance', '', '', 'StorageCommitmentPushModelInstance'),
'1.2.840.10008.1.20.2': ('Storage Commitment Pull Model SOP Class', 'SOP Class', '', 'Retired', 'StorageCommitmentPullModel'),
'1.2.840.10008.1.20.2.1': ('Storage Commitment Pull Model SOP Instance', 'Well-known SOP Instance', '', 'Retired', 'StorageCommitmentPullModelInstance'),
'1.2.840.10008.1.40': ('Procedural Event Logging SOP Class', 'SOP Class', '', '', 'ProceduralEventLogging'),
'1.2.840.10008.1.40.1': ('Procedural Event Logging SOP Instance', 'Well-known SOP Instance', '', '', 'ProceduralEventLoggingInstance'),
'1.2.840.10008.1.42': ('Substance Administration Logging SOP Class', 'SOP Class', '', '', 'SubstanceAdministrationLogging'),
'1.2.840.10008.1.42.1': ('Substance Administration Logging SOP Instance', 'Well-known SOP Instance', '', '', 'SubstanceAdministrationLoggingInstance'),
'1.2.840.10008.2.6.1': ('DICOM UID Registry', 'DICOM UIDs as a Coding Scheme', '', '', 'DCMUID'),
'1.2.840.10008.2.16.4': ('DICOM Controlled Terminology', 'Coding Scheme', '', '', 'DCM'),
'1.2.840.10008.2.16.5': ('Adult Mouse Anatomy Ontology', 'Coding Scheme', '', '', 'MA'),
'1.2.840.10008.2.16.6': ('Uberon Ontology', 'Coding Scheme', '', '', 'UBERON'),
'1.2.840.10008.2.16.7': ('Integrated Taxonomic Information System (ITIS) Taxonomic Serial Number (TSN)', 'Coding Scheme', '', '', 'ITIS_TSN'),
'1.2.840.10008.2.16.8': ('Mouse Genome Initiative (MGI)', 'Coding Scheme', '', '', 'MGI'),
'1.2.840.10008.2.16.9': ('PubChem Compound CID', 'Coding Scheme', '', '', 'PUBCHEM_CID'),
'1.2.840.10008.2.16.10': ('Dublin Core', 'Coding Scheme', '', '', 'DC'),
'1.2.840.10008.2.16.11': ('New York University Melanoma Clinical Cooperative Group', 'Coding Scheme', '', '', 'NYUMCCG'),
'1.2.840.10008.2.16.12': ('Mayo Clinic Non-radiological Images Specific Body Structure Anatomical Surface Region Guide', 'Coding Scheme', '', '', 'MAYONRISBSASRG'),
'1.2.840.10008.2.16.13': ('Image Biomarker Standardisation Initiative', 'Coding Scheme', '', '', 'IBSI'),
'1.2.840.10008.2.16.14': ('Radiomics Ontology', 'Coding Scheme', '', '', 'RO'),
'1.2.840.10008.2.16.15': ('RadElement', 'Coding Scheme', '', '', 'RADELEMENT'),
'1.2.840.10008.2.16.16': ('ICD-11', 'Coding Scheme', '', '', 'I11'),
'1.2.840.10008.2.16.17': ('Unified numbering system (UNS) for metals and alloys', 'Coding Scheme', '', '', 'UNS'),
'1.2.840.10008.2.16.18': ('Research Resource Identification', 'Coding Scheme', '', '', 'RRID'),
'1.2.840.10008.3.1.1.1': ('DICOM Application Context Name', 'Application Context Name', '', '', 'DICOMApplicationContext'),
'1.2.840.10008.3.1.2.1.1': ('Detached Patient Management SOP Class', 'SOP Class', '', 'Retired', 'DetachedPatientManagement'),
'1.2.840.10008.3.1.2.1.4': ('Detached Patient Management Meta SOP Class', 'Meta SOP Class', '', 'Retired', 'DetachedPatientManagementMeta'),
'1.2.840.10008.3.1.2.2.1': ('Detached Visit Management SOP Class', 'SOP Class', '', 'Retired', 'DetachedVisitManagement'),
'1.2.840.10008.3.1.2.3.1': ('Detached Study Management SOP Class', 'SOP Class', '', 'Retired', 'DetachedStudyManagement'),
'1.2.840.10008.3.1.2.3.2': ('Study Component Management SOP Class', 'SOP Class', '', 'Retired', 'StudyComponentManagement'),
'1.2.840.10008.3.1.2.3.3': ('Modality Performed Procedure Step SOP Class', 'SOP Class', '', '', 'ModalityPerformedProcedureStep'),
'1.2.840.10008.3.1.2.3.4': ('Modality Performed Procedure Step Retrieve SOP Class', 'SOP Class', '', '', 'ModalityPerformedProcedureStepRetrieve'),
'1.2.840.10008.3.1.2.3.5': ('Modality Performed Procedure Step Notification SOP Class', 'SOP Class', '', '', 'ModalityPerformedProcedureStepNotification'),
'1.2.840.10008.3.1.2.5.1': ('Detached Results Management SOP Class', 'SOP Class', '', 'Retired', 'DetachedResultsManagement'),
'1.2.840.10008.3.1.2.5.4': ('Detached Results Management Meta SOP Class', 'Meta SOP Class', '', 'Retired', 'DetachedResultsManagementMeta'),
'1.2.840.10008.3.1.2.5.5': ('Detached Study Management Meta SOP Class', 'Meta SOP Class', '', 'Retired', 'DetachedStudyManagementMeta'),
'1.2.840.10008.3.1.2.6.1': ('Detached Interpretation Management SOP Class', 'SOP Class', '', 'Retired', 'DetachedInterpretationManagement'),
'1.2.840.10008.4.2': ('Storage Service Class', 'Service Class', '', '', 'Storage'),
'1.2.840.10008.5.1.1.1': ('Basic Film Session SOP Class', 'SOP Class', '', '', 'BasicFilmSession'),
'1.2.840.10008.5.1.1.2': ('Basic Film Box SOP Class', 'SOP Class', '', '', 'BasicFilmBox'),
'1.2.840.10008.5.1.1.4': ('Basic Grayscale Image Box SOP Class', 'SOP Class', '', '', 'BasicGrayscaleImageBox'),
'1.2.840.10008.5.1.1.4.1': ('Basic Color Image Box SOP Class', 'SOP Class', '', '', 'BasicColorImageBox'),
'1.2.840.10008.5.1.1.4.2': ('Referenced Image Box SOP Class', 'SOP Class', '', 'Retired', 'ReferencedImageBox'),
'1.2.840.10008.5.1.1.9': ('Basic Grayscale Print Management Meta SOP Class', 'Meta SOP Class', '', '', 'BasicGrayscalePrintManagementMeta'),
'1.2.840.10008.5.1.1.9.1': ('Referenced Grayscale Print Management Meta SOP Class', 'Meta SOP Class', '', 'Retired', 'ReferencedGrayscalePrintManagementMeta'),
'1.2.840.10008.5.1.1.14': ('Print Job SOP Class', 'SOP Class', '', '', 'PrintJob'),
'1.2.840.10008.5.1.1.15': ('Basic Annotation Box SOP Class', 'SOP Class', '', '', 'BasicAnnotationBox'),
'1.2.840.10008.5.1.1.16': ('Printer SOP Class', 'SOP Class', '', '', 'Printer'),
'1.2.840.10008.5.1.1.16.376': ('Printer Configuration Retrieval SOP Class', 'SOP Class', '', '', 'PrinterConfigurationRetrieval'),
'1.2.840.10008.5.1.1.17': ('Printer SOP Instance', 'Well-known SOP Instance', '', '', 'PrinterInstance'),
'1.2.840.10008.5.1.1.17.376': ('Printer Configuration Retrieval SOP Instance', 'Well-known SOP Instance', '', '', 'PrinterConfigurationRetrievalInstance'),
'1.2.840.10008.5.1.1.18': ('Basic Color Print Management Meta SOP Class', 'Meta SOP Class', '', '', 'BasicColorPrintManagementMeta'),
'1.2.840.10008.5.1.1.18.1': ('Referenced Color Print Management Meta SOP Class', 'Meta SOP Class', '', 'Retired', 'ReferencedColorPrintManagementMeta'),
'1.2.840.10008.5.1.1.22': ('VOI LUT Box SOP Class', 'SOP Class', '', '', 'VOILUTBox'),
'1.2.840.10008.5.1.1.23': ('Presentation LUT SOP Class', 'SOP Class', '', '', 'PresentationLUT'),
'1.2.840.10008.5.1.1.24': ('Image Overlay Box SOP Class', 'SOP Class', '', 'Retired', 'ImageOverlayBox'),
'1.2.840.10008.5.1.1.24.1': ('Basic Print Image Overlay Box SOP Class', 'SOP Class', '', 'Retired', 'BasicPrintImageOverlayBox'),
'1.2.840.10008.5.1.1.25': ('Print Queue SOP Instance', 'Well-known SOP Instance', '', 'Retired', 'PrintQueueInstance'),
'1.2.840.10008.5.1.1.26': ('Print Queue Management SOP Class', 'SOP Class', '', 'Retired', 'PrintQueueManagement'),
'1.2.840.10008.5.1.1.27': ('Stored Print Storage SOP Class', 'SOP Class', '', 'Retired', 'StoredPrintStorage'),
'1.2.840.10008.5.1.1.29': ('Hardcopy Grayscale Image Storage SOP Class', 'SOP Class', '', 'Retired', 'HardcopyGrayscaleImageStorage'),
'1.2.840.10008.5.1.1.30': ('Hardcopy Color Image Storage SOP Class', 'SOP Class', '', 'Retired', 'HardcopyColorImageStorage'),
'1.2.840.10008.5.1.1.31': ('Pull Print Request SOP Class', 'SOP Class', '', 'Retired', 'PullPrintRequest'),
'1.2.840.10008.5.1.1.32': ('Pull Stored Print Management Meta SOP Class', 'Meta SOP Class', '', 'Retired', 'PullStoredPrintManagementMeta'),
'1.2.840.10008.5.1.1.33': ('Media Creation Management SOP Class UID', 'SOP Class', '', '', 'MediaCreationManagement'),
'1.2.840.10008.5.1.1.40': ('Display System SOP Class', 'SOP Class', '', '', 'DisplaySystem'),
'1.2.840.10008.5.1.1.40.1': ('Display System SOP Instance', 'Well-known SOP Instance', '', '', 'DisplaySystemInstance'),
'1.2.840.10008.5.1.4.1.1.1': ('Computed Radiography Image Storage', 'SOP Class', '', '', 'ComputedRadiographyImageStorage'),
'1.2.840.10008.5.1.4.1.1.1.1': ('Digital X-Ray Image Storage - For Presentation', 'SOP Class', '', '', 'DigitalXRayImageStorageForPresentation'),
'1.2.840.10008.5.1.4.1.1.1.1.1': ('Digital X-Ray Image Storage - For Processing', 'SOP Class', '', '', 'DigitalXRayImageStorageForProcessing'),
'1.2.840.10008.5.1.4.1.1.1.2': ('Digital Mammography X-Ray Image Storage - For Presentation', 'SOP Class', '', '', 'DigitalMammographyXRayImageStorageForPresentation'),
'1.2.840.10008.5.1.4.1.1.1.2.1': ('Digital Mammography X-Ray Image Storage - For Processing', 'SOP Class', '', '', 'DigitalMammographyXRayImageStorageForProcessing'),
'1.2.840.10008.5.1.4.1.1.1.3': ('Digital Intra-Oral X-Ray Image Storage - For Presentation', 'SOP Class', '', '', 'DigitalIntraOralXRayImageStorageForPresentation'),
'1.2.840.10008.5.1.4.1.1.1.3.1': ('Digital Intra-Oral X-Ray Image Storage - For Processing', 'SOP Class', '', '', 'DigitalIntraOralXRayImageStorageForProcessing'),
'1.2.840.10008.5.1.4.1.1.2': ('CT Image Storage', 'SOP Class', '', '', 'CTImageStorage'),
'1.2.840.10008.5.1.4.1.1.2.1': ('Enhanced CT Image Storage', 'SOP Class', '', '', 'EnhancedCTImageStorage'),
'1.2.840.10008.5.1.4.1.1.2.2': ('Legacy Converted Enhanced CT Image Storage', 'SOP Class', '', '', 'LegacyConvertedEnhancedCTImageStorage'),
'1.2.840.10008.5.1.4.1.1.3': ('Ultrasound Multi-frame Image Storage', 'SOP Class', '', 'Retired', 'UltrasoundMultiFrameImageStorageRetired'),
'1.2.840.10008.5.1.4.1.1.3.1': ('Ultrasound Multi-frame Image Storage', 'SOP Class', '', '', 'UltrasoundMultiFrameImageStorage'),
'1.2.840.10008.5.1.4.1.1.4': ('MR Image Storage', 'SOP Class', '', '', 'MRImageStorage'),
'1.2.840.10008.5.1.4.1.1.4.1': ('Enhanced MR Image Storage', 'SOP Class', '', '', 'EnhancedMRImageStorage'),
'1.2.840.10008.5.1.4.1.1.4.2': ('MR Spectroscopy Storage', 'SOP Class', '', '', 'MRSpectroscopyStorage'),
'1.2.840.10008.5.1.4.1.1.4.3': ('Enhanced MR Color Image Storage', 'SOP Class', '', '', 'EnhancedMRColorImageStorage'),
'1.2.840.10008.5.1.4.1.1.4.4': ('Legacy Converted Enhanced MR Image Storage', 'SOP Class', '', '', 'LegacyConvertedEnhancedMRImageStorage'),
'1.2.840.10008.5.1.4.1.1.5': ('Nuclear Medicine Image Storage', 'SOP Class', '', 'Retired', 'NuclearMedicineImageStorageRetired'),
'1.2.840.10008.5.1.4.1.1.6': ('Ultrasound Image Storage', 'SOP Class', '', 'Retired', 'UltrasoundImageStorageRetired'),
'1.2.840.10008.5.1.4.1.1.6.1': ('Ultrasound Image Storage', 'SOP Class', '', '', 'UltrasoundImageStorage'),
'1.2.840.10008.5.1.4.1.1.6.2': ('Enhanced US Volume Storage', 'SOP Class', '', '', 'EnhancedUSVolumeStorage'),
'1.2.840.10008.5.1.4.1.1.6.3': ('Photoacoustic Image Storage', 'SOP Class', '', '', 'PhotoacousticImageStorage'),
'1.2.840.10008.5.1.4.1.1.7': ('Secondary Capture Image Storage', 'SOP Class', '', '', 'SecondaryCaptureImageStorage'),
'1.2.840.10008.5.1.4.1.1.7.1': ('Multi-frame Single Bit Secondary Capture Image Storage', 'SOP Class', '', '', 'MultiFrameSingleBitSecondaryCaptureImageStorage'),
'1.2.840.10008.5.1.4.1.1.7.2': ('Multi-frame Grayscale Byte Secondary Capture Image Storage', 'SOP Class', '', '', 'MultiFrameGrayscaleByteSecondaryCaptureImageStorage'),
'1.2.840.10008.5.1.4.1.1.7.3': ('Multi-frame Grayscale Word Secondary Capture Image Storage', 'SOP Class', '', '', 'MultiFrameGrayscaleWordSecondaryCaptureImageStorage'),
'1.2.840.10008.5.1.4.1.1.7.4': ('Multi-frame True Color Secondary Capture Image Storage', 'SOP Class', '', '', 'MultiFrameTrueColorSecondaryCaptureImageStorage'),
'1.2.840.10008.5.1.4.1.1.8': ('Standalone Overlay Storage', 'SOP Class', '', 'Retired', 'StandaloneOverlayStorage'),
'1.2.840.10008.5.1.4.1.1.9': ('Standalone Curve Storage', 'SOP Class', '', 'Retired', 'StandaloneCurveStorage'),
'1.2.840.10008.5.1.4.1.1.9.1': ('Waveform Storage - Trial', 'SOP Class', '', 'Retired', 'WaveformStorageTrial'),
'1.2.840.10008.5.1.4.1.1.9.1.1': ('12-lead ECG Waveform Storage', 'SOP Class', '', '', 'TwelveLeadECGWaveformStorage'),
'1.2.840.10008.5.1.4.1.1.9.1.2': ('General ECG Waveform Storage', 'SOP Class', '', '', 'GeneralECGWaveformStorage'),
'1.2.840.10008.5.1.4.1.1.9.1.3': ('Ambulatory ECG Waveform Storage', 'SOP Class', '', '', 'AmbulatoryECGWaveformStorage'),
'1.2.840.10008.5.1.4.1.1.9.1.4': ('General 32-bit ECG Waveform Storage', 'SOP Class', '', '', 'General32bitECGWaveformStorage'),
'1.2.840.10008.5.1.4.1.1.9.2.1': ('Hemodynamic Waveform Storage', 'SOP Class', '', '', 'HemodynamicWaveformStorage'),
'1.2.840.10008.5.1.4.1.1.9.3.1': ('Cardiac Electrophysiology Waveform Storage', 'SOP Class', '', '', 'CardiacElectrophysiologyWaveformStorage'),
'1.2.840.10008.5.1.4.1.1.9.4.1': ('Basic Voice Audio Waveform Storage', 'SOP Class', '', '', 'BasicVoiceAudioWaveformStorage'),
'1.2.840.10008.5.1.4.1.1.9.4.2': ('General Audio Waveform Storage', 'SOP Class', '', '', 'GeneralAudioWaveformStorage'),
'1.2.840.10008.5.1.4.1.1.9.5.1': ('Arterial Pulse Waveform Storage', 'SOP Class', '', '', 'ArterialPulseWaveformStorage'),
'1.2.840.10008.5.1.4.1.1.9.6.1': ('Respiratory Waveform Storage', 'SOP Class', '', '', 'RespiratoryWaveformStorage'),
'1.2.840.10008.5.1.4.1.1.9.6.2': ('Multi-channel Respiratory Waveform Storage', 'SOP Class', '', '', 'MultichannelRespiratoryWaveformStorage'),
'1.2.840.10008.5.1.4.1.1.9.7.1': ('Routine Scalp Electroencephalogram Waveform Storage', 'SOP Class', '', '', 'RoutineScalpElectroencephalogramWaveformStorage'),
'1.2.840.10008.5.1.4.1.1.9.7.2': ('Electromyogram Waveform Storage', 'SOP Class', '', '', 'ElectromyogramWaveformStorage'),
'1.2.840.10008.5.1.4.1.1.9.7.3': ('Electrooculogram Waveform Storage', 'SOP Class', '', '', 'ElectrooculogramWaveformStorage'),
'1.2.840.10008.5.1.4.1.1.9.7.4': ('Sleep Electroencephalogram Waveform Storage', 'SOP Class', '', '', 'SleepElectroencephalogramWaveformStorage'),
'1.2.840.10008.5.1.4.1.1.9.8.1': ('Body Position Waveform Storage', 'SOP Class', '', '', 'BodyPositionWaveformStorage'),
'1.2.840.10008.5.1.4.1.1.10': ('Standalone Modality LUT Storage', 'SOP Class', '', 'Retired', 'StandaloneModalityLUTStorage'),
'1.2.840.10008.5.1.4.1.1.11': ('Standalone VOI LUT Storage', 'SOP Class', '', 'Retired', 'StandaloneVOILUTStorage'),
'1.2.840.10008.5.1.4.1.1.11.1': ('Grayscale Softcopy Presentation State Storage', 'SOP Class', '', '', 'GrayscaleSoftcopyPresentationStateStorage'),
'1.2.840.10008.5.1.4.1.1.11.2': ('Color Softcopy Presentation State Storage', 'SOP Class', '', '', 'ColorSoftcopyPresentationStateStorage'),
'1.2.840.10008.5.1.4.1.1.11.3': ('Pseudo-Color Softcopy Presentation State Storage', 'SOP Class', '', '', 'PseudoColorSoftcopyPresentationStateStorage'),
'1.2.840.10008.5.1.4.1.1.11.4': ('Blending Softcopy Presentation State Storage', 'SOP Class', '', '', 'BlendingSoftcopyPresentationStateStorage'),
'1.2.840.10008.5.1.4.1.1.11.5': ('XA/XRF Grayscale Softcopy Presentation State Storage', 'SOP Class', '', '', 'XAXRFGrayscaleSoftcopyPresentationStateStorage'),
'1.2.840.10008.5.1.4.1.1.11.6': ('Grayscale Planar MPR Volumetric Presentation State Storage', 'SOP Class', '', '', 'GrayscalePlanarMPRVolumetricPresentationStateStorage'),
'1.2.840.10008.5.1.4.1.1.11.7': ('Compositing Planar MPR Volumetric Presentation State Storage', 'SOP Class', '', '', 'CompositingPlanarMPRVolumetricPresentationStateStorage'),
'1.2.840.10008.5.1.4.1.1.11.8': ('Advanced Blending Presentation State Storage', 'SOP Class', '', '', 'AdvancedBlendingPresentationStateStorage'),
'1.2.840.10008.5.1.4.1.1.11.9': ('Volume Rendering Volumetric Presentation State Storage', 'SOP Class', '', '', 'VolumeRenderingVolumetricPresentationStateStorage'),
'1.2.840.10008.5.1.4.1.1.11.10': ('Segmented Volume Rendering Volumetric Presentation State Storage', 'SOP Class', '', '', 'SegmentedVolumeRenderingVolumetricPresentationStateStorage'),
'1.2.840.10008.5.1.4.1.1.11.11': ('Multiple Volume Rendering Volumetric Presentation State Storage', 'SOP Class', '', '', 'MultipleVolumeRenderingVolumetricPresentationStateStorage'),
'1.2.840.10008.5.1.4.1.1.11.12': ('Variable Modality LUT Softcopy Presentation State Storage', 'SOP Class', '', '', 'VariableModalityLUTSoftcopyPresentationStateStorage'),
'1.2.840.10008.5.1.4.1.1.12.1': ('X-Ray Angiographic Image Storage', 'SOP Class', '', '', 'XRayAngiographicImageStorage'),
'1.2.840.10008.5.1.4.1.1.12.1.1': ('Enhanced XA Image Storage', 'SOP Class', '', '', 'EnhancedXAImageStorage'),
'1.2.840.10008.5.1.4.1.1.12.2': ('X-Ray Radiofluoroscopic Image Storage', 'SOP Class', '', '', 'XRayRadiofluoroscopicImageStorage'),
'1.2.840.10008.5.1.4.1.1.12.2.1': ('Enhanced XRF Image Storage', 'SOP Class', '', '', 'EnhancedXRFImageStorage'),
'1.2.840.10008.5.1.4.1.1.12.3': ('X-Ray Angiographic Bi-Plane Image Storage', 'SOP Class', '', 'Retired', 'XRayAngiographicBiPlaneImageStorage'),
'1.2.840.10008.5.1.4.1.1.12.77': ('', 'SOP Class', '(2015c)', 'Retired', ''),
'1.2.840.10008.5.1.4.1.1.13.1.1': ('X-Ray 3D Angiographic Image Storage', 'SOP Class', '', '', 'XRay3DAngiographicImageStorage'),
'1.2.840.10008.5.1.4.1.1.13.1.2': ('X-Ray 3D Craniofacial Image Storage', 'SOP Class', '', '', 'XRay3DCraniofacialImageStorage'),
'1.2.840.10008.5.1.4.1.1.13.1.3': ('Breast Tomosynthesis Image Storage', 'SOP Class', '', '', 'BreastTomosynthesisImageStorage'),
'1.2.840.10008.5.1.4.1.1.13.1.4': ('Breast Projection X-Ray Image Storage - For Presentation', 'SOP Class', '', '', 'BreastProjectionXRayImageStorageForPresentation'),
'1.2.840.10008.5.1.4.1.1.13.1.5': ('Breast Projection X-Ray Image Storage - For Processing', 'SOP Class', '', '', 'BreastProjectionXRayImageStorageForProcessing'),
'1.2.840.10008.5.1.4.1.1.14.1': ('Intravascular Optical Coherence Tomography Image Storage - For Presentation', 'SOP Class', '', '', 'IntravascularOpticalCoherenceTomographyImageStorageForPresentation'),
'1.2.840.10008.5.1.4.1.1.14.2': ('Intravascular Optical Coherence Tomography Image Storage - For Processing', 'SOP Class', '', '', 'IntravascularOpticalCoherenceTomographyImageStorageForProcessing'),
'1.2.840.10008.5.1.4.1.1.20': ('Nuclear Medicine Image Storage', 'SOP Class', '', '', 'NuclearMedicineImageStorage'),
'1.2.840.10008.5.1.4.1.1.30': ('Parametric Map Storage', 'SOP Class', '', '', 'ParametricMapStorage'),
'1.2.840.10008.5.1.4.1.1.40': ('', 'SOP Class', '(2015c)', 'Retired', ''),
'1.2.840.10008.5.1.4.1.1.66': ('Raw Data Storage', 'SOP Class', '', '', 'RawDataStorage'),
'1.2.840.10008.5.1.4.1.1.66.1': ('Spatial Registration Storage', 'SOP Class', '', '', 'SpatialRegistrationStorage'),
'1.2.840.10008.5.1.4.1.1.66.2': ('Spatial Fiducials Storage', 'SOP Class', '', '', 'SpatialFiducialsStorage'),
'1.2.840.10008.5.1.4.1.1.66.3': ('Deformable Spatial Registration Storage', 'SOP Class', '', '', 'DeformableSpatialRegistrationStorage'),
'1.2.840.10008.5.1.4.1.1.66.4': ('Segmentation Storage', 'SOP Class', '', '', 'SegmentationStorage'),
'1.2.840.10008.5.1.4.1.1.66.5': ('Surface Segmentation Storage', 'SOP Class', '', '', 'SurfaceSegmentationStorage'),
'1.2.840.10008.5.1.4.1.1.66.6': ('Tractography Results Storage', 'SOP Class', '', '', 'TractographyResultsStorage'),
'1.2.840.10008.5.1.4.1.1.67': ('Real World Value Mapping Storage', 'SOP Class', '', '', 'RealWorldValueMappingStorage'),
'1.2.840.10008.5.1.4.1.1.68.1': ('Surface Scan Mesh Storage', 'SOP Class', '', '', 'SurfaceScanMeshStorage'),
'1.2.840.10008.5.1.4.1.1.68.2': ('Surface Scan Point Cloud Storage', 'SOP Class', '', '', 'SurfaceScanPointCloudStorage'),
'1.2.840.10008.5.1.4.1.1.77.1': ('VL Image Storage - Trial', 'SOP Class', '', 'Retired', 'VLImageStorageTrial'),
'1.2.840.10008.5.1.4.1.1.77.2': ('VL Multi-frame Image Storage - Trial', 'SOP Class', '', 'Retired', 'VLMultiFrameImageStorageTrial'),
'1.2.840.10008.5.1.4.1.1.77.1.1': ('VL Endoscopic Image Storage', 'SOP Class', '', '', 'VLEndoscopicImageStorage'),
'1.2.840.10008.5.1.4.1.1.77.1.1.1': ('Video Endoscopic Image Storage', 'SOP Class', '', '', 'VideoEndoscopicImageStorage'),
'1.2.840.10008.5.1.4.1.1.77.1.2': ('VL Microscopic Image Storage', 'SOP Class', '', '', 'VLMicroscopicImageStorage'),
'1.2.840.10008.5.1.4.1.1.77.1.2.1': ('Video Microscopic Image Storage', 'SOP Class', '', '', 'VideoMicroscopicImageStorage'),
'1.2.840.10008.5.1.4.1.1.77.1.3': ('VL Slide-Coordinates Microscopic Image Storage', 'SOP Class', '', '', 'VLSlideCoordinatesMicroscopicImageStorage'),
'1.2.840.10008.5.1.4.1.1.77.1.4': ('VL Photographic Image Storage', 'SOP Class', '', '', 'VLPhotographicImageStorage'),
'1.2.840.10008.5.1.4.1.1.77.1.4.1': ('Video Photographic Image Storage', 'SOP Class', '', '', 'VideoPhotographicImageStorage'),
'1.2.840.10008.5.1.4.1.1.77.1.5.1': ('Ophthalmic Photography 8 Bit Image Storage', 'SOP Class', '', '', 'OphthalmicPhotography8BitImageStorage'),
'1.2.840.10008.5.1.4.1.1.77.1.5.2': ('Ophthalmic Photography 16 Bit Image Storage', 'SOP Class', '', '', 'OphthalmicPhotography16BitImageStorage'),
'1.2.840.10008.5.1.4.1.1.77.1.5.3': ('Stereometric Relationship Storage', 'SOP Class', '', '', 'StereometricRelationshipStorage'),
'1.2.840.10008.5.1.4.1.1.77.1.5.4': ('Ophthalmic Tomography Image Storage', 'SOP Class', '', '', 'OphthalmicTomographyImageStorage'),
'1.2.840.10008.5.1.4.1.1.77.1.5.5': ('Wide Field Ophthalmic Photography Stereographic Projection Image Storage', 'SOP Class', '', '', 'WideFieldOphthalmicPhotographyStereographicProjectionImageStorage'),
'1.2.840.10008.5.1.4.1.1.77.1.5.6': ('Wide Field Ophthalmic Photography 3D Coordinates Image Storage', 'SOP Class', '', '', 'WideFieldOphthalmicPhotography3DCoordinatesImageStorage'),
'1.2.840.10008.5.1.4.1.1.77.1.5.7': ('Ophthalmic Optical Coherence Tomography En Face Image Storage', 'SOP Class', '', '', 'OphthalmicOpticalCoherenceTomographyEnFaceImageStorage'),
'1.2.840.10008.5.1.4.1.1.77.1.5.8': ('Ophthalmic Optical Coherence Tomography B-scan Volume Analysis Storage', 'SOP Class', '', '', 'OphthalmicOpticalCoherenceTomographyBscanVolumeAnalysisStorage'),
'1.2.840.10008.5.1.4.1.1.77.1.6': ('VL Whole Slide Microscopy Image Storage', 'SOP Class', '', '', 'VLWholeSlideMicroscopyImageStorage'),
'1.2.840.10008.5.1.4.1.1.77.1.7': ('Dermoscopic Photography Image Storage', 'SOP Class', '', '', 'DermoscopicPhotographyImageStorage'),
'1.2.840.10008.5.1.4.1.1.77.1.8': ('Confocal Microscopy Image Storage', 'SOP Class', '', '', 'ConfocalMicroscopyImageStorage'),
'1.2.840.10008.5.1.4.1.1.77.1.9': ('Confocal Microscopy Tiled Pyramidal Image Storage', 'SOP Class', '', '', 'ConfocalMicroscopyTiledPyramidalImageStorage'),
'1.2.840.10008.5.1.4.1.1.78.1': ('Lensometry Measurements Storage', 'SOP Class', '', '', 'LensometryMeasurementsStorage'),
'1.2.840.10008.5.1.4.1.1.78.2': ('Autorefraction Measurements Storage', 'SOP Class', '', '', 'AutorefractionMeasurementsStorage'),
'1.2.840.10008.5.1.4.1.1.78.3': ('Keratometry Measurements Storage', 'SOP Class', '', '', 'KeratometryMeasurementsStorage'),
'1.2.840.10008.5.1.4.1.1.78.4': ('Subjective Refraction Measurements Storage', 'SOP Class', '', '', 'SubjectiveRefractionMeasurementsStorage'),
'1.2.840.10008.5.1.4.1.1.78.5': ('Visual Acuity Measurements Storage', 'SOP Class', '', '', 'VisualAcuityMeasurementsStorage'),
'1.2.840.10008.5.1.4.1.1.78.6': ('Spectacle Prescription Report Storage', 'SOP Class', '', '', 'SpectaclePrescriptionReportStorage'),
'1.2.840.10008.5.1.4.1.1.78.7': ('Ophthalmic Axial Measurements Storage', 'SOP Class', '', '', 'OphthalmicAxialMeasurementsStorage'),
'1.2.840.10008.5.1.4.1.1.78.8': ('Intraocular Lens Calculations Storage', 'SOP Class', '', '', 'IntraocularLensCalculationsStorage'),
'1.2.840.10008.5.1.4.1.1.79.1': ('Macular Grid Thickness and Volume Report Storage', 'SOP Class', '', '', 'MacularGridThicknessAndVolumeReportStorage'),
'1.2.840.10008.5.1.4.1.1.80.1': ('Ophthalmic Visual Field Static Perimetry Measurements Storage', 'SOP Class', '', '', 'OphthalmicVisualFieldStaticPerimetryMeasurementsStorage'),
'1.2.840.10008.5.1.4.1.1.81.1': ('Ophthalmic Thickness Map Storage', 'SOP Class', '', '', 'OphthalmicThicknessMapStorage'),
'1.2.840.10008.5.1.4.1.1.82.1': ('Corneal Topography Map Storage', 'SOP Class', '', '', 'CornealTopographyMapStorage'),
'1.2.840.10008.5.1.4.1.1.88.1': ('Text SR Storage - Trial', 'SOP Class', '', 'Retired', 'TextSRStorageTrial'),
'1.2.840.10008.5.1.4.1.1.88.2': ('Audio SR Storage - Trial', 'SOP Class', '', 'Retired', 'AudioSRStorageTrial'),
'1.2.840.10008.5.1.4.1.1.88.3': ('Detail SR Storage - Trial', 'SOP Class', '', 'Retired', 'DetailSRStorageTrial'),
'1.2.840.10008.5.1.4.1.1.88.4': ('Comprehensive SR Storage - Trial', 'SOP Class', '', 'Retired', 'ComprehensiveSRStorageTrial'),
'1.2.840.10008.5.1.4.1.1.88.11': ('Basic Text SR Storage', 'SOP Class', '', '', 'BasicTextSRStorage'),
'1.2.840.10008.5.1.4.1.1.88.22': ('Enhanced SR Storage', 'SOP Class', '', '', 'EnhancedSRStorage'),
'1.2.840.10008.5.1.4.1.1.88.33': ('Comprehensive SR Storage', 'SOP Class', '', '', 'ComprehensiveSRStorage'),
'1.2.840.10008.5.1.4.1.1.88.34': ('Comprehensive 3D SR Storage', 'SOP Class', '', '', 'Comprehensive3DSRStorage'),
'1.2.840.10008.5.1.4.1.1.88.35': ('Extensible SR Storage', 'SOP Class', '', '', 'ExtensibleSRStorage'),
'1.2.840.10008.5.1.4.1.1.88.40': ('Procedure Log Storage', 'SOP Class', '', '', 'ProcedureLogStorage'),
'1.2.840.10008.5.1.4.1.1.88.50': ('Mammography CAD SR Storage', 'SOP Class', '', '', 'MammographyCADSRStorage'),
'1.2.840.10008.5.1.4.1.1.88.59': ('Key Object Selection Document Storage', 'SOP Class', '', '', 'KeyObjectSelectionDocumentStorage'),
'1.2.840.10008.5.1.4.1.1.88.65': ('Chest CAD SR Storage', 'SOP Class', '', '', 'ChestCADSRStorage'),
'1.2.840.10008.5.1.4.1.1.88.67': ('X-Ray Radiation Dose SR Storage', 'SOP Class', '', '', 'XRayRadiationDoseSRStorage'),
'1.2.840.10008.5.1.4.1.1.88.68': ('Radiopharmaceutical Radiation Dose SR Storage', 'SOP Class', '', '', 'RadiopharmaceuticalRadiationDoseSRStorage'),
'1.2.840.10008.5.1.4.1.1.88.69': ('Colon CAD SR Storage', 'SOP Class', '', '', 'ColonCADSRStorage'),
'1.2.840.10008.5.1.4.1.1.88.70': ('Implantation Plan SR Storage', 'SOP Class', '', '', 'ImplantationPlanSRStorage'),
'1.2.840.10008.5.1.4.1.1.88.71': ('Acquisition Context SR Storage', 'SOP Class', '', '', 'AcquisitionContextSRStorage'),
'1.2.840.10008.5.1.4.1.1.88.72': ('Simplified Adult Echo SR Storage', 'SOP Class', '', '', 'SimplifiedAdultEchoSRStorage'),
'1.2.840.10008.5.1.4.1.1.88.73': ('Patient Radiation Dose SR Storage', 'SOP Class', '', '', 'PatientRadiationDoseSRStorage'),
'1.2.840.10008.5.1.4.1.1.88.74': ('Planned Imaging Agent Administration SR Storage', 'SOP Class', '', '', 'PlannedImagingAgentAdministrationSRStorage'),
'1.2.840.10008.5.1.4.1.1.88.75': ('Performed Imaging Agent Administration SR Storage', 'SOP Class', '', '', 'PerformedImagingAgentAdministrationSRStorage'),
'1.2.840.10008.5.1.4.1.1.88.76': ('Enhanced X-Ray Radiation Dose SR Storage', 'SOP Class', '', '', 'EnhancedXRayRadiationDoseSRStorage'),
'1.2.840.10008.5.1.4.1.1.88.77': ('Waveform Annotation SR Storage', 'SOP Class', '', '', 'WaveformAnnotationSRStorage'),
'1.2.840.10008.5.1.4.1.1.90.1': ('Content Assessment Results Storage', 'SOP Class', '', '', 'ContentAssessmentResultsStorage'),
'1.2.840.10008.5.1.4.1.1.91.1': ('Microscopy Bulk Simple Annotations Storage', 'SOP Class', '', '', 'MicroscopyBulkSimpleAnnotationsStorage'),
'1.2.840.10008.5.1.4.1.1.104.1': ('Encapsulated PDF Storage', 'SOP Class', '', '', 'EncapsulatedPDFStorage'),
'1.2.840.10008.5.1.4.1.1.104.2': ('Encapsulated CDA Storage', 'SOP Class', '', '', 'EncapsulatedCDAStorage'),
'1.2.840.10008.5.1.4.1.1.104.3': ('Encapsulated STL Storage', 'SOP Class', '', '', 'EncapsulatedSTLStorage'),
'1.2.840.10008.5.1.4.1.1.104.4': ('Encapsulated OBJ Storage', 'SOP Class', '', '', 'EncapsulatedOBJStorage'),
'1.2.840.10008.5.1.4.1.1.104.5': ('Encapsulated MTL Storage', 'SOP Class', '', '', 'EncapsulatedMTLStorage'),
'1.2.840.10008.5.1.4.1.1.128': ('Positron Emission Tomography Image Storage', 'SOP Class', '', '', 'PositronEmissionTomographyImageStorage'),
'1.2.840.10008.5.1.4.1.1.128.1': ('Legacy Converted Enhanced PET Image Storage', 'SOP Class', '', '', 'LegacyConvertedEnhancedPETImageStorage'),
'1.2.840.10008.5.1.4.1.1.129': ('Standalone PET Curve Storage', 'SOP Class', '', 'Retired', 'StandalonePETCurveStorage'),
'1.2.840.10008.5.1.4.1.1.130': ('Enhanced PET Image Storage', 'SOP Class', '', '', 'EnhancedPETImageStorage'),
'1.2.840.10008.5.1.4.1.1.131': ('Basic Structured Display Storage', 'SOP Class', '', '', 'BasicStructuredDisplayStorage'),
'1.2.840.10008.5.1.4.1.1.200.1': ('CT Defined Procedure Protocol Storage', 'SOP Class', '', '', 'CTDefinedProcedureProtocolStorage'),
'1.2.840.10008.5.1.4.1.1.200.2': ('CT Performed Procedure Protocol Storage', 'SOP Class', '', '', 'CTPerformedProcedureProtocolStorage'),
'1.2.840.10008.5.1.4.1.1.200.3': ('Protocol Approval Storage', 'SOP Class', '', '', 'ProtocolApprovalStorage'),
'1.2.840.10008.5.1.4.1.1.200.4': ('Protocol Approval Information Model - FIND', 'SOP Class', '', '', 'ProtocolApprovalInformationModelFind'),
'1.2.840.10008.5.1.4.1.1.200.5': ('Protocol Approval Information Model - MOVE', 'SOP Class', '', '', 'ProtocolApprovalInformationModelMove'),
'1.2.840.10008.5.1.4.1.1.200.6': ('Protocol Approval Information Model - GET', 'SOP Class', '', '', 'ProtocolApprovalInformationModelGet'),
'1.2.840.10008.5.1.4.1.1.200.7': ('XA Defined Procedure Protocol Storage', 'SOP Class', '', '', 'XADefinedProcedureProtocolStorage'),
'1.2.840.10008.5.1.4.1.1.200.8': ('XA Performed Procedure Protocol Storage', 'SOP Class', '', '', 'XAPerformedProcedureProtocolStorage'),
'1.2.840.10008.5.1.4.1.1.201.1': ('Inventory Storage', 'SOP Class', '', '', 'InventoryStorage'),
'1.2.840.10008.5.1.4.1.1.201.2': ('Inventory - FIND', 'SOP Class', '', '', 'InventoryFind'),
'1.2.840.10008.5.1.4.1.1.201.3': ('Inventory - MOVE', 'SOP Class', '', '', 'InventoryMove'),
'1.2.840.10008.5.1.4.1.1.201.4': ('Inventory - GET', 'SOP Class', '', '', 'InventoryGet'),
'1.2.840.10008.5.1.4.1.1.201.5': ('Inventory Creation', 'SOP Class', '', '', 'InventoryCreation'),
'1.2.840.10008.5.1.4.1.1.201.6': ('Repository Query', 'SOP Class', '', '', 'RepositoryQuery'),
'1.2.840.10008.5.1.4.1.1.201.1.1': ('Storage Management SOP Instance', 'Well-known SOP Instance', '', '', 'StorageManagementInstance'),
'1.2.840.10008.5.1.4.1.1.481.1': ('RT Image Storage', 'SOP Class', '', '', 'RTImageStorage'),
'1.2.840.10008.5.1.4.1.1.481.2': ('RT Dose Storage', 'SOP Class', '', '', 'RTDoseStorage'),
'1.2.840.10008.5.1.4.1.1.481.3': ('RT Structure Set Storage', 'SOP Class', '', '', 'RTStructureSetStorage'),
'1.2.840.10008.5.1.4.1.1.481.4': ('RT Beams Treatment Record Storage', 'SOP Class', '', '', 'RTBeamsTreatmentRecordStorage'),
'1.2.840.10008.5.1.4.1.1.481.5': ('RT Plan Storage', 'SOP Class', '', '', 'RTPlanStorage'),
'1.2.840.10008.5.1.4.1.1.481.6': ('RT Brachy Treatment Record Storage', 'SOP Class', '', '', 'RTBrachyTreatmentRecordStorage'),
'1.2.840.10008.5.1.4.1.1.481.7': ('RT Treatment Summary Record Storage', 'SOP Class', '', '', 'RTTreatmentSummaryRecordStorage'),
'1.2.840.10008.5.1.4.1.1.481.8': ('RT Ion Plan Storage', 'SOP Class', '', '', 'RTIonPlanStorage'),
'1.2.840.10008.5.1.4.1.1.481.9': ('RT Ion Beams Treatment Record Storage', 'SOP Class', '', '', 'RTIonBeamsTreatmentRecordStorage'),
'1.2.840.10008.5.1.4.1.1.481.10': ('RT Physician Intent Storage', 'SOP Class', '', '', 'RTPhysicianIntentStorage'),
'1.2.840.10008.5.1.4.1.1.481.11': ('RT Segment Annotation Storage', 'SOP Class', '', '', 'RTSegmentAnnotationStorage'),
'1.2.840.10008.5.1.4.1.1.481.12': ('RT Radiation Set Storage', 'SOP Class', '', '', 'RTRadiationSetStorage'),
'1.2.840.10008.5.1.4.1.1.481.13': ('C-Arm Photon-Electron Radiation Storage', 'SOP Class', '', '', 'CArmPhotonElectronRadiationStorage'),
'1.2.840.10008.5.1.4.1.1.481.14': ('Tomotherapeutic Radiation Storage', 'SOP Class', '', '', 'TomotherapeuticRadiationStorage'),
'1.2.840.10008.5.1.4.1.1.481.15': ('Robotic-Arm Radiation Storage', 'SOP Class', '', '', 'RoboticArmRadiationStorage'),
'1.2.840.10008.5.1.4.1.1.481.16': ('RT Radiation Record Set Storage', 'SOP Class', '', '', 'RTRadiationRecordSetStorage'),
'1.2.840.10008.5.1.4.1.1.481.17': ('RT Radiation Salvage Record Storage', 'SOP Class', '', '', 'RTRadiationSalvageRecordStorage'),
'1.2.840.10008.5.1.4.1.1.481.18': ('Tomotherapeutic Radiation Record Storage', 'SOP Class', '', '', 'TomotherapeuticRadiationRecordStorage'),
'1.2.840.10008.5.1.4.1.1.481.19': ('C-Arm Photon-Electron Radiation Record Storage', 'SOP Class', '', '', 'CArmPhotonElectronRadiationRecordStorage'),
'1.2.840.10008.5.1.4.1.1.481.20': ('Robotic Radiation Record Storage', 'SOP Class', '', '', 'RoboticRadiationRecordStorage'),
'1.2.840.10008.5.1.4.1.1.481.21': ('RT Radiation Set Delivery Instruction Storage', 'SOP Class', '', '', 'RTRadiationSetDeliveryInstructionStorage'),
'1.2.840.10008.5.1.4.1.1.481.22': ('RT Treatment Preparation Storage', 'SOP Class', '', '', 'RTTreatmentPreparationStorage'),
'1.2.840.10008.5.1.4.1.1.481.23': ('Enhanced RT Image Storage', 'SOP Class', '', '', 'EnhancedRTImageStorage'),
'1.2.840.10008.5.1.4.1.1.481.24': ('Enhanced Continuous RT Image Storage', 'SOP Class', '', '', 'EnhancedContinuousRTImageStorage'),
'1.2.840.10008.5.1.4.1.1.481.25': ('RT Patient Position Acquisition Instruction Storage', 'SOP Class', '', '', 'RTPatientPositionAcquisitionInstructionStorage'),
'1.2.840.10008.5.1.4.1.1.501.1': ('DICOS CT Image Storage', 'SOP Class', 'DICOS', '', 'DICOSCTImageStorage'),
'1.2.840.10008.5.1.4.1.1.501.2.1': ('DICOS Digital X-Ray Image Storage - For Presentation', 'SOP Class', 'DICOS', '', 'DICOSDigitalXRayImageStorageForPresentation'),
'1.2.840.10008.5.1.4.1.1.501.2.2': ('DICOS Digital X-Ray Image Storage - For Processing', 'SOP Class', 'DICOS', '', 'DICOSDigitalXRayImageStorageForProcessing'),
'1.2.840.10008.5.1.4.1.1.501.3': ('DICOS Threat Detection Report Storage', 'SOP Class', 'DICOS', '', 'DICOSThreatDetectionReportStorage'),
'1.2.840.10008.5.1.4.1.1.501.4': ('DICOS 2D AIT Storage', 'SOP Class', 'DICOS', '', 'DICOS2DAITStorage'),
'1.2.840.10008.5.1.4.1.1.501.5': ('DICOS 3D AIT Storage', 'SOP Class', 'DICOS', '', 'DICOS3DAITStorage'),
'1.2.840.10008.5.1.4.1.1.501.6': ('DICOS Quadrupole Resonance (QR) Storage', 'SOP Class', 'DICOS', '', 'DICOSQuadrupoleResonanceStorage'),
'1.2.840.10008.5.1.4.1.1.601.1': ('Eddy Current Image Storage', 'SOP Class', 'DICONDE ASTM E2934', '', 'EddyCurrentImageStorage'),
'1.2.840.10008.5.1.4.1.1.601.2': ('Eddy Current Multi-frame Image Storage', 'SOP Class', 'DICONDE ASTM E2934', '', 'EddyCurrentMultiFrameImageStorage'),
'1.2.840.10008.5.1.4.1.2.1.1': ('Patient Root Query/Retrieve Information Model - FIND', 'SOP Class', '', '', 'PatientRootQueryRetrieveInformationModelFind'),
'1.2.840.10008.5.1.4.1.2.1.2': ('Patient Root Query/Retrieve Information Model - MOVE', 'SOP Class', '', '', 'PatientRootQueryRetrieveInformationModelMove'),
'1.2.840.10008.5.1.4.1.2.1.3': ('Patient Root Query/Retrieve Information Model - GET', 'SOP Class', '', '', 'PatientRootQueryRetrieveInformationModelGet'),
'1.2.840.10008.5.1.4.1.2.2.1': ('Study Root Query/Retrieve Information Model - FIND', 'SOP Class', '', '', 'StudyRootQueryRetrieveInformationModelFind'),
'1.2.840.10008.5.1.4.1.2.2.2': ('Study Root Query/Retrieve Information Model - MOVE', 'SOP Class', '', '', 'StudyRootQueryRetrieveInformationModelMove'),
'1.2.840.10008.5.1.4.1.2.2.3': ('Study Root Query/Retrieve Information Model - GET', 'SOP Class', '', '', 'StudyRootQueryRetrieveInformationModelGet'),
'1.2.840.10008.5.1.4.1.2.3.1': ('Patient/Study Only Query/Retrieve Information Model - FIND', 'SOP Class', '', 'Retired', 'PatientStudyOnlyQueryRetrieveInformationModelFind'),
'1.2.840.10008.5.1.4.1.2.3.2': ('Patient/Study Only Query/Retrieve Information Model - MOVE', 'SOP Class', '', 'Retired', 'PatientStudyOnlyQueryRetrieveInformationModelMove'),
'1.2.840.10008.5.1.4.1.2.3.3': ('Patient/Study Only Query/Retrieve Information Model - GET', 'SOP Class', '', 'Retired', 'PatientStudyOnlyQueryRetrieveInformationModelGet'),
'1.2.840.10008.5.1.4.1.2.4.2': ('Composite Instance Root Retrieve - MOVE', 'SOP Class', '', '', 'CompositeInstanceRootRetrieveMove'),
'1.2.840.10008.5.1.4.1.2.4.3': ('Composite Instance Root Retrieve - GET', 'SOP Class', '', '', 'CompositeInstanceRootRetrieveGet'),
'1.2.840.10008.5.1.4.1.2.5.3': ('Composite Instance Retrieve Without Bulk Data - GET', 'SOP Class', '', '', 'CompositeInstanceRetrieveWithoutBulkDataGet'),
'1.2.840.10008.5.1.4.20.1': ('Defined Procedure Protocol Information Model - FIND', 'SOP Class', '', '', 'DefinedProcedureProtocolInformationModelFind'),
'1.2.840.10008.5.1.4.20.2': ('Defined Procedure Protocol Information Model - MOVE', 'SOP Class', '', '', 'DefinedProcedureProtocolInformationModelMove'),
'1.2.840.10008.5.1.4.20.3': ('Defined Procedure Protocol Information Model - GET', 'SOP Class', '', '', 'DefinedProcedureProtocolInformationModelGet'),
'1.2.840.10008.5.1.4.31': ('Modality Worklist Information Model - FIND', 'SOP Class', '', '', 'ModalityWorklistInformationModelFind'),
'1.2.840.10008.5.1.4.32': ('General Purpose Worklist Management Meta SOP Class', 'Meta SOP Class', '', 'Retired', 'GeneralPurposeWorklistManagementMeta'),
'1.2.840.10008.5.1.4.32.1': ('General Purpose Worklist Information Model - FIND', 'SOP Class', '', 'Retired', 'GeneralPurposeWorklistInformationModelFind'),
'1.2.840.10008.5.1.4.32.2': ('General Purpose Scheduled Procedure Step SOP Class', 'SOP Class', '', 'Retired', 'GeneralPurposeScheduledProcedureStep'),
'1.2.840.10008.5.1.4.32.3': ('General Purpose Performed Procedure Step SOP Class', 'SOP Class', '', 'Retired', 'GeneralPurposePerformedProcedureStep'),
'1.2.840.10008.5.1.4.33': ('Instance Availability Notification SOP Class', 'SOP Class', '', '', 'InstanceAvailabilityNotification'),
'1.2.840.10008.5.1.4.34.1': ('RT Beams Delivery Instruction Storage - Trial', 'SOP Class', '', 'Retired', 'RTBeamsDeliveryInstructionStorageTrial'),
'1.2.840.10008.5.1.4.34.2': ('RT Conventional Machine Verification - Trial', 'SOP Class', '', 'Retired', 'RTConventionalMachineVerificationTrial'),
'1.2.840.10008.5.1.4.34.3': ('RT Ion Machine Verification - Trial', 'SOP Class', '', 'Retired', 'RTIonMachineVerificationTrial'),
'1.2.840.10008.5.1.4.34.4': ('Unified Worklist and Procedure Step Service Class - Trial', 'Service Class', '', 'Retired', 'UnifiedWorklistAndProcedureStepTrial'),
'1.2.840.10008.5.1.4.34.4.1': ('Unified Procedure Step - Push SOP Class - Trial', 'SOP Class', '', 'Retired', 'UnifiedProcedureStepPushTrial'),
'1.2.840.10008.5.1.4.34.4.2': ('Unified Procedure Step - Watch SOP Class - Trial', 'SOP Class', '', 'Retired', 'UnifiedProcedureStepWatchTrial'),
'1.2.840.10008.5.1.4.34.4.3': ('Unified Procedure Step - Pull SOP Class - Trial', 'SOP Class', '', 'Retired', 'UnifiedProcedureStepPullTrial'),
'1.2.840.10008.5.1.4.34.4.4': ('Unified Procedure Step - Event SOP Class - Trial', 'SOP Class', '', 'Retired', 'UnifiedProcedureStepEventTrial'),
'1.2.840.10008.5.1.4.34.5': ('UPS Global Subscription SOP Instance', 'Well-known SOP Instance', '', '', 'UPSGlobalSubscriptionInstance'),
'1.2.840.10008.5.1.4.34.5.1': ('UPS Filtered Global Subscription SOP Instance', 'Well-known SOP Instance', '', '', 'UPSFilteredGlobalSubscriptionInstance'),
'1.2.840.10008.5.1.4.34.6': ('Unified Worklist and Procedure Step Service Class', 'Service Class', '', '', 'UnifiedWorklistAndProcedureStep'),
'1.2.840.10008.5.1.4.34.6.1': ('Unified Procedure Step - Push SOP Class', 'SOP Class', '', '', 'UnifiedProcedureStepPush'),
'1.2.840.10008.5.1.4.34.6.2': ('Unified Procedure Step - Watch SOP Class', 'SOP Class', '', '', 'UnifiedProcedureStepWatch'),
'1.2.840.10008.5.1.4.34.6.3': ('Unified Procedure Step - Pull SOP Class', 'SOP Class', '', '', 'UnifiedProcedureStepPull'),
'1.2.840.10008.5.1.4.34.6.4': ('Unified Procedure Step - Event SOP Class', 'SOP Class', '', '', 'UnifiedProcedureStepEvent'),
'1.2.840.10008.5.1.4.34.6.5': ('Unified Procedure Step - Query SOP Class', 'SOP Class', '', '', 'UnifiedProcedureStepQuery'),
'1.2.840.10008.5.1.4.34.7': ('RT Beams Delivery Instruction Storage', 'SOP Class', '', '', 'RTBeamsDeliveryInstructionStorage'),
'1.2.840.10008.5.1.4.34.8': ('RT Conventional Machine Verification', 'SOP Class', '', '', 'RTConventionalMachineVerification'),
'1.2.840.10008.5.1.4.34.9': ('RT Ion Machine Verification', 'SOP Class', '', '', 'RTIonMachineVerification'),
'1.2.840.10008.5.1.4.34.10': ('RT Brachy Application Setup Delivery Instruction Storage', 'SOP Class', '', '', 'RTBrachyApplicationSetupDeliveryInstructionStorage'),
'1.2.840.10008.5.1.4.37.1': ('General Relevant Patient Information Query', 'SOP Class', '', '', 'GeneralRelevantPatientInformationQuery'),
'1.2.840.10008.5.1.4.37.2': ('Breast Imaging Relevant Patient Information Query', 'SOP Class', '', '', 'BreastImagingRelevantPatientInformationQuery'),
'1.2.840.10008.5.1.4.37.3': ('Cardiac Relevant Patient Information Query', 'SOP Class', '', '', 'CardiacRelevantPatientInformationQuery'),
'1.2.840.10008.5.1.4.38.1': ('Hanging Protocol Storage', 'SOP Class', '', '', 'HangingProtocolStorage'),
'1.2.840.10008.5.1.4.38.2': ('Hanging Protocol Information Model - FIND', 'SOP Class', '', '', 'HangingProtocolInformationModelFind'),
'1.2.840.10008.5.1.4.38.3': ('Hanging Protocol Information Model - MOVE', 'SOP Class', '', '', 'HangingProtocolInformationModelMove'),
'1.2.840.10008.5.1.4.38.4': ('Hanging Protocol Information Model - GET', 'SOP Class', '', '', 'HangingProtocolInformationModelGet'),
'1.2.840.10008.5.1.4.39.1': ('Color Palette Storage', 'SOP Class', '', '', 'ColorPaletteStorage'),
'1.2.840.10008.5.1.4.39.2': ('Color Palette Query/Retrieve Information Model - FIND', 'SOP Class', '', '', 'ColorPaletteQueryRetrieveInformationModelFind'),
'1.2.840.10008.5.1.4.39.3': ('Color Palette Query/Retrieve Information Model - MOVE', 'SOP Class', '', '', 'ColorPaletteQueryRetrieveInformationModelMove'),
'1.2.840.10008.5.1.4.39.4': ('Color Palette Query/Retrieve Information Model - GET', 'SOP Class', '', '', 'ColorPaletteQueryRetrieveInformationModelGet'),
'1.2.840.10008.5.1.4.41': ('Product Characteristics Query SOP Class', 'SOP Class', '', '', 'ProductCharacteristicsQuery'),
'1.2.840.10008.5.1.4.42': ('Substance Approval Query SOP Class', 'SOP Class', '', '', 'SubstanceApprovalQuery'),
'1.2.840.10008.5.1.4.43.1': ('Generic Implant Template Storage', 'SOP Class', '', '', 'GenericImplantTemplateStorage'),
'1.2.840.10008.5.1.4.43.2': ('Generic Implant Template Information Model - FIND', 'SOP Class', '', '', 'GenericImplantTemplateInformationModelFind'),
'1.2.840.10008.5.1.4.43.3': ('Generic Implant Template Information Model - MOVE', 'SOP Class', '', '', 'GenericImplantTemplateInformationModelMove'),
'1.2.840.10008.5.1.4.43.4': ('Generic Implant Template Information Model - GET', 'SOP Class', '', '', 'GenericImplantTemplateInformationModelGet'),
'1.2.840.10008.5.1.4.44.1': ('Implant Assembly Template Storage', 'SOP Class', '', '', 'ImplantAssemblyTemplateStorage'),
'1.2.840.10008.5.1.4.44.2': ('Implant Assembly Template Information Model - FIND', 'SOP Class', '', '', 'ImplantAssemblyTemplateInformationModelFind'),
'1.2.840.10008.5.1.4.44.3': ('Implant Assembly Template Information Model - MOVE', 'SOP Class', '', '', 'ImplantAssemblyTemplateInformationModelMove'),
'1.2.840.10008.5.1.4.44.4': ('Implant Assembly Template Information Model - GET', 'SOP Class', '', '', 'ImplantAssemblyTemplateInformationModelGet'),
'1.2.840.10008.5.1.4.45.1': ('Implant Template Group Storage', 'SOP Class', '', '', 'ImplantTemplateGroupStorage'),
'1.2.840.10008.5.1.4.45.2': ('Implant Template Group Information Model - FIND', 'SOP Class', '', '', 'ImplantTemplateGroupInformationModelFind'),
'1.2.840.10008.5.1.4.45.3': ('Implant Template Group Information Model - MOVE', 'SOP Class', '', '', 'ImplantTemplateGroupInformationModelMove'),
'1.2.840.10008.5.1.4.45.4': ('Implant Template Group Information Model - GET', 'SOP Class', '', '', 'ImplantTemplateGroupInformationModelGet'),
'1.2.840.10008.7.1.1': ('Native DICOM Model', 'Application Hosting Model', '', '', 'NativeDICOMModel'),
'1.2.840.10008.7.1.2': ('Abstract Multi-Dimensional Image Model', 'Application Hosting Model', '', '', 'AbstractMultiDimensionalImageModel'),
'1.2.840.10008.8.1.1': ('DICOM Content Mapping Resource', 'Mapping Resource', '', '', 'DICOMContentMappingResource'),
'1.2.840.10008.10.1': ('Video Endoscopic Image Real-Time Communication', 'SOP Class', '', '', 'VideoEndoscopicImageRealTimeCommunication'),
'1.2.840.10008.10.2': ('Video Photographic Image Real-Time Communication', 'SOP Class', '', '', 'VideoPhotographicImageRealTimeCommunication'),
'1.2.840.10008.10.3': ('Audio Waveform Real-Time Communication', 'SOP Class', '', '', 'AudioWaveformRealTimeCommunication'),
'1.2.840.10008.10.4': ('Rendition Selection Document Real-Time Communication', 'SOP Class', '', '', 'RenditionSelectionDocumentRealTimeCommunication'),
'1.2.840.10008.15.0.3.1': ('dicomDeviceName', 'LDAP OID', '', '', 'dicomDeviceName'),
'1.2.840.10008.15.0.3.2': ('dicomDescription', 'LDAP OID', '', '', 'dicomDescription'),
'1.2.840.10008.15.0.3.3': ('dicomManufacturer', 'LDAP OID', '', '', 'dicomManufacturer'),
'1.2.840.10008.15.0.3.4': ('dicomManufacturerModelName', 'LDAP OID', '', '', 'dicomManufacturerModelName'),
'1.2.840.10008.15.0.3.5': ('dicomSoftwareVersion', 'LDAP OID', '', '', 'dicomSoftwareVersion'),
'1.2.840.10008.15.0.3.6': ('dicomVendorData', 'LDAP OID', '', '', 'dicomVendorData'),
'1.2.840.10008.15.0.3.7': ('dicomAETitle', 'LDAP OID', '', '', 'dicomAETitle'),
'1.2.840.10008.15.0.3.8': ('dicomNetworkConnectionReference', 'LDAP OID', '', '', 'dicomNetworkConnectionReference'),
'1.2.840.10008.15.0.3.9': ('dicomApplicationCluster', 'LDAP OID', '', '', 'dicomApplicationCluster'),
'1.2.840.10008.15.0.3.10': ('dicomAssociationInitiator', 'LDAP OID', '', '', 'dicomAssociationInitiator'),
'1.2.840.10008.15.0.3.11': ('dicomAssociationAcceptor', 'LDAP OID', '', '', 'dicomAssociationAcceptor'),
'1.2.840.10008.15.0.3.12': ('dicomHostname', 'LDAP OID', '', '', 'dicomHostname'),
'1.2.840.10008.15.0.3.13': ('dicomPort', 'LDAP OID', '', '', 'dicomPort'),
'1.2.840.10008.15.0.3.14': ('dicomSOPClass', 'LDAP OID', '', '', 'dicomSOPClass'),
'1.2.840.10008.15.0.3.15': ('dicomTransferRole', 'LDAP OID', '', '', 'dicomTransferRole'),
'1.2.840.10008.15.0.3.16': ('dicomTransferSyntax', 'LDAP OID', '', '', 'dicomTransferSyntax'),
'1.2.840.10008.15.0.3.17': ('dicomPrimaryDeviceType', 'LDAP OID', '', '', 'dicomPrimaryDeviceType'),
'1.2.840.10008.15.0.3.18': ('dicomRelatedDeviceReference', 'LDAP OID', '', '', 'dicomRelatedDeviceReference'),
'1.2.840.10008.15.0.3.19': ('dicomPreferredCalledAETitle', 'LDAP OID', '', '', 'dicomPreferredCalledAETitle'),
'1.2.840.10008.15.0.3.20': ('dicomTLSCyphersuite', 'LDAP OID', '', '', 'dicomTLSCyphersuite'),
'1.2.840.10008.15.0.3.21': ('dicomAuthorizedNodeCertificateReference', 'LDAP OID', '', '', 'dicomAuthorizedNodeCertificateReference'),
'1.2.840.10008.15.0.3.22': ('dicomThisNodeCertificateReference', 'LDAP OID', '', '', 'dicomThisNodeCertificateReference'),
'1.2.840.10008.15.0.3.23': ('dicomInstalled', 'LDAP OID', '', '', 'dicomInstalled'),
'1.2.840.10008.15.0.3.24': ('dicomStationName', 'LDAP OID', '', '', 'dicomStationName'),
'1.2.840.10008.15.0.3.25': ('dicomDeviceSerialNumber', 'LDAP OID', '', '', 'dicomDeviceSerialNumber'),
'1.2.840.10008.15.0.3.26': ('dicomInstitutionName', 'LDAP OID', '', '', 'dicomInstitutionName'),
'1.2.840.10008.15.0.3.27': ('dicomInstitutionAddress', 'LDAP OID', '', '', 'dicomInstitutionAddress'),
'1.2.840.10008.15.0.3.28': ('dicomInstitutionDepartmentName', 'LDAP OID', '', '', 'dicomInstitutionDepartmentName'),
'1.2.840.10008.15.0.3.29': ('dicomIssuerOfPatientID', 'LDAP OID', '', '', 'dicomIssuerOfPatientID'),
'1.2.840.10008.15.0.3.30': ('dicomPreferredCallingAETitle', 'LDAP OID', '', '', 'dicomPreferredCallingAETitle'),
'1.2.840.10008.15.0.3.31': ('dicomSupportedCharacterSet', 'LDAP OID', '', '', 'dicomSupportedCharacterSet'),
'1.2.840.10008.15.0.4.1': ('dicomConfigurationRoot', 'LDAP OID', '', '', 'dicomConfigurationRoot'),
'1.2.840.10008.15.0.4.2': ('dicomDevicesRoot', 'LDAP OID', '', '', 'dicomDevicesRoot'),
'1.2.840.10008.15.0.4.3': ('dicomUniqueAETitlesRegistryRoot', 'LDAP OID', '', '', 'dicomUniqueAETitlesRegistryRoot'),
'1.2.840.10008.15.0.4.4': ('dicomDevice', 'LDAP OID', '', '', 'dicomDevice'),
'1.2.840.10008.15.0.4.5': ('dicomNetworkAE', 'LDAP OID', '', '', 'dicomNetworkAE'),
'1.2.840.10008.15.0.4.6': ('dicomNetworkConnection', 'LDAP OID', '', '', 'dicomNetworkConnection'),
'1.2.840.10008.15.0.4.7': ('dicomUniqueAETitle', 'LDAP OID', '', '', 'dicomUniqueAETitle'),
'1.2.840.10008.15.0.4.8': ('dicomTransferCapability', 'LDAP OID', '', '', 'dicomTransferCapability'),
'1.2.840.10008.15.1.1': ('Universal Coordinated Time', 'Synchronization Frame of Reference', '', '', 'UTC'),
'1.2.840.10008.1.4.1.1': ('Talairach Brain Atlas Frame of Reference', 'Well-known frame of reference', '', '', 'TalairachBrainAtlas'),
'1.2.840.10008.1.4.1.2': ('SPM2 T1 Frame of Reference', 'Well-known frame of reference', '', '', 'SPM2T1'),
'1.2.840.10008.1.4.1.3': ('SPM2 T2 Frame of Reference', 'Well-known frame of reference', '', '', 'SPM2T2'),
'1.2.840.10008.1.4.1.4': ('SPM2 PD Frame of Reference', 'Well-known frame of reference', '', '', 'SPM2PD'),
'1.2.840.10008.1.4.1.5': ('SPM2 EPI Frame of Reference', 'Well-known frame of reference', '', '', 'SPM2EPI'),
'1.2.840.10008.1.4.1.6': ('SPM2 FIL T1 Frame of Reference', 'Well-known frame of reference', '', '', 'SPM2FILT1'),
'1.2.840.10008.1.4.1.7': ('SPM2 PET Frame of Reference', 'Well-known frame of reference', '', '', 'SPM2PET'),
'1.2.840.10008.1.4.1.8': ('SPM2 TRANSM Frame of Reference', 'Well-known frame of reference', '', '', 'SPM2TRANSM'),
'1.2.840.10008.1.4.1.9': ('SPM2 SPECT Frame of Reference', 'Well-known frame of reference', '', '', 'SPM2SPECT'),
'1.2.840.10008.1.4.1.10': ('SPM2 GRAY Frame of Reference', 'Well-known frame of reference', '', '', 'SPM2GRAY'),
'1.2.840.10008.1.4.1.11': ('SPM2 WHITE Frame of Reference', 'Well-known frame of reference', '', '', 'SPM2WHITE'),
'1.2.840.10008.1.4.1.12': ('SPM2 CSF Frame of Reference', 'Well-known frame of reference', '', '', 'SPM2CSF'),
'1.2.840.10008.1.4.1.13': ('SPM2 BRAINMASK Frame of Reference', 'Well-known frame of reference', '', '', 'SPM2BRAINMASK'),
'1.2.840.10008.1.4.1.14': ('SPM2 AVG305T1 Frame of Reference', 'Well-known frame of reference', '', '', 'SPM2AVG305T1'),
'1.2.840.10008.1.4.1.15': ('SPM2 AVG152T1 Frame of Reference', 'Well-known frame of reference', '', '', 'SPM2AVG152T1'),
'1.2.840.10008.1.4.1.16': ('SPM2 AVG152T2 Frame of Reference', 'Well-known frame of reference', '', '', 'SPM2AVG152T2'),
'1.2.840.10008.1.4.1.17': ('SPM2 AVG152PD Frame of Reference', 'Well-known frame of reference', '', '', 'SPM2AVG152PD'),
'1.2.840.10008.1.4.1.18': ('SPM2 SINGLESUBJT1 Frame of Reference', 'Well-known frame of reference', '', '', 'SPM2SINGLESUBJT1'),
'1.2.840.10008.1.4.2.1': ('ICBM 452 T1 Frame of Reference', 'Well-known frame of reference', '', '', 'ICBM452T1'),
'1.2.840.10008.1.4.2.2': ('ICBM Single Subject MRI Frame of Reference', 'Well-known frame of reference', '', '', 'ICBMSingleSubjectMRI'),
'1.2.840.10008.1.4.3.1': ('IEC 61217 Fixed Coordinate System Frame of Reference', 'Well-known frame of reference', '', '', 'IEC61217FixedCoordinateSystem'),
'1.2.840.10008.1.4.3.2': ('Standard Robotic-Arm Coordinate System Frame of Reference', 'Well-known frame of reference', '', '', 'StandardRoboticArmCoordinateSystem'),
'1.2.840.10008.1.4.3.3': ('IEC 61217 Table Top Coordinate System Frame of Reference', 'Well-known frame of reference', '', '', 'IEC61217TableTopCoordinateSystem'),
'1.2.840.10008.1.4.4.1': ('SRI24 Frame of Reference', 'Well-known frame of reference', '', '', 'SRI24'),
'1.2.840.10008.1.4.5.1': ('Colin27 Frame of Reference', 'Well-known frame of reference', '', '', 'Colin27'),
'1.2.840.10008.1.4.6.1': ('LPBA40/AIR Frame of Reference', 'Well-known frame of reference', '', '', 'LPBA40AIR'),
'1.2.840.10008.1.4.6.2': ('LPBA40/FLIRT Frame of Reference', 'Well-known frame of reference', '', '', 'LPBA40FLIRT'),
'1.2.840.10008.1.4.6.3': ('LPBA40/SPM5 Frame of Reference', 'Well-known frame of reference', '', '', 'LPBA40SPM5')
}

View File

@@ -0,0 +1,23 @@
"""Pure python package for DICOM medical file reading and writing."""
import re
from typing import cast
from re import Match
from importlib.metadata import version
__version__: str = version("pydicom")
result = cast(Match[str], re.match(r"(\d+\.\d+\.\d+).*", __version__))
__version_info__ = tuple(result.group(1).split("."))
# DICOM Standard version used for:
# _dicom_dict.py, _uid_dict.py and uid.py
__dicom_version__: str = "2024c"
# DICOM Standard version used for:
# sr/_cid_dict.py
# sr/_concepts_dict.py
# sr/_snomed_dict.py
__concepts_version__: str = "2024c"

View File

@@ -0,0 +1,841 @@
# Copyright 2008-2021 pydicom authors. See LICENSE file for details.
"""Handle alternate character sets for character strings."""
import codecs
import re
from typing import (
TYPE_CHECKING,
cast,
)
from collections.abc import MutableSequence, Sequence
from pydicom import config
from pydicom.misc import warn_and_log
from pydicom.valuerep import TEXT_VR_DELIMS, PersonName, VR, CUSTOMIZABLE_CHARSET_VR
if TYPE_CHECKING: # pragma: no cover
from pydicom.dataelem import DataElement
# default encoding if no encoding defined - corresponds to ISO IR 6 / ASCII
default_encoding = "iso8859"
# Map DICOM Specific Character Set to python equivalent
# https://docs.python.org/3/library/codecs.html#standard-encodings
python_encoding = {
# default character set for DICOM
"": default_encoding,
# alias for latin_1 too (iso_ir_6 exists as an alias to 'ascii')
"ISO_IR 6": default_encoding,
"ISO_IR 13": "shift_jis",
"ISO_IR 100": "latin_1",
"ISO_IR 101": "iso8859_2",
"ISO_IR 109": "iso8859_3",
"ISO_IR 110": "iso8859_4",
"ISO_IR 126": "iso_ir_126", # Greek
"ISO_IR 127": "iso_ir_127", # Arabic
"ISO_IR 138": "iso_ir_138", # Hebrew
"ISO_IR 144": "iso_ir_144", # Russian
"ISO_IR 148": "iso_ir_148", # Turkish
"ISO_IR 166": "iso_ir_166", # Thai
"ISO 2022 IR 6": "iso8859", # alias for latin_1 too
"ISO 2022 IR 13": "shift_jis",
"ISO 2022 IR 87": "iso2022_jp",
"ISO 2022 IR 100": "latin_1",
"ISO 2022 IR 101": "iso8859_2",
"ISO 2022 IR 109": "iso8859_3",
"ISO 2022 IR 110": "iso8859_4",
"ISO 2022 IR 126": "iso_ir_126",
"ISO 2022 IR 127": "iso_ir_127",
"ISO 2022 IR 138": "iso_ir_138",
"ISO 2022 IR 144": "iso_ir_144",
"ISO 2022 IR 148": "iso_ir_148",
"ISO 2022 IR 149": "euc_kr",
"ISO 2022 IR 159": "iso2022_jp_2",
"ISO 2022 IR 166": "iso_ir_166",
"ISO 2022 IR 58": "iso_ir_58",
"ISO_IR 192": "UTF8", # from Chinese example, 2008 PS3.5 Annex J p1-4
"GB18030": "GB18030",
"ISO 2022 GBK": "GBK", # from DICOM correction CP1234
"ISO 2022 58": "GB2312", # from DICOM correction CP1234
"GBK": "GBK", # from DICOM correction CP1234
}
# these encodings cannot be used with code extensions
# see DICOM Standard, Part 3, Table C.12-5
# and DICOM Standard, Part 5, Section 6.1.2.5.4, item d
STAND_ALONE_ENCODINGS = ("ISO_IR 192", "GBK", "GB18030")
# the escape character used to mark the start of escape sequences
ESC = b"\x1b"
# Map Python encodings to escape sequences as defined in PS3.3 in tables
# C.12-3 (single-byte) and C.12-4 (multi-byte character sets).
CODES_TO_ENCODINGS = {
ESC + b"(B": default_encoding, # used to switch to ASCII G0 code element
ESC + b"-A": "latin_1",
ESC + b")I": "shift_jis", # switches to ISO-IR 13
ESC + b"(J": "shift_jis", # switches to ISO-IR 14 (shift_jis handles both)
ESC + b"$B": "iso2022_jp",
ESC + b"-B": "iso8859_2",
ESC + b"-C": "iso8859_3",
ESC + b"-D": "iso8859_4",
ESC + b"-F": "iso_ir_126",
ESC + b"-G": "iso_ir_127",
ESC + b"-H": "iso_ir_138",
ESC + b"-L": "iso_ir_144",
ESC + b"-M": "iso_ir_148",
ESC + b"-T": "iso_ir_166",
ESC + b"$)C": "euc_kr",
ESC + b"$(D": "iso2022_jp_2",
ESC + b"$)A": "iso_ir_58",
}
ENCODINGS_TO_CODES = {v: k for k, v in CODES_TO_ENCODINGS.items()}
ENCODINGS_TO_CODES["shift_jis"] = ESC + b")I"
# Multi-byte character sets except Korean are handled by Python.
# To decode them, the escape sequence shall be preserved in the input byte
# string, and will be removed during decoding by Python.
handled_encodings = ("iso2022_jp", "iso2022_jp_2", "iso_ir_58")
def _encode_to_jis_x_0201(value: str, errors: str = "strict") -> bytes:
"""Convert a unicode string into JIS X 0201 byte string using shift_jis
encodings.
shift_jis is a superset of jis_x_0201. So we can regard the encoded value
as jis_x_0201 if it is single byte character.
Parameters
----------
value : str
The unicode string as presented to the user.
errors : str
The behavior of a character which could not be encoded. If 'strict' is
passed, raise an UnicodeEncodeError. If any other value is passed,
non ISO IR 14 characters are replaced by the ASCII '?'.
Returns
-------
bytes
The encoded string. If some characters in value could not be encoded to
JIS X 0201, and `errors` is not set to 'strict', they are replaced to
'?'.
Raises
------
UnicodeEncodeError
If errors is set to 'strict' and `value` could not be encoded with
JIS X 0201.
"""
encoder_class = codecs.getincrementalencoder("shift_jis")
encoder = encoder_class()
# If errors is not strict, this function is used as fallback.
# In this case, we use only ISO IR 14 to encode given value
# without escape sequence.
if errors != "strict" or value == "":
encoded = b""
for c in value:
try:
b = encoder.encode(c)
except UnicodeEncodeError:
b = b"?"
if len(b) != 1 or 0x80 <= ord(b):
b = b"?"
encoded += b
return encoded
encoded = encoder.encode(value[0])
if len(encoded) != 1:
raise UnicodeEncodeError(
"shift_jis", value, 0, len(value), "illegal multibyte sequence"
)
msb = ord(encoded) & 0x80 # msb is 1 for ISO IR 13, 0 for ISO IR 14
for i, c in enumerate(value[1:], 1):
try:
b = encoder.encode(c)
except UnicodeEncodeError as e:
e.start = i
e.end = len(value)
raise e
if len(b) != 1 or ((ord(b) & 0x80) ^ msb) != 0:
character_set = "ISO IR 14" if msb == 0 else "ISO IR 13"
msg = f"Given character is out of {character_set}"
raise UnicodeEncodeError("shift_jis", value, i, len(value), msg)
encoded += b
return encoded
def _encode_to_jis_x_0208(value: str, errors: str = "strict") -> bytes:
"""Convert a unicode string into JIS X 0208 encoded bytes."""
return _encode_to_given_charset(value, "ISO 2022 IR 87", errors=errors)
def _encode_to_jis_x_0212(value: str, errors: str = "strict") -> bytes:
"""Convert a unicode string into JIS X 0212 encoded bytes."""
return _encode_to_given_charset(value, "ISO 2022 IR 159", errors=errors)
def _encode_to_given_charset(
value: str, character_set: str, errors: str = "strict"
) -> bytes:
"""Encode a unicode string using the given character set.
The escape sequence which is located at the end of the encoded value has
to vary depending on the value 1 of SpecificCharacterSet. So we have to
trim it and append the correct escape sequence manually.
Parameters
----------
value : text type
The unicode string as presented to the user.
character_set: str:
Character set for result.
errors : str
The behavior of a character which could not be encoded. This value
is passed to errors argument of str.encode().
Returns
-------
bytes
The encoded string. If some characters in value could not be encoded to
given character_set, it depends on the behavior of corresponding python
encoder.
Raises
------
UnicodeEncodeError
If errors is set to 'strict' and `value` could not be encoded with
given character_set.
"""
encoding = python_encoding[character_set]
# If errors is not strict, this function is used as fallback.
# So keep the tail escape sequence of encoded for backward compatibility.
if errors != "strict":
return value.encode(encoding, errors=errors)
encoder_class = codecs.getincrementalencoder(encoding)
encoder = encoder_class()
encoded = encoder.encode(value[0])
if not encoded.startswith(ENCODINGS_TO_CODES[encoding]):
raise UnicodeEncodeError(
encoding, value, 0, len(value), f"Given character is out of {character_set}"
)
for i, c in enumerate(value[1:], 1):
try:
b = encoder.encode(c)
except UnicodeEncodeError as e:
e.start = i
e.end = len(value)
raise e
if b[:1] == ESC:
raise UnicodeEncodeError(
encoding,
value,
i,
len(value),
f"Given character is out of {character_set}",
)
encoded += b
return encoded
def _get_escape_sequence_for_encoding(
encoding: str, encoded: bytes | None = None
) -> bytes:
"""Return an escape sequence corresponding to the given encoding. If
encoding is 'shift_jis', return 'ESC)I' or 'ESC(J' depending on the first
byte of encoded.
Parameters
----------
encoding : str
An encoding is used to specify an escape sequence.
encoded : bytes
The encoded value is used to choose an escape sequence if encoding is
'shift_jis'.
Returns
-------
bytes
Escape sequence for encoded value.
"""
ESC_ISO_IR_14 = ESC + b"(J"
ESC_ISO_IR_13 = ESC + b")I"
if encoding == "shift_jis":
if encoded is None:
return ESC_ISO_IR_14
first_byte = encoded[0]
if 0x80 <= first_byte:
return ESC_ISO_IR_13
return ESC_ISO_IR_14
return ENCODINGS_TO_CODES.get(encoding, b"")
# These encodings need escape sequence to handle alphanumeric characters.
need_tail_escape_sequence_encodings = ("iso2022_jp", "iso2022_jp_2")
custom_encoders = {
"shift_jis": _encode_to_jis_x_0201,
"iso2022_jp": _encode_to_jis_x_0208,
"iso2022_jp_2": _encode_to_jis_x_0212,
}
def decode_bytes(value: bytes, encodings: Sequence[str], delimiters: set[int]) -> str:
"""Decode an encoded byte `value` into a unicode string using `encodings`.
Parameters
----------
value : bytes
The encoded byte string in the DICOM element value.
encodings : list of str
The encodings needed to decode the string as a list of Python
encodings, converted from the encodings in (0008,0005) *Specific
Character Set*.
delimiters : set of int
A set of characters or character codes, each of which resets the
encoding in `value`.
Returns
-------
str
The decoded unicode string. If the value could not be decoded,
and :attr:`~pydicom.config.settings.reading_validation_mode`
is not ``RAISE``, a warning is issued, and `value` is
decoded using the first encoding with replacement characters,
resulting in data loss.
Raises
------
UnicodeDecodeError
If :attr:`~pydicom.config.settings.reading_validation_mode`
is ``RAISE`` and `value` could not be decoded with the given
encodings.
LookupError
If :attr:`~pydicom.config.settings.reading_validation_mode`
is ``RAISE`` and the given encodings are invalid.
"""
# shortcut for the common case - no escape sequences present
if ESC not in value:
first_encoding = encodings[0]
try:
return value.decode(first_encoding)
except LookupError:
if config.settings.reading_validation_mode == config.RAISE:
raise
# IGNORE is handled as WARN here, as this is
# not an optional validation check
warn_and_log(
f"Unknown encoding '{first_encoding}' - using default "
"encoding instead"
)
first_encoding = default_encoding
return value.decode(first_encoding)
except UnicodeError:
if config.settings.reading_validation_mode == config.RAISE:
raise
warn_and_log(
"Failed to decode byte string with encoding "
f"'{first_encoding}' - using replacement characters in "
"decoded string"
)
return value.decode(first_encoding, errors="replace")
# Each part of the value that starts with an escape sequence is decoded
# separately. If it starts with an escape sequence, the
# corresponding encoding is used, otherwise (e.g. the first part if it
# does not start with an escape sequence) the first encoding.
# See PS3.5, 6.1.2.4 and 6.1.2.5 for the use of code extensions.
#
# The following regex splits the value into these parts, by matching
# the substring until the first escape character, and subsequent
# substrings starting with an escape character.
regex = b"(^[^\x1b]+|[\x1b][^\x1b]*)"
fragments: list[bytes] = re.findall(regex, value)
# decode each byte string fragment with it's corresponding encoding
# and join them all together
return "".join(
[_decode_fragment(fragment, encodings, delimiters) for fragment in fragments]
)
decode_string = decode_bytes
def _decode_fragment(
byte_str: bytes, encodings: Sequence[str], delimiters: set[int]
) -> str:
"""Decode a byte string encoded with a single encoding.
If `byte_str` starts with an escape sequence, the encoding corresponding
to this sequence is used for decoding if present in `encodings`,
otherwise the first value in encodings.
If a delimiter occurs inside the string, it resets the encoding to the
first encoding in case of single-byte encodings.
Parameters
----------
byte_str : bytes
The encoded string to be decoded.
encodings: list of str
The list of Python encodings as converted from the values in the
Specific Character Set tag.
delimiters: set of int
A set of characters or character codes, each of which resets the
encoding in `byte_str`.
Returns
-------
str
The decoded unicode string. If the value could not be decoded,
and :attr:`~pydicom.config.settings.reading_validation_mode` is not
set to ``RAISE``, a warning is issued, and the value is
decoded using the first encoding with replacement characters,
resulting in data loss.
Raises
------
UnicodeDecodeError
If :attr:`~pydicom.config.settings.reading_validation_mode` is set
to ``RAISE`` and `value` could not be decoded with the given
encodings.
References
----------
* DICOM Standard, Part 5,
:dcm:`Sections 6.1.2.4<part05/chapter_6.html#sect_6.1.2.4>` and
:dcm:`6.1.2.5<part05/chapter_6.html#sect_6.1.2.5>`
* DICOM Standard, Part 3,
:dcm:`Annex C.12.1.1.2<part03/sect_C.12.html#sect_C.12.1.1.2>`
"""
try:
if byte_str.startswith(ESC):
return _decode_escaped_fragment(byte_str, encodings, delimiters)
# no escape sequence - use first encoding
return byte_str.decode(encodings[0])
except UnicodeError:
if config.settings.reading_validation_mode == config.RAISE:
raise
warn_and_log(
"Failed to decode byte string with encodings: "
f"{', '.join(encodings)} - using replacement characters in "
"decoded string"
)
return byte_str.decode(encodings[0], errors="replace")
def _decode_escaped_fragment(
byte_str: bytes, encodings: Sequence[str], delimiters: set[int]
) -> str:
"""Decodes a byte string starting with an escape sequence.
See `_decode_fragment` for parameter description and more information.
"""
# all 4-character escape codes start with one of two character sets
seq_length = 4 if byte_str.startswith((b"\x1b$(", b"\x1b$)")) else 3
encoding = CODES_TO_ENCODINGS.get(byte_str[:seq_length], "")
if encoding in encodings or encoding == default_encoding:
if encoding in handled_encodings:
# Python strips the escape sequences for this encoding.
# Any delimiters must be handled correctly by `byte_str`.
return byte_str.decode(encoding)
# Python doesn't know about the escape sequence -
# we have to strip it before decoding
byte_str = byte_str[seq_length:]
# If a delimiter occurs in the string, it resets the encoding.
# The following returns the first occurrence of a delimiter in
# the byte string, or None if it does not contain any.
index = next((idx for idx, ch in enumerate(byte_str) if ch in delimiters), None)
if index is not None:
# the part of the string after the first delimiter
# is decoded with the first encoding
return byte_str[:index].decode(encoding) + byte_str[index:].decode(
encodings[0]
)
# No delimiter - use the encoding defined by the escape code
return byte_str.decode(encoding)
# unknown escape code - use first encoding
msg = "Found unknown escape sequence in encoded string value"
if config.settings.reading_validation_mode == config.RAISE:
raise ValueError(msg)
warn_and_log(f"{msg} - using encoding {encodings[0]}")
return byte_str.decode(encodings[0], errors="replace")
def encode_string(value: str, encodings: Sequence[str]) -> bytes:
"""Encode a unicode string `value` into :class:`bytes` using `encodings`.
Parameters
----------
value : str
The unicode string as presented to the user.
encodings : list of str
The encodings needed to encode the string as a list of Python
encodings, converted from the encodings in (0008,0005) *Specific
Character Set*.
Returns
-------
bytes
The encoded string. If `value` could not be encoded with any of
the given encodings, and
:attr:`~pydicom.config.settings.reading_validation_mode` is not
``RAISE``, a warning is issued, and `value` is encoded using
the first encoding with replacement characters, resulting in data loss.
Raises
------
UnicodeEncodeError
If :attr:`~pydicom.config.settings.writing_validation_mode`
is set to ``RAISE`` and `value` could not be encoded with the
supplied encodings.
"""
for i, encoding in enumerate(encodings):
try:
encoded = _encode_string_impl(value, encoding)
if i > 0 and encoding not in handled_encodings:
escape_sequence = _get_escape_sequence_for_encoding(
encoding, encoded=encoded
)
encoded = escape_sequence + encoded
if encoding in need_tail_escape_sequence_encodings:
encoded += _get_escape_sequence_for_encoding(encodings[0])
return encoded
except UnicodeError:
continue
# if we have more than one encoding, we retry encoding by splitting
# `value` into chunks that can be encoded with one of the encodings
if len(encodings) > 1:
try:
return _encode_string_parts(value, encodings)
except ValueError:
pass
# all attempts failed - raise or warn and encode with replacement
# characters
if config.settings.writing_validation_mode == config.RAISE:
# force raising a valid UnicodeEncodeError
value.encode(encodings[0])
warn_and_log(
f"Failed to encode value with encodings: {', '.join(encodings)} "
"- using replacement characters in encoded string"
)
return _encode_string_impl(value, encodings[0], errors="replace")
def _encode_string_parts(value: str, encodings: Sequence[str]) -> bytes:
"""Convert a unicode string into a byte string using the given
list of encodings.
This is invoked if `encode_string` failed to encode `value` with a single
encoding. We try instead to use different encodings for different parts
of the string, using the encoding that can encode the longest part of
the rest of the string as we go along.
Parameters
----------
value : str
The unicode string as presented to the user.
encodings : list of str
The encodings needed to encode the string as a list of Python
encodings, converted from the encodings in Specific Character Set.
Returns
-------
bytes
The encoded string, including the escape sequences needed to switch
between different encodings.
Raises
------
ValueError
If `value` could not be encoded with the given encodings.
"""
encoded = bytearray()
unencoded_part = value
best_encoding = default_encoding
while unencoded_part:
# find the encoding that can encode the longest part of the rest
# of the string still to be encoded
max_index = 0
for encoding in encodings:
try:
_encode_string_impl(unencoded_part, encoding)
# if we get here, the whole rest of the value can be encoded
best_encoding = encoding
max_index = len(unencoded_part)
break
except (UnicodeDecodeError, UnicodeEncodeError) as err:
if err.start > max_index:
# err.start is the index of first char we failed to encode
max_index = err.start
best_encoding = encoding
# none of the given encodings can encode the first character - give up
if max_index == 0:
raise ValueError(
"None of the given encodings can encode the first character"
)
# encode the part that can be encoded with the found encoding
encoded_part = _encode_string_impl(unencoded_part[:max_index], best_encoding)
if best_encoding not in handled_encodings:
encoded += _get_escape_sequence_for_encoding(
best_encoding, encoded=encoded_part
)
encoded += encoded_part
# set remaining unencoded part of the string and handle that
unencoded_part = unencoded_part[max_index:]
# unencoded_part is empty - we are done, return the encoded string
if best_encoding in need_tail_escape_sequence_encodings:
encoded += _get_escape_sequence_for_encoding(encodings[0])
return bytes(encoded)
def _encode_string_impl(value: str, encoding: str, errors: str = "strict") -> bytes:
"""Convert a unicode string into a byte string.
If given encoding is in `custom_encoders`, use a corresponding
`custom_encoder`. If given encoding is not in `custom_encoders`, use a
corresponding python handled encoder.
"""
if encoding in custom_encoders:
return custom_encoders[encoding](value, errors=errors)
return value.encode(encoding, errors=errors)
# DICOM PS3.5-2008 6.1.1 (p 18) says:
# default is ISO-IR 6 G0, equiv to common chr set of ISO 8859 (PS3.5 6.1.2.1)
# (0008,0005) value 1 can *replace* the default encoding...
# for VRs of SH, LO, ST, LT, PN and UT (PS3.5 6.1.2.3)...
# with a single-byte character encoding
# if (0008,0005) is multi-valued, then value 1 (or default if blank)...
# is used until code extension escape sequence is hit,
# which can be at start of string, or after CR/LF, FF, or
# in Person Name PN, after ^ or =
# NOTE also that 7.5.3 SEQUENCE INHERITANCE states that if (0008,0005)
# is not present in a sequence item then it is inherited from its parent.
def convert_encodings(encodings: None | str | MutableSequence[str]) -> list[str]:
"""Convert DICOM `encodings` into corresponding Python encodings.
Handles some common spelling mistakes and issues a warning in this case.
Handles stand-alone encodings: if they are the first encodings,
additional encodings are ignored, if they are not the first encoding,
they are ignored. In both cases, a warning is issued.
Invalid encodings are replaced with the default encoding with a
respective warning issued, if
:attr:`~pydicom.config.settings.reading_validation_mode` is
``WARN``, or an exception is raised if it is set to
``RAISE``.
Parameters
----------
encodings : str or list of str
The encoding or list of encodings as read from (0008,0005)
*Specific Character Set*.
Returns
-------
list of str
A :class:`list` of Python encodings corresponding to the DICOM
encodings. If an encoding is already a Python encoding, it is returned
unchanged. Encodings with common spelling errors are replaced by the
correct encoding, and invalid encodings are replaced with the default
encoding if :attr:`~pydicom.config.settings.reading_validation_mode`
is not set to ``RAISE``.
Raises
------
LookupError
If `encodings` contains a value that could not be converted and
:attr:`~pydicom.config.settings.reading_validation_mode` is
``RAISE``.
"""
encodings = encodings or [""]
if isinstance(encodings, str):
encodings = [encodings]
else:
# If a list if passed, we don't want to modify the list
# in place so copy it
encodings = encodings[:]
if not encodings[0]:
encodings[0] = "ISO_IR 6"
py_encodings = []
for encoding in encodings:
try:
py_encodings.append(python_encoding[encoding])
except KeyError:
py_encodings.append(_python_encoding_for_corrected_encoding(encoding))
if len(encodings) > 1:
py_encodings = _handle_illegal_standalone_encodings(encodings, py_encodings)
return py_encodings
def _python_encoding_for_corrected_encoding(encoding: str) -> str:
"""Try to replace the given invalid encoding with a valid encoding by
checking for common spelling errors, and return the correct Python
encoding for that encoding. Otherwise check if the
encoding is already a valid Python encoding, and return that. If both
attempts fail, return the default encoding.
Issue a warning for the invalid encoding except for the case where it is
already converted.
"""
# standard encodings
patched = None
if re.match("^ISO[^_]IR", encoding) is not None:
patched = "ISO_IR" + encoding[6:]
# encodings with code extensions
elif re.match("^(?=ISO.2022.IR.)(?!ISO 2022 IR )", encoding) is not None:
patched = "ISO 2022 IR " + encoding[12:]
if patched:
# handle encoding patched for common spelling errors
try:
py_encoding = python_encoding[patched]
_warn_about_invalid_encoding(encoding, patched)
return py_encoding
except KeyError:
_warn_about_invalid_encoding(encoding)
return default_encoding
# fallback: assume that it is already a python encoding
try:
codecs.lookup(encoding)
return encoding
except LookupError:
_warn_about_invalid_encoding(encoding)
return default_encoding
def _warn_about_invalid_encoding(
encoding: str, patched_encoding: str | None = None
) -> None:
"""Issue a warning for the given invalid encoding.
If patched_encoding is given, it is mentioned as the
replacement encoding, other the default encoding.
If no replacement encoding is given, and
:attr:`~pydicom.config.settings.reading_validation_mode` is set to
``RAISE``, `LookupError` is raised.
"""
if patched_encoding is None:
if config.settings.reading_validation_mode == config.RAISE:
raise LookupError(f"Unknown encoding '{encoding}'")
msg = f"Unknown encoding '{encoding}' - using default encoding instead"
else:
msg = (
f"Incorrect value for Specific Character Set '{encoding}' - "
f"assuming '{patched_encoding}'"
)
warn_and_log(msg, stacklevel=2)
def _handle_illegal_standalone_encodings(
encodings: MutableSequence[str], py_encodings: list[str]
) -> list[str]:
"""Check for stand-alone encodings in multi-valued encodings.
If the first encoding is a stand-alone encoding, the rest of the
encodings is removed. If any other encoding is a stand-alone encoding,
it is removed from the encodings.
"""
if encodings[0] in STAND_ALONE_ENCODINGS:
warn_and_log(
(
f"Value '{encodings[0]}' for Specific Character Set does not "
f"allow code extensions, ignoring: {', '.join(encodings[1:])}"
),
stacklevel=2,
)
return py_encodings[:1]
for i, encoding in reversed(list(enumerate(encodings[1:]))):
if encoding in STAND_ALONE_ENCODINGS:
warn_and_log(
f"Value '{encoding}' cannot be used as code extension, ignoring it",
stacklevel=2,
)
del py_encodings[i + 1]
return py_encodings
def decode_element(
elem: "DataElement", dicom_character_set: str | list[str] | None
) -> None:
"""Apply the DICOM character encoding to a data element
Parameters
----------
elem : dataelem.DataElement
The :class:`DataElement<pydicom.dataelem.DataElement>` instance
containing an encoded byte string value to decode.
dicom_character_set : str or list of str or None
The value of (0008,0005) *Specific Character Set*, which may be a
single value, a multiple value (code extension), or may also be ``''``
or ``None``, in which case ``'ISO_IR 6'`` will be used.
"""
if elem.is_empty:
return
if not dicom_character_set:
dicom_character_set = ["ISO_IR 6"]
encodings = convert_encodings(dicom_character_set)
# decode the string value to unicode
# PN is special case as may have 3 components with different chr sets
if elem.VR == VR.PN:
if elem.VM == 1:
# elem.value: PersonName | bytes
elem.value = cast(PersonName, elem.value).decode(encodings)
else:
# elem.value: Iterable[PersonName | bytes]
elem.value = [cast(PersonName, vv).decode(encodings) for vv in elem.value]
elif elem.VR in CUSTOMIZABLE_CHARSET_VR:
# You can't re-decode unicode (string literals in py3)
if elem.VM == 1:
if isinstance(elem.value, str):
# already decoded
return
elem.value = decode_bytes(elem.value, encodings, TEXT_VR_DELIMS)
else:
output = list()
for value in elem.value:
if isinstance(value, str):
output.append(value)
else:
output.append(decode_bytes(value, encodings, TEXT_VR_DELIMS))
elem.value = output

View File

@@ -0,0 +1,29 @@
# Copyright 2020 pydicom authors. See LICENSE file for details.
"""Pydicom command line interface program for codify"""
import argparse
import pydicom.util.codify
default_exclude_size = 100
def add_subparser(subparsers: argparse._SubParsersAction) -> None:
codify_parser = subparsers.add_parser(
"codify",
description=(
"Read a DICOM file and produce the pydicom (Python) "
"code which can create that file"
),
epilog=(
"Binary data (e.g. pixels) larger than --exclude-size "
f"(default {default_exclude_size} bytes) is not included. "
"A dummy line with a syntax error is produced. "
"Private data elements are not included by default."
),
)
# Codify existed before as a stand-alone before, re-use it here
pydicom.util.codify.set_parser_arguments(codify_parser, default_exclude_size)
codify_parser.set_defaults(func=pydicom.util.codify.do_codify)

View File

@@ -0,0 +1,232 @@
# Copyright 2020 pydicom authors. See LICENSE file for details.
"""Pydicom command line interface program
Each subcommand is a module within pydicom.cli, which
defines an add_subparser(subparsers) function to set argparse
attributes, and calls set_defaults(func=callback_function)
"""
import argparse
from importlib.metadata import entry_points
import re
import sys
from typing import cast, Any
from collections.abc import Callable
from pydicom import dcmread
from pydicom.data.data_manager import get_charset_files, get_testdata_file
from pydicom.dataset import Dataset
subparsers: argparse._SubParsersAction | None = None
# Restrict the allowed syntax tightly, since use Python `eval`
# on the expression. Do not allow callables, or assignment, for example.
re_kywd_or_item = (
r"\w+" # Keyword (\w allows underscore, needed for file_meta)
r"(\[(-)?\d+\])?" # Optional [index] or [-index]
)
re_file_spec_object = re.compile(re_kywd_or_item + r"(\." + re_kywd_or_item + r")*$")
filespec_help = (
"File specification, in format [pydicom::]filename[::element]. "
"If `pydicom::` prefix is present, then use the pydicom "
"test file with that name. If `element` is given, "
"use only that data element within the file. "
"Examples: "
"path/to/your_file.dcm, "
"your_file.dcm::StudyDate, "
"pydicom::rtplan.dcm::BeamSequence[0], "
"yourplan.dcm::BeamSequence[0].BeamNumber"
)
def eval_element(ds: Dataset, element: str) -> Any:
try:
return eval("ds." + element, {"ds": ds})
except AttributeError:
raise argparse.ArgumentTypeError(
f"Data element '{element}' is not in the dataset"
)
except IndexError as e:
raise argparse.ArgumentTypeError(f"'{element}' has an index error: {e}")
def filespec_parts(filespec: str) -> tuple[str, str, str]:
"""Parse the filespec format into prefix, filename, element
Format is [prefix::filename::element]
Note that ':' can also exist in valid filename, e.g. r'c:\temp\test.dcm'
"""
*prefix_file, last = filespec.split("::")
if not prefix_file: # then only the filename component
return "", last, ""
prefix = "pydicom" if prefix_file[0] == "pydicom" else ""
if prefix:
prefix_file.pop(0)
# If list empty after pop above, then have pydicom::filename
if not prefix_file:
return prefix, last, ""
return prefix, "".join(prefix_file), last
def filespec_parser(filespec: str) -> list[tuple[Dataset, Any]]:
"""Utility to return a dataset and an optional data element value within it
Note: this is used as an argparse 'type' for adding parsing arguments.
Parameters
----------
filespec: str
A filename with optional `pydicom::` prefix and optional data element,
in format:
[pydicom::]<filename>[::<element>]
If an element is specified, it must be a path to a data element,
sequence item (dataset), or a sequence.
Examples:
your_file.dcm
your_file.dcm::StudyDate
pydicom::rtplan.dcm::BeamSequence[0]
pydicom::rtplan.dcm::BeamSequence[0].BeamLimitingDeviceSequence
Returns
-------
List[Tuple[Dataset, Any]]
Matching pairs of (dataset, data element value)
This usually is a single pair, but a list is returned for future
ability to work across multiple files.
Note
----
This function is meant to be used in a call to an `argparse` library's
`add_argument` call for subparsers, with name="filespec" and
`type=filespec_parser`. When used that way, the resulting args.filespec
will contain the return values of this function
(e.g. use `ds, element_val = args.filespec` after parsing arguments)
See the `pydicom.cli.show` module for an example.
Raises
------
argparse.ArgumentTypeError
If the filename does not exist in local path or in pydicom test files,
or if the optional element is not a valid expression,
or if the optional element is a valid expression but does not exist
within the dataset
"""
prefix, filename, element = filespec_parts(filespec)
# Get the pydicom test filename even without prefix, in case user forgot it
try:
pydicom_filename = cast(str, get_testdata_file(filename))
except ValueError: # will get this if absolute path passed
pydicom_filename = ""
# Check if filename is in charset files
if not pydicom_filename:
try:
char_filenames = get_charset_files(filename)
if char_filenames:
pydicom_filename = char_filenames[0]
except NotImplementedError: # will get this if absolute path passed
pass
if prefix == "pydicom":
filename = pydicom_filename
# Check element syntax first to avoid unnecessary load of file
if element and not re_file_spec_object.match(element):
raise argparse.ArgumentTypeError(
f"Component '{element}' is not valid syntax for a "
"data element, sequence, or sequence item"
)
# Read DICOM file
try:
ds = dcmread(filename, force=True)
except FileNotFoundError:
extra = (
(f", \nbut 'pydicom::{filename}' test data file is available")
if pydicom_filename
else ""
)
raise argparse.ArgumentTypeError(f"File '{filename}' not found{extra}")
except Exception as e:
raise argparse.ArgumentTypeError(f"Error reading '{filename}': {e}")
if not element:
return [(ds, None)]
data_elem_val = eval_element(ds, element)
return [(ds, data_elem_val)]
def help_command(args: argparse.Namespace) -> None:
if subparsers is None:
print("No subcommands are available")
return
subcommands: list[str] = list(subparsers.choices.keys())
if args.subcommand and args.subcommand in subcommands:
subparsers.choices[args.subcommand].print_help()
else:
print("Use pydicom help [subcommand] to show help for a subcommand")
subcommands.remove("help")
print(f"Available subcommands: {', '.join(subcommands)}")
SubCommandType = dict[str, Callable[[argparse._SubParsersAction], None]]
def get_subcommand_entry_points() -> SubCommandType:
subcommands = {}
for entry_point in entry_points(group="pydicom_subcommands"):
subcommands[entry_point.name] = entry_point.load()
return subcommands
def main(args: list[str] | None = None) -> None:
"""Entry point for 'pydicom' command line interface
Parameters
----------
args : List[str], optional
Command-line arguments to parse. If ``None``, then :attr:`sys.argv`
is used.
"""
global subparsers
py_version = sys.version.split()[0]
parser = argparse.ArgumentParser(
prog="pydicom",
description=f"pydicom command line utilities (Python {py_version})",
)
subparsers = parser.add_subparsers(help="subcommand help")
help_parser = subparsers.add_parser("help", help="display help for subcommands")
help_parser.add_argument(
"subcommand", nargs="?", help="Subcommand to show help for"
)
help_parser.set_defaults(func=help_command)
# Get subcommands to register themselves as a subparser
subcommands = get_subcommand_entry_points()
for subcommand in subcommands.values():
subcommand(subparsers)
ns = parser.parse_args(args)
if not vars(ns):
parser.print_help()
else:
ns.func(ns)

View File

@@ -0,0 +1,162 @@
# Copyright 2019 pydicom authors. See LICENSE file for details.
"""Pydicom command line interface program for `pydicom show`"""
import argparse
from collections.abc import Callable
from pydicom.dataset import Dataset
from pydicom.cli.main import filespec_help, filespec_parser
def add_subparser(subparsers: argparse._SubParsersAction) -> None:
subparser = subparsers.add_parser(
"show", description="Display all or part of a DICOM file"
)
subparser.add_argument("filespec", help=filespec_help, type=filespec_parser)
subparser.add_argument(
"-x",
"--exclude-private",
help="Don't show private data elements",
action="store_true",
)
subparser.add_argument(
"-t", "--top", help="Only show top level", action="store_true"
)
subparser.add_argument(
"-q",
"--quiet",
help="Only show basic information",
action="store_true",
)
subparser.set_defaults(func=do_command)
def do_command(args: argparse.Namespace) -> None:
if len(args.filespec) != 1:
raise NotImplementedError("Show can only work on a single DICOM file input")
ds, element_val = args.filespec[0]
if not element_val:
element_val = ds
if args.exclude_private:
ds.remove_private_tags()
if args.quiet and isinstance(element_val, Dataset):
show_quiet(element_val)
elif args.top and isinstance(element_val, Dataset):
print(element_val.top())
else:
print(str(element_val))
def SOPClassname(ds: Dataset) -> str | None:
class_uid = ds.get("SOPClassUID")
if class_uid is None:
return None
return f"SOPClassUID: {class_uid.name}"
def quiet_rtplan(ds: Dataset) -> str | None:
if "BeamSequence" not in ds:
return None
plan_label = ds.get("RTPlanLabel")
plan_name = ds.get("RTPlanName")
line = f"Plan Label: {plan_label} "
if plan_name:
line += f"Plan Name: {plan_name}"
lines = [line]
if "FractionGroupSequence" in ds: # it should be, is mandatory
for fraction_group in ds.FractionGroupSequence:
fraction_group_num = fraction_group.get("FractionGroupNumber", "")
descr = fraction_group.get("FractionGroupDescription", "")
fractions = fraction_group.get("NumberOfFractionsPlanned")
fxn_info = f"{fractions} fraction(s) planned" if fractions else ""
lines.append(f"Fraction Group {fraction_group_num} {descr} {fxn_info}")
num_brachy = fraction_group.get("NumberOfBrachyApplicationSetups")
lines.append(f" Brachy Application Setups: {num_brachy}")
for refd_beam in fraction_group.ReferencedBeamSequence:
ref_num = refd_beam.get("ReferencedBeamNumber")
dose = refd_beam.get("BeamDose")
mu = refd_beam.get("BeamMeterset")
line = f" Beam {ref_num} "
if dose or mu:
line += f"Dose {dose} Meterset {mu}"
lines.append(line)
for beam in ds.BeamSequence:
beam_num = beam.get("BeamNumber")
beam_name = beam.get("BeamName")
beam_type = beam.get("BeamType")
beam_delivery = beam.get("TreatmentDeliveryType")
beam_radtype = beam.get("RadiationType")
line = (
f"Beam {beam_num} '{beam_name}' {beam_delivery} "
f"{beam_type} {beam_radtype}"
)
if beam_type == "STATIC":
cp = beam.ControlPointSequence[0]
if cp:
energy = cp.get("NominalBeamEnergy")
gantry = cp.get("GantryAngle")
bld = cp.get("BeamLimitingDeviceAngle")
couch = cp.get("PatientSupportAngle")
line += f" energy {energy} gantry {gantry}, coll {bld}, couch {couch}"
wedges = beam.get("NumberOfWedges")
comps = beam.get("NumberOfCompensators")
boli = beam.get("NumberOfBoli")
blocks = beam.get("NumberOfBlocks")
line += f" ({wedges} wedges, {comps} comps, {boli} boli, {blocks} blocks)"
lines.append(line)
return "\n".join(lines)
def quiet_image(ds: Dataset) -> str | None:
if "SOPClassUID" not in ds or "Image Storage" not in ds.SOPClassUID.name:
return None
results = [
f"{name}: {ds.get(name, 'N/A')}"
for name in [
"BitsStored",
"Modality",
"Rows",
"Columns",
"SliceLocation",
]
]
return "\n".join(results)
# Items to show in quiet mode
# Item can be a callable or a DICOM keyword
quiet_items: list[Callable[[Dataset], str | None] | str] = [
SOPClassname,
"PatientName",
"PatientID",
# Images
"StudyID",
"StudyDate",
"StudyTime",
"StudyDescription",
quiet_image,
quiet_rtplan,
]
def show_quiet(ds: Dataset) -> None:
for item in quiet_items:
if callable(item):
result = item(ds)
if result:
print(result)
else:
print(f"{item}: {ds.get(item, 'N/A')}")

View File

@@ -0,0 +1,586 @@
# Copyright 2008-2023 pydicom authors. See LICENSE file for details.
"""Pydicom configuration options."""
# doc strings following items are picked up by sphinx for documentation
import logging
import os
from contextlib import contextmanager
from typing import Optional, Any, TYPE_CHECKING
from collections.abc import Generator
have_numpy = True
try:
import numpy # noqa: F401
except ImportError:
have_numpy = False
if TYPE_CHECKING: # pragma: no cover
from pydicom.dataelem import RawDataElement
from typing import Protocol
class ElementCallback(Protocol):
def __call__(
self,
raw_elem: "RawDataElement",
**kwargs: Any,
) -> "RawDataElement": ...
_use_future = False
_use_future_env = os.getenv("PYDICOM_FUTURE")
# Logging system and debug function to change logging level
logger = logging.getLogger("pydicom")
logger.addHandler(logging.NullHandler())
debugging: bool
def debug(debug_on: bool = True, default_handler: bool = True) -> None:
"""Turn on/off debugging of DICOM file reading and writing.
When debugging is on, file location and details about the elements read at
that location are logged to the 'pydicom' logger using Python's
:mod:`logging`
module.
Parameters
----------
debug_on : bool, optional
If ``True`` (default) then turn on debugging, ``False`` to turn off.
default_handler : bool, optional
If ``True`` (default) then use :class:`logging.StreamHandler` as the
handler for log messages.
"""
global logger, debugging
if default_handler:
handler = logging.StreamHandler()
formatter = logging.Formatter("%(message)s")
handler.setFormatter(formatter)
logger.addHandler(handler)
if debug_on:
logger.setLevel(logging.DEBUG)
debugging = True
else:
logger.setLevel(logging.WARNING)
debugging = False
# force level=WARNING, in case logging default is set differently (issue 103)
debug(False, False)
# Set the type used to hold DS values
# default False; was decimal-based in pydicom 0.9.7
use_DS_decimal: bool = False
"""Set using :func:`DS_decimal` to control if elements with a
VR of **DS** are represented as :class:`~decimal.Decimal`.
Default ``False``.
"""
data_element_callback: Optional["ElementCallback"] = None
"""Set to a callable function to be called from
:func:`~pydicom.filereader.dcmread` every time a
:class:`~pydicom.dataelem.RawDataElement` has been returned,
before it is added to the :class:`~pydicom.dataset.Dataset`.
Default ``None``.
.. deprecated:: 3.0
``data_element_callback`` will be removed in v4.0, use
:meth:`~pydicom.hooks.Hooks.register_callback` instead.
"""
data_element_callback_kwargs: dict[str, Any] = {}
"""Set the keyword arguments passed to :func:`data_element_callback`.
Default ``{}``.
.. deprecated:: 3.0
``data_element_callback_kwargs`` will be removed in v4.0, use
:meth:`~pydicom.hooks.Hooks.register_kwargs` instead.
"""
def reset_data_element_callback() -> None:
"""Reset the :func:`data_element_callback` function to the default.
.. deprecated:: 3.0
``reset_data_element_callback()`` will be removed in v4.0, use
:meth:`pydicom.hooks.Hooks.reset` instead.
"""
global data_element_callback
global data_element_callback_kwargs
data_element_callback = None
data_element_callback_kwargs = {}
def DS_numpy(use_numpy: bool = True) -> None:
"""Set whether multi-valued elements with VR of **DS** will be numpy arrays
.. versionadded:: 2.0
Parameters
----------
use_numpy : bool, optional
``True`` (default) to read multi-value **DS** elements
as :class:`~numpy.ndarray`, ``False`` to read multi-valued **DS**
data elements as type :class:`~python.mulitval.MultiValue`
Note: once a value has been accessed, changing this setting will
no longer change its type
Raises
------
ValueError
If :data:`use_DS_decimal` and `use_numpy` are both True.
"""
global use_DS_numpy
if use_DS_decimal and use_numpy:
raise ValueError(
"Cannot use numpy arrays to read DS elements if `use_DS_decimal` is True"
)
use_DS_numpy = use_numpy
def DS_decimal(use_Decimal_boolean: bool = True) -> None:
"""Set DS class to be derived from :class:`decimal.Decimal` or
:class:`float`.
If this function is never called, the default in *pydicom* >= 0.9.8
is for DS to be based on :class:`float`.
Parameters
----------
use_Decimal_boolean : bool, optional
``True`` (default) to derive :class:`~pydicom.valuerep.DS` from
:class:`decimal.Decimal`, ``False`` to derive it from :class:`float`.
Raises
------
ValueError
If `use_Decimal_boolean` and :data:`use_DS_numpy` are
both ``True``.
"""
global use_DS_decimal
use_DS_decimal = use_Decimal_boolean
if use_DS_decimal and use_DS_numpy:
raise ValueError("Cannot set use_DS_decimal True if use_DS_numpy is True")
import pydicom.valuerep
if use_DS_decimal:
pydicom.valuerep.DSclass = pydicom.valuerep.DSdecimal
else:
pydicom.valuerep.DSclass = pydicom.valuerep.DSfloat
# Configuration flags
use_DS_numpy = False
"""Set using the function :func:`DS_numpy` to control
whether arrays of VR **DS** are returned as numpy arrays.
Default: ``False``.
.. versionadded:: 2.0
"""
use_IS_numpy = False
"""Set to False to avoid IS values being returned as numpy ndarray objects.
Default: ``False``.
.. versionadded:: 2.0
"""
allow_DS_float = False
"""Set to ``True`` to allow :class:`~pydicom.valuerep.DSdecimal`
instances to be created using :class:`floats<float>`; otherwise, they must be
explicitly converted to :class:`str`, with the user explicitly setting the
precision of digits and rounding.
Default ``False``.
"""
enforce_valid_values = False
"""Deprecated.
Use :attr:`Settings.reading_validation_mode` instead.
"""
# Constants used to define how data element values shall be validated
IGNORE = 0
"""If one of the validation modes is set to this value, no value validation
will be performed.
"""
WARN = 1
"""If one of the validation modes is set to this value, a warning is issued if
a value validation error occurs.
"""
RAISE = 2
"""If one of the validation modes is set to this value, an exception is raised
if a value validation error occurs.
"""
class Settings:
"""Collection of several configuration values.
Accessed via the singleton :attr:`settings`.
.. versionadded:: 2.3
"""
def __init__(self) -> None:
self._reading_validation_mode: int | None = None
# in future version, writing invalid values will raise by default,
# currently the default value depends on enforce_valid_values
self._writing_validation_mode: int | None = RAISE if _use_future else None
self._infer_sq_for_un_vr: bool = True
# Chunk size to use when reading from buffered DataElement values
self._buffered_read_size = 8192
@property
def buffered_read_size(self) -> int:
"""Get or set the chunk size when reading from buffered
:class:`~pydicom.dataelem.DataElement` values.
Parameters
----------
size : int
The chunk size to use, must be greater than 0 (default 8192).
"""
return self._buffered_read_size
@buffered_read_size.setter
def buffered_read_size(self, size: int) -> None:
if size <= 0:
raise ValueError("The read size must be greater than 0")
self._buffered_read_size = size
@property
def reading_validation_mode(self) -> int:
"""Defines behavior of validation while reading values, compared with
the DICOM standard, e.g. that DS strings are not longer than
16 characters and contain only allowed characters.
* :attr:`WARN` will emit a warning in the case of an invalid value (default)
* :attr:`RAISE` will raise an error instead
* :attr:`IGNORE` will bypass the validation (with the exception of some
encoding errors).
"""
# upwards compatibility
if self._reading_validation_mode is None:
return RAISE if enforce_valid_values else WARN
return self._reading_validation_mode
@reading_validation_mode.setter
def reading_validation_mode(self, value: int) -> None:
self._reading_validation_mode = value
@property
def writing_validation_mode(self) -> int:
"""Defines behavior for value validation while writing a value.
See :attr:`Settings.reading_validation_mode`.
"""
if self._writing_validation_mode is None:
return RAISE if enforce_valid_values else WARN
return self._writing_validation_mode
@writing_validation_mode.setter
def writing_validation_mode(self, value: int) -> None:
self._writing_validation_mode = value
@property
def infer_sq_for_un_vr(self) -> bool:
"""If ``True``, and the VR of a known data element is encoded as
**UN** in an explicit encoding for an undefined length data element,
the VR is changed to SQ per PS 3.5, section 6.2.2. Can be set to
``False`` where the content of the tag shown as **UN** is not DICOM
conformant and would lead to a failure if accessing it.
"""
return self._infer_sq_for_un_vr
@infer_sq_for_un_vr.setter
def infer_sq_for_un_vr(self, value: bool) -> None:
self._infer_sq_for_un_vr = value
settings = Settings()
"""The global configuration object of type :class:`Settings` to access some
of the settings. More settings may move here in later versions.
.. versionadded:: 2.3
"""
@contextmanager
def disable_value_validation() -> Generator:
"""Context manager to temporarily disable value validation
both for reading and writing.
Can be used for performance reasons if the values are known to be valid.
"""
reading_mode = settings._reading_validation_mode
writing_mode = settings._writing_validation_mode
try:
settings.reading_validation_mode = IGNORE
settings.writing_validation_mode = IGNORE
yield
finally:
settings._reading_validation_mode = reading_mode
settings._writing_validation_mode = writing_mode
@contextmanager
def strict_reading() -> Generator:
"""Context manager to temporarily enably strict value validation
for reading."""
original_reading_mode = settings._reading_validation_mode
try:
settings.reading_validation_mode = RAISE
yield
finally:
settings._reading_validation_mode = original_reading_mode
convert_wrong_length_to_UN = False
"""Convert a field VR to "UN" and return bytes if bytes length is invalid.
Default ``False``.
"""
datetime_conversion = False
"""Set to ``True`` to convert the value(s) of elements with a VR of DA, DT and
TM to :class:`datetime.date`, :class:`datetime.datetime` and
:class:`datetime.time` respectively.
Note that when datetime conversion is enabled then range matching in
C-GET/C-FIND/C-MOVE queries is not possible anymore. So if you need range
matching we recommend to do the conversion manually.
Default ``False``
References
----------
* :dcm:`Range Matching<part04/sect_C.2.2.2.5.html>`
"""
use_none_as_empty_text_VR_value = False
""" If ``True``, the value of a decoded empty data element with
a text VR is ``None``, otherwise (the default), it is is an empty string.
For all other VRs the behavior does not change - the value is en empty
list for VR **SQ** and ``None`` for all other VRs.
Note that the default of this value may change to ``True`` in a later version.
"""
replace_un_with_known_vr = True
""" If ``True``, and the VR of a known data element is encoded as **UN** in
an explicit encoding, the VR is changed to the known value.
Can be set to ``False`` where the content of the tag shown as **UN** is
not DICOM conformant and would lead to a failure if accessing it.
.. versionadded:: 2.0
"""
show_file_meta = True
"""
If ``True`` (default), the 'str' and 'repr' methods
of :class:`~pydicom.dataset.Dataset` begin with a separate section
displaying the file meta information data elements
.. versionadded:: 2.0
"""
import pydicom.pixel_data_handlers.numpy_handler as np_handler # noqa
import pydicom.pixel_data_handlers.rle_handler as rle_handler # noqa
import pydicom.pixel_data_handlers.pillow_handler as pillow_handler # noqa
import pydicom.pixel_data_handlers.jpeg_ls_handler as jpegls_handler # noqa
import pydicom.pixel_data_handlers.gdcm_handler as gdcm_handler # noqa
import pydicom.pixel_data_handlers.pylibjpeg_handler as pylibjpeg_handler # noqa
pixel_data_handlers = [
np_handler,
gdcm_handler,
pillow_handler,
jpegls_handler,
pylibjpeg_handler,
rle_handler,
]
"""Handlers for converting (7FE0,0010) *Pixel Data*.
.. currentmodule:: pydicom.dataset
This is an ordered list of *Pixel Data* handlers that the
:meth:`~Dataset.convert_pixel_data` method will use to try to extract a
correctly sized numpy array from the *Pixel Data* element.
Handlers shall have four methods:
def supports_transfer_syntax(transfer_syntax: UID)
Return ``True`` if the handler supports the transfer syntax indicated in
:class:`Dataset` `ds`, ``False`` otherwise.
def is_available():
Return ``True`` if the handler's dependencies are installed, ``False``
otherwise.
def get_pixeldata(ds):
Return a correctly sized 1D :class:`numpy.ndarray` derived from the
*Pixel Data* in :class:`Dataset` `ds` or raise an exception. Reshaping the
returned array to the correct dimensions is handled automatically.
def needs_to_convert_to_RGB(ds):
Return ``True`` if the *Pixel Data* in the :class:`Dataset` `ds` needs to
be converted to the RGB colourspace, ``False`` otherwise.
The first handler that both announces that it supports the transfer syntax
and does not raise an exception, either in getting the data or when the data
is reshaped to the correct dimensions, is the handler that will provide the
data.
If they all fail only the last exception is raised.
If none raise an exception, but they all refuse to support the transfer
syntax, then this fact is announced in a :class:`NotImplementedError`
exception.
"""
APPLY_J2K_CORRECTIONS = True
"""Use the information within JPEG 2000 data to correct the returned pixel data
.. versionadded:: 2.1
If ``True`` (default), then for handlers that support JPEG 2000 pixel data,
use the component precision and sign to correct the returned ndarray when
using the pixel data handlers. If ``False`` then only rely on the element
values within the dataset when applying corrections.
"""
assume_implicit_vr_switch = True
"""If invalid VR encountered, assume file switched to implicit VR
.. versionadded:: 2.2
If ``True`` (default), when reading an explicit VR file,
if a VR is encountered that is not a valid two bytes within A-Z,
then assume the original writer switched to implicit VR. This has been
seen in particular in some sequences. This does not test that
the VR is a valid DICOM VR, just that it has valid characters.
"""
INVALID_KEYWORD_BEHAVIOR = "WARN"
"""Control the behavior when setting a :class:`~pydicom.dataset.Dataset`
attribute that's not a known element keyword.
.. versionadded:: 2.1
If ``"WARN"`` (default), then warn when an element value is set using
``Dataset.__setattr__()`` and the keyword is camel case but doesn't match a
known DICOM element keyword. If ``"RAISE"`` then raise a :class:`ValueError`
exception. If ``"IGNORE"`` then neither warn nor raise.
Examples
--------
>>> from pydicom import config
>>> config.INVALID_KEYWORD_BEHAVIOR = "WARN"
>>> ds = Dataset()
>>> ds.PatientName = "Citizen^Jan" # OK
>>> ds.PatientsName = "Citizen^Jan"
../pydicom/dataset.py:1895: UserWarning: Camel case attribute 'PatientsName'
used which is not in the element keyword data dictionary
"""
INVALID_KEY_BEHAVIOR = "WARN"
"""Control the behavior when invalid keys are used with
:meth:`~pydicom.dataset.Dataset.__contains__` (e.g. ``'invalid' in ds``).
.. versionadded:: 2.1
Invalid keys are objects that cannot be converted to a
:class:`~pydicom.tag.BaseTag`, such as unknown element keywords or invalid
element tags like ``0x100100010``.
If ``"WARN"`` (default), then warn when an invalid key is used, if ``"RAISE"``
then raise a :class:`ValueError` exception. If ``"IGNORE"`` then neither warn
nor raise.
Examples
--------
>>> from pydicom import config
>>> config.INVALID_KEY_BEHAVIOR = "RAISE"
>>> ds = Dataset()
>>> 'PatientName' in ds # OK
False
>>> 'PatientsName' in ds
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File ".../pydicom/dataset.py", line 494, in __contains__
raise ValueError(msg) from exc
ValueError: Invalid value used with the 'in' operator: must be an
element tag as a 2-tuple or int, or an element keyword
"""
if _use_future_env:
if _use_future_env.lower() in ["true", "yes", "on", "1"]:
_use_future = True
elif _use_future_env.lower() in ["false", "no", "off", "0"]:
_use_future = False
else:
raise ValueError(
"Unknown setting for environment variable "
"PYDICOM_FUTURE. Use True or False."
)
def future_behavior(enable_future: bool = True) -> None:
"""Imitate the behavior for the next major version of *pydicom*.
.. versionadded:: 2.1
This can be used to ensure your code is "future-proof" for known
upcoming changes in the next major version of *pydicom*. Typically,
deprecations become errors, and default values of config flags may change.
Parameters
----------
enable_future: bool
Set ``True`` (default) to emulate future pydicom behavior,
``False`` to reset to current pydicom behavior.
See also
--------
:attr:`INVALID_KEYWORD_BEHAVIOR`
:attr:`INVALID_KEY_BEHAVIOR`
"""
global _use_future, INVALID_KEYWORD_BEHAVIOR
if enable_future:
_use_future = True
INVALID_KEYWORD_BEHAVIOR = "RAISE"
settings._writing_validation_mode = RAISE
else:
_use_future = False
INVALID_KEYWORD_BEHAVIOR = "WARN"
settings._writing_validation_mode = None
if _use_future:
future_behavior()

View File

@@ -0,0 +1,20 @@
# Copyright 2008-2018 pydicom authors. See LICENSE file for details.
"""pydicom data manager"""
from .data_manager import (
get_charset_files,
get_testdata_file,
get_testdata_files,
get_palette_files,
DATA_ROOT,
external_data_sources,
fetch_data_files,
)
__all__ = [
"fetch_data_files",
"get_charset_files",
"get_palette_files",
"get_testdata_files",
"get_testdata_file",
]

View File

@@ -0,0 +1,23 @@
Filename Character Sets "Patient's Name"
-------- -------------- '--------------'
chrArab.dcm ISO_IR 127 '\xe2\xc8\xc7\xe6\xea^\xe4\xe6\xd2\xc7\xd1'
chrFren.dcm ISO_IR 100 'Buc^J\xe9r\xf4me'
chrFrenMulti.dcm ISO_IR 100 'Buc^J\xe9r\xf4me'
chrGerm.dcm ISO_IR 100 '\xc4neas^R\xfcdiger'
chrGreek.dcm ISO_IR 126 '\xc4\xe9\xef\xed\xf5\xf3\xe9\xef\xf2'
chrH31.dcm ['', 'ISO 2022 IR 87'] 'Yamada^Tarou=\x1b$B;3ED\x1b(B^\x1b$BB@O:\x1b(B=\x1b$B$d$^$@\x1b(B^\x1b$B$?$m$&\x1b(B'
chrH32.dcm ['ISO 2022 IR 13', 'ISO 2022 IR 87'] '\xd4\xcf\xc0\xde^\xc0\xdb\xb3=\x1b$B;3ED\x1b(J^\x1b$BB@O:\x1b(J=\x1b$B$d$^$@\x1b(J^\x1b$B$?$m$&\x1b(J'
chrHbrw.dcm ISO_IR 138 '\xf9\xf8\xe5\xef^\xe3\xe1\xe5\xf8\xe4'
chrI2.dcm ['', 'ISO 2022 IR 149'] 'Hong^Gildong=\x1b$)C\xfb\xf3^\x1b$)C\xd1\xce\xd4\xd7=\x1b$)C\xc8\xab^\x1b$)C\xb1\xe6\xb5\xbf'
chrRuss.dcm ISO_IR 144 '\xbb\xee\xdace\xdc\xd1yp\xd3'
chrX1.dcm ISO_IR 192 'Wang^XiaoDong=\xe7\x8e\x8b^\xe5\xb0\x8f\xe6\x9d\xb1='
chrX2.dcm GB18030 'Wang^XiaoDong=\xcd\xf5^\xd0\xa1\xb6\xab='
Other
=====
chrFrenMulti.dcm is a modified version of chrFren.dcm with multi-valued PN and LO for testing decoding
chrSQEncoding.dcm is a minimal constructed dataset with a sequence that has
another encoding (['ISO 2022 IR 13', 'ISO 2022 IR 87']) than the dataset (ISO_IR 192)
chrSQEncoding1.dcm is the same dataset with the encoding ['ISO 2022 IR 13', 'ISO 2022 IR 87']
defined in the dataset, but not in the sequence

View File

@@ -0,0 +1,436 @@
# Copyright 2008-2020 pydicom authors. See LICENSE file for details.
"""Management of pydicom's data files.
External Data Sources
---------------------
*pydicom* can also search third-party data sources for matching data. To do so
your project should register its entry points in its `setup.py` file. For
example, a project named "mydata" with the interface class ``MyInterface``
should register:
.. codeblock: python
from setuptools import setup
setup(
...,
entry_points={
"pydicom.data.external_sources": "mydata = mydata:MyInterface",
},
)
The interface class should have, at a minimum, the following two methods:
* ``get_path(self, name: str, dtype: int) -> str`` - returns the absolute path
to the first file with a filename `name` or raises a ``ValueError`` if no
matching file found.
* ``get_paths(self, pattern: str, dtype: int) -> List[str]`` - returns a list
of absolute paths to filenames matching `pattern`.
Where `name` is the name of the filename to search for, `dtype` is an int
that indicates the type of data to search for and should be one of the
following:
* ``0`` - DICOM dataset
* ``1`` - Character set file
* ``2`` - Palette file
* ``3`` - DICOMDIR file
* ``4`` - JPEG file
And lastly, `pattern` is a str used to filter files against when searching.
For a real-life example of an external data source you can look at the
`pydicom-data <https://github.com/pydicom/pydicom-data>`_ repository.
"""
from enum import IntEnum
import fnmatch
import os
from pathlib import Path
from typing import TYPE_CHECKING
from pydicom.data.download import (
data_path_with_download,
calculate_file_hash,
get_cached_filehash,
get_url_map,
get_data_dir,
)
from pydicom.misc import warn_and_log
if TYPE_CHECKING: # pragma: no cover
from pydicom import Dataset
DATA_ROOT = os.fspath(Path(__file__).parent.resolve())
"""The absolute path to the pydicom/data directory."""
class DataTypes(IntEnum):
"""Constants for data types."""
DATASET = 0
CHARSET = 1
PALETTE = 2
DICOMDIR = 3
JPEG = 4
def _check_data_hash(fpath: str) -> bool:
"""Return ``True`` if the SHA256 checksum of the file at ``fpath`` is OK.
Parameters
----------
fpath : str
The absolute path to the file to perform the checksum for.
Returns
-------
bool
``True`` if the checksum matches those in ``hashes.json``, ``False``
otherwise.
Raises
------
pydicom.data.download.NoHashFound
If the file is missing from ``hashes.json``.
"""
p = Path(fpath)
ext_hash = calculate_file_hash(p)
ref_hash = get_cached_filehash(p.name)
return ext_hash == ref_hash
def get_external_sources() -> dict:
"""Return a :class:`dict` of external data source interfaces.
Returns
-------
dict
A dict of ``{'source name': <interface class instance>}``.
"""
from importlib.metadata import entry_points
# Prefer pydicom-data as the source
sources = {
vv.name: vv.load()()
for vv in entry_points(group="pydicom.data.external_sources")
}
out = {}
if "pydicom-data" in sources:
out["pydicom-data"] = sources["pydicom-data"]
out.update(sources)
return out
_EXTERNAL_DATA_SOURCES: dict | None = None
def external_data_sources() -> dict:
"""Return the available external data sources - loaded once."""
global _EXTERNAL_DATA_SOURCES
if _EXTERNAL_DATA_SOURCES is None:
_EXTERNAL_DATA_SOURCES = get_external_sources()
return _EXTERNAL_DATA_SOURCES
def online_test_file_dummy_paths() -> dict[str, str]:
"""Return a :class:`dict` of dummy paths to the downloadable test files.
Returns
-------
dict
A dict of dummy paths to the test files available via download.
"""
filenames = list(get_url_map().keys())
test_files_root = os.path.join(DATA_ROOT, "test_files")
dummy_path_map = {
os.path.join(test_files_root, filename): filename for filename in filenames
}
return dummy_path_map
def fetch_data_files() -> None:
"""Download missing test files to the local cache."""
cache = get_data_dir()
paths = {cache / fname: fname for fname in list(get_url_map().keys())}
error = []
for p in paths:
# Download missing files or files that don't match the hash
try:
data_path_with_download(p.name)
except Exception:
error.append(p.name)
if error:
raise RuntimeError(
f"An error occurred downloading the following files: {', '.join(error)}"
)
def get_files(
base: str | os.PathLike, pattern: str = "**/*", dtype: int = DataTypes.DATASET
) -> list[str]:
"""Return all matching file paths from the available data sources.
First searches the local *pydicom* data store, then any locally available
external sources, and finally the files available in the
pydicom/pydicom-data repository.
.. versionchanged: 2.1
Added the `dtype` keyword parameter, modified to search locally
available external data sources and the pydicom/pydicom-data repository
Parameters
----------
base : str or os.PathLike
Base directory to recursively search.
pattern : str, optional
The pattern to pass to :meth:`~pathlib.Path.glob`, default
(``'**/*'``).
dtype : int, optional
The type of data to search for when using an external source, one of:
* ``0`` - DICOM dataset
* ``1`` - Character set file
* ``2`` - Palette file
* ``3`` - DICOMDIR file
* ``4`` - JPEG file
Returns
-------
list of str
A list of absolute paths to matching files.
"""
base = Path(base)
# Search locally
files = [os.fspath(m) for m in base.glob(pattern)]
# Search external sources
for lib, source in external_data_sources().items():
fpaths = source.get_paths(pattern, dtype)
if lib == "pydicom-data":
# For pydicom-data, check the hash against hashes.json
fpaths = [p for p in fpaths if _check_data_hash(p)]
files.extend(fpaths)
# Search https://github.com/pydicom/pydicom-data or local cache
# To preserve backwards compatibility filter the downloaded files
# as if they are stored within DATA_ROOT/test_files/*.dcm
dummy_online_file_path_map = online_test_file_dummy_paths()
dummy_online_file_path_filtered = fnmatch.filter(
dummy_online_file_path_map.keys(), os.path.join(base, pattern)
)
download_names = [
os.fspath(dummy_online_file_path_map[dummy_path])
for dummy_path in dummy_online_file_path_filtered
]
real_online_file_paths = []
download_error = False
for filename in download_names:
try:
real_online_file_paths.append(os.fspath(data_path_with_download(filename)))
except Exception:
download_error = True
files += real_online_file_paths
if download_error:
warn_and_log(
"One or more download failures occurred, the list of matching "
"file paths may be incomplete"
)
return files
def get_palette_files(pattern: str = "**/*") -> list[str]:
"""Return a list of absolute paths to palettes with filenames matching
`pattern`.
Parameters
----------
pattern : str, optional
The pattern to pass to :meth:`~pathlib.Path.glob`, default
(``'**/*'``).
Returns
-------
list of str
A list of absolute paths to matching files.
"""
data_path = Path(DATA_ROOT) / "palettes"
files = get_files(base=data_path, pattern=pattern, dtype=DataTypes.PALETTE)
files = [filename for filename in files if not filename.endswith(".py")]
return files
def get_testdata_file(
name: str,
read: bool = False,
download: bool = True,
) -> "str | Dataset | None":
"""Return an absolute path to the first matching dataset with filename
`name` that is found in a local or external pydicom datastore.
First searches the local *pydicom* data store, then any locally available
external sources, and finally the files available in the
pydicom/pydicom-data repository.
.. versionchanged:: 2.1
Modified to search locally available external data sources and the
pydicom/pydicom-data repository
.. versionchanged:: 2.2
Added the `read` keyword parameter.
.. versionchanged:: 2.3
Added the `download` keyword parameter.
Parameters
----------
name : str
The full file name (without path)
read : bool, optional
If ``True`` then use :func:`~pydicom.filereader.dcmread` to read the
file and return the corresponding
:class:`~pydicom.dataset.FileDataset`. Default ``False``.
download : bool, optional
If ``True`` (default) download the file if missed locally.
Returns
-------
str, pydicom.dataset.Dataset or None
The absolute path of the file if found, the dataset itself if `read` is
``True``, or ``None`` if the file is not found.
Raises
______
ValueError
If `name` is an absolute path.
"""
if os.path.isabs(name):
raise ValueError(
f"'get_testdata_file' does not support absolute paths, as it only works"
f" with internal pydicom test data - did you mean 'dcmread(\"{name}\")'?"
)
path = _get_testdata_file(name=name, download=download)
if read and path is not None:
from pydicom.filereader import dcmread
return dcmread(path, force=True)
return path
def _get_testdata_file(name: str, download: bool = True) -> str | None:
# Check pydicom local
data_path = Path(DATA_ROOT) / "test_files"
matches = [m for m in data_path.rglob(name)]
if matches:
return os.fspath(matches[0])
# Check external data sources
fpath: str | None
for lib, source in external_data_sources().items():
try:
fpath = source.get_path(name, dtype=DataTypes.DATASET)
except ValueError:
fpath = None
# For pydicom-data, check the hash against hashes.json
if lib == "pydicom-data":
if fpath and _check_data_hash(fpath):
return fpath
elif fpath:
return fpath
# Try online
if download:
for filename in get_url_map().keys():
if filename != name:
continue
try:
return os.fspath(data_path_with_download(filename))
except Exception:
warn_and_log(
f"A download failure occurred while attempting to "
f"retrieve {name}"
)
return None
def get_testdata_files(pattern: str = "**/*") -> list[str]:
"""Return a list of absolute paths to datasets with filenames matching
`pattern`.
Parameters
----------
pattern : str, optional
The pattern to pass to :meth:`~pathlib.Path.glob`, default
(``'**/*'``).
Returns
-------
list of str
A list of absolute paths to matching files.
Raises
______
ValueError
If `pattern` matches an absolute path.
"""
if os.path.isabs(pattern):
raise ValueError(
"'get_testdata_files' does not support absolute paths, as it only works"
" with internal pydicom test data."
)
data_path = Path(DATA_ROOT) / "test_files"
files = get_files(base=data_path, pattern=pattern, dtype=DataTypes.DATASET)
files = [filename for filename in files if not filename.endswith(".py")]
return files
def get_charset_files(pattern: str = "**/*") -> list[str]:
"""Return a list of absolute paths to charsets with filenames matching
`pattern`.
Parameters
----------
pattern : str, optional
The pattern to pass to :meth:`~pathlib.Path.glob`, default
(``'**/*'``).
Returns
----------
list of str
A list of absolute paths to matching files.
"""
data_path = Path(DATA_ROOT) / "charset_files"
files = get_files(base=data_path, pattern=pattern, dtype=DataTypes.CHARSET)
files = [filename for filename in files if not filename.endswith(".py")]
return files

View File

@@ -0,0 +1,295 @@
# Copyright 2020 pydicom authors. See LICENSE file for details.
# Copyright 2018-2019 Cancer Care Associates.
# Relicensed under pydicom LICENSE by Simon Biggs.
import functools
import hashlib
import json
import os
import pathlib
from typing import cast
import urllib.request
import urllib.error
try:
import requests
HAVE_REQUESTS = True
except ImportError:
HAVE_REQUESTS = False
try:
import tqdm
if HAVE_REQUESTS is False:
class DownloadProgressBar(tqdm.tqdm):
def update_to(
self, b: int = 1, bsize: int = 1, tsize: int | None = None
) -> None:
if tsize is not None:
self.total = tsize
self.update(b * bsize - self.n)
USE_PROGRESS_BAR = True
except ImportError:
USE_PROGRESS_BAR = False
from . import retry
from pydicom.misc import warn_and_log
HERE = pathlib.Path(__file__).resolve().parent
_SIMULATE_NETWORK_OUTAGE = False # For testing network outages
def calculate_file_hash(fpath: pathlib.Path) -> str:
"""Return the SHA256 checksum for the file at `fpath`.
Parameters
----------
fpath : pathlib.Path
The absolute path to the file that is to be checksummed.
Returns
-------
str
The SHA256 checksum of the file.
"""
BLOCKSIZE = 65536
hasher = hashlib.sha256()
with open(fpath, "rb") as f:
buf = f.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
buf = f.read(BLOCKSIZE)
return hasher.hexdigest()
def get_config_dir() -> pathlib.Path:
"""Return the path to the pydicom config directory, creating it if required
The config directory will be named ``.pydicom`` and will be created in the
local user's home directory.
"""
config_dir = pathlib.Path.home() / ".pydicom"
config_dir.mkdir(exist_ok=True)
return config_dir
@retry.retry(
(urllib.error.HTTPError, urllib.error.URLError),
exc_msg=("Installing the `requests` package may help"),
)
def download_with_progress(url: str, fpath: pathlib.Path) -> None:
"""Download the file at `url` to `fpath` with a progress bar.
Parameters
----------
url : str
The URL to download the file from.
fpath : pathlib.Path
The absolute path where the file will be written to.
"""
filename = os.fspath(fpath)
if HAVE_REQUESTS:
if USE_PROGRESS_BAR:
r = requests.get(url, stream=True)
total_size_in_bytes = int(r.headers.get("content-length", 0))
with open(fpath, "wb") as file:
for data in tqdm.tqdm(
r.iter_content(chunk_size=4096),
total=total_size_in_bytes,
unit="B",
unit_scale=True,
miniters=1,
desc=url.split("/")[-1],
):
file.write(data)
else:
r = requests.get(url)
with open(filename, "wb") as f:
f.write(r.content)
else:
if USE_PROGRESS_BAR:
with DownloadProgressBar(
unit="B", unit_scale=True, miniters=1, desc=url.split("/")[-1]
) as t:
urllib.request.urlretrieve(url, filename, reporthook=t.update_to)
else:
urllib.request.urlretrieve(url, filename)
def get_data_dir() -> pathlib.Path:
"""Return the path to the cache directory, creating it if required."""
data_dir = get_config_dir() / "data"
data_dir.mkdir(exist_ok=True)
return data_dir
@functools.lru_cache
def get_url_map() -> dict[str, str]:
"""Return a dict containing the URL mappings from ``urls.json```."""
with open(HERE / "urls.json") as url_file:
return cast(dict[str, str], json.load(url_file))
def get_url(filename: str) -> str:
"""Return the download URL corresponding to `filename`.
The filename:URL mappings are located in the ``urls.json`` file.
Parameters
----------
filename : str
The filename of the file to get the corresponding URL for.
Returns
-------
str
The download URL corresponding to `filename`.
Raises
------
ValueError
If `filename` is not in the ``urls.json`` record.
"""
# Convert filename to lowercase because windows filenames are
# case-insensitive
urls = {k.lower(): v for k, v in get_url_map().items()}
try:
return urls[filename.lower()]
except KeyError:
raise ValueError("The file provided isn't within pydicom's urls.json record.")
def data_path_with_download(
filename: str,
check_hash: bool = True,
redownload_on_hash_mismatch: bool = True,
url: str | None = None,
quiet: bool = True,
) -> pathlib.Path:
"""Return the absolute path to the cached file with `filename`.
If the file isn't available in the cache then it will be downloaded.
Parameters
----------
filename : str
The filename of the file to return the path to.
check_hash : bool, optional
``True`` to perform a SHA256 checksum on the file, ``False`` otherwise.
redownload_on_hash_mismatch : bool, optional
``True`` to redownload the file on checksum failure, ``False``
otherwise.
url : str, optional
The file's corresponding download URL
Returns
-------
pathlib.Path
The absolute path to the file.
"""
if _SIMULATE_NETWORK_OUTAGE:
raise RuntimeError("No network!")
filepath = get_data_dir().joinpath(filename)
if check_hash and filepath.exists():
try:
get_cached_filehash(filename)
except NoHashFound:
filepath.unlink() # Force a redownload
if not filepath.exists():
if url is None:
url = get_url(filename)
download_with_progress(url, filepath)
if check_hash:
try:
hash_agrees = data_file_hash_check(filename)
except NoHashFound:
return filepath.resolve()
if not hash_agrees:
if redownload_on_hash_mismatch:
filepath.unlink()
return data_path_with_download(
filename, redownload_on_hash_mismatch=False
)
raise ValueError("The file on disk does not match the recorded hash.")
return filepath.resolve()
class NoHashFound(KeyError):
pass
def get_cached_filehash(filename: str) -> str:
"""Return the SHA256 checksum of a cached file.
Parameters
----------
filename : str
The filename of the cached file to calculate the checksum for.
Returns
-------
str
The SHA256 checksum of the cached file.
"""
with open(HERE / "hashes.json") as hash_file:
hashes = cast(dict[str, str], json.load(hash_file))
# Convert filenames to lowercase because windows filenames are
# case-insensitive
hashes = {k.lower(): v for k, v in hashes.items()}
try:
return hashes[filename.lower()]
except KeyError:
raise NoHashFound
def data_file_hash_check(filename: str) -> bool:
"""Return ``True`` if the SHA256 checksum of the cached file is correct.
Parameters
----------
filename : str
The filename of the cached file to check.
Returns
-------
bool
``True`` if the cached file has the correct checksum, ``False``
otherwise.
"""
filename = os.fspath(filename)
filepath = get_data_dir().joinpath(filename)
calculated_filehash = calculate_file_hash(filepath)
try:
cached_filehash = get_cached_filehash(filename)
except NoHashFound:
warn_and_log("Hash not found in hashes.json. File will be updated.")
with open(HERE / "hashes.json") as hash_file:
hashes = json.load(hash_file)
hashes[filename] = calculated_filehash
with open(HERE / "hashes.json", "w") as hash_file:
json.dump(hashes, hash_file, indent=2, sort_keys=True)
raise
return cached_filehash == calculated_filehash

View File

@@ -0,0 +1,81 @@
{
"693_J2KR.dcm": "c392d8bd1f952ed2d9387d5143d34c5a29ac9d74566688169731a50ac6a82aa2",
"693_UNCI.dcm": "42d6c33d6666bf569a53951211be6fca2ab04956db43c3f75a9720d976ab128c",
"693_UNCR.dcm": "cc4cdd599231922ecf63de2ddacf03d51c4588805c9154c2eef1ff49c23b32be",
"JPEG-LL.dcm": "c9d000c75d92b143ce1c0421471a7e9a69c8996d98b2589e533e311615a10079",
"JPEG2000_UNC.dcm": "645ff302c7f7ee6c402d74c7c9e3cb5efdb861a828959cc2adc8775a8260688d",
"JPGLosslessP14SV1_1s_1f_8b.dcm": "1978d4f058e52d3239fae33f261b3dc74605fdd9f89031fffd57bea6218d0dbf",
"MR-SIEMENS-DICOM-WithOverlays.dcm": "094faf56c63bff84c30567e29de0c67d7c5a8ae05cf880ac12175491b6b645d2",
"MR2_J2KI.dcm": "8319846e6ad6dc70dbbaf61748b1987a6807fd02db3da24e7989fd5a5ce19e4e",
"MR2_J2KR.dcm": "707f0a1b648b79f17b61e241af31fb9edea7fe596681a4bcab6cce890300a9a5",
"MR2_UNCI.dcm": "7f79ac33e1ab32e1a8ca10ce62f18e5a2372e78c8a6684af17302b1a0171fc46",
"MR2_UNCR.dcm": "c14c7f0c6e25bd4dfbb822fe264e540fc7142bf1c9d15d4c652ec8f5f97fa9e8",
"OBXXXX1A.dcm": "164a460bebdc15fbe391ad4bfe4c84672eb2bad57adfe7dad372fd7367b0f63e",
"OBXXXX1A_2frame.dcm": "6627f6e46dbf8c16292fb1eaff8807439bcd233dc68099c07f0b83c4093256b1",
"OBXXXX1A_expb.dcm": "0cf6b4b04a1f239755fe9aef2b093b3004a290c262ec2567f555212d6c679c83",
"OBXXXX1A_expb_2frame.dcm": "2ccd16a61d680e85ad82fe1d60c4c50496a91309aa2fd27157fc9660291d6ba5",
"OBXXXX1A_rle.dcm": "aaf57785817dbe35503c6175d677d2efa811f90e931fc5017611ba9ff4c7f92a",
"OBXXXX1A_rle_2frame.dcm": "65bee869c507f535edea93a446a26e941fb9cbc3819e4d73395f11eef56d4687",
"OT-PAL-8-face.dcm": "d5560470077f77ef6a0a52d22f9f61e803436d2b468a9550a4d12c5675ee0a97",
"RG1_J2KI.dcm": "744d01372fdda4e21b507bb7f97329065de961f4f263342079b369b430064d65",
"RG1_J2KR.dcm": "7fbfd29360af806770102fd7c4ffcb2a133075bd00920a4bb63460d516f67ac4",
"RG1_UNCI.dcm": "3561020824868615a93a51078671b3ff73bb2578c966f76def99b4d982897e75",
"RG1_UNCR.dcm": "946f28f48b9fbf360196a9b835c8fce83b0c654bf85a5107663c8a61df02e498",
"RG3_J2KI.dcm": "c90c915c0c373eb6d244151f9476b05e50623c303ac20334ca9ce4aab0dddf19",
"RG3_J2KR.dcm": "ffde92ba154a7d5ed2ab70b7cd37892772f8bef63fb26f9080327c6a089c205b",
"RG3_UNCI.dcm": "9ef0260919de89774da90336ad16c03a5be899a8bb663bbaea52b6d0769bec78",
"RG3_UNCR.dcm": "6babfc42dd404213e1758d6dbb93648c248783cc23f593103fff4295c3374dfb",
"SC_rgb.dcm": "b0f868d6a689a0ff96c39b459caf1b628eacd74134114ce84549573321231138",
"SC_rgb_16bit.dcm": "3dc969768431d1cb2695dcd3f190588b02413798dab8420418d2fbb9cb4d4075",
"SC_rgb_16bit_2frame.dcm": "f251a296e1aa5dde37423a9aacba7f31b0b4869328caa6e42bf6b110f007c401",
"SC_rgb_2frame.dcm": "9b5c0306679675c688c2044d97878a6a14ce9976ecdf022309e5f6e9ceaffd9c",
"SC_rgb_32bit.dcm": "c3dac5c807ab27227c0d36b7cd34bb776103bb08d230ff74e62259eeeef0769a",
"SC_rgb_32bit_2frame.dcm": "33f78c27519f23e0410e9c5d24f55380a431255f00ff95e12a26fd45765a7920",
"SC_rgb_dcmtk_ebcr_dcmd.dcm": "e183a37c833c78da6c516aed9920527d80d7f1bbaf805a92530024e1aa2e74ff",
"SC_rgb_dcmtk_ebcyn1_dcmd.dcm": "a963683216b270b788682dc132a65965406a3100722c2d0c2fd2219a0ea53c66",
"SC_rgb_dcmtk_ebcyn2_dcmd.dcm": "2692a16f99b879c742398f3a5b4b9508165d4fe6b056eaa85642ff6bed80ff62",
"SC_rgb_dcmtk_ebcynp_dcmd.dcm": "6324aa7eb90e57299087a70ff6875b10f4d17b8e359ee2f20f1eaaf3d0876993",
"SC_rgb_dcmtk_ebcys2_dcmd.dcm": "f6334492b38d4494b0e8929c4f6b34e9decba9b2dae4e01749263bf254a8c096",
"SC_rgb_dcmtk_ebcys4_dcmd.dcm": "9fb6b7e5dd1f1097ecb23fcd2afafeee9c5233f75680b0922b723f2f1b7b09ab",
"SC_rgb_expb.dcm": "e92997e0cf83407693478ca6f2ce44f42f50f73751f11c355ce555ef86dc8e84",
"SC_rgb_expb_16bit.dcm": "5e8e2340ba9698deba857f76e0ee007c1acb88de84841519425afe76b5b25c11",
"SC_rgb_expb_16bit_2frame.dcm": "fb88f409d21ca9c08672f32f756d0ba0d57de91f8240cf807971085a600e866b",
"SC_rgb_expb_2frame.dcm": "b8b9adb32b2c3ce33c3136620a9b00c2440e047574305e76d176e28ad374134f",
"SC_rgb_expb_32bit.dcm": "5153bb5df191a2b1ec40f592d433a097523a2979ee2ec22ae47ad2bf823bebd3",
"SC_rgb_expb_32bit_2frame.dcm": "cb4e18465d10d4c60afcf8e591b44687ffac8cfd63ab9ca3b6ad45ec25dc2175",
"SC_rgb_gdcm2k_uncompressed.dcm": "abf72c420b8bb97a29b93cb5d63a633271b65038d8323e28d71334bc56ef1a2b",
"SC_ybr_full_uncompressed.dcm": "3c9f4b2b82a3f88ce5340cb07ce14782dcbb09840938e4489e8c21eac1f02dd6",
"US1_J2KI.dcm": "22340375674ff253196ce8a147acf0458bea3f105ff2c6af81f0eb119729605b",
"US1_J2KR.dcm": "2427fdc82d90cd4ce8a69b5157eecb37549902dce138ac15c6456a7eae70b83d",
"US1_UNCI.dcm": "b7556a5414d5ed6bd0359b8222eda10efcce81762428848d9a3ac6be5b55cb6c",
"US1_UNCR.dcm": "af5a66e40cd49d15dfbf7b78c850eba0662bdc7339339c3fa13f123a57e812cb",
"bad_sequence.dcm": "0677915e5c3e8c98498eb3d1b726ccf38ba0d8ada657c8ca7fe1b8b9b5890f4f",
"color-pl.dcm": "16bfc3134e59d789985efddfc70d924420b16e1c6d1f21c960bb4544c9e9dbf9",
"color-px.dcm": "bf10a89f277743ea337b7c4741efa0709a086f0161e1ff2b94cff01e428047e4",
"color3d_jpeg_baseline.dcm": "c8798b8abf8ae0a18e8c9952e7c7f75f3cc8465234b1b63f9e3ba3bebb9d5625",
"eCT_Supplemental.dcm": "0a4c3aa02d1b0b4826daa5ffe85ef13be83c1433842a9a98b901e075136dd86f",
"emri_small.dcm": "151233ec63f64ebb63b979df51aa827cd612a53422c073f6ef341770c7bc9a56",
"emri_small_RLE.dcm": "93c19bca3fb6b7202dcd067de8d16cb6b3f7c6e9a0632e474aab81175ee45266",
"emri_small_big_endian.dcm": "8e18ed3542bc4df70dc6acda87eab5095b19e2b4c1b7fb72ba457e7c217b1ab7",
"emri_small_jpeg_2k_lossless.dcm": "b2b4063359a08ed3b0afa9f4e4f72f84af79e5116515b446d9a30da9dc7f1888",
"emri_small_jpeg_2k_lossless_too_short.dcm": "8742a49b7d02dedb11e7926d30900a415c42efeff02a64a0aa0f0873cf6da582",
"emri_small_jpeg_ls_lossless.dcm": "24de03c9c0f8b5aa75d7fbcc894f94e612b66702175b4936589a0849ec9f87b4",
"explicit_VR-UN.dcm": "28c4a61022d7dbebec97e2f1bbdad0ed097bee2c62727c26a3f3720248c9c6e7",
"gdcm-US-ALOKA-16.dcm": "f1a2d5f7c4ffe87dc589b12738084099fe44a436f6980f5d7e96a026ad356d65",
"gdcm-US-ALOKA-16_big.dcm": "2a801cbd7bd04ed28b9c14c7a8edb04b43384e38f00574e27c0fab8f4aa62db4",
"liver.dcm": "4f8fb316b6df067bdf2ef7bc2385fd571ad5be67e171aed3ed902a71293d9d5c",
"liver_expb.dcm": "fe3323f3f4a2166e4c5305a2380a035a66504197f3f01a6e2b50bbd9814721d5",
"mlut_18.dcm": "9c65b39df55dc46a4670f76e0ec1093d097206ed46c2d7e23b8051c87ef0228b",
"vlut_04.dcm": "64f54c0f490ce3fa2faac0a90a7ca0166caa025f8fdcfbe181906387a7867c27",
"HTJ2KLossless_08_RGB.dcm": "38f8e8adf46b928a12f1905df1405bc8a32c10286733c47562c75be84ceae00e",
"HTJ2K_08_RGB.dcm": "9a7ae1960f18315c4d58876c2a8333a704e89ca3697edd5b69f600773220eb90",
"JLSL_RGB_ILV0.dcm": "f8836a650728f4f1b014a52905e321490f3eefcc0f71ac27fd2c1bd7fc5bbcc4",
"JLSL_RGB_ILV1.dcm": "281610d528d8e22bd52e79d17261ef0c238ef8cfc50696a2d9875a933108864e",
"JLSL_RGB_ILV2.dcm": "f8d670e9988cbca207d3367d916aff3cb508c076495211a0d132692266e9546d",
"JLSN_RGB_ILV0.dcm": "a377750d24bd3413d21faa343662dfff997db9acf65c0b095c5d8a95beb866fa",
"JLSL_08_07_0_1F.dcm": "308fb028c8fbdd1e9a93e731978ea4da6b15cb55b40451cf6f21e7c9ba35dd8a",
"JLSL_16_15_1_1F.dcm": "61f38f250a7dc82c44529c0face2eeab3ffd02ca8b9dfc756dd818eb252104b6",
"parametric_map_float.dcm": "957f34397c26d82f7a90cad7a653ce0f7238f4be6aa9dfa9a33bae5dc2ce7e23",
"parametric_map_double_float.dcm": "a41e0b78b05e543a2448e22435858f9ca8d5f94807d7b391b93b4bca80e23a22",
"liver_nonbyte_aligned.dcm": "530c6af2a2a0caa6033d99ad407fe1f6e3942c64a8fcfc5649d4d06c26473862"
}

View File

@@ -0,0 +1,29 @@
DICOM Well-known Color Palettes
http://dicom.nema.org/medical/dicom/current/output/chtml/part06/chapter_B.html
+----------------------------+---------------------+------------------+
| (0070,0080) Content Label | SOP Instance UID | Filename |
+============================+=====================+==================+
| HOT_IRON | 1.2.840.10008.1.5.1 | hotiron.dcm |
+----------------------------+---------------------+------------------+
| PET | 1.2.840.10008.1.5.2 | pet.dcm |
+----------------------------+---------------------+------------------+
| HOT_METAL_BLUE | 1.2.840.10008.1.5.3 | hotmetalblue.dcm |
+----------------------------+---------------------+------------------+
| PET_20_STEP | 1.2.840.10008.1.5.4 | pet20step.dcm |
+----------------------------+---------------------+------------------+
| SPRING | 1.2.840.10008.1.5.5 | spring.dcm |
+----------------------------+---------------------+------------------+
| SUMMER | 1.2.840.10008.1.5.6 | summer.dcm |
+----------------------------+---------------------+------------------+
| FALL | 1.2.840.10008.1.5.7 | fall.dcm |
+----------------------------+---------------------+------------------+
| WINTER | 1.2.840.10008.1.5.8 | winter.dcm |
+----------------------------+---------------------+------------------+
* All color palettes have 256 LUT entries, a first mapping of 0, and 8-bit
entries
* HOT_IRON, PET, HOT_METAL_BLUE, and PET_20_STEP use normal color palette LUT
data.
* SPRING, SUMMER, FALL and WINTER use segmented color palette LUT data.

View File

@@ -0,0 +1,62 @@
from functools import wraps
import logging
import time
from typing import Any
from collections.abc import Callable
def retry(
exc: type[Exception] | tuple[type[Exception], ...],
exc_msg: str | None = None,
tries: int = 4,
delay: int = 3,
backoff: int = 2,
logger: logging.Logger | None = None,
) -> Callable[[Callable], Any]:
"""Retry calling the decorated function using an exponential backoff.
https://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
original from: https://wiki.python.org/moin/PythonDecoratorLibrary#Retry
Parameters
----------
exc : Exception or Tuple[Exception, ...]
The exception to check. may be a tuple of exceptions to check.
exc_msg : str, optional
The message to be shown if an exception occurs.
tries : int, optional
The number of times to try (not retry) before giving up, default ``4``.
delay : int, optional
The initial delay between retries in seconds, default ``3``.
backoff : int, optional
The backoff multiplier e.g. value of 2 will double the delay each
retry, default ``2``.
logger : logging.Logger, optional
The logger to use. If ``None`` (default), print to stdout.
"""
def deco_retry(f: Callable) -> Any:
@wraps(f)
def f_retry(*args: Any, **kwargs: Any) -> Any:
mtries, mdelay = tries, delay
while mtries > 1:
try:
return f(*args, **kwargs)
except exc as e:
msg = f"{e}: retrying in {mdelay} seconds..."
if exc_msg:
msg += f" {exc_msg}"
if logger:
logger.warning(msg)
else:
print(msg)
time.sleep(mdelay)
mtries -= 1
mdelay *= backoff
return f(*args, **kwargs)
return f_retry # true decorator
return deco_retry

Some files were not shown because too many files have changed in this diff Show More