Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 8 additions & 2 deletions src/dolphin/interferogram.py
Original file line number Diff line number Diff line change
Expand Up @@ -774,6 +774,7 @@ def convert_pl_to_ifg(
reference_date: DateOrDatetime,
output_dir: Filename,
dry_run: bool = False,
date_format: str = io.DEFAULT_DATETIME_FORMAT,
) -> Path:
"""Convert a phase-linked SLC to an interferogram by conjugating the phase.

Expand All @@ -793,6 +794,11 @@ def convert_pl_to_ifg(
Default = False (the ifgs will be created/written to disk.)
`dry_run=True` is used to plan out which ifgs will be formed
before actually running the workflow.
date_format : str, optional
``strptime``-compatible format used both to parse the date from
``phase_linked_slc`` and to format the output filename. Must match
the format of the input filename so the time-of-day component is
preserved when callers use formats like ``"%Y%m%d%H%M%S"``.

Returns
-------
Expand All @@ -802,8 +808,8 @@ def convert_pl_to_ifg(
"""
# The phase_linked_slc will be named with the secondary date.
# Make the output from that, plus the given reference date
secondary_date = get_dates(phase_linked_slc)[-1]
date_str = utils.format_date_pair(reference_date, secondary_date)
secondary_date = get_dates(phase_linked_slc, fmt=date_format)[-1]
date_str = utils.format_date_pair(reference_date, secondary_date, fmt=date_format)
out_name = Path(output_dir) / f"{date_str}.int.vrt"
if dry_run:
return out_name
Expand Down
1 change: 1 addition & 0 deletions src/dolphin/stack.py
Original file line number Diff line number Diff line change
Expand Up @@ -547,6 +547,7 @@ def plan(
output_reference_idx=output_reference_idx,
compressed_reference_idx=compressed_reference_idx,
output_folder=cur_output_folder,
file_date_fmt=self.file_date_fmt,
)

output_ministacks.append(cur_ministack)
Expand Down
2 changes: 1 addition & 1 deletion src/dolphin/stitching.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def merge_by_date(

for dates, cur_images in grouped_images.items():
logger.info(f"{dates}: Stitching {len(cur_images)} images.")
date_str = utils.format_dates(*dates)
date_str = utils.format_dates(*dates, fmt=file_date_fmt)
# If we passed files where different dates have different prefixes,
# we need to use the common prefix before the first date token
# e.g. if we have "temporal_coherence_<dates>,...
Expand Down
11 changes: 7 additions & 4 deletions src/dolphin/timeseries.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,6 +215,7 @@ def run(
wavelength=wavelength,
method=method,
bad_pixel_mask=bad_pixel_mask,
file_date_fmt=file_date_fmt,
)
if extra_reference_date is None:
final_ts_paths = inverted_phase_paths
Expand Down Expand Up @@ -309,7 +310,7 @@ def _redo_reference(
# To create the interferogram (r, r+1), we subtract
# (1, r) from (1, r+1)
cur_img = inverted_phase_paths[idx]
new_stem = format_dates(ref_date, secondary_dates[idx])
new_stem = format_dates(ref_date, secondary_dates[idx], fmt=file_date_fmt)
cur_output_name = extra_out_dir / f"{new_stem}.tif"
cur = io.load_gdal(cur_img, masked=True)
new_out = cur - ref
Expand Down Expand Up @@ -1046,10 +1047,12 @@ def invert_unw_network(
suffix = ".tif"
# Create the `n_sar_dates - 1` output files (skipping the 0 reference raster)
out_paths = [
Path(output_dir) / f"{format_dates(ref_date, d)}{suffix}" for d in sar_dates[1:]
Path(output_dir) / f"{format_dates(ref_date, d, fmt=file_date_fmt)}{suffix}"
for d in sar_dates[1:]
]
out_residuals_paths = [
Path(output_dir) / f"residuals_{format_dates(ref_date, d)}{suffix}"
Path(output_dir)
/ f"residuals_{format_dates(ref_date, d, fmt=file_date_fmt)}{suffix}"
for d in sar_dates[1:]
]
if all(p.exists() for p in out_paths):
Expand Down Expand Up @@ -1585,7 +1588,7 @@ def create_nonzero_conncomp_counts(

# Create output paths for each date
suffix = "_valid_count.tif"
out_paths = [output_dir / f"{d.strftime('%Y%m%d')}{suffix}" for d in sar_dates]
out_paths = [output_dir / f"{d.strftime(file_date_fmt)}{suffix}" for d in sar_dates]

if all(p.exists() for p in out_paths):
logger.info("All output files exist, skipping counting")
Expand Down
6 changes: 6 additions & 0 deletions src/dolphin/unwrap/_unwrap.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ def run(
scratchdir: PathOrStr | None = None,
delete_intermediate: bool = True,
overwrite: bool = False,
file_date_fmt: str = "%Y%m%d",
) -> tuple[list[Path], list[Path]]:
"""Run snaphu on all interferograms in a directory.

Expand Down Expand Up @@ -94,6 +95,10 @@ def run(
Must specify `scratchdir` for this option to be used.
overwrite : bool, optional, default = False
Overwrite existing unwrapped files.
file_date_fmt : str, optional
The strftime format used to parse acquisition dates from input
filenames and to write the date portion of output filenames.
Default is "%Y%m%d".

Returns
-------
Expand Down Expand Up @@ -129,6 +134,7 @@ def run(
mask_filename=mask_filename,
options=unwrap_options.spurt_options,
scratchdir=scratchdir,
file_date_fmt=file_date_fmt,
)
for f in unw_paths:
io.set_raster_units(f, "radians")
Expand Down
7 changes: 6 additions & 1 deletion src/dolphin/unwrap/_unwrap_3d.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ def unwrap_spurt(
options: SpurtOptions = DEFAULT_OPTIONS,
scratchdir: PathOrStr | None = None,
num_retries: int = 3,
file_date_fmt: str = "%Y%m%d",
) -> tuple[list[Path], list[Path]]:
"""Perform 3D unwrapping using `spurt` via subprocess call."""
# NOTE: we are working around spurt currently wanting "temporal_coherence.tif",
Expand Down Expand Up @@ -86,6 +87,8 @@ def unwrap_spurt(
str(scratch_path / "emcf_tmp"),
"-c",
str(0.5), # arbitrary, since we are passing a 0/1 file anyway
"--date-fmt",
file_date_fmt,
]
if not options.general_settings.use_tiles:
cmd.append("--singletile")
Expand Down Expand Up @@ -157,7 +160,9 @@ def run_with_retry(cmd: list[str], num_retries: int = 3) -> int:
)

if options.run_ambiguity_interpolation:
filled_masked_unw_regions(unw_filenames, ifg_filenames)
filled_masked_unw_regions(
unw_filenames, ifg_filenames, file_date_fmt=file_date_fmt
)
return unw_filenames, conncomp_filenames


Expand Down
2 changes: 2 additions & 0 deletions src/dolphin/workflows/displacement.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,6 +262,7 @@ def run(
nlooks=nlooks,
unwrap_options=cfg.unwrap_options,
mask_file=cfg.mask_file,
file_date_fmt=cfg.input_options.cslc_date_fmt,
)

# ##############################################
Expand Down Expand Up @@ -294,6 +295,7 @@ def run(
wavelength=cfg.input_options.wavelength,
add_overviews=cfg.output_options.add_overviews,
extra_reference_date=cfg.output_options.extra_reference_date,
file_date_fmt=cfg.input_options.cslc_date_fmt,
)

else:
Expand Down
6 changes: 6 additions & 0 deletions src/dolphin/workflows/sequential.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,12 @@ def run_wrapped_phase_sequential(
max_num_compressed=max_num_compressed,
output_reference_idx=output_reference_idx,
compressed_slc_plan=compressed_slc_plan,
# Propagate the caller's date format so phase-linked SLC and CRLB
# filenames preserve any time-of-day component. Without this, output
# filenames are written with the default ``%Y%m%d`` and ``create_ifgs``
# later fails to extract dates when ``cslc_date_fmt`` includes hours
# (e.g. ``%Y%m%d%H%M%S`` for non-SSO cadences with same-day repeats).
file_date_fmt=cslc_date_fmt,
)
ministacks = ministack_planner.plan(
ministack_size, compressed_idx=new_compressed_reference_idx
Expand Down
6 changes: 6 additions & 0 deletions src/dolphin/workflows/unwrapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ def run(
similarity_filename: Path | str | None = None,
mask_file: Path | str | None = None,
add_overviews: bool = True,
file_date_fmt: str = "%Y%m%d",
) -> tuple[list[Path], list[Path]]:
"""Run the displacement workflow on a stack of SLCs.

Expand All @@ -51,6 +52,10 @@ def run(
add_overviews : bool, default = True
If True, creates overviews of the unwrapped phase and connected component
labels.
file_date_fmt : str, optional
The strftime format used to parse acquisition dates from input
filenames and to write the date portion of output filenames.
Default is "%Y%m%d".

Returns
-------
Expand Down Expand Up @@ -92,6 +97,7 @@ def run(
similarity_filename=similarity_filename,
mask_filename=output_mask,
scratchdir=unwrap_scratchdir,
file_date_fmt=file_date_fmt,
)

if add_overviews:
Expand Down
11 changes: 10 additions & 1 deletion src/dolphin/workflows/wrapped_phase.py
Original file line number Diff line number Diff line change
Expand Up @@ -400,6 +400,7 @@ def create_ifgs(
outdir=ifg_dir,
write=not dry_run,
verify_slcs=not dry_run,
date_format=file_date_fmt,
)
if len(network.ifg_list) == 0:
msg = "No interferograms were created"
Expand All @@ -419,7 +420,11 @@ def create_ifgs(
# a `.conj()` on the phase-linked SLCs (currently `day1.conj() * day2`)
single_ref_ifgs = [
interferogram.convert_pl_to_ifg(
f, reference_date=reference_date, output_dir=ifg_dir, dry_run=dry_run
f,
reference_date=reference_date,
output_dir=ifg_dir,
dry_run=dry_run,
date_format=file_date_fmt,
)
for f in phase_linked_slcs
]
Expand All @@ -434,6 +439,7 @@ def create_ifgs(
reference_date=reference_date, # this is the `phase_linking.output_idx`
output_dir=ifg_dir,
dry_run=dry_run,
date_format=file_date_fmt,
)
for f in phase_linked_slcs[: manual_reference_idx + 1]
]
Expand All @@ -446,6 +452,7 @@ def create_ifgs(
outdir=ifg_dir,
write=not dry_run,
verify_slcs=not dry_run,
date_format=file_date_fmt,
)
single_ref_ifgs.append(v.path) # type: ignore[arg-type]

Expand All @@ -472,6 +479,7 @@ def create_ifgs(
dates=secondary_dates,
write=not dry_run,
verify_slcs=not dry_run,
date_format=file_date_fmt,
)
# Using `cast` to assert that the paths are not None
if len(network.ifg_list) == 0:
Expand All @@ -495,6 +503,7 @@ def create_ifgs(
dates=secondary_dates,
write=not dry_run,
verify_slcs=not dry_run,
date_format=file_date_fmt,
)
# Using `cast` to assert that the paths are not None
ifgs_others = cast(list[Path], [ifg.path for ifg in network_rest.ifg_list])
Expand Down
59 changes: 59 additions & 0 deletions tests/test_workflows_displacement.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,6 +285,65 @@ def test_displacement_run_extra_reference_date(
log_file.unlink()


def test_displacement_run_preserves_time_of_day_in_filenames(
opera_slc_files_official: list[Path], tmpdir
):
"""Datetime ``cslc_date_fmt`` must propagate to all output filenames.

Regression test covering three independent propagation points:
- ``MiniStackPlanner`` -> ``MiniStackInfo.file_date_fmt`` (PL SLC names),
- ``create_ifgs`` -> ``convert_pl_to_ifg(..., date_format=...)``
(per-burst ifg VRT names),
- ``stitching.merge_by_date`` formatting the grouped dates with
``file_date_fmt`` (stitched ifg names).

Without all three, a caller using ``cslc_date_fmt="%Y%m%dT%H%M%S"`` ends up
with output filenames stripped to ``%Y%m%d``, which then breaks downstream
date extraction (e.g. same-day repeats collide).
"""
# ``opera_slc_files_official`` fixture uses ``datetime(2022, 1, 1, 1, 2, 3)``
# as the base, incremented by one day per acquisition. So every real-SLC
# filename should embed ``T010203`` once propagation works end-to-end.
time_token = "T010203"
with tmpdir.as_cwd():
cfg = config.DisplacementWorkflow(
cslc_file_list=opera_slc_files_official,
input_options={
"subdataset": "/data/VV",
"cslc_date_fmt": "%Y%m%dT%H%M%S",
},
interferogram_network={"reference_idx": 0},
phase_linking={"ministack_size": 500},
unwrap_options={"run_unwrap": False},
)
paths = displacement.run(cfg)

# Per-burst phase-linked SLCs (named by ``MiniStackInfo.get_date_str_list``)
# and per-burst ifgs (named by ``convert_pl_to_ifg``) must keep the
# time-of-day component end-to-end through ``create_ifgs``.
burst_dir = next(iter(paths.comp_slc_dict.values()))[0].parent.parent
pl_slcs = list((burst_dir / "linked_phase").glob("*.slc.tif"))
assert len(pl_slcs) > 0
for p in pl_slcs:
assert time_token in p.stem, f"Missing datetime in PL SLC {p.name}"

burst_ifgs = list((burst_dir / "interferograms").glob("*.int.*"))
assert len(burst_ifgs) > 0
for p in burst_ifgs:
assert (
p.stem.count(time_token) == 2
), f"Expected datetime preserved in both dates of {p.name}"

# Stitched (cross-burst) ifgs must also keep the time-of-day in both
# the reference and secondary date tokens.
assert len(paths.stitched_ifg_paths) > 0
for p in paths.stitched_ifg_paths:
assert p.exists()
assert (
p.stem.count(time_token) == 2
), f"Expected datetime preserved in both dates of {p.name}"


def test_displacement_run_different_epsg(opera_slc_files: list[Path], tmpdir):
with tmpdir.as_cwd():
cfg = config.DisplacementWorkflow(
Expand Down
Loading