forked from insarlab/MintPy
-
Notifications
You must be signed in to change notification settings - Fork 0
/
plot.py
2319 lines (1959 loc) · 93.2 KB
/
plot.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
"""Utilities for plotting."""
############################################################
# Program is part of MintPy #
# Copyright (c) 2013, Zhang Yunjun, Heresh Fattahi #
# Author: Zhang Yunjun, 2018 #
############################################################
# Recommend import:
# from mintpy.utils import plot as pp
import datetime as dt
import os
import warnings
import h5py
import matplotlib as mpl
import numpy as np
from matplotlib import dates as mdates, pyplot as plt, ticker
from mpl_toolkits.axes_grid1 import make_axes_locatable
from scipy import stats
from mintpy.objects import TIMESERIES_DSET_NAMES, TIMESERIES_KEY_NAMES
from mintpy.objects.colors import ColormapExt
from mintpy.objects.coord import coordinate
from mintpy.utils import network as pnet, ptime, readfile, utils0 as ut0
from mintpy.utils.map import draw_lalo_label, draw_scalebar
min_figsize_single = 6.0 # default min size in inch, for single plot
max_figsize_single = 10.0 # default min size in inch, for single plot
# default size in inch, for multiple subplots
default_figsize_multi = [15.0, 8.0]
max_figsize_height = 8.0 # max figure size in vertical direction in inch
# default color names in matplotlib
# ref: https://matplotlib.org/users/dflt_style_changes.html
MPL_COLORS = [
'#1f77b4', # C0
'#ff7f0e', # C1
'#2ca02c', # ...
'#d62728',
'#9467bd',
'#8c564b',
'#e377c2',
'#7f7f7f',
'#bcbd22',
'#17becf',
]
# ensur UTM coordinate plot axes do not use scientific notation
plt.rcParams["axes.formatter.limits"] = (-1e10, 1e10)
########################################### Parser utilities ##############################################
def read_pts2inps(inps, coord_obj):
"""Read pts_* options"""
## 1. merge pts_file/lalo/yx into pts_yx
# pts_file --> pts_lalo
if inps.pts_file and os.path.isfile(inps.pts_file):
print(f'read points lat/lon from text file: {inps.pts_file}')
inps.pts_lalo = np.loadtxt(inps.pts_file, usecols=(0,1), dtype=bytes).astype(float)
# pts_lalo --> pts_yx
if inps.pts_lalo is not None:
# format pts_lalo to 2D array in size of (num_pts, 2)
inps.pts_lalo = np.array(inps.pts_lalo).reshape(-1, 2)
# pts_lalo --> pts_yx
inps.pts_yx = coord_obj.geo2radar(
inps.pts_lalo[:, 0],
inps.pts_lalo[:, 1],
print_msg=False,
)[:2]
inps.pts_yx = np.array(inps.pts_yx).T.reshape(-1, 2)
## 2. pts_yx --> pts_yx/lalo
if inps.pts_yx is not None:
# format pts_yx to 2D array in size of (num_pts, 2)
inps.pts_yx = np.array(inps.pts_yx).reshape(-1, 2)
# pts_yx --> pts_lalo
try:
inps.pts_lalo = coord_obj.radar2geo(
inps.pts_yx[:, 0],
inps.pts_yx[:, 1],
print_msg=False,
)[:2]
inps.pts_lalo = np.array(inps.pts_lalo).T.reshape(-1, 2)
except ValueError:
pass
return inps
############################################ Plot Utilities #############################################
def add_inner_title(ax, title, loc, prop=None, **kwargs):
from matplotlib.offsetbox import AnchoredText
from matplotlib.patheffects import withStroke
if prop is None:
prop = dict(size=plt.rcParams['legend.fontsize'])
at = AnchoredText(title, loc=loc, prop=prop,
pad=0., borderpad=0.5,
frameon=False, **kwargs)
ax.add_artist(at)
at.txt._text.set_path_effects([withStroke(foreground="w", linewidth=3)])
return at
def auto_figure_size(ds_shape, scale=1.0, disp_cbar=False, disp_slider=False,
cbar_ratio=0.25, slider_ratio=0.15, print_msg=True):
"""Get auto figure size based on input data shape
Adjust if display colobar on the right and/or slider on the bottom
Parameters: ds_shape - tuple/list of 2 int for the 2D matrix shape in [length, width]
scale - floag, scale the final figure size
disp_cbar/slider - bool, plot colorbar on the right / slider on the bottom
cbar/slider_ratio - float, size ratio of the additional colobar / slider
Returns: figsize - list of 2 float for the figure size in [width, length] in inches
"""
# figure shape
fig_shape = list(ds_shape)[::-1]
fig_shape[0] *= 1 if not disp_cbar else 1 + cbar_ratio
fig_shape[1] *= 1 if not disp_slider else 1 + slider_ratio
# get scale to meet the min/max figure size constrain
fig_scale = min(
min_figsize_single / min(fig_shape),
max_figsize_single / max(fig_shape),
max_figsize_height / fig_shape[1],
)
# fig_shape/scale --> fig_size
fig_size = [x * fig_scale * scale for x in fig_shape]
fig_size = [float(f'{x:.1f}') for x in fig_size]
if print_msg:
print(f'figure size : [{fig_size[0]}, {fig_size[1]}]')
return fig_size
def auto_figure_title(fname, datasetNames=[], inps_dict=None):
"""Get auto figure title from meta dict and input options
Parameters: fname : str, input file name
datasetNames : list of str, optional, dataset to read for multi dataset/group files
inps_dict : dict, optional, processing attributes, including:
ref_date
pix_box
wrap
Returns: fig_title : str, output figure title
Example: 'geo_velocity.h5' = auto_figure_title('geo_velocity.h5', None, vars(inps))
'101020-110220_ERA5_ramp_demErr' = auto_figure_title('timeseries_ERA5_ramp_demErr.h5', '110220')
"""
if not datasetNames:
datasetNames = []
if isinstance(datasetNames, str):
datasetNames = [datasetNames]
fbase, fext = os.path.splitext(os.path.basename(fname))
atr = readfile.read_attribute(fname)
k = atr['FILE_TYPE']
num_pixel = int(atr['WIDTH']) * int(atr['LENGTH'])
if k == 'ifgramStack':
if len(datasetNames) == 1:
fig_title = datasetNames[0]
if 'unwCor' in fname:
fig_title += '_unwCor'
else:
fig_title = datasetNames[0].split('-')[0]
# for ionStack.h5 file
if fbase.lower().startswith('ion'):
fig_title += '_ion'
elif k in TIMESERIES_KEY_NAMES and len(datasetNames) == 1:
if 'ref_date' in inps_dict.keys():
ref_date = inps_dict['ref_date']
elif 'REF_DATE' in atr.keys():
ref_date = atr['REF_DATE']
else:
ref_date = None
if not ref_date:
fig_title = datasetNames[0]
else:
fig_title = f'{ref_date}_{datasetNames[0]}'
# grab info of applied phase corrections from filename
if 'timeseries' in fname:
proc_suffix = os.path.basename(fname).split('timeseries')[1].split(fext)[0]
if proc_suffix:
fig_title += proc_suffix if proc_suffix.startswith('_') else f'_{proc_suffix}'
elif k == 'geometry':
if len(datasetNames) == 1:
fig_title = datasetNames[0]
elif datasetNames[0].startswith('bperp'):
fig_title = 'bperp'
else:
fig_title = fbase
elif fext in ['.h5','.he5']:
# for generic HDF5 file, e.g. velocity, masks, horz/vert decomposed file, etc.
num_dset = len(readfile.get_dataset_list(fname))
if num_dset > 1 and len(datasetNames) == 1:
# for single subplot from a multi-dataset file
# keep meaningful suffix, e.g. geo_, sub_, etc.
fparts = os.path.basename(fname).rsplit('_', 1)
suffix = fparts[0] + '_' if len(fparts) > 1 else ''
fig_title = suffix + datasetNames[0]
else:
# for single subplot from a single-dataset file OR multi-subplots
fig_title = fbase
else:
fig_title = os.path.basename(fname)
# show dataset name for multi-band binry files
num_band = int(atr.get('BANDS', '1'))
if num_band > 1 and len(datasetNames) == 1:
fig_title += f' - {datasetNames[0]}'
# suffix for subset
if inps_dict.get('pix_box', None):
box = inps_dict['pix_box']
if (box[2] - box[0]) * (box[3] - box[1]) < num_pixel:
fig_title += '_sub'
# suffix for re-wrapping
if inps_dict.get('wrap', False):
fig_title += '_wrap'
wrap_range = inps_dict.get('wrap_range', [-1.*np.pi, np.pi])
wrap_range = wrap_range[1] - wrap_range[0]
if wrap_range != 2*np.pi:
if wrap_range == int(wrap_range):
wrap_range = int(wrap_range)
fig_title += str(wrap_range)
return fig_title
def auto_flip_direction(metadata, ax=None, print_msg=True):
"""Check flip left-right and up-down based on attribute dict, for radar-coded file only"""
# default value
flip_lr = False
flip_ud = False
# auto flip for file in radar coordinates
if 'Y_FIRST' not in metadata.keys() and 'ORBIT_DIRECTION' in metadata.keys():
msg = '{} orbit'.format(metadata['ORBIT_DIRECTION'])
if metadata['ORBIT_DIRECTION'].lower().startswith('a'):
flip_ud = True
msg += ' -> flip up-down'
else:
flip_lr = True
msg += ' -> flip left-right'
if print_msg:
print(msg)
if ax is not None:
if flip_lr:
ax.invert_xaxis()
if flip_ud:
ax.invert_yaxis()
return ax
return flip_lr, flip_ud
def auto_multilook_num(box, num_time, max_memory=4.0, print_msg=True):
"""Calculate the default/auto multilook number based on the input 3D shape.
Parameters: box - tuple of 4 int in (x0, y0, x1, y1) for the spatial bounding box
num_time - int, the 3rd / time dimension size
max_memory - float, max memory in GB
Returns: multilook_num - int, multilook number
"""
# calc total number of pixels
num_pixel = (box[2] - box[0]) * (box[3] - box[1]) * num_time
# calc auto multilook_num
if num_pixel > (64e6*320): multilook_num = 32; # 16k * 4k image with 320 subplots
elif num_pixel > (50e6*160): multilook_num = 20; # 10k * 5k image with 160 subplots
elif num_pixel > (32e6*160): multilook_num = 16; # 8k * 4k image with 160 subplots
elif num_pixel > (18e6*160): multilook_num = 12; # 9k * 2k image with 160 subplots
elif num_pixel > ( 8e6*160): multilook_num = 8; # 4k * 2k image with 160 subplots
elif num_pixel > ( 4e6*180): multilook_num = 6; # 2k * 2k image with 180 subplots
elif num_pixel > ( 4e6*80) : multilook_num = 4; # 2k * 2k image with 80 subplots
elif num_pixel > ( 4e6*45) : multilook_num = 3; # 2k * 2k image with 45 subplots
elif num_pixel > ( 4e6*20) : multilook_num = 2; # 2k * 2k image with 20 subplots
else: multilook_num = 1;
# do not apply multilooking if the spatial size <= 180 * 360
# corresponding to 1x1 deg for world map
if (box[2] - box[0]) * (box[3] - box[1]) <= 180 * 360:
multilook_num = 1
## scale based on memory
# The auto calculation above uses ~1.5 GB in reserved memory and ~700 MB in actual memory.
if max_memory <= 2.0:
# With a lower max memory from manual input, we increase the multilook_num (lower resolution)
multilook_num *= np.sqrt(4.0 / max_memory)
elif max_memory <= 4.0:
# Do nothing if input max memory is between 2.0-4.0 GB.
pass
else:
# With a larger max memory from manual input, we decrease the multilook_num (higher resolution)
multilook_num /= np.sqrt(max_memory / 4.0)
multilook_num = int(np.ceil(multilook_num))
# print out msg
if multilook_num > 1 and print_msg:
print(f'total number of pixels: {num_pixel:.1E}')
print('* multilook {0} by {0} with nearest interpolation for display to save memory'.format(multilook_num))
return multilook_num
def auto_row_col_num(subplot_num, data_shape, fig_size, fig_num=1):
"""Get optimal row and column number given figure size number of subplots
Parameters: subplot_num : int, total number of subplots
data_shape : list of 2 float, data size in pixel in row and column direction of each plot
fig_size : list of 2 float, figure window size in inches
fig_num : int, number of figure windows, optional, default = 1.
Returns: row_num : number of subplots in row direction per figure
col_num : number of subplots in column direction per figure
"""
subplot_num_per_fig = int(np.ceil(float(subplot_num) / float(fig_num)))
data_shape_ratio = float(data_shape[0]) / float(data_shape[1])
num_ratio = fig_size[1] / fig_size[0] / data_shape_ratio
row_num = max(np.sqrt(subplot_num_per_fig * num_ratio), 1.)
col_num = max(np.sqrt(subplot_num_per_fig / num_ratio), 1.)
if row_num == 1.:
col_num = subplot_num_per_fig
elif col_num == 1.:
row_num = subplot_num_per_fig
while np.rint(row_num) * np.rint(col_num) < subplot_num_per_fig:
if row_num % 1 > col_num % 1:
row_num += 0.5
else:
col_num += 0.5
row_num = int(np.rint(row_num))
col_num = int(np.rint(col_num))
return row_num, col_num
def auto_shared_lalo_location(axs, loc=(1,0,0,1), flatten=False):
"""Return the auto lat/lon label location of subplots
Parameters: axs : 2D np.ndarray of matplotlib.axes._subplots.AxesSubplot object
loc : tuple of 4 bool, for (left, right, top, bottom)
flatten : bool, return variable in 2D np.ndarray or in list of flattened array
Returns: locs : 2D np.ndarray of tuple of 4 bool.
"""
nrows, ncols = axs.shape
locs = np.zeros([nrows, ncols, 4], dtype=int)
locs[ :, 0,0] = loc[0]
locs[ :,-1,1] = loc[1]
locs[ 0, :,2] = loc[2]
locs[-1, :,3] = loc[3]
if flatten:
loc_list = list(locs.tolist())
locs = []
for i in range(nrows):
for j in range(ncols):
locs.append(loc_list[i][j])
return locs
def auto_colormap_name(metadata, cmap_name=None, datasetName=None, print_msg=True):
"""Get auto/default colormap name based on input metadata."""
if not cmap_name:
ds_names = [metadata['FILE_TYPE'], str(datasetName).split('-')[0]]
# SLC stack
if metadata['FILE_TYPE'] == 'timeseries' and metadata['DATA_TYPE'].startswith('complex'):
ds_names += ['.slc']
gray_ds_names = [
'coherence', 'temporalCoherence', 'waterMask', 'shadowMask',
'.cor', '.mli', '.slc', '.amp', '.ramp',
]
cmap_name = 'gray' if any(i in gray_ds_names for i in ds_names) else 'jet'
if print_msg:
print('colormap:', cmap_name)
return cmap_name
def auto_adjust_colormap_lut_and_disp_limit(data, num_multilook=1, max_discrete_num_step=20, print_msg=True):
# max step size / min step number for a uniform colormap
unique_values = np.unique(data[~np.isnan(data) * np.isfinite(data)])
min_val = np.min(unique_values).astype(float)
max_val = np.max(unique_values).astype(float)
if min_val == max_val:
cmap_lut = 256
vlim = [min_val, max_val]
unique_values = None
else:
vstep = np.min(np.abs(np.diff(unique_values))).astype(float)
min_num_step = int((max_val - min_val) / vstep + 1)
# use discrete colromap for data with uniform AND limited unique values
# e.g. unwrapError time-series
if min_num_step <= max_discrete_num_step:
cmap_lut = min_num_step
vlim = [min_val - vstep/2, max_val + vstep/2]
if print_msg:
msg = f'data has uniform and limited number ({unique_values.size} <= {max_discrete_num_step})'
msg += ' of unique values --> discretize colormap'
print(msg)
else:
if min(data.shape[1:]) >= 100 and num_multilook > 1:
# multilook data (by 10X for 3D matrix by default) to ignore extreme values
from mintpy.multilook import multilook_data
data_mli = multilook_data(data, num_multilook, num_multilook)
else:
data_mli = np.array(data)
cmap_lut = 256
vlim = [np.nanmin(data_mli), np.nanmax(data_mli)]
unique_values = None
# convert near-pi value to pi
vlim[0] = vlim[0] if abs(vlim[0] + np.pi) / np.pi >= 0.001 else np.pi * -1
vlim[1] = vlim[1] if abs(vlim[1] - np.pi) / np.pi >= 0.001 else np.pi
return cmap_lut, vlim, unique_values
def auto_adjust_xaxis_date(ax, datevector, fontsize=12, every_year=None, buffer_year=0.2,
every_month=None):
"""Adjust X axis
Parameters: ax - matplotlib figure axes object
datevector - list of float, date in years
i.e. [2007.013698630137, 2007.521917808219, 2007.6463470319634]
OR list of datetime.datetime objects
every_year - int, number of years per major locator
buffer_year - float in years, None for keep the original xlim range.
Returns: ax - matplotlib figure axes object
xmin/max - datetime.datetime object, X-axis min/max value
"""
# convert datetime.datetime format into date in years in float
if isinstance(datevector[0], dt.datetime):
datevector = [i.year + (i.timetuple().tm_yday - 1) / 365.25 for i in datevector]
# xmin/max
if buffer_year is not None:
# refine with buffer_year
t0 = datevector[0] - buffer_year;
t1 = datevector[-1] + buffer_year + 0.1;
y0 = int(t0); m0 = int((t0 - y0) * 12.0)
y1 = int(t1); m1 = int((t1 - y1) * 12.0)
if m0 > 12: y0 += 1; m0 = 1
if m1 > 12: y1 += 1; m1 = 1
if m0 < 1 : y0 -= 1; m0 = 12
if m1 < 1 : y1 -= 1; m1 = 12
xmin = dt.datetime(y0, m0, 1, 0, 0)
xmax = dt.datetime(y1, m1, 1, 0, 0)
else:
(xmin, xmax) = mdates.num2date(ax.get_xlim())
ax.set_xlim(xmin, xmax)
# auto param
if not every_year:
# take axes width into account
fig = ax.get_figure()
bbox = ax.get_window_extent().transformed(fig.dpi_scale_trans.inverted())
scale = 6.2 / bbox.width
every_year = max(1, np.rint(scale * (xmax - xmin).days / 365.25 / 5).astype(int))
if not every_month:
if every_year <= 3 : every_month = 1
elif every_year <= 6 : every_month = 3
elif every_year <= 12: every_month = 6
else: every_month = None
# Label/Tick format
ax.fmt_xdata = mdates.DateFormatter('%Y-%m-%d %H:%M:%S')
ax.xaxis.set_major_locator(mdates.YearLocator(every_year))
ax.xaxis.set_major_formatter(mdates.DateFormatter('%Y'))
if every_month:
ax.xaxis.set_minor_locator(mdates.MonthLocator(range(1,13,every_month)))
# Label font size
ax.tick_params(labelsize=fontsize)
# fig2.autofmt_xdate() #adjust x overlap by rorating, may enable again
return ax, xmin, xmax
def auto_adjust_yaxis(ax, data_list, fontsize=12, ymin=None, ymax=None):
"""Adjust Y axis lower/upper limit.
Parameters: ax - matplotlib figure axes object
data_list - list(float), Y-axis values
fontsize - float, font size
ymin/max - float, lower/upper Y-axis limit
Returns: ax - matplotlib figure axes object
"""
# check
if np.all(np.isnan(data_list)):
raise ValueError('All NaN values detected in the input data_list!')
# calc ymin/max
dmin = np.nanmin(data_list)
dmax = np.nanmax(data_list)
drange = dmax - dmin
ymin = ymin if ymin is not None else dmin - 0.1 * drange
ymax = ymax if ymax is not None else dmax + 0.1 * drange
# set ymin/max
ax.set_ylim([ymin, ymax])
# Tick/Label setting
#xticklabels = plt.getp(ax, 'xticklabels')
#yticklabels = plt.getp(ax, 'yticklabels')
#plt.setp(yticklabels, 'color', 'k', fontsize=fontsize)
#plt.setp(xticklabels, 'color', 'k', fontsize=fontsize)
return ax
####################################### Plot ################################################
def plot_coherence_history(ax, date12List, cohList, p_dict={}):
"""Plot min/max Coherence of all interferograms for each date"""
# Figure Setting
if 'ds_name' not in p_dict.keys(): p_dict['ds_name'] = 'Coherence'
if 'fontsize' not in p_dict.keys(): p_dict['fontsize'] = 12
if 'linewidth' not in p_dict.keys(): p_dict['linewidth'] = 2
if 'markercolor' not in p_dict.keys(): p_dict['markercolor'] = 'orange'
if 'markersize' not in p_dict.keys(): p_dict['markersize'] = 12
if 'disp_title' not in p_dict.keys(): p_dict['disp_title'] = True
if 'every_year' not in p_dict.keys(): p_dict['every_year'] = 1
if 'vlim' not in p_dict.keys(): p_dict['vlim'] = [0.2, 1.0]
# Get date list
date12List = ptime.yyyymmdd_date12(date12List)
m_dates = [date12.split('_')[0] for date12 in date12List]
s_dates = [date12.split('_')[1] for date12 in date12List]
dateList = sorted(ptime.yyyymmdd(list(set(m_dates + s_dates))))
dates, datevector = ptime.date_list2vector(dateList)
bar_width = ut0.most_common(np.diff(dates).tolist())*3/4
x_list = [i-bar_width/2 for i in dates]
coh_mat = pnet.coherence_matrix(date12List, cohList)
ax.bar(x_list, np.nanmax(coh_mat, axis=0), bar_width.days, label='Max {}'.format(p_dict['ds_name']))
ax.bar(x_list, np.nanmin(coh_mat, axis=0), bar_width.days, label='Min {}'.format(p_dict['ds_name']))
# axis format
if p_dict['disp_title']:
ax.set_title('{} History: Min/Max of All Related Pairs'.format(p_dict['ds_name']))
ax = auto_adjust_xaxis_date(ax, datevector, fontsize=p_dict['fontsize'],
every_year=p_dict['every_year'])[0]
ax.set_ylim([p_dict['vlim'][0], p_dict['vlim'][1]])
#ax.set_xlabel('Time [years]', fontsize=p_dict['fontsize'])
ax.set_ylabel(p_dict['cbar_label'], fontsize=p_dict['fontsize'])
ax.legend(loc='best')
ax.tick_params(which='both', direction='in', labelsize=p_dict['fontsize'],
bottom=True, top=True, left=True, right=True)
return ax
def plot_network(ax, date12List, dateList, pbaseList, p_dict={}, date12List_drop=[], print_msg=True):
"""Plot Temporal-Perp baseline Network
Parameters: ax - matplotlib axes object
date12List - list(str) for date12 in YYYYMMDD_YYYYMMDD format
dateList - list(str), for date in YYYYMMDD format
pbaseList - list(float), perp baseline, len=number of acquisition
p_dict - dictionary with the following items:
fontsize
linewidth
markercolor
markersize
cohList - list(float), coherence value of each interferogram, len = number of ifgrams
colormap - str, colormap name
disp_title - bool, show figure title or not, default: True
disp_drop - bool, show dropped interferograms or not, default: True
Returns: ax - matplotlib axes object
"""
# Figure Setting
if 'fontsize' not in p_dict.keys(): p_dict['fontsize'] = 12
if 'linewidth' not in p_dict.keys(): p_dict['linewidth'] = 2
if 'markercolor' not in p_dict.keys(): p_dict['markercolor'] = 'orange'
if 'markersize' not in p_dict.keys(): p_dict['markersize'] = 12
# For colorful display of coherence
if 'cohList' not in p_dict.keys(): p_dict['cohList'] = None
if 'xlabel' not in p_dict.keys(): p_dict['xlabel'] = None #'Time [years]'
if 'ylabel' not in p_dict.keys(): p_dict['ylabel'] = 'Perp Baseline [m]'
if 'cbar_label' not in p_dict.keys(): p_dict['cbar_label'] = 'Average Spatial Coherence'
if 'cbar_size' not in p_dict.keys(): p_dict['cbar_size'] = '3%'
if 'disp_cbar' not in p_dict.keys(): p_dict['disp_cbar'] = True
if 'colormap' not in p_dict.keys(): p_dict['colormap'] = 'RdBu'
if 'vlim' not in p_dict.keys(): p_dict['vlim'] = [0.2, 1.0]
if 'disp_title' not in p_dict.keys(): p_dict['disp_title'] = True
if 'disp_drop' not in p_dict.keys(): p_dict['disp_drop'] = True
if 'disp_legend' not in p_dict.keys(): p_dict['disp_legend'] = True
if 'every_year' not in p_dict.keys(): p_dict['every_year'] = 1
if 'number' not in p_dict.keys(): p_dict['number'] = None
# support input colormap: string for colormap name, or colormap object directly
if isinstance(p_dict['colormap'], str):
cmap = ColormapExt(p_dict['colormap']).colormap
elif isinstance(p_dict['colormap'], mpl.colors.LinearSegmentedColormap):
cmap = p_dict['colormap']
else:
raise ValueError('unrecognized colormap input: {}'.format(p_dict['colormap']))
cohList = p_dict['cohList']
transparency = 0.7
# Date Convert
dateList = ptime.yyyymmdd(sorted(dateList))
dates, datevector = ptime.date_list2vector(dateList)
tbaseList = ptime.date_list2tbase(dateList)[0]
## maxBperp and maxBtemp
date12List = ptime.yyyymmdd_date12(date12List)
ifgram_num = len(date12List)
pbase12 = np.zeros(ifgram_num)
tbase12 = np.zeros(ifgram_num)
for i in range(ifgram_num):
m_date, s_date = date12List[i].split('_')
m_idx = dateList.index(m_date)
s_idx = dateList.index(s_date)
pbase12[i] = pbaseList[s_idx] - pbaseList[m_idx]
tbase12[i] = tbaseList[s_idx] - tbaseList[m_idx]
if print_msg:
print(f'max perpendicular baseline: {np.max(np.abs(pbase12)):.2f} m')
print(f'max temporal baseline: {np.max(tbase12)} days')
## Keep/Drop - date12
date12List_keep = sorted(list(set(date12List) - set(date12List_drop)))
if not date12List_drop:
p_dict['disp_drop'] = False
## Keep/Drop - date
m_dates = [i.split('_')[0] for i in date12List_keep]
s_dates = [i.split('_')[1] for i in date12List_keep]
dateList_keep = ptime.yyyymmdd(sorted(list(set(m_dates + s_dates))))
dateList_drop = sorted(list(set(dateList) - set(dateList_keep)))
idx_date_keep = [dateList.index(i) for i in dateList_keep]
idx_date_drop = [dateList.index(i) for i in dateList_drop]
# Plotting
if cohList is not None:
data_min = min(cohList)
data_max = max(cohList)
disp_min = p_dict['vlim'][0]
disp_max = p_dict['vlim'][1]
if print_msg:
print('showing coherence')
print(f'data range: {[data_min, data_max]}')
print('display range: {}'.format(p_dict['vlim']))
if p_dict['disp_cbar']:
cax = make_axes_locatable(ax).append_axes("right", p_dict['cbar_size'], pad=p_dict['cbar_size'])
norm = mpl.colors.Normalize(vmin=disp_min, vmax=disp_max)
cbar = mpl.colorbar.ColorbarBase(cax, cmap=cmap, norm=norm)
cbar.ax.tick_params(labelsize=p_dict['fontsize'])
cbar.set_label(p_dict['cbar_label'], fontsize=p_dict['fontsize'])
# plot low coherent ifgram first and high coherence ifgram later
cohList_keep = [cohList[date12List.index(i)] for i in date12List_keep]
date12List_keep = [x for _, x in sorted(zip(cohList_keep, date12List_keep))]
# Dot - SAR Acquisition
if idx_date_keep:
x_list = [dates[i] for i in idx_date_keep]
y_list = [pbaseList[i] for i in idx_date_keep]
ax.plot(x_list, y_list, 'ko', alpha=0.7, ms=p_dict['markersize'], mfc=p_dict['markercolor'])
if idx_date_drop:
x_list = [dates[i] for i in idx_date_drop]
y_list = [pbaseList[i] for i in idx_date_drop]
ax.plot(x_list, y_list, 'ko', alpha=0.7, ms=p_dict['markersize'], mfc='gray')
## Line - Pair/Interferogram
# interferograms dropped
if p_dict['disp_drop']:
for date12 in date12List_drop:
date1, date2 = date12.split('_')
idx1 = dateList.index(date1)
idx2 = dateList.index(date2)
x = np.array([dates[idx1], dates[idx2]])
y = np.array([pbaseList[idx1], pbaseList[idx2]])
if cohList is not None:
val = cohList[date12List.index(date12)]
val_norm = (val - disp_min) / (disp_max - disp_min)
ax.plot(x, y, '--', lw=p_dict['linewidth'], alpha=transparency, c=cmap(val_norm))
else:
ax.plot(x, y, '--', lw=p_dict['linewidth'], alpha=transparency, c='k')
# interferograms kept
for date12 in date12List_keep:
date1, date2 = date12.split('_')
idx1 = dateList.index(date1)
idx2 = dateList.index(date2)
x = np.array([dates[idx1], dates[idx2]])
y = np.array([pbaseList[idx1], pbaseList[idx2]])
if cohList is not None:
val = cohList[date12List.index(date12)]
val_norm = (val - disp_min) / (disp_max - disp_min)
ax.plot(x, y, '-', lw=p_dict['linewidth'], alpha=transparency, c=cmap(val_norm))
else:
ax.plot(x, y, '-', lw=p_dict['linewidth'], alpha=transparency, c='k')
if p_dict['disp_title']:
ax.set_title('Interferogram Network', fontsize=p_dict['fontsize'])
# axis format
ax = auto_adjust_xaxis_date(ax, datevector, fontsize=p_dict['fontsize'],
every_year=p_dict['every_year'])[0]
ax = auto_adjust_yaxis(ax, pbaseList, fontsize=p_dict['fontsize'])
ax.set_xlabel(p_dict['xlabel'], fontsize=p_dict['fontsize'])
ax.set_ylabel(p_dict['ylabel'], fontsize=p_dict['fontsize'])
ax.tick_params(which='both', direction='in', labelsize=p_dict['fontsize'],
bottom=True, top=True, left=True, right=True)
if p_dict['number'] is not None:
ax.annotate(p_dict['number'], xy=(0.03, 0.92), color='k',
xycoords='axes fraction', fontsize=p_dict['fontsize'])
# Legend
if p_dict['disp_drop'] and p_dict['disp_legend']:
solid_line = mpl.lines.Line2D([], [], color='k', ls='solid', label='Ifgram used')
dash_line = mpl.lines.Line2D([], [], color='k', ls='dashed', label='Ifgram dropped')
ax.legend(handles=[solid_line, dash_line])
return ax
def plot_perp_baseline_hist(ax, dateList, pbaseList, p_dict={}, dateList_drop=[]):
""" Plot Perpendicular Spatial Baseline History
Parameters: ax - matplotlib axes object
dateList - list(str), date in YYYYMMDD format
pbaseList - list(float), perp baseline
p_dict - dictionary with the following items:
fontsize
linewidth
markercolor
markersize
disp_title : bool, show figure title or not, default: True
every_year : int, number of years for the major tick on xaxis
dateList_drop - list(str), date dropped in YYYYMMDD format
e.g. ['20080711', '20081011']
Returns: ax - matplotlib axes object
"""
# Figure Setting
if 'fontsize' not in p_dict.keys(): p_dict['fontsize'] = 12
if 'linewidth' not in p_dict.keys(): p_dict['linewidth'] = 2
if 'markercolor' not in p_dict.keys(): p_dict['markercolor'] = 'orange'
if 'markersize' not in p_dict.keys(): p_dict['markersize'] = 12
if 'disp_title' not in p_dict.keys(): p_dict['disp_title'] = True
if 'every_year' not in p_dict.keys(): p_dict['every_year'] = 1
transparency = 0.7
# Date Convert
dateList = ptime.yyyymmdd(dateList)
dates, datevector = ptime.date_list2vector(dateList)
# Get index of date used and dropped
# dateList_drop = ['20080711', '20081011'] # for debug
idx_keep = list(range(len(dateList)))
idx_drop = []
for i in dateList_drop:
idx = dateList.index(i)
idx_keep.remove(idx)
idx_drop.append(idx)
# Plot
# ax=fig.add_subplot(111)
# Plot date used
if idx_keep:
x_list = [dates[i] for i in idx_keep]
y_list = [pbaseList[i] for i in idx_keep]
ax.plot(x_list, y_list, '-ko', alpha=transparency, lw=p_dict['linewidth'],
ms=p_dict['markersize'], mfc=p_dict['markercolor'])
# Plot date dropped
if idx_drop:
x_list = [dates[i] for i in idx_drop]
y_list = [pbaseList[i] for i in idx_drop]
ax.plot(x_list, y_list, 'ko', alpha=transparency,
ms=p_dict['markersize'], mfc='gray')
if p_dict['disp_title']:
ax.set_title('Perpendicular Baseline History', fontsize=p_dict['fontsize'])
# axis format
ax = auto_adjust_xaxis_date(ax, datevector, fontsize=p_dict['fontsize'],
every_year=p_dict['every_year'])[0]
ax = auto_adjust_yaxis(ax, pbaseList, fontsize=p_dict['fontsize'])
#ax.set_xlabel('Time [years]', fontsize=p_dict['fontsize'])
ax.set_ylabel('Perpendicular Baseline [m]', fontsize=p_dict['fontsize'])
return ax
def plot_rotate_diag_coherence_matrix(ax, coh_list, date12_list, date12_list_drop=[],
rotate_deg=-45., cmap='RdBu', disp_half=False, disp_min=0.2):
"""Plot Rotated Coherence Matrix, suitable for Sentinel-1 data with sequential network"""
# support input colormap: string for colormap name, or colormap object directly
if isinstance(cmap, str):
cmap = ColormapExt(cmap).colormap
elif isinstance(cmap, mpl.colors.LinearSegmentedColormap):
pass
else:
raise ValueError(f'unrecognized colormap input: {cmap}')
#calculate coherence matrix
coh_mat = pnet.coherence_matrix(date12_list, coh_list)
#for date12_list_drop, set value to nan in upper triangle
if date12_list_drop:
m_dates = [i.split('_')[0] for i in date12_list]
s_dates = [i.split('_')[1] for i in date12_list]
date_list = sorted(list(set(m_dates + s_dates)))
for date12 in date12_list_drop:
idx1, idx2 = (date_list.index(i) for i in date12.split('_'))
coh_mat[idx2, idx1] = np.nan
#aux info
num_img = coh_mat.shape[0]
idx1, idx2 = np.where(~np.isnan(coh_mat))
num_conn = np.max(np.abs(idx1 - idx2))
#plot diagonal - black
diag_mat = np.diag(np.ones(num_img))
diag_mat[diag_mat == 0.] = np.nan
im = ax.imshow(diag_mat, cmap='gray_r', vmin=0.0, vmax=1.0)
im.set_transform(mpl.transforms.Affine2D().rotate_deg(rotate_deg) + ax.transData)
im = ax.imshow(coh_mat, vmin=disp_min, vmax=1, cmap=cmap)
im.set_transform(mpl.transforms.Affine2D().rotate_deg(rotate_deg) + ax.transData)
#axis format
ymax = np.sqrt(num_conn**2 / 2.) + 0.9
ax.set_xlim(-1, np.sqrt(num_img**2 * 2)-0.7)
if disp_half:
ax.set_ylim(0, ymax)
else:
ax.set_ylim(-1.*ymax, ymax)
ax.get_xaxis().set_ticks([])
ax.get_yaxis().set_ticks([])
ax.axis('off')
return ax, im
def plot_coherence_matrix(ax, date12List, cohList, date12List_drop=[], p_dict={}):
"""Plot Coherence Matrix of input network
if date12List_drop is not empty, plot KEPT pairs in the upper triangle and
ALL pairs in the lower triangle.
Parameters: ax : matplotlib.pyplot.Axes,
date12List : list of date12 in YYYYMMDD_YYYYMMDD format
cohList : list of float, coherence value
date12List_drop : list of date12 for date12 marked as dropped
p_dict : dict of plot setting
Returns: ax : matplotlib.pyplot.Axes
coh_mat : 2D np.array in size of [num_date, num_date]
im : mappable
"""
# Figure Setting
if 'ds_name' not in p_dict.keys(): p_dict['ds_name'] = 'Coherence'
if 'fontsize' not in p_dict.keys(): p_dict['fontsize'] = 12
if 'linewidth' not in p_dict.keys(): p_dict['linewidth'] = 2
if 'markercolor' not in p_dict.keys(): p_dict['markercolor'] = 'orange'
if 'markersize' not in p_dict.keys(): p_dict['markersize'] = 12
if 'disp_title' not in p_dict.keys(): p_dict['disp_title'] = True
if 'fig_title' not in p_dict.keys(): p_dict['fig_title'] = '{} Matrix'.format(p_dict['ds_name'])
if 'colormap' not in p_dict.keys(): p_dict['colormap'] = 'jet'
if 'cbar_label' not in p_dict.keys(): p_dict['cbar_label'] = p_dict['ds_name']
if 'vlim' not in p_dict.keys(): p_dict['vlim'] = (0.2, 1.0)
if 'disp_cbar' not in p_dict.keys(): p_dict['disp_cbar'] = True
if 'legend_loc' not in p_dict.keys(): p_dict['legend_loc'] = 'best'
if 'disp_legend' not in p_dict.keys(): p_dict['disp_legend'] = True
# support input colormap: string for colormap name, or colormap object directly
if isinstance(p_dict['colormap'], str):
cmap = ColormapExt(p_dict['colormap']).colormap
elif isinstance(p_dict['colormap'], mpl.colors.LinearSegmentedColormap):
cmap = p_dict['colormap']
else:
raise ValueError('unrecognized colormap input: {}'.format(p_dict['colormap']))
date12List = ptime.yyyymmdd_date12(date12List)
coh_mat = pnet.coherence_matrix(date12List, cohList)
if date12List_drop:
# Date Convert
m_dates = [i.split('_')[0] for i in date12List]
s_dates = [i.split('_')[1] for i in date12List]
dateList = sorted(list(set(m_dates + s_dates)))
# Set dropped pairs' value to nan, in upper triangle only.
for date12 in date12List_drop:
idx1, idx2 = (dateList.index(i) for i in date12.split('_'))
coh_mat[idx1, idx2] = np.nan
# Show diagonal value as black, to be distinguished from un-selected interferograms
diag_mat = np.diag(np.ones(coh_mat.shape[0]))
diag_mat[diag_mat == 0.] = np.nan
im = ax.imshow(diag_mat, cmap='gray_r', vmin=0.0, vmax=1.0, interpolation='nearest')
im = ax.imshow(
coh_mat,
cmap=cmap,
vmin=p_dict['vlim'][0],
vmax=p_dict['vlim'][1],
interpolation='nearest',
)
date_num = coh_mat.shape[0]
if date_num < 30: tick_step = 5
elif date_num < 50: tick_step = 10
elif date_num < 100: tick_step = 20
else: tick_step = 30
tick_list = list(range(0, date_num, tick_step))
ax.get_xaxis().set_ticks(tick_list)
ax.get_yaxis().set_ticks(tick_list)
ax.set_xlabel('Image Number', fontsize=p_dict['fontsize'])
ax.set_ylabel('Image Number', fontsize=p_dict['fontsize'])
ax.tick_params(which='both', direction='in', labelsize=p_dict['fontsize'],
bottom=True, top=True, left=True, right=True)
if p_dict['disp_title']:
ax.set_title(p_dict['fig_title'])
# Colorbar
if p_dict['disp_cbar']:
divider = make_axes_locatable(ax)
cax = divider.append_axes("right", "3%", pad="3%")
cbar = plt.colorbar(im, cax=cax)
cbar.set_label(p_dict['cbar_label'], fontsize=p_dict['fontsize'])
# Legend
if date12List_drop and p_dict['disp_legend']:
ax.plot([], [], label='Upper: Ifgrams used')
ax.plot([], [], label='Lower: Ifgrams all')
ax.legend(loc=p_dict['legend_loc'], handlelength=0)
return ax, coh_mat, im
def plot_num_triplet_with_nonzero_integer_ambiguity(fname, disp_fig=False, font_size=12, fig_size=[9,3]):
"""Plot the histogram for the number of triplets with non-zero integer ambiguity.
Fig. 3d-e in Yunjun et al. (2019, CAGEO).
Parameters: fname - str, path to the numTriNonzeroIntAmbiguity.h5 file.
"""
# matplotlib backend setting
if not disp_fig:
plt.switch_backend('Agg')
# read data
data, atr = readfile.read(fname)
vmax = int(np.nanmax(data))
# plot
fig, axs = plt.subplots(nrows=1, ncols=2, figsize=fig_size)
# subplot 1 - map
ax = axs[0]
im = ax.imshow(data, cmap='RdBu_r', interpolation='nearest')
# reference point
if all(key in atr.keys() for key in ['REF_Y','REF_X']):
ax.plot(int(atr['REF_X']), int(atr['REF_Y']), 's', color='white', ms=3)
# format
auto_flip_direction(atr, ax=ax, print_msg=False)
fig.colorbar(im, ax=ax)
ax.set_title(r'$T_{int}$', fontsize=font_size)
# subplot 2 - histogram
ax = axs[1]
ax.hist(data[~np.isnan(data)].flatten(), range=(0, vmax), log=True, bins=vmax)
# axis format
ax.set_xlabel(r'# of triplets w non-zero int ambiguity $T_{int}$', fontsize=font_size)
ax.set_ylabel('# of pixels', fontsize=font_size)
ax.xaxis.set_minor_locator(ticker.AutoMinorLocator())
ax.yaxis.set_major_locator(ticker.LogLocator(base=10.0, numticks=15))
ax.yaxis.set_minor_locator(ticker.LogLocator(base=10.0, numticks=15,
subs=(0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9)))
ax.yaxis.set_minor_formatter(ticker.NullFormatter())
for ax in axs:
ax.tick_params(which='both', direction='in', labelsize=font_size,
bottom=True, top=True, left=True, right=True)
fig.tight_layout()
# output
out_fig = f'{os.path.splitext(fname)[0]}.png'