-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathweddell_polynya_paper.py
3399 lines (3075 loc) · 214 KB
/
weddell_polynya_paper.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# -*- coding: utf-8 -*-
print('Script is loading dependencies.')
# external imports
import os
from numpy import *
import scipy.io as sio
import scipy.interpolate as spin
from scipy import stats
import pandas as pd
import pandas.plotting._converter as pandacnv # FIXME: only necessary due to Pandas 0.21.0 bug with Datetime plotting
pandacnv.register() # FIXME: only necessary due to Pandas 0.21.0 bug with Datetime plotting
import xarray as xr
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.colors as mcolors
import matplotlib.cbook as mcbook
import matplotlib.dates as mdates
import matplotlib.ticker as pltick
import matplotlib.legend as mlegend
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
from matplotlib.patches import Polygon, Rectangle, ConnectionPatch
if os.path.isdir('/Applications/anaconda/share/proj'): # needed for Basemap import on my machine, but probably not yours
os.environ['PROJ_LIB'] = '/Applications/anaconda/share/proj'
from mpl_toolkits.basemap import Basemap
from matplotlib import gridspec
from datetime import datetime, timedelta
from collections import OrderedDict
from Circles.circles import circle # from https://github.com/urschrei/Circles
import pickle
import warnings
import time
import gsw
# import custom functions
import download_product as dlp
import load_product as ldp
import time_tools as tt
import plot_tools as pt
import geo_tools as gt
# custom settings
set_printoptions(threshold=100) # speeds up printing of large dicts containing NumPy arrays during debugging
plt.ion() # necessary for interactive contour label placement
# prettier font for plots
# note: before uncommenting, have to 'install' Helvetica using fondu (see instructions at https://goo.gl/crpbt2)
# mpl.rc('font',**{'family':'sans-serif','sans-serif':'Helvetica'})
####################################################################################################
# download data
download_argo = False
download_soccom = False
download_amsr2 = False
download_amsre = False
download_dmsp_nrt = False
download_dmsp_v3 = False
download_nimbus5 = False
download_ecmwf = False
download_isd = False
# data preparation/processing routines
argo_cal_5904468 = False
process_ecmwf = False
load_erai_land_sea_mask = True # always keep this True!
# analyze data and generate paper figures
plot_fig_1 = False # note: this routine requires manual input for contour labels
plot_fig_2_ED_figs_3_4 = False
plot_fig_3 = False
plot_fig_4_ED_figs_6_7 = False
plot_fig_5_ED_fig_9_ED_table_1 = False
plot_ED_fig_1 = False
plot_ED_figs_2_8 = False
plot_ED_fig_5 = False
# directory for plotting output
current_results_dir = os.getcwd() + '/Results/'
# directory for h4toh5 executable
script_dir = os.getcwd() + '/'
# root directory for data files
data_dir = os.getcwd() + '/Data/'
# sub-directories for data files
argo_gdac_dir = data_dir + 'Argo/'
soccom_dir = argo_gdac_dir + 'SOCCOM/'
uw_o2_dir = argo_gdac_dir + 'UW-O2/'
shipboard_dir = data_dir + 'Shipboard/'
wod_dir = shipboard_dir + 'WOD/'
waghc_dir = shipboard_dir + 'WAGHC2017/'
amsr2_dir = data_dir + 'Sea ice concentration/AMSR2/'
amsre_dir = data_dir + 'Sea ice concentration/AMSR-E/'
dmsp_nrt_dir = data_dir + 'Sea ice concentration/DMSP_NRT/'
dmsp_v3_dir = data_dir + 'Sea ice concentration/DMSP_v3/'
nimbus5_dir = data_dir + 'Sea ice concentration/Nimbus-5/'
amsr_gridfile = data_dir + 'Sea ice concentration/AMSR_grid/LongitudeLatitudeGrid-s6250-Antarctic.h5'
amsr_areafile = data_dir + 'Sea ice concentration/AMSR_grid/pss06area_v3.dat'
nsidc_ps25_grid_dir = data_dir + 'Sea ice concentration/NSIDC_polar_stereo_25km_grid/'
coastline_filename_prefix = data_dir + 'GSHHG coast shapefiles/l/GSHHS_l_L5'
climate_indices_dir = data_dir + 'Climate indices/'
reanalysis_dir = data_dir + 'Reanalysis/'
era_new_dir = data_dir + 'Reanalysis/ECMWF_Weddell_unprocessed/'
era_custom_dir = data_dir + 'Reanalysis/ECMWF_Weddell_processed/'
era_processed_gwk_moore_dir = data_dir + 'Reanalysis/ECMWF_processed_GWKMoore/'
isd_dir = data_dir + 'ISD station records/'
reader_dir = data_dir + 'READER station records/'
# sub-directories for serialized ("pickled") processed data
figure_pickle_dir = data_dir + 'Processed_pickle_archives/'
argo_index_pickle_dir = argo_gdac_dir + 'Argo_index_pickles/'
############################################ DATA DOWNLOAD ##########################################################
print('Script is starting.')
# download Argo data from GDAC and update pickles
if download_argo:
# note: set overwrite_global_index to True when checking for new/updated profiles
dlp.argo_gdac((1990,1,1),(2018,10,1),[-80,-55],[-70,50],argo_gdac_dir,overwrite_global_index=False,
overwrite_profs=False,bypass_download=False,
only_download_wmoids=[])
argo_gdac_index = ldp.argo_gdac_load_index(argo_gdac_dir)
pickle.dump(argo_gdac_index,open(argo_index_pickle_dir + 'argo_gdac_index.pickle','wb'))
# correct float 5904468 for salinity drift
# note: run this after downloading new Argo data; see #FIXME notes in ldp.argo_float_data()
if argo_cal_5904468:
argo_gdac_index = pickle.load(open(argo_index_pickle_dir + 'argo_gdac_index.pickle','rb'))
argo_soccom_index = pickle.load(open(argo_index_pickle_dir + 'argo_soccom_index.pickle','rb'))
# export calibration based on average salinity between 1600-1700 m
gdac_data = ldp.argo_float_data(5904468,argo_gdac_dir,argo_gdac_index,argo_soccom_index,
prof_nums='all',verbose=False,use_unadjusted=False,correct_5904468_interim=False)
gdac_num_profs = len(gdac_data['profiles'])
gdac_prof_nums = array([gdac_data['profiles'][p]['prof_num'] for p in range(gdac_num_profs)])
gdac_time_coord = array([tt.convert_tuple_to_datetime(tt.convert_14_to_tuple(gdac_data['profiles'][p]['datetime']))
for p in range(gdac_num_profs)])
gdac_sal_series = array([gt.vert_prof_eval(gdac_data['profiles'][p],'psal',(1600,1700),z_coor='depth',
interp_method='linear',extrap='NaN',avg_method='interp',avg_spacing=1.0,
avg_nan_tolerance=1.0) for p in range(gdac_num_profs)])
last_good_sal = gdac_sal_series[where(gdac_prof_nums == 83)[0][0]]
prof_idx_mask_for_trend = logical_and(gdac_prof_nums >= 83, gdac_prof_nums <= 118)
prof_idx_mask_to_cal = logical_and(gdac_prof_nums >= 84, gdac_prof_nums <= 118)
[cal_slope,cal_intercept] = stats.linregress(mdates.date2num(gdac_time_coord[prof_idx_mask_for_trend]),
gdac_sal_series[prof_idx_mask_for_trend])[0:2]
sal_trend = cal_intercept + cal_slope * mdates.date2num(gdac_time_coord)
sal_deltas = -1 * (sal_trend - sal_trend[where(gdac_prof_nums == 83)[0][0]])
# note: to not zero the trend at its leftmost y-value, subtract last_good_sal instead (a subjective choice)
sal_deltas_within_cal_period_only = zeros(gdac_num_profs)
sal_deltas_within_cal_period_only[prof_idx_mask_to_cal] = sal_deltas[prof_idx_mask_to_cal]
pickle.dump([gdac_prof_nums,sal_deltas_within_cal_period_only],
open(argo_index_pickle_dir + 'argo_5904468_cal.pickle','wb'))
# download and/or process SOCCOM data and update pickles
# note: it is preferred to download quarterly SOCCOM snapshots, which have a DOI, rather than the latest data
# so this offers two options:
# 1) to just process already-downloaded quarterly DOI snapshots, uncomment the ldp.argo_soccom() routine
# 2) to download near-real-time FTP files and process them, uncomment the dlp.argo_soccom() routine
if download_soccom:
# dlp.argo_soccom(argo_gdac_dir,overwrite_profs=True) # OPTION 2
ldp.argo_soccom(soccom_dir) # OPTION 1
argo_soccom_index = ldp.argo_soccom_load_index(soccom_dir,uw_o2_dir,verbose=True)
pickle.dump(argo_soccom_index,open(argo_index_pickle_dir + 'argo_soccom_index.pickle','wb'))
# download AMSR2 sea ice data and convert from HDF4 to HDF5
if download_amsr2:
dlp.amsr(2,(2012,7,4),(2019,2,26),amsr2_dir,get_pdfs=False,overwrite=False,convert=True,
conversion_script_dir=script_dir)
# download AMSR-E sea ice data and convert from HDF4 to HDF5
if download_amsre:
dlp.amsr(1,(2002,6,1),(2011,10,4),amsre_dir,get_pdfs=False,overwrite=False,convert=True,
conversion_script_dir=script_dir)
# download NSIDC v1 NRT CDR sea ice data
# note: check FTP link for new data availability; update satellite abbreviations/dates accordingly within functions,
# including gt.identify_polynyas_magic()
if download_dmsp_nrt:
dlp.dmsp_nrt((2018,1,1),(2019,2,26),dmsp_nrt_dir,overwrite=True)
# download NSIDC v3 GSFC Merged/CDR sea ice data
# note: check FTP link for new data availability; update satellite abbreviations/dates accordingly within functions,
# including gt.identify_polynyas_magic()
# note: if a new year of data is released, delete the corresponding NSIDC NRT CDR data
if download_dmsp_v3:
dlp.dmsp_v3((1978,11,1),(2017,12,31),dmsp_v3_dir,overwrite=False)
# download and unzip Nimbus-5 sea ice data
# note: this requires an EarthData login username and password and a local key; see dlp.nimbus5() for instructions
if download_nimbus5:
dlp.nimbus5((1972,12,12),(1977,5,11),nimbus5_dir,convert=True,conversion_script_dir=script_dir)
# download station meteorological data from Integrated Surface Database
if download_isd:
dlp.isd_station(895120,1973,2019,isd_dir,overwrite=True) # Novolazarevskaya
dlp.isd_station(890010,1973,1994,isd_dir,overwrite=True) # SANAE SAF
dlp.isd_station(890040,1997,2019,isd_dir,overwrite=True) # SANAE AWS
dlp.isd_station(890020,1981,2019,isd_dir,overwrite=True) # Neumayer
dlp.isd_station(895140,1990,2019,isd_dir,overwrite=True) # Maitri
# submit MARS request for ERA-Interim reanalysis fields
# note: submit one at a time; cancel using Ctrl-C (or "stop" button) immediately after seeing "Request is queued"
# then download using Chrome from: http://apps.ecmwf.int/webmars/joblist/
# and save using filenames in comments in folder 'ECMWF_Weddell_unprocessed'
# then run 'process_ecmwf' routine below
if download_ecmwf:
which_to_download = 1 # change to submit one at a time (see note above) - recommend order 3, 4, 1, 2
# daily fields
if which_to_download == 1: # analysis; save as 'erai_daily_weddell.nc'
dlp.ecmwf(date_range='1979-01-01/to/2018-12-31',area='-40/-90/-90/90',output_filename=None,type='an',
step='0',time='00/06/12/18',params=['msl','sst','skt','t2m','d2m','u10','v10'])
elif which_to_download == 2: # forecast; save as 'erai_daily_weddell_forecast.nc'
dlp.ecmwf(date_range='1979-01-01/to/2018-12-31',area='-40/-90/-90/90',output_filename=None,type='fc',
step='6/12',time='00/12',params=['sf','sshf','slhf','ssr','str','strd','e','tp','iews','inss'])
# monthly means
elif which_to_download == 3: # analysis; save as 'erai_monthly_mean_weddell.nc'
dlp.ecmwf(date_range=[datetime(1979,1,1),datetime(2018,12,1)],area='-40/-90/-90/90',output_filename=None,
type='an',step=None,time=None,params=['msl','sp','sst','skt','t2m','u10','v10','si10'])
elif which_to_download == 4: # forecast; save as 'erai_monthly_mean_weddell_forecast.nc'
dlp.ecmwf(date_range=[datetime(1979,1,1),datetime(2018,12,1)],area='-40/-90/-90/90',output_filename=None,
type='fc',step=None,time=None,params=['iews','inss'])
# process newly downloaded ECMWF reanalysis files (calculate derived quantities, de-accumulate, and re-export)
# note: once finished, manually delete unprocessed files and any processed chunks
if process_ecmwf:
for filename in os.listdir(path=era_new_dir):
if filename == '.DS_Store': continue
ldp.load_ecmwf(era_new_dir,filename,export_to_dir=era_custom_dir,verbose=True)
# load ERA-Interim land-sea mask
if load_erai_land_sea_mask:
erai_mask = ldp.load_ecmwf_mask(reanalysis_dir,'erai_land_sea_mask.nc')
###################################### ANALYSIS ROUTINES ######################################################
# Fig. 1. Polynyas of 1974, 2016 and 2017 in relation to profiling float trajectories near Maud Rise.
if plot_fig_1:
# NOTE: bathymetry contours require manual input to select locations (click, then Return)
plot_fig_1a = True
plot_fig_1b = True
plot_fig_1c = True
use_fig_1a_pickle = True # must be False after changing polynya dates for contour (not a big slowdown)
# establish Antarctic coastline
circumant_lons,circumant_lats = gt.establish_coastline(coastline_filename_prefix)
# load sea ice concentration metadata
[sea_ice_grids,sea_ice_data_avail,sea_ice_all_dates] = ldp.sea_ice_data_prep(nimbus5_dir,dmsp_v3_dir,dmsp_nrt_dir,
amsre_dir,amsr2_dir,amsr_gridfile,
amsr_areafile,nsidc_ps25_grid_dir)
# plot parameters
open_sic_plotting = 50 # SIC to be plotted as open
open_sic_polynya = 50 # SIC threshold for polynyas
extent_threshold_weddell = 30000 # minimum polynya extent in km^2 to identify/plot
map_params_weddell = [1700000,1800000,-67.0,0] # width,height,lat_center,lon_center
map_params_floats_2016 = [475000,520000,-65.25,4.25]
map_params_floats_2017 = [465000,520000,-65.25,2.75]
labelsize = 5
station_names = ['Novolazarevskaya Station']
station_locs = [[11.8,-70.8]] # lon, lat
station_colors = ['w']
station_markers = ['^']
motoi_fuji_loc = [1.2,-66.5] # lon, lat of R/V Fuji MLS observation on 1974-02-27 from Motoi et al. 1987
mr_center = [-65.0,3.0] # summit of Maud Rise (65.0°S, 3.0°E)
mr_hydro_radius = 250 # search radius (km) from Maud Rise
mr_bathy_contours = arange(-3250,760,750)
ocean_color = '#bce6fc' # light blue
ocean_color_light = '#ddf2fd' # lighter version of the above
polynya_2016_date = (2016,8,6)
polynya_color_2016 = '#ffae1a'
polynya_color_2016_light = '#ffce75'
polynya_color_2016_dark = '#AA7411'
polynya_line_2016 = '-'
polynya_2017_date_sic = (2017,11,21)
polynya_2017_date_string = '2017-11-21'
polynya_2017_date = (2017,9,25)
polynya_color_2017 = '#0000cd'
polynya_color_2017_medium = '#4c4cdc'
polynya_color_2017_light = '#8484e7'
polynya_color_2017_dark = '#000089'
polynya_line_2017 = '-'
polynya_1974_date = (1974,10,12)
polynya_color_1974 = 'maroon'
polynya_line_1974 = '-'
float_wmoids = [5903616,5904468,5904471]
float_markers = ['^','o','s']
float_lines = [(0,(1,1)),'-.','--']
float_line_colors = ['0.2','k','k']
float_start_colors = ['lime','lime','lime']
float_start_sizes = [10,10,10]
polynya_marker_sizes = [None,25,25]
toi_5903616 = [20111218000000,20160603000000]
toi_5904468a = [20150118000000,20170110000000]
toi_5904468b = [20161231000000,20180509000000]
toi_5904471a = [20141220000000,20170110000000]
toi_5904471b = [20161231000000,20180623000000]
float_polynya_plot_dates = [polynya_2016_date,polynya_2017_date]
polynya_colors = [polynya_color_2016,polynya_color_2017]
# establish figure
if plot_fig_1a or plot_fig_1b or plot_fig_1c:
fig = plt.figure(figsize=(6.4,2.5))
subplot_grid_left \
= gridspec.GridSpec(1,3,width_ratios=[1.1*map_params_weddell[0]*map_params_floats_2016[1]/map_params_weddell[1],
map_params_floats_2016[0],map_params_floats_2017[0]],wspace=0.20)
subplot_grid_right \
= gridspec.GridSpec(1,3,width_ratios=[1.1*map_params_weddell[0]*map_params_floats_2016[1]/map_params_weddell[1],
map_params_floats_2016[0],map_params_floats_2017[0]],wspace=0.05)
# 1974, 2016, and 2017 polynyas
if plot_fig_1a:
ax1 = plt.subplot(subplot_grid_left[0])
sic_grid = sea_ice_grids['amsr2']
sic_field = ldp.load_amsr(sea_ice_data_avail['amsr2'][polynya_2017_date_sic][0],regrid_to_25km=False)
m1,pcm = pt.sea_ice_argo_spatial(data_dir,polynya_2017_date_sic,sic_grid,sic_field,None,None,None,None,None,
*map_params_weddell,plot_floats=False,polynya_grid=None,open_sic=0,
rasterized=True,as_subplot=True,create_subplot=False,
subplot_add_colorbar=False,
bathy_contours=[],subplot_lon_labels=[0,0,1,0],subplot_lat_labels=[1,0,0,0],
grid_lats=arange(-80,60,5),grid_lons=arange(-80,50,10),
subplot_labelsize=labelsize,grid_color='0.7',
which_ice_cmap=5,cmap_bad_color=ocean_color,cmap_ocean_color=ocean_color,
continent_color='0.7',boundary_width=0.5,coastline_width=0.5,
return_basemap=True,return_pcolor=True)
sic_grid = sea_ice_grids['nimbus5']
polynya_grid_1974 = gt.identify_polynyas_magic('nimbus5',polynya_1974_date,sea_ice_grids,sea_ice_data_avail,
circumant_lons,circumant_lats,open_threshold=open_sic_polynya,
extent_threshold=extent_threshold_weddell,identify_bad=True)[7]
m1.contour(*m1(sic_grid['lons'],sic_grid['lats']),polynya_grid_1974,levels=[0.999],
colors=polynya_color_1974,linestyles=polynya_line_1974,linewidths=1.0,alpha=0.9,zorder=3)
sic_grid = sea_ice_grids['amsr2']
if use_fig_1a_pickle:
[polynya_grid_2016,polynya_grid_2017] = pickle.load(open(figure_pickle_dir + 'fig_1a','rb'))
else:
polynya_grid_2016 = gt.identify_polynyas_magic('amsr2',polynya_2016_date,sea_ice_grids,sea_ice_data_avail,
circumant_lons,circumant_lats,
open_threshold=open_sic_polynya,
extent_threshold=extent_threshold_weddell,identify_bad=True,
regrid_amsr_to_25km=False)[7]
polynya_grid_2017 = gt.identify_polynyas_magic('amsr2',polynya_2017_date,sea_ice_grids,sea_ice_data_avail,
circumant_lons,circumant_lats,
open_threshold=open_sic_polynya,
extent_threshold=extent_threshold_weddell,identify_bad=True,
regrid_amsr_to_25km=False)[7]
pickle.dump([polynya_grid_2016,polynya_grid_2017],open(figure_pickle_dir + 'fig_1a','wb'))
m1.contour(*m1(sic_grid['lons'],sic_grid['lats']),polynya_grid_2017,levels=[0.999],
colors=polynya_color_2017_dark,linestyles=polynya_line_2017,linewidths=0.5,alpha=0.9,zorder=3)
m1.contourf(*m1(sic_grid['lons'],sic_grid['lats']),polynya_grid_2017,levels=[0.999,1.1],
colors=polynya_color_2017,alpha=0.25,zorder=3)
m1.contour(*m1(sic_grid['lons'],sic_grid['lats']),polynya_grid_2016,levels=[0.999],
colors=polynya_color_2016,linestyles=polynya_line_2016,linewidths=0.5,alpha=0.9,zorder=3)
m1.contourf(*m1(sic_grid['lons'],sic_grid['lats']),polynya_grid_2016,levels=[0.999,1.1],
colors=polynya_color_2016,alpha=0.25,zorder=3)
m1.scatter(*m1(*station_locs[0]),s=14,c=station_colors[0],marker=station_markers[0],
edgecolors='k',linewidths=0.25,zorder=3)
m1.scatter(*m1(*motoi_fuji_loc),s=17,c='maroon',marker='*',
edgecolors='maroon',linewidths=0.25,zorder=3)
c1, = plt.plot(0,NaN,ls=polynya_line_1974,color=polynya_color_1974,linewidth=1.0,alpha=0.8,
label='{0}-{1:02d}-{2:02d}'.format(*polynya_1974_date)) # dummy handles for legend
c4, = plt.plot([0,0],[NaN,NaN],c='maroon',marker='*',ms=5,mec='maroon',mew=0.25,ls='none',
label=r'R/V $\it{Fuji}$, 1974-02-27')
c5, = plt.plot([0,0],[NaN,NaN],c=station_colors[0],marker=station_markers[0],ms=3,mec='k',mew=0.25,ls='none',
label=station_names[0])
leg1 = ax1.legend(handles=[c4,c1],ncol=2,loc='upper center',bbox_to_anchor=(0.5,-0.010),
fontsize=labelsize,columnspacing=1.5,handletextpad=0.6,handlelength=1.5,frameon=False)
leg2 = mlegend.Legend(ax1,[c5],[station_names[0]],ncol=1,fontsize=labelsize,
columnspacing=1.5,handletextpad=0.6,handlelength=1.5,frameon=False)
leg1._legend_box._children.append(leg2._legend_box._children[1])
leg1._legend_box.align = 'center'
m1_ax = plt.gca()
plt.gca().set_anchor('C')
plt.text(0.96,0.97,polynya_2017_date_string,color='0.2',size=labelsize+1,fontweight='bold',
horizontalalignment='right',verticalalignment='top',transform=ax1.transAxes)
# 2016 polynya
if plot_fig_1b:
ax2 = plt.subplot(subplot_grid_right[1])
argo_gdac_index = pickle.load(open(argo_index_pickle_dir + 'argo_gdac_index.pickle','rb'))
float_data_all = []
for wmoid in float_wmoids:
this_float_meta = ldp.argo_gdac_float_meta(argo_gdac_index['local_prof_index'],wmoid)
if wmoid == 5903616: toi_mask = logical_and(this_float_meta['prof_datetimes'] >= toi_5903616[0],
this_float_meta['prof_datetimes'] <= toi_5903616[1])
if wmoid == 5904468: toi_mask = logical_and(this_float_meta['prof_datetimes'] >= toi_5904468a[0],
this_float_meta['prof_datetimes'] <= toi_5904468a[1])
if wmoid == 5904471: toi_mask = logical_and(this_float_meta['prof_datetimes'] >= toi_5904471a[0],
this_float_meta['prof_datetimes'] <= toi_5904471a[1])
float_data_all.append([wmoid,this_float_meta['prof_lons'][toi_mask],this_float_meta['prof_lats'][toi_mask],
this_float_meta['prof_position_flags'][toi_mask],
this_float_meta['prof_datetimes'][toi_mask]])
_,m2 = pt.bathy_basemap(data_dir,*map_params_floats_2016,create_new_fig=False,
labelsize=labelsize,boundary_width=0.5,lon_labels_on_top=True,
grid_color='0.4',label_contours=True,cmap='Greys_r',bathy_alpha=0.6,
grid_lats=arange(-70,-60,2),grid_lons=arange(-80,50,5))
for f in range(len(float_data_all)):
zob = f * 7 # zorder baseline
mk = float_markers[f]
fl = float_line_colors[f]
fls = float_lines[f]
start_mark = float_markers[f]
start_color = float_start_colors[f]
start_size = float_start_sizes[f]
polynya_marker_size = polynya_marker_sizes[f]
wmoid = float_data_all[f][0]
lons = float_data_all[f][1]
lats = float_data_all[f][2]
position_flags = float_data_all[f][3]
datetimes = array([datetime(*tt.convert_14_to_tuple(dt)[0:3]) for dt in float_data_all[f][4]])
lonx,laty = m2(lons,lats)
plt.plot(lonx[position_flags != 9],laty[position_flags != 9],color=fl,lw=0.75,ls=fls,zorder=zob+3)
plt.scatter(lonx[0],laty[0],s=start_size,c=start_color,marker=start_mark,edgecolors='none',zorder=zob+6)
if wmoid == 5904468 or wmoid == 5904471:
date_int = tt.convert_tuple_to_8_int(polynya_2016_date) * 1000000
if sum((float_data_all[f][4] - date_int) == 0) >= 1:
polynya_prof_idx = where((float_data_all[f][4] - date_int) == 0)[0][0]
else:
polynya_prof_idx = where((float_data_all[f][4] - date_int) < 0)[0][-1]
polynya_lon = lons[polynya_prof_idx]
polynya_lat = lats[polynya_prof_idx]
plt.scatter(*m2(polynya_lon,polynya_lat),s=polynya_marker_size,c=polynya_color_2016,marker='*',
edgecolor='k',linewidths=0.5,zorder=zob+6)
c1, = plt.plot([0,0],[NaN,NaN],lw=0.75,ls=float_lines[0],c=float_line_colors[0],label='{0}'.format(float_wmoids[0]))
c2, = plt.plot([0,0],[NaN,NaN],lw=0.75,ls=float_lines[1],c=float_line_colors[1],label='{0}'.format(float_wmoids[1]))
c3, = plt.plot([0,0],[NaN,NaN],lw=0.75,ls=float_lines[2],c=float_line_colors[2],label='{0}'.format(float_wmoids[2]))
leg1 = ax2.legend(handles=[c1,c2,c3],ncol=3,loc='upper center',bbox_to_anchor=(1.025,-0.010),
fontsize=labelsize,columnspacing=2.5,handletextpad=0.6,handlelength=2.25,frameon=False,
markerscale=1,scatterpoints=1)
c4, = plt.plot([0,0],[NaN,NaN],c=polynya_color_2016_light,ls='-',lw=3,
label='{0}-{1:02d}-{2:02d}'.format(*polynya_2016_date),
marker='*',ms=5,markerfacecolor=polynya_color_2016,markeredgecolor='k',markeredgewidth=0.5)
c5, = plt.plot([0,0],[NaN,NaN],c=polynya_color_2017_light,ls='-',lw=3,
label='{0}-{1:02d}-{2:02d}'.format(*polynya_2017_date),
marker='*',ms=5,markerfacecolor=polynya_color_2017,markeredgecolor='k',markeredgewidth=0.5)
leg2 = mlegend.Legend(ax2,[c4,c5],['{0}-{1:02d}-{2:02d}'.format(*polynya_2016_date),
'{0}-{1:02d}-{2:02d}'.format(*polynya_2017_date)],ncol=2,fontsize=labelsize,
columnspacing=3.0,handletextpad=0.6,handlelength=2.25,frameon=False)
leg1._legend_box._children.append(leg2._legend_box._children[1])
leg1._legend_box.align = 'center'
sic_grid = sea_ice_grids['amsr2']
sic_field = ldp.load_amsr(sea_ice_data_avail['amsr2'][polynya_2016_date][0],regrid_to_25km=False)
m2.contour(*m2(sic_grid['lons'],sic_grid['lats']),sic_field,levels=[open_sic_plotting+1],
colors=polynya_color_2016_dark,linewidths=0.5,alpha=0.9,zorder=4)
sic_field = ma.masked_where(sic_field > open_sic_plotting,sic_field)
m2.contourf(*m2(sic_grid['lons'],sic_grid['lats']),sic_field,levels=[0,open_sic_plotting-1],
colors=polynya_color_2016,alpha=0.40,zorder=3)
plt.text(0.96,0.97,'2011–2016',color='w',size=labelsize+1,fontweight='bold',
horizontalalignment='right',verticalalignment='top',transform=ax2.transAxes)
# 2017 polynya
if plot_fig_1c:
ax3 = plt.subplot(subplot_grid_right[2])
argo_gdac_index = pickle.load(open(argo_index_pickle_dir + 'argo_gdac_index.pickle','rb'))
float_data_all = []
for wmoid in float_wmoids:
this_float_meta = ldp.argo_gdac_float_meta(argo_gdac_index['local_prof_index'],wmoid)
if wmoid == 5903616: toi_mask = logical_and(this_float_meta['prof_datetimes'] >= toi_5903616[0],
this_float_meta['prof_datetimes'] <= toi_5903616[1]) # dummy
if wmoid == 5904468: toi_mask = logical_and(this_float_meta['prof_datetimes'] >= toi_5904468b[0],
this_float_meta['prof_datetimes'] <= toi_5904468b[1])
if wmoid == 5904471: toi_mask = logical_and(this_float_meta['prof_datetimes'] >= toi_5904471b[0],
this_float_meta['prof_datetimes'] <= toi_5904471b[1])
float_data_all.append([wmoid,this_float_meta['prof_lons'][toi_mask],this_float_meta['prof_lats'][toi_mask],
this_float_meta['prof_position_flags'][toi_mask],
this_float_meta['prof_datetimes'][toi_mask]])
_,m3 = pt.bathy_basemap(data_dir,*map_params_floats_2017,create_new_fig=False,
labelsize=labelsize,boundary_width=0.5,lon_labels_on_top=True,
grid_color='0.4',label_contours=False,cmap='Greys_r',bathy_alpha=0.6,
grid_lats=arange(-70,-60,2),grid_lons=arange(-80,50,5),force_lat_labels=[0,0,0,0])
for f in range(len(float_data_all)):
if f == 0: continue
zob = f * 7 # zorder baseline
mk = float_markers[f]
fl = float_line_colors[f]
fls = float_lines[f]
start_mark = float_markers[f]
start_color = float_start_colors[f]
start_size = float_start_sizes[f]
polynya_marker_size = polynya_marker_sizes[f]
wmoid = float_data_all[f][0]
lons = float_data_all[f][1]
lats = float_data_all[f][2]
position_flags = float_data_all[f][3]
datetimes = array([datetime(*tt.convert_14_to_tuple(dt)[0:3]) for dt in float_data_all[f][4]])
lonx,laty = m3(lons,lats)
plt.plot(lonx[position_flags != 9],laty[position_flags != 9],color=fl,lw=0.75,ls=fls,zorder=zob+3)
plt.scatter(lonx[0],laty[0],s=start_size-6,c=start_color,marker=start_mark,edgecolors='none',zorder=zob+6)
if wmoid == 5904468 or wmoid == 5904471:
date_int = tt.convert_tuple_to_8_int(polynya_2017_date) * 1000000
if sum((float_data_all[f][4] - date_int) == 0) >= 1:
polynya_prof_idx = where((float_data_all[f][4] - date_int) == 0)[0][0]
else:
polynya_prof_idx = where((float_data_all[f][4] - date_int) < 0)[0][-1]
polynya_lon = lons[polynya_prof_idx]
polynya_lat = lats[polynya_prof_idx]
plt.scatter(*m3(polynya_lon,polynya_lat),s=polynya_marker_size,c=polynya_color_2017_medium,marker='*',
edgecolor='k',linewidths=0.5,zorder=zob+6)
sic_grid = sea_ice_grids['amsr2']
sic_field = ldp.load_amsr(sea_ice_data_avail['amsr2'][polynya_2017_date][0],regrid_to_25km=False)
m3.contour(*m3(sic_grid['lons'],sic_grid['lats']),sic_field,levels=[open_sic_plotting+1],
colors=polynya_color_2017_dark,linewidths=0.5,alpha=0.70,zorder=4)
sic_field = ma.masked_where(sic_field > open_sic_plotting,sic_field)
m3.contourf(*m3(sic_grid['lons'],sic_grid['lats']),sic_field,levels=[0,open_sic_plotting-1],
colors=polynya_color_2017,alpha=0.30,zorder=3)
plt.text(0.04,0.97,'2017–2018',color='w',size=labelsize+1,fontweight='bold',
horizontalalignment='left',verticalalignment='top',transform=ax3.transAxes)
# inset box on first subplot
if plot_fig_1a and plot_fig_1b and plot_fig_1c:
slon = 3.25 # switchover longitude for right frame representing Fig. 1b and left frame representing Fig. 1c
# representing Fig. 1b: top, right, bottom
top_lons,top_lats = m2(linspace(0,map_params_floats_2016[0],100),
linspace(map_params_floats_2016[1],map_params_floats_2016[1],100),inverse=True) # left to right
right_lons,right_lats = m2(linspace(map_params_floats_2016[0],map_params_floats_2016[0],100),
linspace(map_params_floats_2016[1],0,100),inverse=True) # downwards
bottom_lons,bottom_lats = m2(linspace(map_params_floats_2016[0],0,100),
linspace(0,0,100),inverse=True) # right to left
patch_lons = concatenate((top_lons[top_lons >= slon],right_lons,bottom_lons[bottom_lons >= slon]))
patch_lats = concatenate((top_lats[top_lons >= slon],right_lats,bottom_lats[bottom_lons >= slon]))
plonx,platy = m1(patch_lons,patch_lats)
m1_ax.plot(plonx,platy,c='k',lw=0.5,ls='-',alpha=0.7,zorder=2)
# representing Fig. 1c: bottom, left, top
top_lons,top_lats = m3(linspace(0,map_params_floats_2017[0],100),
linspace(map_params_floats_2017[1],map_params_floats_2017[1],100),inverse=True) # left to right
bottom_lons,bottom_lats = m3(linspace(map_params_floats_2017[0],0,100),
linspace(0,0,100),inverse=True) # right to left
left_lons,left_lats = m3(linspace(0,0,100),
linspace(0,map_params_floats_2017[1],100),inverse=True) # upwards
patch_lons = concatenate((bottom_lons[bottom_lons <= slon],left_lons,top_lons[top_lons <= slon]))
patch_lats = concatenate((bottom_lats[bottom_lons <= slon],left_lats,top_lats[top_lons <= slon]))
plonx,platy = m1(patch_lons,patch_lats)
m1_ax.plot(plonx,platy,c='k',lw=0.5,ls='-',alpha=0.7,zorder=2)
# add ice colorbar to Fig. 1a
if plot_fig_1a:
original_axes = plt.gca()
cbar_ax = inset_axes(m1_ax,width='100%',height='100%',loc=3,
bbox_to_anchor=(0.25,0.0,0.60,0.017),bbox_transform=m1_ax.transAxes)
# left, bottom, width, height
cbar = plt.gcf().colorbar(pcm,ticks=arange(0,101,20),format='%.0f%%',extend='neither',
orientation='horizontal',cax=cbar_ax)
cbar.ax.tick_params(labelsize=labelsize,top='on',bottom=False,labeltop='on',labelbottom=False)
cbar.outline.set_linewidth(0.5)
plt.sca(original_axes)
# add inset map to Fig. 1a
if plot_fig_1a:
original_axes = plt.gca()
m4_ax = inset_axes(m1_ax,width='100%',height='100%',
bbox_to_anchor=(0.08,0.70,0.32,0.32),bbox_transform=m1_ax.transAxes)
m4 = Basemap(projection='spstere',boundinglat=-56,lon_0=180,resolution='i',round=True)
circle = m4.drawmapboundary(linewidth=0.5,fill_color=ocean_color_light)
circle.set_clip_on(False)
m4.fillcontinents(color='0.7')
m4.drawcoastlines(linewidth=0.25,color='k')
m4.drawparallels(arange(-80,0,10),color='0.6',linewidth=0.25,labels=[0,0,0,0],zorder=2)
m4.drawmeridians(arange(0,359,30),color='0.6',linewidth=0.25,labels=[0,0,0,0],zorder=2)
top_lons,top_lats = m1(linspace(0,map_params_weddell[0],100),
linspace(map_params_weddell[1],map_params_weddell[1],100),inverse=True) # left to right
right_lons,right_lats = m1(linspace(map_params_weddell[0],map_params_weddell[0],100),
linspace(map_params_weddell[1],0,100),inverse=True) # downwards
bottom_lons,bottom_lats = m1(linspace(map_params_weddell[0],0,100),
linspace(0,0,100),inverse=True) # right to left
left_lons,left_lats = m1(linspace(0,0,100),
linspace(0,map_params_weddell[1],100),inverse=True) # upwards
patch_lons = concatenate((top_lons,right_lons,bottom_lons,left_lons))
patch_lats = concatenate((top_lats,right_lats,bottom_lats,left_lats))
plonx,platy = m4(patch_lons,patch_lats)
patchxy = list(zip(plonx,platy))
poly = Polygon(patchxy,linewidth=0.5,linestyle='-',edgecolor='k',facecolor='none',alpha=0.7,zorder=3)
m4_ax.add_patch(poly)
plt.sca(original_axes)
# save figure
if plot_fig_1a or plot_fig_1b or plot_fig_1c:
plt.savefig(current_results_dir + 'figure_1.pdf',dpi=450)
plt.close()
# Fig. 2. Storms, sea ice concentration and mixed-layer salinity at Maud Rise in 2016 and 2017.
# Extended Data Fig. 3. Evolution of sea ice concentration, air temperature and upper ocean properties at Maud Rise in
# 2016 and 2017.
# Extended Data Fig. 4. Correspondence of sea ice loss episodes and major storms near Maud Rise.
if plot_fig_2_ED_figs_3_4:
verbose = True
use_ice_pickle = True
use_hydro_pickle = True
use_erai_pickle = True
mr_box_small = [0,10,-67,-63] # for reanalysis series
erai_toi_2016 = [datetime(2016,1,1),datetime(2016,12,31)]
erai_toi_2017 = [datetime(2017,1,1),datetime(2017,12,31)]
polynya_datetimes = [[datetime(2016,7,27),datetime(2016,8,17)],[datetime(2017,9,3),datetime(2017,11,28)]]
mr_obs_dist = 250 # maximum obs distance (in km) from Maud Rise center
e_weddell_obs_dist = 500 # for comparison
mr_center = [-65.0,3.0] # summit of Maud Rise (65.0°S, 3.0°E)
dpb = 21 # days per bin for climatological envelope
si10_crit = 20 # storm criterion (wind speed ≥ 20 m/s)
msl_crit = 950 # storm criterion (surface pressure ≤ 950 hPa)
polynya_sats = ['dmsp','amsr2']
pickle_names = ['fig_2_ice','fig_2_ice_with_amsr_polynya_extent']
if not use_ice_pickle:
for run_idx, polynya_sat in enumerate(polynya_sats):
sic_lon_bounds = [0,10]
sic_lat_bounds = [-67,-63]
polynya_lon_bounds = array([-15,20])
polynya_lat_bounds = array([-68,-62])
circumant_lons,circumant_lats = gt.establish_coastline(coastline_filename_prefix)
[sea_ice_grids,sea_ice_data_avail,sea_ice_all_dates] = ldp.sea_ice_data_prep(nimbus5_dir,dmsp_v3_dir,
dmsp_nrt_dir,
amsre_dir,amsr2_dir,amsr_gridfile,
amsr_areafile,nsidc_ps25_grid_dir)
sic_doy_2016_dmsp = pd.Series()
sic_doy_2017_dmsp = pd.Series()
sic_doy_dmsp = dict()
sic_doy_2016_amsr = pd.Series()
sic_doy_2017_amsr = pd.Series()
sic_doy_amsr = dict()
polynya_extent_doy_2016 = pd.Series()
polynya_extent_doy_2017 = pd.Series()
for index, date in enumerate(tt.dates_in_range((1978,11,1),tt.now())):
if verbose: print(date)
date_as_datetime = datetime(*date)
doy = date_as_datetime.timetuple().tm_yday
# SIC average
[sic_dmsp,open_area,day_offset] = ldp.sea_ice_concentration(date,sic_lat_bounds,sic_lon_bounds,sea_ice_grids,
sea_ice_data_avail,use_only=['dmsp'])
[sic_amsr,open_area,day_offset] = ldp.sea_ice_concentration(date,sic_lat_bounds,sic_lon_bounds,sea_ice_grids,
sea_ice_data_avail,use_only=['amsre','amsr2'])
if doy not in sic_doy_dmsp: sic_doy_dmsp[doy] = []
if doy not in sic_doy_amsr: sic_doy_amsr[doy] = []
sic_doy_dmsp[doy].append(sic_dmsp)
sic_doy_amsr[doy].append(sic_amsr)
if date[0] == 2016: sic_doy_2016_dmsp = sic_doy_2016_dmsp.append(pd.Series(index=[doy],data=[sic_dmsp]))
if date[0] == 2016: sic_doy_2016_amsr = sic_doy_2016_amsr.append(pd.Series(index=[doy],data=[sic_amsr]))
if date[0] == 2017: sic_doy_2017_dmsp = sic_doy_2017_dmsp.append(pd.Series(index=[doy],data=[sic_dmsp]))
if date[0] == 2017: sic_doy_2017_amsr = sic_doy_2017_amsr.append(pd.Series(index=[doy],data=[sic_amsr]))
if date[0] == 2016 or date[0] == 2017:
# polynya identification
if verbose: print('polynya ID for ',date)
sat_string,polynya_string,filename_abbrev,sic_grid,sic_field, \
polynya_stats,polynya_grid,polynya_grid_binary,open_ocean_grid,error_code \
= gt.identify_polynyas_magic(polynya_sat,date,sea_ice_grids,sea_ice_data_avail,circumant_lons,
circumant_lats,open_threshold=50,
extent_threshold=0,regrid_amsr_to_25km=True)
# no errors in identifying polynyas
if error_code == 0:
total_polynya_extent = 0
for polynya_index in range(len(polynya_stats)):
if polynya_lat_bounds[0] <= polynya_stats[polynya_index]['centroid'][0] <= polynya_lat_bounds[1] \
and polynya_lon_bounds[0] <= polynya_stats[polynya_index]['centroid'][1] <= \
polynya_lon_bounds[1]:
total_polynya_extent += polynya_stats[polynya_index]['total_extent']
if date[0] == 2016: polynya_extent_doy_2016 \
= polynya_extent_doy_2016.append(pd.Series(index=[doy],data=[total_polynya_extent]))
if date[0] == 2017: polynya_extent_doy_2017 \
= polynya_extent_doy_2017.append(pd.Series(index=[doy],data=[total_polynya_extent]))
if verbose: print('>>> polynya extent: ',total_polynya_extent)
# fully or partially bad SIC field
else:
if date[0] == 2016: polynya_extent_doy_2016 \
= polynya_extent_doy_2016.append(pd.Series(index=[doy],data=[NaN]))
if date[0] == 2017: polynya_extent_doy_2017 \
= polynya_extent_doy_2017.append(pd.Series(index=[doy],data=[NaN]))
for doy in sic_doy_dmsp.keys():
sic_doy_dmsp[doy] = [nanmean(sic_doy_dmsp[doy]),nanstd(sic_doy_dmsp[doy]),nanmedian(sic_doy_dmsp[doy]),
stats.iqr(sic_doy_dmsp[doy],rng=(25,50),nan_policy='omit'),
stats.iqr(sic_doy_dmsp[doy],rng=(50,75),nan_policy='omit')]
for doy in sic_doy_amsr.keys():
sic_doy_amsr[doy] = [nanmean(sic_doy_amsr[doy]),nanstd(sic_doy_amsr[doy]),nanmedian(sic_doy_amsr[doy]),
stats.iqr(sic_doy_amsr[doy],rng=(25,50),nan_policy='omit'),
stats.iqr(sic_doy_amsr[doy],rng=(50,75),nan_policy='omit')]
pickle.dump([sic_doy_dmsp,sic_doy_amsr,sic_doy_2016_dmsp,sic_doy_2016_amsr,sic_doy_2017_dmsp,sic_doy_2017_amsr,
polynya_extent_doy_2016,polynya_extent_doy_2017],
open(figure_pickle_dir + pickle_names[run_idx],'wb'))
sic_doy_dmsp,sic_doy_amsr,sic_doy_2016_dmsp,sic_doy_2016_amsr,sic_doy_2017_dmsp,sic_doy_2017_amsr, \
polynya_extent_doy_2016,polynya_extent_doy_2017 \
= pickle.load(open(figure_pickle_dir + pickle_names[0],'rb'))
_,_,_,_,_,_,polynya_extent_doy_2016_amsr,polynya_extent_doy_2017_amsr \
= pickle.load(open(figure_pickle_dir + pickle_names[1],'rb'))
sic_doys = arange(1,366+1)
sic_doy_dmsp_mean = pd.Series(index=sic_doys,data=[sic_doy_dmsp[doy][0] for doy in sic_doys])
sic_doy_dmsp_median \
= pd.Series(index=sic_doys,data=[sic_doy_dmsp[doy][2] for doy in sic_doys]).rolling(window=7,center=True).mean()
sic_doy_dmsp_iqr_25 \
= pd.Series(index=sic_doys,data=[sic_doy_dmsp[doy][3] for doy in sic_doys]).rolling(window=7,center=True).mean()
sic_doy_dmsp_iqr_75 \
= pd.Series(index=sic_doys,data=[sic_doy_dmsp[doy][4] for doy in sic_doys]).rolling(window=7,center=True).mean()
sic_doy_amsr_median \
= pd.Series(index=sic_doys,data=[sic_doy_amsr[doy][2] for doy in sic_doys]).rolling(window=7,center=True).mean()
sic_doy_amsr_iqr_25 \
= pd.Series(index=sic_doys,data=[sic_doy_amsr[doy][3] for doy in sic_doys]).rolling(window=7,center=True).mean()
sic_doy_amsr_iqr_75 \
= pd.Series(index=sic_doys,data=[sic_doy_amsr[doy][4] for doy in sic_doys]).rolling(window=7,center=True).mean()
# pickle SIC climatology for other purposes...
pickle.dump(sic_doy_dmsp_mean,open(figure_pickle_dir + 'fig_2_sic_climatology_dmsp','wb'))
if use_hydro_pickle:
mr_obs = pickle.load(open(figure_pickle_dir + 'fig_2_mr_obs','rb'))
e_weddell_obs = pickle.load(open(figure_pickle_dir + 'fig_2_e_weddell_obs','rb'))
else:
mr_obs \
= ldp.compile_hydrographic_obs(argo_index_pickle_dir,argo_gdac_dir,wod_dir,lon_bounds=[-20,25],
lat_bounds=[-75,-59],toi_bounds=[datetime(1900,1,1),datetime.today()],
distance_check=mr_obs_dist,distance_center=mr_center,
include_argo=True,include_wod=True,params=['ptmp','psal','sigma_theta'],
compute_extras=False,max_cast_min_depth=30,min_cast_max_depth=250,
reject_mld_below=250,interp_spacing=0.1,interp_depths=(0,500),calc_mld=True,
calc_ml_avg=True,calc_at_depths=[20,200,250],
calc_depth_avgs=[(0,200),(0,250),(200,500),(250,500)],
calc_sd=[200,250,(0,250,34.8),(250,1650,34.8)],calc_tb=[200,250],
pickle_dir=figure_pickle_dir,pickle_filename='fig_2_mr_obs',
prof_count_dir=current_results_dir,
prof_count_filename='figure_2_prof_counts_mr',verbose=verbose)
e_weddell_obs \
= ldp.compile_hydrographic_obs(argo_index_pickle_dir,argo_gdac_dir,wod_dir,lon_bounds=[-20,25],
lat_bounds=[-75,-59],toi_bounds=[datetime(1900,1,1),datetime.today()],
distance_check=[mr_obs_dist,e_weddell_obs_dist],distance_center=mr_center,
include_argo=True,include_wod=True,params=['ptmp','psal','sigma_theta'],
compute_extras=False,max_cast_min_depth=30,min_cast_max_depth=250,
reject_mld_below=250,interp_spacing=0.1,interp_depths=(0,500),calc_mld=True,
calc_ml_avg=True,calc_at_depths=[20,200,250],
calc_depth_avgs=[(0,200),(0,250),(200,500),(250,500)],
calc_sd=[250,(0,250,34.8)],calc_tb=None,
pickle_dir=figure_pickle_dir,pickle_filename='fig_2_e_weddell_obs',
prof_count_dir=current_results_dir,
prof_count_filename='figure_2_prof_counts_e_weddell',verbose=verbose)
# calculate and export statistics on compiled hydrography
text_file = open(current_results_dir + 'figure_2_mr_hydrography_stats.txt','w')
text_file.write('Statistics on Maud Rise hydrography compiled for MLS, MLT, SD climatologies:\n'
'- Number of obs: {0}\n'
'- Fraction of obs collected before 1970: {1:.2f}%\n'
'- Mean date: {2}\n'
'- Median date: {3}\n'
.format(len(mr_obs['datetimes']),
100 * sum(mr_obs['datetimes'] < datetime(1970,1,1)) / len(mr_obs['datetimes']),
datetime.fromtimestamp(mean(array([dt.timestamp() for dt in mr_obs['datetimes']]))),
datetime.fromtimestamp(median(array([dt.timestamp() for dt in mr_obs['datetimes']])))))
text_file.close()
text_file = open(current_results_dir + 'figure_2_ew_hydrography_stats.txt','w')
text_file.write('Statistics on Eastern Weddell hydrography compiled for MLS, MLT, SD climatologies:\n'
'- Number of obs: {0}\n'
'- Fraction of obs collected before 1970: {1:.2f}%\n'
'- Mean date: {2}\n'
'- Median date: {3}\n'
.format(len(e_weddell_obs['datetimes']),
100 * sum(e_weddell_obs['datetimes'] < datetime(1970,1,1)) / len(e_weddell_obs['datetimes']),
datetime.fromtimestamp(mean(array([dt.timestamp() for dt in e_weddell_obs['datetimes']]))),
datetime.fromtimestamp(median(array([dt.timestamp() for dt in e_weddell_obs['datetimes']])))))
text_file.close()
# load reanalysis data and create time series of interest
if use_erai_pickle:
erai_series = pickle.load(open(figure_pickle_dir + 'fig_2_erai_series','rb'))
else:
erai_daily = ldp.load_ecmwf(era_custom_dir,'erai_daily_weddell.nc')
erai_daily_toi_2016 = erai_daily.sel(time=slice(erai_toi_2016[0],erai_toi_2016[1]))
erai_daily_toi_2017 = erai_daily.sel(time=slice(erai_toi_2017[0],erai_toi_2017[1]))
erai_daily_toi_2016_2017 = erai_daily.sel(time=slice(erai_toi_2016[0],erai_toi_2017[1]))
erai_series = dict()
erai_series['msl_min_2016'] \
= ldp.create_reanalysis_index(erai_daily_toi_2016,param_name='msl',avg_box=mr_box_small,
min_not_mean=True)[0]
erai_series['msl_min_2017'] \
= ldp.create_reanalysis_index(erai_daily_toi_2017,param_name='msl',avg_box=mr_box_small,
min_not_mean=True)[0]
erai_series['msl_min_climo_2016'], erai_series['msl_min_climo_iqr_25_2016'], erai_series['msl_min_climo_iqr_75_2016'] \
= ldp.create_reanalysis_index(erai_daily,param_name='msl',avg_box=mr_box_small,
min_not_mean=True,create_climo_iqr=True,make_year=2016)
erai_series['msl_min_climo_2017'], erai_series['msl_min_climo_iqr_25_2017'], erai_series['msl_min_climo_iqr_75_2017'] \
= ldp.create_reanalysis_index(erai_daily,param_name='msl',avg_box=mr_box_small,
min_not_mean=True,create_climo_iqr=True,make_year=2017)
erai_series['si10_max_2016'] \
= ldp.create_reanalysis_index(erai_daily_toi_2016,param_name='si10',avg_box=mr_box_small,
max_not_mean=True)[0]
erai_series['si10_max_2017'] \
= ldp.create_reanalysis_index(erai_daily_toi_2017,param_name='si10',avg_box=mr_box_small,
max_not_mean=True)[0]
erai_series['si10_max_climo_2016'], erai_series['si10_max_climo_iqr_25_2016'], erai_series['si10_max_climo_iqr_75_2016'] \
= ldp.create_reanalysis_index(erai_daily,param_name='si10',avg_box=mr_box_small,
max_not_mean=True,create_climo_iqr=True,make_year=2016)
erai_series['si10_max_climo_2017'], erai_series['si10_max_climo_iqr_25_2017'], erai_series['si10_max_climo_iqr_75_2017'] \
= ldp.create_reanalysis_index(erai_daily,param_name='si10',avg_box=mr_box_small,
max_not_mean=True,create_climo_iqr=True,make_year=2017)
erai_series['t2m_2016'] \
= ldp.create_reanalysis_index(erai_daily_toi_2016,param_name='t2m',avg_box=mr_box_small)[0]
erai_series['t2m_2017'] \
= ldp.create_reanalysis_index(erai_daily_toi_2017,param_name='t2m',avg_box=mr_box_small)[0]
erai_series['t2m_2016_2017'] \
= ldp.create_reanalysis_index(erai_daily_toi_2016_2017,param_name='t2m',avg_box=mr_box_small)[0]
erai_series['t2m_climo_2016'], erai_series['t2m_climo_iqr_25_2016'], erai_series['t2m_climo_iqr_75_2016'] \
= ldp.create_reanalysis_index(erai_daily,param_name='t2m',avg_box=mr_box_small,
create_climo_iqr=True,make_year=2016)
erai_series['t2m_climo_2017'], erai_series['t2m_climo_iqr_25_2017'], erai_series['t2m_climo_iqr_75_2017'] \
= ldp.create_reanalysis_index(erai_daily,param_name='t2m',avg_box=mr_box_small,
create_climo_iqr=True,make_year=2017)
pickle.dump(erai_series,open(figure_pickle_dir + 'fig_2_erai_series','wb'))
erai_daily_toi_2016.close()
erai_daily_toi_2017.close()
erai_daily.close()
##################################################################################################################
################################### FIG. 2 PLOTTING ROUTINE ##################################################
##################################################################################################################
fontsize = 5
shade_color='k'
shade_alpha=0.15
shade_alpha_dark=0.25
shade_centerline_alpha=0.5
ew_shade_color='saddlebrown'
ew_shade_centerline_alpha=0.5
color_accent='k'
color_polynya='navy'
lw=0.5
ms=1.5
mew=0.1 # marker edge width
# create multiyear series that repeats a single-year climatology
def plot_doy_series(series,ref_datetime=datetime(2013,12,31),N_years=5,smooth_N_days_betw_yrs=7,
just_return_series=False):
if N_years == 1: smooth_N_days_betw_yrs = 0
full_index = array([])
full_data = array([])
for y in range(N_years):
new_index = array([timedelta(days=int(doy)) for doy in series.index]) \
+ ref_datetime.replace(year=ref_datetime.year+y)
single_year_mask = array([dt.year == ref_datetime.year+1+y for dt in new_index])
full_index = append(full_index,new_index[single_year_mask])
if smooth_N_days_betw_yrs != 0: full_data[-1*smooth_N_days_betw_yrs:] = NaN
new_data = series.values[single_year_mask]
if smooth_N_days_betw_yrs != 0: new_data[:smooth_N_days_betw_yrs] = NaN
full_data = append(full_data,new_data)
full_series = pd.Series(index=full_index,data=full_data).interpolate(method='linear').bfill().ffill()
if just_return_series: return full_series
return full_series.index,full_series.values
def series_change_year(series,new_year):
new_index = array([dt.replace(year=new_year) for dt in series.index])
return pd.Series(index=new_index,data=series.values)
def series_daily_interp(orig_series,new_index_toi=[datetime(2016,1,1),datetime(2018,1,1)]):
interpolator = spin.interp1d(tt.datetime_to_datenum(orig_series.index),orig_series.values,
bounds_error=False,fill_value=NaN)
new_index = arange(*new_index_toi,timedelta(days=1))
interp_data = interpolator(tt.datetime_to_datenum(new_index))
return pd.Series(index=new_index,data=interp_data)
def fig_2_axis_prep(ylabel='',xlabel_top=False,xlabel_bottom=False,add_xtick_top=False,add_xtick_bottom=False,
spines=[],ylabel_pos='left',ylabel_color='k',remove_yticks=[],
xgrid=False,ygrid=False,ylabel_offset=0.0,ylabel_pad=0.0,
xbounds=[datetime(2016,1,1),datetime(2017,12,31)]):
plt.xlim(xbounds)
plt.gca().xaxis.set_major_locator(mdates.MonthLocator())
plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%b'))
if add_xtick_top: xtick_top = True
else: xtick_top = xlabel_top
if add_xtick_bottom: xtick_bottom = True
else: xtick_bottom = xlabel_bottom
plt.tick_params(axis='x',which='both',bottom=xtick_bottom,top=xtick_top,
labeltop=xlabel_top,labelbottom=xlabel_bottom)
if xgrid: plt.gca().grid(which='major',axis='x',linewidth=0.5,alpha=0.3)
if ygrid: plt.gca().grid(which='major',axis='y',linewidth=0.5,alpha=0.3)
plt.gca().tick_params(axis='both',which='major',labelsize=fontsize)
[plt.gca().spines[side].set_linewidth(0.25) for side in plt.gca().spines.keys()]
plt.gca().tick_params(width=0.25)
if 'top' not in spines: plt.gca().spines['top'].set_visible(False)
if 'bottom' not in spines: plt.gca().spines['bottom'].set_visible(False)
for tick in remove_yticks:
plt.gca().get_yticklabels()[tick].set_visible(False)
plt.gca().yaxis.get_major_ticks()[tick].set_visible(False)
if ylabel_pos == 'right':
plt.gca().yaxis.tick_right()
plt.gca().yaxis.set_label_position('right')
plt.gca().patch.set_alpha(0.0)
if ylabel_pos == 'left': ylabel_rot = 90
if ylabel_pos == 'right': ylabel_rot = 90 # previously -90
plt.ylabel(ylabel,fontsize=fontsize,rotation=ylabel_rot,color=ylabel_color)
if ylabel_pos == 'left': plt.gca().get_yaxis().set_label_coords(-0.07-ylabel_pad,0.5+ylabel_offset)
if ylabel_pos == 'right': plt.gca().get_yaxis().set_label_coords( 1.09+ylabel_pad,0.5+ylabel_offset)
plt.setp(plt.gca().get_yticklabels(),color=ylabel_color)
def fig_2_plot_sic(plot_2016=True,plot_2017=True,plot_legend=True,legend_bbox=(0.003,1.00),
xbounds=[datetime(2016,1,1),datetime(2017,12,31)]):
factor = 1.7
sic_doy_dmsp_low = sic_doy_dmsp_median-sic_doy_dmsp_iqr_25
sic_doy_dmsp_low[sic_doy_dmsp_low < 0.0] = 0.0
plt.fill_between(*plot_doy_series(sic_doy_dmsp_low**factor),
plot_doy_series(sic_doy_dmsp_median+sic_doy_dmsp_iqr_75)[1]**factor,
facecolor=shade_color,alpha=shade_alpha,zorder=4)
plt.plot(*plot_doy_series(sic_doy_dmsp_median**factor),
c='k',linewidth=0.5,alpha=shade_centerline_alpha,zorder=5)
# han1 = plt.fill_between([0,1],[nan,nan],facecolor=shade_color,alpha=shade_alpha,
# label='Maud Rise') # dummy handle # (±1$\sigma$)
han2, = plt.plot([0,1],[nan,nan],c='k',lw=lw,ls='-',label='NSIDC Merged') # dummy handle
han3, = plt.plot([0,1],[nan,nan],c='k',lw=lw,ls=':',label='AMSR2-ASI') # dummy handle
if plot_2016:
# han4, = plt.plot([0,1],[nan,nan],c=color_accent,lw=1.0,ls='-',label='2016') # dummy handle
plt.plot(*plot_doy_series(sic_doy_2016_dmsp**factor,N_years=1,ref_datetime=datetime(2015,12,31)),
c=color_accent,lw=lw,ls='-',zorder=6)
plt.plot(*plot_doy_series(sic_doy_2016_amsr**factor,N_years=1,ref_datetime=datetime(2015,12,31)),
c=color_accent,lw=lw,ls=':',zorder=7)
if plot_2017:
# han5, = plt.plot([0,1],[nan,nan],c=color_accent,lw=1.0,ls='-',label='2017') # dummy handle
plt.plot(*plot_doy_series(sic_doy_2017_dmsp**factor,N_years=1,ref_datetime=datetime(2016,12,31)),
c=color_accent,lw=lw,ls='-',zorder=6)
plt.plot(*plot_doy_series(sic_doy_2017_amsr**factor,N_years=1,ref_datetime=datetime(2016,12,31)),
c=color_accent,lw=lw,ls=':',zorder=7)
# plt.plot([0,1],[nan,nan],c='k',lw=lw,ls='-',label=' ',visible=False)
plt.plot([datetime(2016,1,1),datetime(2017,12,31)],[0,0],c='k',linestyle='-',linewidth=0.5,zorder=15)
plt.ylim([-0.01,1.0])
plt.gca().set_yticks(array([0,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.0])**factor)
plt.gca().set_yticklabels(['','30','40','50','60','70','80','90','100'])
if plot_legend:
plt.legend(handles=[han2,han3],bbox_to_anchor=legend_bbox,handlelength=1.75,loc='upper left',
ncol=1,frameon=False,fontsize=fontsize - 1) # labelspacing=1.075
fig_2_axis_prep(ylabel='Sea ice\nconcentration (%)',spines=['top'],xbounds=xbounds,add_xtick_bottom=True)
def find_storms(si10_series,msl_series,si10_crit=si10_crit,msl_crit=msl_crit,after=datetime(2016,5,1)):
storm_dates_all = []
for date in si10_series.index:
if date < after: continue
if si10_series[date] >= si10_crit or msl_series[date] <= msl_crit:
storm_dates_all.append(date)
return storm_dates_all
# NOTE: storms identified using find_storms(), then dates are manually aggregated (if applicable) and listed below:
storm_datetimes_2016 = [[datetime(2016,5,20,0),datetime(2016,5,26,0)],
[datetime(2016,6,12,0),datetime(2016,6,15,0)],
[datetime(2016,6,30,0),datetime(2016,7,1,0)],
[datetime(2016,7,9,0),datetime(2016,7,13,0)],
[datetime(2016,7,26,0),datetime(2016,7,28,0)],
[datetime(2016,8,2,0),datetime(2016,8,6,0)],
[datetime(2016,8,21,0),datetime(2016,8,23,0)],
[datetime(2016,8,28,0),datetime(2016,8,31,0)],
[datetime(2016,10,17,0),datetime(2016,10,19,0)],
[datetime(2016,10,26,0),datetime(2016,10,28,0)]]
storm_datetimes_2017 = [[datetime(2017,7,3,0),datetime(2017,7,8,0)],
[datetime(2017,7,23,0),datetime(2017,7,27,0)],
[datetime(2017,7,30,0),datetime(2017,8,3,0)],
[datetime(2017,8,7,0),datetime(2017,8,8,0)],
[datetime(2017,8,14,0),datetime(2017,8,15,0)],
[datetime(2017,8,18,0),datetime(2017,8,19,0)],
[datetime(2017,8,30,0),datetime(2017,9,2,0)],
[datetime(2017,9,13,0),datetime(2017,9,19,0)],
[datetime(2017,11,4,0),datetime(2017,11,5,0)],
[datetime(2017,11,30,0),datetime(2017,12,1,0)]]
# extract hydrographic quantities
mr_obs_mask_5903616 = mr_obs['platforms'] == str(5903616)
mr_obs_mask_5904471 = mr_obs['platforms'] == str(5904471)
mr_obs_mask_5904468 = mr_obs['platforms'] == str(5904468)
ml_avg_ptmp_median \
= ldp.hydro_obs_to_doy_series(mr_obs,'ptmp','ml_avg',doy_median=True,days_per_bin=dpb,rm_days=dpb)
ml_avg_ptmp_iqr_25 \
= ldp.hydro_obs_to_doy_series(mr_obs,'ptmp','ml_avg',doy_iqr_25=True,days_per_bin=dpb,rm_days=dpb)
ml_avg_ptmp_iqr_75 \
= ldp.hydro_obs_to_doy_series(mr_obs,'ptmp','ml_avg',doy_iqr_75=True,days_per_bin=dpb,rm_days=dpb)