Some first SMT-WAVE diagnostics#
[35]:
# if this line is not separated from the line below this cell has to be executed twice
# %matplotlib notebook
%matplotlib inline
%load_ext autoreload
%autoreload 2
The autoreload extension is already loaded. To reload it, use:
%reload_ext autoreload
[2]:
import pyicon as pyic
import numpy as np
from netCDF4 import Dataset
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import cartopy.crs as ccrs
import xarray as xr
import glob
import pandas as pd
import seawater as sw
import matplotlib.transforms as transforms
import matplotlib
import os
import cmocean
-----calc
sys glob os
numpy
netcdf
Done modules calc.
-----calc_xr
sys glob os
numpy
netcdf
xarray
Done modules calc.
-----tb
sys
json
numpy
scipy
netcdf datetime
matplotlib
mybreak
pnadas
xarray
done xarray
-----IconData
-----plotting
-----accessor
-----view
-----calc
-----calc_xr
-----tb
-----IconData
-----plotting
-----accessor
-----view
-----quickplots
-----quickplots
[3]:
from dask.distributed import Client, LocalCluster
cluster = LocalCluster()
client = Client(cluster)
[4]:
client
[4]:
Client
Client-e067845b-3d19-11ed-a69c-080038c0762f
Connection method: Cluster object | Cluster type: distributed.LocalCluster |
Dashboard: http://127.0.0.1:8787/status |
Cluster Info
LocalCluster
8f6e1566
Dashboard: http://127.0.0.1:8787/status | Workers: 16 |
Total threads: 256 | Total memory: 195.31 GiB |
Status: running | Using processes: True |
Scheduler Info
Scheduler
Scheduler-e2499b77-8b1a-490f-b891-472bcd613c81
Comm: tcp://127.0.0.1:44855 | Workers: 16 |
Dashboard: http://127.0.0.1:8787/status | Total threads: 256 |
Started: Just now | Total memory: 195.31 GiB |
Workers
Worker: 0
Comm: tcp://127.0.0.1:42749 | Total threads: 16 |
Dashboard: http://127.0.0.1:40173/status | Memory: 12.21 GiB |
Nanny: tcp://127.0.0.1:41261 | |
Local directory: /tmp/dask-worker-space/worker-gnxph412 |
Worker: 1
Comm: tcp://127.0.0.1:39887 | Total threads: 16 |
Dashboard: http://127.0.0.1:41321/status | Memory: 12.21 GiB |
Nanny: tcp://127.0.0.1:40943 | |
Local directory: /tmp/dask-worker-space/worker-0g0fwjyv |
Worker: 2
Comm: tcp://127.0.0.1:46409 | Total threads: 16 |
Dashboard: http://127.0.0.1:41203/status | Memory: 12.21 GiB |
Nanny: tcp://127.0.0.1:33149 | |
Local directory: /tmp/dask-worker-space/worker-esj5u6jv |
Worker: 3
Comm: tcp://127.0.0.1:46791 | Total threads: 16 |
Dashboard: http://127.0.0.1:43793/status | Memory: 12.21 GiB |
Nanny: tcp://127.0.0.1:45601 | |
Local directory: /tmp/dask-worker-space/worker-svzbu4_e |
Worker: 4
Comm: tcp://127.0.0.1:43849 | Total threads: 16 |
Dashboard: http://127.0.0.1:35163/status | Memory: 12.21 GiB |
Nanny: tcp://127.0.0.1:38605 | |
Local directory: /tmp/dask-worker-space/worker-7t9i5ggw |
Worker: 5
Comm: tcp://127.0.0.1:34779 | Total threads: 16 |
Dashboard: http://127.0.0.1:38361/status | Memory: 12.21 GiB |
Nanny: tcp://127.0.0.1:36009 | |
Local directory: /tmp/dask-worker-space/worker-hel0zlla |
Worker: 6
Comm: tcp://127.0.0.1:37277 | Total threads: 16 |
Dashboard: http://127.0.0.1:41497/status | Memory: 12.21 GiB |
Nanny: tcp://127.0.0.1:42869 | |
Local directory: /tmp/dask-worker-space/worker-7u2q8092 |
Worker: 7
Comm: tcp://127.0.0.1:33999 | Total threads: 16 |
Dashboard: http://127.0.0.1:36295/status | Memory: 12.21 GiB |
Nanny: tcp://127.0.0.1:41239 | |
Local directory: /tmp/dask-worker-space/worker-cim9vb22 |
Worker: 8
Comm: tcp://127.0.0.1:46239 | Total threads: 16 |
Dashboard: http://127.0.0.1:44493/status | Memory: 12.21 GiB |
Nanny: tcp://127.0.0.1:43415 | |
Local directory: /tmp/dask-worker-space/worker-3itrbua1 |
Worker: 9
Comm: tcp://127.0.0.1:33253 | Total threads: 16 |
Dashboard: http://127.0.0.1:42139/status | Memory: 12.21 GiB |
Nanny: tcp://127.0.0.1:40129 | |
Local directory: /tmp/dask-worker-space/worker-jltwfdvg |
Worker: 10
Comm: tcp://127.0.0.1:37065 | Total threads: 16 |
Dashboard: http://127.0.0.1:38683/status | Memory: 12.21 GiB |
Nanny: tcp://127.0.0.1:45839 | |
Local directory: /tmp/dask-worker-space/worker-rctvc_dl |
Worker: 11
Comm: tcp://127.0.0.1:46201 | Total threads: 16 |
Dashboard: http://127.0.0.1:37633/status | Memory: 12.21 GiB |
Nanny: tcp://127.0.0.1:33231 | |
Local directory: /tmp/dask-worker-space/worker-_qfbyx1r |
Worker: 12
Comm: tcp://127.0.0.1:37219 | Total threads: 16 |
Dashboard: http://127.0.0.1:33869/status | Memory: 12.21 GiB |
Nanny: tcp://127.0.0.1:40845 | |
Local directory: /tmp/dask-worker-space/worker-coyqd3h2 |
Worker: 13
Comm: tcp://127.0.0.1:46003 | Total threads: 16 |
Dashboard: http://127.0.0.1:39529/status | Memory: 12.21 GiB |
Nanny: tcp://127.0.0.1:45673 | |
Local directory: /tmp/dask-worker-space/worker-3c8y1qx0 |
Worker: 14
Comm: tcp://127.0.0.1:33303 | Total threads: 16 |
Dashboard: http://127.0.0.1:35683/status | Memory: 12.21 GiB |
Nanny: tcp://127.0.0.1:38345 | |
Local directory: /tmp/dask-worker-space/worker-i98dbtx_ |
Worker: 15
Comm: tcp://127.0.0.1:35423 | Total threads: 16 |
Dashboard: http://127.0.0.1:43205/status | Memory: 12.21 GiB |
Nanny: tcp://127.0.0.1:45575 | |
Local directory: /tmp/dask-worker-space/worker-jd5fk7xm |
[86]:
def savefig(prfx):
fpath = f'../pics/smtwv_diagnose_smtwv0004_{prfx}.pdf'
print(f'Saving figure {fpath}')
plt.savefig(fpath)
return
[6]:
lon_s2 = [-12, -7]
lat_s2 = [-37.5, -34.5]
lon_s1 = [1, 10]
lat_s1 = [-35, -30]
from matplotlib.patches import Rectangle
def add_sonett(ax, color='k'):
rectprop = dict(facecolor='none', edgecolor=color, lw=2)
ax.add_patch(Rectangle((-12,-37.5), 5, 4, **rectprop))
ax.text(-9.5, -33.5, 'SONETT II', ha='center', va='bottom', fontsize=8, color=color)
rectprop = dict(facecolor='none', edgecolor=color, lw=2)
ax.add_patch(Rectangle((1,-35), 9, 5, **rectprop))
ax.text(5.5, -30., 'SONETT I', ha='center', va='bottom', fontsize=8, color=color)
ax.text(-2.5, -35, 'Walvis Ridge', va='center', ha='center', rotation=45, fontsize=8, color=color)
return
Load data#
[38]:
gname = 'smtwv_oce_2022'
path_grid = f'/work/mh0033/m300602/icon/grids/{gname}/'
fpath_tgrid = f'{path_grid}{gname}_tgrid.nc'
fpath_ckdtree = f'{path_grid}ckdtree/rectgrids/{gname}_res0.02_180W-180E_90S-90N.nc'
# fpath_ckdtree = f'{path_grid}ckdtree/rectgrids/{gname}_res0.30_180W-180E_90S-90N.npz'
[8]:
mfdset_kwargs = dict(data_vars='minimal', coords='minimal', compat='override', combine='nested', concat_dim='time')
[9]:
run = 'smtwv0004'
path_data = f'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/{run}/'
[19]:
#!ls -d /work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/*
[22]:
flist = glob.glob(f'{path_data}/{run}_oce_3d_to_PT1H_*.nc')
flist.sort()
flist = flist[:-1]
flist
[22]:
['/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190701T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190702T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190703T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190704T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190705T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190706T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190707T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190708T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190709T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190710T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190711T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190712T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190713T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190714T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190715T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190716T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190717T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190718T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190719T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190720T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190721T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190722T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190723T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190724T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190725T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190726T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190727T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190728T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190729T001500Z.nc',
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_3d_to_PT1H_20190730T001500Z.nc']
[23]:
ds_3d = xr.open_mfdataset(flist, **mfdset_kwargs, chunks=dict(time=1, depth=1, depth_2=1))
[24]:
flist = glob.glob(f'{path_data}/{run}_oce_2d_PT1H_*.nc')
flist.sort()
flist = flist[:-1]
flist[-1]
[24]:
'/work/bm1102/m300602/proj_smtwv/icon-oes-zstar4/experiments/smtwv0004/smtwv0004_oce_2d_PT1H_20190730T001500Z.nc'
[25]:
ds_2d = xr.open_mfdataset(flist, **mfdset_kwargs, chunks=dict(time=1, depth=1, depth_2=1))
[26]:
%%time
ds_tg = xr.open_mfdataset(fpath_tgrid, **mfdset_kwargs)
# tgrid = tgrid.rename(dict(ncells='cell'))
ds_tg['dtype'] = 'float32'
# ds_tg['clon'] *= 180./np.pi
# ds_tg['clat'] *= 180./np.pi
# ds_tg['elon'] *= 180./np.pi
# ds_tg['elat'] *= 180./np.pi
# ds_tg['vlon'] *= 180./np.pi
# ds_tg['vlat'] *= 180./np.pi
ds_tg
CPU times: user 215 ms, sys: 352 ms, total: 567 ms
Wall time: 610 ms
[26]:
<xarray.Dataset> Dimensions: (cell: 66877252, nv: 3, vertex: 33484696, ne: 6, edge: 100363924, no: 4, nc: 2, two_grf: 2, cell_grf: 14, max_chdom: 1, edge_grf: 24, vert_grf: 13) Coordinates: clon (cell) float64 dask.array<chunksize=(66877252,), meta=np.ndarray> clat (cell) float64 dask.array<chunksize=(66877252,), meta=np.ndarray> vlon (vertex) float64 dask.array<chunksize=(33484696,), meta=np.ndarray> vlat (vertex) float64 dask.array<chunksize=(33484696,), meta=np.ndarray> elon (edge) float64 dask.array<chunksize=(100363924,), meta=np.ndarray> elat (edge) float64 dask.array<chunksize=(100363924,), meta=np.ndarray> Dimensions without coordinates: cell, nv, vertex, ne, edge, no, nc, two_grf, cell_grf, max_chdom, edge_grf, vert_grf Data variables: (12/85) clon_vertices (cell, nv) float64 dask.array<chunksize=(66877252, 3), meta=np.ndarray> clat_vertices (cell, nv) float64 dask.array<chunksize=(66877252, 3), meta=np.ndarray> vlon_vertices (vertex, ne) float64 dask.array<chunksize=(33484696, 6), meta=np.ndarray> vlat_vertices (vertex, ne) float64 dask.array<chunksize=(33484696, 6), meta=np.ndarray> elon_vertices (edge, no) float64 dask.array<chunksize=(100363924, 4), meta=np.ndarray> elat_vertices (edge, no) float64 dask.array<chunksize=(100363924, 4), meta=np.ndarray> ... ... edge_dual_normal_cartesian_y (edge) float64 dask.array<chunksize=(100363924,), meta=np.ndarray> edge_dual_normal_cartesian_z (edge) float64 dask.array<chunksize=(100363924,), meta=np.ndarray> cell_circumcenter_cartesian_x (cell) float64 dask.array<chunksize=(66877252,), meta=np.ndarray> cell_circumcenter_cartesian_y (cell) float64 dask.array<chunksize=(66877252,), meta=np.ndarray> cell_circumcenter_cartesian_z (cell) float64 dask.array<chunksize=(66877252,), meta=np.ndarray> dtype <U7 'float32' Attributes: (12/34) title: ICON grid description institution: Max Planck Institute for Meteorology/Deutscher ... source: git@gitlab.dkrz.de:mpim-sw/grid-generator.git revision: ac0519bd41b0a9f5ae428ac946646dd39c6268b2 history: /work/mh0033/leonidas/grid-generator/intel/bin/... date: 20220816 at 083853 ... ... mean_cell_area: 6080879.452321341 mean_dual_cell_area: 12161758.324725308 domain_length: 40031612.44147649 domain_height: 40031612.44147649 sphere_radius: 6371229.0 domain_cartesian_center: [0. 0. 0.]
Prepare plotting on original grid#
[27]:
lon_reg = [3.5, 9.5]
lat_reg = [-31.5, -28.5]
[28]:
%%time
clon = ds_tg.clon.compute().data * 180./np.pi
clat = ds_tg.clat.compute().data * 180./np.pi
vlon = ds_tg.vlon.compute().data * 180./np.pi
vlat = ds_tg.vlat.compute().data * 180./np.pi
ireg_c = np.where(
(clon>lon_reg[0]) & (clon<=lon_reg[1]) & (clat>lat_reg[0]) & (clat<=lat_reg[1])
)[0]
ireg_v = np.where(
(vlon>lon_reg[0]) & (vlon<=lon_reg[1]) & (vlat>lat_reg[0]) & (vlat<=lat_reg[1])
)[0]
CPU times: user 1.14 s, sys: 3.46 s, total: 4.6 s
Wall time: 7.65 s
[15]:
%%time
# clon = ds_tg.clon.compute().data * 180./np.pi
# clat = ds_tg.clat.compute().data * 180./np.pi
# ireg_c = np.where(
# (clon>lon_reg[0]) & (clon<=lon_reg[1]) & (clat>lat_reg[0]) & (clat<=lat_reg[1])
# )[0]
ds_tg_cut = pyic.xr_crop_tgrid(ds_tg, ireg_c)
ireg_v = ds_tg_cut['ireg_v'].data
CPU times: user 13.5 s, sys: 37.1 s, total: 50.6 s
Wall time: 51.1 s
[ ]:
%%time
clon_bnds, clat_bnds, vlon_bnds, vlat_bnds, cells_of_vertex = pyic.patch_plot_derive_bnds(ds_tg_cut)
patches_c, patches_v = pyic.patch_plot_patches_from_bnds(
clon_bnds.compute(), clat_bnds.compute(),
vlon_bnds.compute(), vlat_bnds.compute(),
cells_of_vertex#.compute()
)
[57]:
%%time
ireg_c_triang, Tri = pyic.triangulation(ds_tg, lon_reg=lon_reg, lat_reg=lat_reg)
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/worker.py:3871: UserWarning: Large object of size 4.63 MiB detected in task graph:
("('truediv-66d4d8bf90621c329810dbacfd95fcf3', 0)" ... , 10426289]),))
Consider scattering large objects ahead of time
with client.scatter to reduce scheduler burden and
keep data on workers
future = client.submit(func, big_data) # bad
big_future = client.scatter(big_data) # good
future = client.submit(func, big_future) # good
warnings.warn(
CPU times: user 6.9 s, sys: 4.42 s, total: 11.3 s
Wall time: 25.4 s
First plots#
[29]:
its = -1
iz = 0
[31]:
list(ds_2d)
[31]:
['clon_bnds',
'clat_bnds',
'vlon_bnds',
'vlat_bnds',
'zos',
'stretch_c',
'vort',
'to',
'so',
'u',
'v',
'mlotst',
'hi',
'hs',
'conc',
'ice_u',
'ice_v']
[39]:
%%time
toi = pyic.interp_to_rectgrid_xr(ds_2d.to[its,iz,:], fpath_ckdtree, lon_reg=lon_reg, lat_reg=lat_reg)
soi = pyic.interp_to_rectgrid_xr(ds_2d.so[its,iz,:], fpath_ckdtree, lon_reg=lon_reg, lat_reg=lat_reg)
kini = pyic.interp_to_rectgrid_xr(0.5*(ds_2d.u[its,iz,:]**2+ds_2d.v[its,iz,:]**2), fpath_ckdtree, lon_reg=lon_reg, lat_reg=lat_reg)
vorti = pyic.interp_to_rectgrid_xr(ds_2d.vort[its,iz,:], fpath_ckdtree, lon_reg=lon_reg, lat_reg=lat_reg, coordinates='vlat vlon')
CPU times: user 871 ms, sys: 1.49 s, total: 2.36 s
Wall time: 4.17 s
[40]:
lon = toi.lon
lat = toi.lat
[41]:
projection = ccrs.PlateCarree()
hca, hcb = pyic.arrange_axes(2,2, plot_cb=True, asp=0.5, fig_size_fac=2,
sharex=True, sharey=True, xlabel="", ylabel="",
projection=projection,
)
ii=-1
ii+=1; ax=hca[ii]; cax=hcb[ii]
hm = pyic.shade(lon, lat, toi, ax=ax, cax=cax, clim=[15.5, 18.5], projection=projection)
ax.set_title(f'temperature')
ii+=1; ax=hca[ii]; cax=hcb[ii]
hm = pyic.shade(lon, lat, soi, ax=ax, cax=cax, clim=[35.1, 35.8], projection=projection,
cmap=cmocean.cm.haline)
ax.set_title(f'salinity')
ii+=1; ax=hca[ii]; cax=hcb[ii]
hm = pyic.shade(lon, lat, kini.data, ax=ax, cax=cax, clim=[-4,0], logplot=True, projection=projection)
ax.set_title('log$_{10}$(kin. energy) [m$^2$/s$^2$]')
ii+=1; ax=hca[ii]; cax=hcb[ii]
hm = pyic.shade(lon, lat, vorti, ax=ax, cax=cax, clim=1e-4, projection=projection)
ax.set_title(f'temperature')
for ax in hca:
pyic.plot_settings(ax, xlim=lon_reg, ylim=lat_reg)
# add_sonett(ax)
# savefig('mld')

Overview plot#
[69]:
%%time
# lon_reg = [-14, 22]
# lat_reg = [-45, -13]
lon_reg = [-30, 30]
lat_reg = [-60, -10]
kini = pyic.interp_to_rectgrid_xr(0.5*(ds_2d.u[its,iz,:]**2+ds_2d.v[its,iz,:]**2), fpath_ckdtree, lon_reg=lon_reg, lat_reg=lat_reg)
CPU times: user 3.14 s, sys: 1.18 s, total: 4.32 s
Wall time: 4.93 s
[70]:
asp = (lat_reg[1]-lat_reg[0])/(lon_reg[1]-lon_reg[0])
asp
[70]:
0.8333333333333334
[71]:
%%time
res = np.sqrt(ds_tg.cell_area).compute()
resi = pyic.interp_to_rectgrid_xr(res, fpath_ckdtree, lon_reg=lon_reg, lat_reg=lat_reg)
CPU times: user 757 ms, sys: 1.58 s, total: 2.33 s
Wall time: 2.8 s
[ ]:
projection = ccrs.PlateCarree()
hca, hcb = pyic.arrange_axes(1,1, plot_cb=True, asp=asp, fig_size_fac=3,
sharex=True, sharey=True, xlabel="", ylabel="",
projection=projection, axlab_kw=None,
)
ii=-1
ii+=1; ax=hca[ii]; cax=hcb[ii]
hm = pyic.shade(kini.lon, kini.lat, kini.data, ax=ax, cax=cax, clim=[-4,0], logplot=True, cmap='RdBu_r')
Cl = ax.contour(resi.lon, resi.lat, resi, np.arange(500,1000,100), colors='0.3')
ax.clabel(Cl, fmt='%.fm', fontsize=8, zorder=2)
pos_cax = cax.get_position()
cax.set_position([0.72,0.58,0.03,0.32])
cax.set_ylabel('log$_{10}$(kin. energy) [m$^2$/s$^2$]')
ax.set_title('kin. energy from spinup')
for ax in hca:
pyic.plot_settings(ax, xlim=lon_reg, ylim=lat_reg)
add_sonett(ax)
savefig('kin_ov')
Saving figure ../pics/smtwv_diagnose_smtwv0004_kin_ov.pdf
Plot on original grid#
[37]:
%%time
to_rg = ds_2d.to.isel(depth=0, time=-1, ncells_2=ireg_c).compute()
so_rg = ds_2d.so.isel(depth=0, time=-1, ncells_2=ireg_c).compute()
kin_rg = ds_2d.kin.isel(depth=0, time=-1, ncells_2=ireg_c).compute()
vort_rg = ds_2d.vort.isel(depth=0, time=-1, ncells=ireg_v).compute()
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
File <timed exec>:1, in <module>
File ~/miniconda3/envs/pyicon_py39_cartopy19/lib/python3.9/site-packages/xarray/core/dataarray.py:1291, in DataArray.isel(self, indexers, drop, missing_dims, **indexers_kwargs)
1286 return self._from_temp_dataset(ds)
1288 # Much faster algorithm for when all indexers are ints, slices, one-dimensional
1289 # lists, or zero or one-dimensional np.ndarray's
-> 1291 variable = self._variable.isel(indexers, missing_dims=missing_dims)
1292 indexes, index_variables = isel_indexes(self.xindexes, indexers)
1294 coords = {}
File ~/miniconda3/envs/pyicon_py39_cartopy19/lib/python3.9/site-packages/xarray/core/variable.py:1223, in Variable.isel(self, indexers, missing_dims, **indexers_kwargs)
1199 """Return a new array indexed along the specified dimension(s).
1200
1201 Parameters
(...)
1219 indexer, in which case the data will be a copy.
1220 """
1221 indexers = either_dict_or_kwargs(indexers, indexers_kwargs, "isel")
-> 1223 indexers = drop_dims_from_indexers(indexers, self.dims, missing_dims)
1225 key = tuple(indexers.get(dim, slice(None)) for dim in self.dims)
1226 return self[key]
File ~/miniconda3/envs/pyicon_py39_cartopy19/lib/python3.9/site-packages/xarray/core/utils.py:850, in drop_dims_from_indexers(indexers, dims, missing_dims)
848 invalid = indexers.keys() - set(dims)
849 if invalid:
--> 850 raise ValueError(
851 f"Dimensions {invalid} do not exist. Expected one or more of {dims}"
852 )
854 return indexers
856 elif missing_dims == "warn":
857
858 # don't modify input
ValueError: Dimensions {'ncells_2'} do not exist. Expected one or more of ('time', 'depth', 'ncells')
[54]:
%%time
clon_rg = clon[ireg_c]
clat_rg = clat[ireg_c]
vlon_rg = vlon[ireg_v]
vlat_rg = vlat[ireg_v]
CPU times: user 8 ms, sys: 3 ms, total: 11 ms
Wall time: 6.9 ms
[55]:
# projection = ccrs.PlateCarree()
# hca, hcb = pyic.arrange_axes(2,2, plot_cb=True, asp=0.5, fig_size_fac=2,
# sharex=True, sharey=True, xlabel="", ylabel="",
# projection=projection,
# )
# ii=-1
# ns = 10
# ii+=1; ax=hca[ii]; cax=hcb[ii]
# ax.scatter(clon_rg[::ns], clat_rg[::ns], c=to_rg[::ns], s=2, vmin=15.5, vmax=18.5, cmap='RdYlBu_r')
# ax.set_title(f'temperature [$^o$C]')
# # ii+=1; ax=hca[ii]; cax=hcb[ii]
# # hm = pyic.shade(Tri, so_rg, ax=ax, cax=cax, clim=[35.1, 35.8], projection=projection)
# # ax.set_title(f'salinity [kg/m$^3$]')
# # ii+=1; ax=hca[ii]; cax=hcb[ii]
# # hm = pyic.shade(Tri, kin_rg.data, ax=ax, cax=cax, clim=[-4,0], logplot=True, projection=projection)
# # ax.set_title('log$_{10}$(kin. energy) [m$^2$/s$^2$]')
# # ii+=1; ax=hca[ii]; cax=hcb[ii]
# # hm3 = pyic.patch_plot_shade(patches_v, vort_rg, ax=ax, cax=cax, clim=1e-4)
# # ax.set_title(f'vorticity')
# for ax in hca:
# pyic.plot_settings(ax, xlim=lon_reg, ylim=lat_reg)
# # add_sonett(ax)
# savefig('overview')
[58]:
projection = ccrs.PlateCarree()
hca, hcb = pyic.arrange_axes(2,2, plot_cb=True, asp=0.5, fig_size_fac=2,
sharex=True, sharey=True, xlabel="", ylabel="",
projection=projection,
)
ii=-1
ii+=1; ax=hca[ii]; cax=hcb[ii]
hm = pyic.shade(Tri, to_rg, ax=ax, cax=cax, clim=[15.5, 18.5], projection=projection)
ax.set_title(f'temperature [$^o$C]')
ii+=1; ax=hca[ii]; cax=hcb[ii]
hm = pyic.shade(Tri, so_rg, ax=ax, cax=cax, clim=[35.1, 35.8], projection=projection)
ax.set_title(f'salinity [kg/m$^3$]')
ii+=1; ax=hca[ii]; cax=hcb[ii]
hm = pyic.shade(Tri, kin_rg.data, ax=ax, cax=cax, clim=[-4,0], logplot=True, projection=projection)
ax.set_title('log$_{10}$(kin. energy) [m$^2$/s$^2$]')
ii+=1; ax=hca[ii]; cax=hcb[ii]
hm3 = pyic.patch_plot_shade(patches_v, vort_rg, ax=ax, cax=cax, clim=1e-4)
ax.set_title(f'vorticity')
for ax in hca:
pyic.plot_settings(ax, xlim=lon_reg, ylim=lat_reg)
# add_sonett(ax)
savefig('overview')
distributed.utils_perf - WARNING - full garbage collections took 12% CPU time recently (threshold: 10%)
Saving figure ../pics/smtwv_diagnose_smtwv0002_overview.pdf
Vertical velocity#
[78]:
%%time
wvel_rg = ds_3d.w.isel(time=-1, ncells=ireg_c).sel(depth_2=[50, 100, 1000, 2000], method='nearest').compute()
CPU times: user 232 ms, sys: 2.81 s, total: 3.04 s
Wall time: 3.28 s
[79]:
wvel_rg
[79]:
<xarray.DataArray 'w' (depth_2: 4, ncells: 265943)> array([[ 0.00047756, 0.00036307, 0.00062074, ..., -0.00162562, -0.00107059, -0.00150702], [ 0.00053502, 0.00038434, 0.00084729, ..., -0.00240545, -0.00203855, -0.00197635], [ 0.00070504, 0.0005771 , 0.00093624, ..., 0.00079223, 0.00099528, 0.00043519], [ 0.00132668, 0.00137925, 0.00134183, ..., 0.00304148, 0.00283049, 0.00320326]], dtype=float32) Coordinates: time datetime64[ns] 2019-07-25T22:15:00 clon (ncells) float32 0.1346 0.1346 0.1347 ... 0.102 0.1019 0.1021 clat (ncells) float32 -0.5521 -0.5522 -0.5521 ... -0.5237 -0.5236 * depth_2 (depth_2) float64 49.4 101.3 990.3 2.025e+03 Dimensions without coordinates: ncells Attributes: standard_name: w long_name: vertical velocity at cells units: m/s code: 255 CDI_grid_type: unstructured number_of_grid_in_reference: 1
xarray.DataArray
'w'
- depth_2: 4
- ncells: 265943
- 0.0004776 0.0003631 0.0006207 0.0003 ... 0.003041 0.00283 0.003203
array([[ 0.00047756, 0.00036307, 0.00062074, ..., -0.00162562, -0.00107059, -0.00150702], [ 0.00053502, 0.00038434, 0.00084729, ..., -0.00240545, -0.00203855, -0.00197635], [ 0.00070504, 0.0005771 , 0.00093624, ..., 0.00079223, 0.00099528, 0.00043519], [ 0.00132668, 0.00137925, 0.00134183, ..., 0.00304148, 0.00283049, 0.00320326]], dtype=float32)
- time()datetime64[ns]2019-07-25T22:15:00
- standard_name :
- time
- units :
- aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
- calendar :
- proleptic_gregorian
- axis :
- T
array('2019-07-25T22:15:00.000000000', dtype='datetime64[ns]')
- clon(ncells)float320.1346 0.1346 ... 0.1019 0.1021
- standard_name :
- longitude
- long_name :
- center longitude
- units :
- radian
- bounds :
- clon_bnds
array([0.1345952 , 0.13455328, 0.13468698, ..., 0.10198068, 0.10194211, 0.10207573], dtype=float32)
- clat(ncells)float32-0.5521 -0.5522 ... -0.5237 -0.5236
- standard_name :
- latitude
- long_name :
- center latitude
- units :
- radian
- bounds :
- clat_bnds
array([-0.552108 , -0.5521776 , -0.55210406, ..., -0.52360636, -0.5236784 , -0.52360374], dtype=float32)
- depth_2(depth_2)float6449.4 101.3 990.3 2.025e+03
- standard_name :
- depth
- long_name :
- depth_below_sea
- units :
- m
- positive :
- down
- axis :
- Z
array([ 49.4, 101.3, 990.3, 2025.1])
- standard_name :
- w
- long_name :
- vertical velocity at cells
- units :
- m/s
- code :
- 255
- CDI_grid_type :
- unstructured
- number_of_grid_in_reference :
- 1
[80]:
projection = ccrs.PlateCarree()
hca, hcb = pyic.arrange_axes(2,2, plot_cb=True, asp=0.5, fig_size_fac=2,
sharex=True, sharey=True, xlabel="", ylabel="",
projection=projection,
)
ii=-1
for kk in range(wvel_rg.depth_2.size):
ii+=1; ax=hca[ii]; cax=hcb[ii]
hm = pyic.shade(Tri, wvel_rg[kk,:], ax=ax, cax=cax, clim=2.5e-3, projection=projection)
ax.set_title(f'depth = {wvel_rg.depth_2[kk].data:.1f}m', loc='right')
ax.set_title('vert. vel. [m/s]')
for ax in hca:
pyic.plot_settings(ax, xlim=lon_reg, ylim=lat_reg)
Plotting a section#
[59]:
lon_reg, lat_reg
[59]:
([3.5, 9.5], [-31.5, -28.5])
[60]:
%%time
p1 = [3.5, -31.5]
p2 = [9.5, -28.5]
# p1 = [-13.5, -39]
# p2 = [15, -28]
lon_sec, lat_sec, dist_sec = pyic.derive_section_points(p1, p2, 101)
dist, inds = pyic.calc_ckdtree(clon, clat, lon_sec, lat_sec, use_npconcatenate=True)
# if False:
# dist, inds = pyic.calc_ckdtree(ds_tg.clon.compute().data, ds_tg.clat.compute().data, lon_sec, lat_sec, use_npconcatenate=True)
# np.savez('tmp_smtwave_section_points.npz', dist=dist, inds=inds)
# else:
# ddnpz = np.load('tmp_smtwave_section_points.npz')
# dist = ddnpz['dist']
# inds = ddnpz['inds']
CPU times: user 47.1 s, sys: 4.46 s, total: 51.6 s
Wall time: 44.1 s
[61]:
projection = ccrs.PlateCarree()
hca, hcb = pyic.arrange_axes(1,1, plot_cb=True, asp=0.5, fig_size_fac=2,
sharex=True, sharey=True, xlabel="", ylabel="",
projection=projection,
)
ii=-1
ii+=1; ax=hca[ii]; cax=hcb[ii]
hm = pyic.shade(lon, lat, toi, ax=ax, cax=cax, clim=[10, 25], projection=projection)
ax.plot(lon_sec, lat_sec)
ax.set_title(f'section')
for ax in hca:
pyic.plot_settings(ax, xlim=[-20,30], ylim=[-45,-20])
add_sonett(ax)
# savefig('mld')
[62]:
ds_sec = ds_3d[['to', 'tke', 'w']]
[41]:
%%time
ds_sec = ds_sec.isel(ncells=inds, time=-1)#.compute()
CPU times: user 434 ms, sys: 1.1 s, total: 1.53 s
Wall time: 1.33 s
[68]:
ds_3d.depth_2[96]
[68]:
<xarray.DataArray 'depth_2' ()> array(2025.1) Coordinates: depth_2 float64 2.025e+03 Attributes: standard_name: depth long_name: depth_below_sea units: m positive: down axis: Z
xarray.DataArray
'depth_2'
- 2.025e+03
array(2025.1)
- depth_2()float642.025e+03
- standard_name :
- depth
- long_name :
- depth_below_sea
- units :
- m
- positive :
- down
- axis :
- Z
array(2025.1)
- standard_name :
- depth
- long_name :
- depth_below_sea
- units :
- m
- positive :
- down
- axis :
- Z
[63]:
%%time
ds_sec = ds_sec.compute()
ERROR! Session/line number was not unique in database. History logging moved to new session 1112
distributed.nanny - WARNING - Worker process still alive after 3 seconds, killing
distributed.nanny - WARNING - Worker process still alive after 3 seconds, killing
distributed.nanny - WARNING - Worker process still alive after 3 seconds, killing
distributed.nanny - WARNING - Worker process still alive after 3 seconds, killing
distributed.nanny - WARNING - Worker process still alive after 3 seconds, killing
distributed.nanny - WARNING - Worker process still alive after 3 seconds, killing
distributed.nanny - WARNING - Worker process still alive after 3 seconds, killing
distributed.comm.tcp - WARNING - Closing dangling stream in <TCP local=tcp://127.0.0.1:60430 remote=tcp://127.0.0.1:38653>
ERROR:root:Internal Python error in the inspect module.
Below is the traceback from this internal error.
Traceback (most recent call last):
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/client.py", line 2707, in get
results = self.gather(packed, asynchronous=asynchronous, direct=direct)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/client.py", line 2021, in gather
return self.sync(
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/client.py", line 862, in sync
return sync(
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/utils.py", line 335, in sync
e.wait(10)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/threading.py", line 574, in wait
signaled = self._cond.wait(timeout)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/threading.py", line 316, in wait
gotit = waiter.acquire(True, timeout)
KeyboardInterrupt
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/selector_events.py", line 140, in _write_to_self
csock.send(b'\0')
BlockingIOError: [Errno 11] Resource temporarily unavailable
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/IPython/core/magics/execution.py", line 1321, in time
exec(code, glob, local_ns)
File "<timed exec>", line 1, in <module>
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/xarray/core/dataset.py", line 1016, in compute
return new.load(**kwargs)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/xarray/core/dataset.py", line 850, in load
evaluated_data = da.compute(*lazy_data.values(), **kwargs)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/dask/base.py", line 567, in compute
results = schedule(dsk, keys, **kwargs)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/client.py", line 2710, in get
f.release()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/client.py", line 362, in release
self.client.loop.add_callback(self.client._dec_ref, stringify(self.key))
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/tornado/platform/asyncio.py", line 227, in add_callback
call_soon(self._run_callback, functools.partial(callback, *args, **kwargs))
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/base_events.py", line 797, in call_soon_threadsafe
self._write_to_self()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/selector_events.py", line 140, in _write_to_self
csock.send(b'\0')
KeyboardInterrupt
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/IPython/core/interactiveshell.py", line 2061, in showtraceback
stb = value._render_traceback_()
AttributeError: 'KeyboardInterrupt' object has no attribute '_render_traceback_'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/IPython/core/ultratb.py", line 1101, in get_records
return _fixed_getinnerframes(etb, number_of_lines_of_context, tb_offset)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/IPython/core/ultratb.py", line 248, in wrapped
return f(*args, **kwargs)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/IPython/core/ultratb.py", line 281, in _fixed_getinnerframes
records = fix_frame_records_filenames(inspect.getinnerframes(etb, context))
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/inspect.py", line 1541, in getinnerframes
frameinfo = (tb.tb_frame,) + getframeinfo(tb, context)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/inspect.py", line 1503, in getframeinfo
lines, lnum = findsource(frame)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/IPython/core/ultratb.py", line 182, in findsource
lines = linecache.getlines(file, globals_dict)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/linecache.py", line 46, in getlines
return updatecache(filename, module_globals)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/linecache.py", line 137, in updatecache
lines = fp.readlines()
KeyboardInterrupt
---------------------------------------------------------------------------
KeyboardInterrupt Traceback (most recent call last)
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/client.py in get(self, dsk, keys, workers, allow_other_workers, resources, sync, asynchronous, direct, retries, priority, fifo_timeout, actors, **kwargs)
2706 try:
-> 2707 results = self.gather(packed, asynchronous=asynchronous, direct=direct)
2708 finally:
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/client.py in gather(self, futures, errors, direct, asynchronous)
2020 local_worker = None
-> 2021 return self.sync(
2022 self._gather,
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/client.py in sync(self, func, asynchronous, callback_timeout, *args, **kwargs)
861 else:
--> 862 return sync(
863 self.loop, func, *args, callback_timeout=callback_timeout, **kwargs
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/utils.py in sync(loop, func, callback_timeout, *args, **kwargs)
334 while not e.is_set():
--> 335 e.wait(10)
336 if error[0]:
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/threading.py in wait(self, timeout)
573 if not signaled:
--> 574 signaled = self._cond.wait(timeout)
575 return signaled
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/threading.py in wait(self, timeout)
315 if timeout > 0:
--> 316 gotit = waiter.acquire(True, timeout)
317 else:
KeyboardInterrupt:
During handling of the above exception, another exception occurred:
BlockingIOError Traceback (most recent call last)
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/selector_events.py in _write_to_self(self)
139 try:
--> 140 csock.send(b'\0')
141 except OSError:
BlockingIOError: [Errno 11] Resource temporarily unavailable
During handling of the above exception, another exception occurred:
KeyboardInterrupt Traceback (most recent call last)
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/IPython/core/magics/execution.py in time(self, line, cell, local_ns)
1320 try:
-> 1321 exec(code, glob, local_ns)
1322 out=None
<timed exec> in <module>
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/xarray/core/dataset.py in compute(self, **kwargs)
1015 new = self.copy(deep=False)
-> 1016 return new.load(**kwargs)
1017
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/xarray/core/dataset.py in load(self, **kwargs)
849 # evaluate all the dask arrays simultaneously
--> 850 evaluated_data = da.compute(*lazy_data.values(), **kwargs)
851
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/dask/base.py in compute(*args, **kwargs)
566
--> 567 results = schedule(dsk, keys, **kwargs)
568 return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)])
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/client.py in get(self, dsk, keys, workers, allow_other_workers, resources, sync, asynchronous, direct, retries, priority, fifo_timeout, actors, **kwargs)
2709 for f in futures.values():
-> 2710 f.release()
2711 if getattr(thread_state, "key", False) and should_rejoin:
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/client.py in release(self, _in_destructor)
361 try:
--> 362 self.client.loop.add_callback(self.client._dec_ref, stringify(self.key))
363 except TypeError:
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/tornado/platform/asyncio.py in add_callback(self, callback, *args, **kwargs)
226 try:
--> 227 call_soon(self._run_callback, functools.partial(callback, *args, **kwargs))
228 except RuntimeError:
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/base_events.py in call_soon_threadsafe(self, callback, context, *args)
796 del handle._source_traceback[-1]
--> 797 self._write_to_self()
798 return handle
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/selector_events.py in _write_to_self(self)
139 try:
--> 140 csock.send(b'\0')
141 except OSError:
KeyboardInterrupt:
During handling of the above exception, another exception occurred:
AttributeError Traceback (most recent call last)
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/IPython/core/interactiveshell.py in showtraceback(self, exc_tuple, filename, tb_offset, exception_only, running_compiled_code)
2060 # in the engines. This should return a list of strings.
-> 2061 stb = value._render_traceback_()
2062 except Exception:
AttributeError: 'KeyboardInterrupt' object has no attribute '_render_traceback_'
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
/tmp/ipykernel_21444/2422761924.py in <module>
----> 1 get_ipython().run_cell_magic('time', '', 'ds_sec = ds_sec.compute()\n')
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/IPython/core/interactiveshell.py in run_cell_magic(self, magic_name, line, cell)
2401 with self.builtin_trap:
2402 args = (magic_arg_s, cell)
-> 2403 result = fn(*args, **kwargs)
2404 return result
2405
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/decorator.py in fun(*args, **kw)
230 if not kwsyntax:
231 args, kw = fix(args, kw, sig)
--> 232 return caller(func, *(extras + args), **kw)
233 fun.__name__ = func.__name__
234 fun.__doc__ = func.__doc__
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/IPython/core/magic.py in <lambda>(f, *a, **k)
185 # but it's overkill for just that one bit of state.
186 def magic_deco(arg):
--> 187 call = lambda f, *a, **k: f(*a, **k)
188
189 if callable(arg):
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/IPython/core/magics/execution.py in time(self, line, cell, local_ns)
1326 out = eval(code_2, glob, local_ns)
1327 except:
-> 1328 self.shell.showtraceback()
1329 return
1330 end = clock2()
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/IPython/core/interactiveshell.py in showtraceback(self, exc_tuple, filename, tb_offset, exception_only, running_compiled_code)
2061 stb = value._render_traceback_()
2062 except Exception:
-> 2063 stb = self.InteractiveTB.structured_traceback(etype,
2064 value, tb, tb_offset=tb_offset)
2065
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/IPython/core/ultratb.py in structured_traceback(self, etype, value, tb, tb_offset, number_of_lines_of_context)
1365 else:
1366 self.tb = tb
-> 1367 return FormattedTB.structured_traceback(
1368 self, etype, value, tb, tb_offset, number_of_lines_of_context)
1369
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/IPython/core/ultratb.py in structured_traceback(self, etype, value, tb, tb_offset, number_of_lines_of_context)
1265 if mode in self.verbose_modes:
1266 # Verbose modes need a full traceback
-> 1267 return VerboseTB.structured_traceback(
1268 self, etype, value, tb, tb_offset, number_of_lines_of_context
1269 )
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/IPython/core/ultratb.py in structured_traceback(self, etype, evalue, etb, tb_offset, number_of_lines_of_context)
1122 """Return a nice text document describing the traceback."""
1123
-> 1124 formatted_exception = self.format_exception_as_a_whole(etype, evalue, etb, number_of_lines_of_context,
1125 tb_offset)
1126
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/IPython/core/ultratb.py in format_exception_as_a_whole(self, etype, evalue, etb, number_of_lines_of_context, tb_offset)
1080
1081
-> 1082 last_unique, recursion_repeat = find_recursion(orig_etype, evalue, records)
1083
1084 frames = self.format_records(records, last_unique, recursion_repeat)
/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/IPython/core/ultratb.py in find_recursion(etype, value, records)
380 # first frame (from in to out) that looks different.
381 if not is_recursion_error(etype, value, records):
--> 382 return len(records), 0
383
384 # Select filename, lineno, func_name to track frames with
TypeError: object of type 'NoneType' has no len()
[43]:
# to_sec = to_sec.where(to_sec!=0)
[62]:
hca, hcb = pyic.arrange_axes(1,3, plot_cb=True, asp=0.5, fig_size_fac=2,
sharex=True, sharey=True, xlabel="longitude", ylabel="depth [m]",
)
ii=-1
ii+=1; ax=hca[ii]; cax=hcb[ii]
da = ds_sec['to']
pyic.shade(lon_sec, ds_sec.depth, da, ax=ax, cax=cax, clim=[4,18.5])
ax.set_title(f'{da.long_name} [{da.units}]')
ii+=1; ax=hca[ii]; cax=hcb[ii]
da = ds_sec['tke']
pyic.shade(lon_sec, ds_sec.depth_2, da.data, ax=ax, cax=cax, clim=[-6,-2], logplot=True)
ax.set_title(f'{da.long_name} [{da.units}]')
ii+=1; ax=hca[ii]; cax=hcb[ii]
da = ds_sec['w']
pyic.shade(lon_sec, ds_sec.depth_2, da, ax=ax, cax=cax, clim=5e-3)
ax.set_title(f'{da.long_name} [{da.units}]')
for ax in hca:
ax.set_ylim(1000,0)
distributed.utils_perf - WARNING - full garbage collections took 12% CPU time recently (threshold: 10%)
Process Dask Worker process (from Nanny):
Traceback (most recent call last):
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/multiprocessing/process.py", line 315, in _bootstrap
self.run()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/multiprocessing/process.py", line 108, in run
self._target(*self._args, **self._kwargs)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/process.py", line 191, in _run
target(*args, **kwargs)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/nanny.py", line 835, in _run
loop.run_sync(do_stop)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/tornado/ioloop.py", line 524, in run_sync
self.start()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/tornado/platform/asyncio.py", line 199, in start
self.asyncio_loop.run_forever()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/base_events.py", line 596, in run_forever
self._run_once()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/base_events.py", line 1890, in _run_once
handle._run()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/events.py", line 80, in _run
self._context.run(self._callback, *self._args)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/tornado/ioloop.py", line 688, in <lambda>
lambda f: self._run_callback(functools.partial(callback, future))
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/tornado/ioloop.py", line 741, in _run_callback
ret = callback()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/tornado/ioloop.py", line 765, in _discard_future_result
future.result()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/nanny.py", line 828, in _run
loop.run_sync(run)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/tornado/ioloop.py", line 524, in run_sync
self.start()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/tornado/platform/asyncio.py", line 199, in start
self.asyncio_loop.run_forever()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/base_events.py", line 596, in run_forever
self._run_once()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/base_events.py", line 1890, in _run_once
handle._run()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/events.py", line 80, in _run
self._context.run(self._callback, *self._args)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/worker.py", line 3042, in execute
self.transition(ts, "memory", value=value)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/worker.py", line 1702, in transition
state = func(ts, **kwargs)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/worker.py", line 1988, in transition_executing_done
self.put_key_in_memory(ts, value, transition=False)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/worker.py", line 2241, in put_key_in_memory
self.data[ts.key] = value
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/spill.py", line 69, in __setitem__
super().__setitem__(key, value)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/zict/buffer.py", line 87, in __setitem__
self.fast[key] = value
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/zict/lru.py", line 70, in __setitem__
self.evict()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/zict/lru.py", line 89, in evict
cb(k, v)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/zict/buffer.py", line 60, in fast_to_slow
self.slow[key] = value
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/zict/func.py", line 41, in __setitem__
self.d[key] = self.dump(value)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/zict/file.py", line 82, in __setitem__
f.write(v)
KeyboardInterrupt
Process Dask Worker process (from Nanny):
Process Dask Worker process (from Nanny):
Process Dask Worker process (from Nanny):
Traceback (most recent call last):
Traceback (most recent call last):
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/nanny.py", line 828, in _run
loop.run_sync(run)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/nanny.py", line 828, in _run
loop.run_sync(run)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/tornado/ioloop.py", line 524, in run_sync
self.start()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/tornado/ioloop.py", line 524, in run_sync
self.start()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/tornado/platform/asyncio.py", line 199, in start
self.asyncio_loop.run_forever()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/tornado/platform/asyncio.py", line 199, in start
self.asyncio_loop.run_forever()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/base_events.py", line 596, in run_forever
self._run_once()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/base_events.py", line 596, in run_forever
self._run_once()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/base_events.py", line 1854, in _run_once
event_list = self._selector.select(timeout)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/base_events.py", line 1854, in _run_once
event_list = self._selector.select(timeout)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/selectors.py", line 469, in select
fd_event_list = self._selector.poll(timeout, max_ev)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/selectors.py", line 469, in select
fd_event_list = self._selecERROR:root:Internal Python error in the inspect module.
Below is the traceback from this internal error.
tor.poll(timeout, max_ev)
KeyboardInterrupt
KeyboardInterrupt
During handling of the above exception, another exception occurred:
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
Traceback (most recent call last):
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/multiprocessing/process.py", line 315, in _bootstrap
self.run()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/multiprocessing/process.py", line 315, in _bootstrap
self.run()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/multiprocessing/process.py", line 108, in run
self._target(*self._args, **self._kwargs)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/multiprocessing/process.py", line 108, in run
self._target(*self._args, **self._kwargs)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/process.py", line 191, in _run
target(*args, **kwargs)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/process.py", line 191, in _run
target(*args, **kwargs)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/nanny.py", line 835, in _run
loop.run_sync(do_stop)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/nanny.py", line 835, in _run
loop.run_sync(do_stop)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/tornado/ioloop.py", line 524, in run_sync
self.start()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/tornado/ioloop.py", line 524, in run_sync
self.start()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/tornado/platform/asyncio.py", line 199, in start
self.asyncio_loop.run_forever()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/tornado/platform/asyncio.py", line 199, in start
self.asyncio_loop.run_forever()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/base_events.py", line 596, in run_forever
self._run_once()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/base_events.py", line 596, in run_forever
self._run_once()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/base_events.py", line 1890, in _run_once
handle._run()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/base_events.py", line 1890, in _run_once
handle._run()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/events.py", line 80, in _run
self._context.run(self._callback, *self._args)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/events.py", line 80, in _run
self._context.run(self._callback, *self._args)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/nanny.py", line 751, in do_stop
await worker.close(
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/nanny.py", line 751, in do_stop
await worker.close(
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/worker.py", line 1311, in close
executor.shutdown(wait=executor_wait, timeout=timeout)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/threadpoolexecutor.py", line 105, in shutdown
t.join(timeout=timeout2)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/worker.py", line 1311, in close
executor.shutdown(wait=executor_wait, timeout=timeout)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/threading.py", line 1057, in join
self._wait_for_tstate_lock(timeout=max(timeout, 0))
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/threadpoolexecutor.py", line 105, in shutdown
t.join(timeout=timeout2)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/threading.py", line 1069, in _wait_for_tstate_lock
elif lock.acquire(block, timeout):
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/threading.py", line 1057, in join
self._wait_for_tstate_lock(timeout=max(timeout, 0))
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/threading.py", line 1069, in _wait_for_tstate_lock
elif lock.acquire(block, timeout):
KeyboardInterrupt
KeyboardInterrupt
Traceback (most recent call last):
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/nanny.py", line 828, in _run
loop.run_sync(run)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/tornado/ioloop.py", line 524, in run_sync
self.start()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/tornado/platform/asyncio.py", line 199, in start
self.asyncio_loop.run_forever()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/base_events.py", line 596, in run_forever
self._run_once()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/base_events.py", line 1854, in _run_once
event_list = self._selector.select(timeout)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/selectors.py", line 469, in select
fd_event_list = self._selector.poll(timeout, max_ev)
KeyboardInterrupt
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/multiprocessing/process.py", line 315, in _bootstrap
self.run()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/multiprocessing/process.py", line 108, in run
self._target(*self._args, **self._kwargs)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/process.py", line 191, in _run
target(*args, **kwargs)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/nanny.py", line 835, in _run
loop.run_sync(do_stop)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/tornado/ioloop.py", line 524, in run_sync
self.start()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/tornado/platform/asyncio.py", line 199, in start
self.asyncio_loop.run_forever()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/base_events.py", line 596, in run_forever
self._run_once()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/base_events.py", line 1890, in _run_once
handle._run()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/events.py", line 80, in _run
self._context.run(self._callback, *self._args)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/nanny.py", line 751, in do_stop
await worker.close(
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/worker.py", line 1311, in close
executor.shutdown(wait=executor_wait, timeout=timeout)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/threadpoolexecutor.py", line 105, in shutdown
t.join(timeout=timeout2)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/threading.py", line 1057, in join
self._wait_for_tstate_lock(timeout=max(timeout, 0))
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/threading.py", line 1069, in _wait_for_tstate_lock
elif lock.acquire(block, timeout):
KeyboardInterrupt
Process Dask Worker process (from Nanny):
Traceback (most recent call last):
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/nanny.py", line 828, in _run
loop.run_sync(run)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/tornado/ioloop.py", line 524, in run_sync
self.start()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/tornado/platform/asyncio.py", line 199, in start
self.asyncio_loop.run_forever()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/base_events.py", line 596, in run_forever
self._run_once()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/base_events.py", line 1854, in _run_once
event_list = self._selector.select(timeout)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/selectors.py", line 469, in select
fd_event_list = self._selector.poll(timeout, max_ev)
KeyboardInterrupt
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/multiprocessing/process.py", line 315, in _bootstrap
self.run()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/multiprocessing/process.py", line 108, in run
self._target(*self._args, **self._kwargs)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/process.py", line 191, in _run
target(*args, **kwargs)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/nanny.py", line 835, in _run
loop.run_sync(do_stop)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/tornado/ioloop.py", line 524, in run_sync
self.start()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/tornado/platform/asyncio.py", line 199, in start
self.asyncio_loop.run_forever()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/base_events.py", line 596, in run_forever
self._run_once()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/base_events.py", line 1890, in _run_once
handle._run()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/asyncio/events.py", line 80, in _run
self._context.run(self._callback, *self._args)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/nanny.py", line 751, in do_stop
await worker.close(
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/worker.py", line 1291, in close
self._workdir.release()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/diskutils.py", line 90, in release
self._finalizer()
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/weakref.py", line 580, in __call__
return info.func(*info.args, **(info.kwargs or {}))
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/diskutils.py", line 95, in _finalize
workspace._purge_directory(dir_path)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/diskutils.py", line 186, in _purge_directory
shutil.rmtree(dir_path, onerror=self._on_remove_error)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/shutil.py", line 718, in rmtree
_rmtree_safe_fd(fd, path, onerror)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/shutil.py", line 655, in _rmtree_safe_fd
_rmtree_safe_fd(dirfd, fullname, onerror)
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/shutil.py", line 673, in _rmtree_safe_fd
os.unlink(entry.name, dir_fd=topfd)
KeyboardInterrupt
Error in atexit._run_exitfuncs:
Traceback (most recent call last):
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/distributed/deploy/local.py", line 266, in close_clusters
KeyboardInterrupt
ERROR:root:Internal Python error in the inspect module.
Below is the traceback from this internal error.
ERROR:root:Internal Python error in the inspect module.
Below is the traceback from this internal error.
Traceback (most recent call last):
File "/work/mh0033/m300602/miniconda3/envs/pyicon_py39/lib/python3.9/site-packages/IPython/core/interactiveshell.py", line 3441, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "/tmp/ipykernel_21444/426675411.py", line 9, in <module>
pyic.shade(lon_sec, ds_sec.depth, da, ax=ax, cax=cax, clim=[4,18.5])
KeyboardInterrupt
[ ]: