Codebase list netcdf4-python / f3ee518
New upstream snapshot. Debian Janitor 1 year, 7 months ago
15 changed file(s) with 354 addition(s) and 3004 deletion(s). Raw diff Collapse all Expand all
+0
-104
.github/workflows/build.yml less more
0 name: Build and Test Linux
1 on: [push, pull_request]
2 jobs:
3 build-linux:
4 name: Python (${{ matrix.python-version }})
5 runs-on: ubuntu-latest
6 env:
7 PNETCDF_VERSION: 1.12.1
8 NETCDF_VERSION: 4.9.0
9 NETCDF_DIR: ${{ github.workspace }}/..
10 NETCDF_EXTRA_CONFIG: --enable-pnetcdf
11 CC: mpicc.mpich
12 #NO_NET: 1
13 strategy:
14 matrix:
15 python-version: ["3.9"]
16 steps:
17
18 - uses: actions/checkout@v2
19
20 - name: Set up Python ${{ matrix.python-version }}
21 uses: actions/setup-python@v2
22 with:
23 python-version: ${{ matrix.python-version }}
24
25 - name: Install Ubuntu Dependencies
26 run: |
27 sudo apt-get update
28 sudo apt-get install mpich libmpich-dev libhdf5-mpich-dev libcurl4-openssl-dev bzip2 libsnappy-dev libblosc-dev libzstd-dev
29 echo "Download and build PnetCDF version ${PNETCDF_VERSION}"
30 wget https://parallel-netcdf.github.io/Release/pnetcdf-${PNETCDF_VERSION}.tar.gz
31 tar -xzf pnetcdf-${PNETCDF_VERSION}.tar.gz
32 pushd pnetcdf-${PNETCDF_VERSION}
33 ./configure --prefix $NETCDF_DIR --enable-shared --disable-fortran --disable-cxx
34 make -j 2
35 make install
36 popd
37 echo "Download and build netCDF version ${NETCDF_VERSION}"
38 wget https://downloads.unidata.ucar.edu/netcdf-c/4.9.0/netcdf-c-${NETCDF_VERSION}.tar.gz
39 tar -xzf netcdf-c-${NETCDF_VERSION}.tar.gz
40 pushd netcdf-c-${NETCDF_VERSION}
41 export CPPFLAGS="-I/usr/include/hdf5/mpich -I${NETCDF_DIR}/include"
42 export LDFLAGS="-L${NETCDF_DIR}/lib"
43 export LIBS="-lhdf5_mpich_hl -lhdf5_mpich -lm -lz"
44 ./configure --prefix $NETCDF_DIR --enable-netcdf-4 --enable-shared --enable-dap --enable-parallel4 $NETCDF_EXTRA_CONFIG
45 make -j 2
46 make install
47 popd
48
49 # - name: The job has failed
50 # if: ${{ failure() }}
51 # run: |
52 # cd netcdf-c-${NETCDF_VERSION}
53 # cat config.log
54
55 - name: Install python dependencies via pip
56 run: |
57 python -m pip install --upgrade pip
58 pip install numpy cython cftime pytest twine wheel check-manifest mpi4py
59
60 - name: Install netcdf4-python
61 run: |
62 export PATH=${NETCDF_DIR}/bin:${PATH}
63 export NETCDF_PLUGIN_DIR=${{ github.workspace }}/netcdf-c-${NETCDF_VERSION}/plugins/plugindir
64 python setup.py install
65 - name: Test
66 run: |
67 export PATH=${NETCDF_DIR}/bin:${PATH}
68 python checkversion.py
69 # serial
70 cd test
71 python run_all.py
72 # parallel (hdf5 for netcdf4, pnetcdf for netcdf3)
73 cd ../examples
74 mpirun.mpich -np 4 python mpi_example.py
75 if [ $? -ne 0 ] ; then
76 echo "hdf5 mpi test failed!"
77 exit 1
78 else
79 echo "hdf5 mpi test passed!"
80 fi
81 mpirun.mpich -np 4 python mpi_example_compressed.py
82 if [ $? -ne 0 ] ; then
83 echo "hdf5 compressed mpi test failed!"
84 exit 1
85 else
86 echo "hdf5 compressed mpi test passed!"
87 fi
88 mpirun.mpich -np 4 python mpi_example.py NETCDF3_64BIT_DATA
89 if [ $? -ne 0 ] ; then
90 echo "pnetcdf mpi test failed!"
91 exit 1
92 else
93 echo "pnetcdf mpi test passed!"
94 fi
95
96 - name: Tarball
97 run: |
98 export PATH=${NETCDF_DIR}/bin:${PATH}
99 python setup.py --version
100 check-manifest --version
101 check-manifest --verbose
102 pip wheel . -w dist --no-deps
103 twine check dist/*
+0
-78
.github/workflows/build_master.yml less more
0 name: Build and Test on Linux with netcdf-c github master
1 on: [push, pull_request]
2 jobs:
3 build-linux:
4 name: Python (${{ matrix.python-version }})
5 runs-on: ubuntu-latest
6 env:
7 NETCDF_DIR: ${{ github.workspace }}/..
8 CC: mpicc.mpich
9 #NO_NET: 1
10 strategy:
11 matrix:
12 python-version: ["3.9"]
13 steps:
14
15 - uses: actions/checkout@v2
16
17 - name: Set up Python ${{ matrix.python-version }}
18 uses: actions/setup-python@v2
19 with:
20 python-version: ${{ matrix.python-version }}
21
22 - name: Install Ubuntu Dependencies
23 run: |
24 sudo apt-get update
25 sudo apt-get install mpich libmpich-dev libhdf5-mpich-dev libcurl4-openssl-dev bzip2 libsnappy-dev libblosc-dev libzstd-dev
26 echo "Download and build netCDF github master"
27 git clone https://github.com/Unidata/netcdf-c
28 pushd netcdf-c
29 export CPPFLAGS="-I/usr/include/hdf5/mpich -I${NETCDF_DIR}/include"
30 export LDFLAGS="-L${NETCDF_DIR}/lib"
31 export LIBS="-lhdf5_mpich_hl -lhdf5_mpich -lm -lz"
32 autoreconf -i
33 ./configure --prefix $NETCDF_DIR --enable-netcdf-4 --enable-shared --enable-dap --enable-parallel4
34 make -j 2
35 make install
36 popd
37
38 # - name: The job has failed
39 # if: ${{ failure() }}
40 # run: |
41 # cd netcdf-c-${NETCDF_VERSION}
42 # cat config.log
43
44 - name: Install python dependencies via pip
45 run: |
46 python -m pip install --upgrade pip
47 pip install numpy cython cftime pytest twine wheel check-manifest mpi4py
48
49 - name: Install netcdf4-python
50 run: |
51 export PATH=${NETCDF_DIR}/bin:${PATH}
52 export NETCDF_PLUGIN_DIR=${{ github.workspace }}/netcdf-c/plugins/plugindir
53 python setup.py install
54 - name: Test
55 run: |
56 export PATH=${NETCDF_DIR}/bin:${PATH}
57 #export HDF5_PLUGIN_PATH=${NETCDF_DIR}/plugins/plugindir
58 python checkversion.py
59 # serial
60 cd test
61 python run_all.py
62 # parallel
63 cd ../examples
64 mpirun.mpich -np 4 python mpi_example.py
65 if [ $? -ne 0 ] ; then
66 echo "hdf5 mpi test failed!"
67 exit 1
68 else
69 echo "hdf5 mpi test passed!"
70 fi
71 mpirun.mpich -np 4 python mpi_example_compressed.py
72 if [ $? -ne 0 ] ; then
73 echo "hdf5 compressed mpi test failed!"
74 exit 1
75 else
76 echo "hdf5 compressed mpi test passed!"
77 fi
+0
-98
.github/workflows/miniconda.yml less more
0 name: Build and Test
1
2 on:
3 pull_request:
4 push:
5 branches: [master]
6
7 jobs:
8 run-serial:
9 runs-on: ${{ matrix.os }}
10 #env:
11 # NO_NET: 1
12 strategy:
13 matrix:
14 python-version: ["3.6", "3.7", "3.8", "3.9", "3.10" ]
15 os: [windows-latest, ubuntu-latest, macos-latest]
16 platform: [x64, x32]
17 exclude:
18 - os: macos-latest
19 platform: x32
20 fail-fast: false
21
22 steps:
23 - uses: actions/checkout@v3
24
25 - name: Setup Micromamba
26 uses: mamba-org/provision-with-micromamba@v13
27 with:
28 environment-file: false
29
30 - name: Python ${{ matrix.python-version }}
31 shell: bash -l {0}
32 run: |
33 micromamba create --name TEST python=${{ matrix.python-version }} numpy cython pip pytest hdf5 libnetcdf cftime zlib --channel conda-forge
34 micromamba activate TEST
35 export PATH="${CONDA_PREFIX}/bin:${CONDA_PREFIX}/Library/bin:$PATH" # so setup.py finds nc-config
36 pip install -e . --no-deps --force-reinstall
37
38 - name: Debug conda
39 shell: bash -l {0}
40 run: |
41 micromamba activate TEST
42 micromamba info --all
43 micromamba list
44
45 - name: Tests
46 shell: bash -l {0}
47 run: |
48 micromamba activate TEST
49 cd test && python run_all.py
50
51 run-mpi:
52 runs-on: ${{ matrix.os }}
53 strategy:
54 matrix:
55 python-version: [ "3.9" ]
56 os: [ubuntu-latest]
57 platform: [x64]
58 steps:
59 - uses: actions/checkout@v2
60
61 - name: Setup Micromamba
62 uses: mamba-org/provision-with-micromamba@main
63 with:
64 environment-file: false
65
66 - name: Python ${{ matrix.python-version }}
67 shell: bash -l {0}
68 run: |
69 micromamba create --name TEST python=${{ matrix.python-version }} numpy cython pip pytest mpi4py hdf5=*=mpi* libnetcdf=*=mpi* cftime zlib --channel conda-forge
70 micromamba activate TEST
71 export PATH="${CONDA_PREFIX}/bin:${CONDA_PREFIX}/Library/bin:$PATH" # so setup.py finds nc-config
72 pip install -e . --no-deps --force-reinstall
73
74 - name: Debug conda
75 shell: bash -l {0}
76 run: |
77 micromamba activate TEST
78 micromamba info --all
79 micromamba list
80
81 - name: Tests
82 shell: bash -l {0}
83 run: |
84 micromamba activate TEST
85 cd test && python run_all.py
86 cd ../examples
87 export PATH="${CONDA_PREFIX}/bin:${CONDA_PREFIX}/Library/bin:$PATH"
88 which mpirun
89 mpirun --version
90 #mpirun -np 4 --oversubscribe python mpi_example.py # for openmpi
91 mpirun -np 4 python mpi_example.py
92 if [ $? -ne 0 ] ; then
93 echo "hdf5 mpi test failed!"
94 exit 1
95 else
96 echo "hdf5 mpi test passed!"
97 fi
0 Metadata-Version: 2.1
1 Name: netCDF4
2 Version: 1.6.1
3 Summary: Provides an object-oriented python interface to the netCDF version 4 library.
4 Home-page: http://github.com/Unidata/netcdf4-python
5 Author: Jeff Whitaker
6 Author-email: jeffrey.s.whitaker@noaa.gov
7 License: License :: OSI Approved :: MIT License
8 Download-URL: http://python.org/pypi/netCDF4
9 Keywords: numpy,netcdf,data,science,network,oceanography,meteorology,climate
10 Platform: any
11 Classifier: Development Status :: 3 - Alpha
12 Classifier: Programming Language :: Python :: 3
13 Classifier: Programming Language :: Python :: 3.6
14 Classifier: Programming Language :: Python :: 3.7
15 Classifier: Programming Language :: Python :: 3.8
16 Classifier: Intended Audience :: Science/Research
17 Classifier: License :: OSI Approved :: MIT License
18 Classifier: Topic :: Software Development :: Libraries :: Python Modules
19 Classifier: Topic :: System :: Archiving :: Compression
20 Classifier: Operating System :: OS Independent
21 License-File: LICENSE
22
23 netCDF version 4 has many features not found in earlier versions of the library, such as hierarchical groups, zlib compression, multiple unlimited dimensions, and new data types. It is implemented on top of HDF5. This module implements most of the new features, and can read and write netCDF files compatible with older versions of the library. The API is modelled after Scientific.IO.NetCDF, and should be familiar to users of that module.
24
25 This project is hosted on a `GitHub repository <https://github.com/Unidata/netcdf4-python>`_ where you may access the most up-to-date source.
26
0 netcdf4-python (1.6.1+git20220915.1.e8f2673-1) UNRELEASED; urgency=low
1
2 * New upstream snapshot.
3
4 -- Debian Janitor <janitor@jelmer.uk> Fri, 16 Sep 2022 05:56:41 -0000
5
06 netcdf4-python (1.6.1-1) unstable; urgency=medium
17
28 * New upstream release.
+0
-1293
examples/reading_netCDF.ipynb less more
0 {
1 "cells": [
2 {
3 "cell_type": "markdown",
4 "metadata": {
5 "internals": {
6 "slide_helper": "subslide_end",
7 "slide_type": "subslide"
8 },
9 "slide_helper": "slide_end",
10 "slideshow": {
11 "slide_type": "slide"
12 }
13 },
14 "source": [
15 "# Reading netCDF data\n",
16 "- requires [numpy](http://numpy.scipy.org) and netCDF/HDF5 C libraries.\n",
17 "- Github site: https://github.com/Unidata/netcdf4-python\n",
18 "- Online docs: http://unidata.github.io/netcdf4-python/\n",
19 "- Based on Konrad Hinsen's old [Scientific.IO.NetCDF](http://dirac.cnrs-orleans.fr/plone/software/scientificpython/) API, with lots of added netcdf version 4 features.\n",
20 "- Developed by Jeff Whitaker at NOAA, with many contributions from users."
21 ]
22 },
23 {
24 "cell_type": "markdown",
25 "metadata": {
26 "internals": {
27 "slide_type": "subslide"
28 },
29 "slideshow": {
30 "slide_type": "slide"
31 }
32 },
33 "source": [
34 "## Interactively exploring a netCDF File\n",
35 "\n",
36 "Let's explore a netCDF file from the *Atlantic Real-Time Ocean Forecast System*\n",
37 "\n",
38 "first, import netcdf4-python and numpy"
39 ]
40 },
41 {
42 "cell_type": "code",
43 "execution_count": 5,
44 "metadata": {
45 "collapsed": false,
46 "internals": {
47 "frag_number": 2,
48 "slide_helper": "subslide_end"
49 },
50 "slide_helper": "slide_end",
51 "slideshow": {
52 "slide_type": "fragment"
53 }
54 },
55 "outputs": [],
56 "source": [
57 "import netCDF4\n",
58 "import numpy as np"
59 ]
60 },
61 {
62 "cell_type": "markdown",
63 "metadata": {
64 "internals": {
65 "frag_helper": "fragment_end",
66 "frag_number": 2,
67 "slide_type": "subslide"
68 },
69 "slideshow": {
70 "slide_type": "slide"
71 }
72 },
73 "source": [
74 "## Create a netCDF4.Dataset object\n",
75 "- **`f`** is a `Dataset` object, representing an open netCDF file.\n",
76 "- printing the object gives you summary information, similar to *`ncdump -h`*."
77 ]
78 },
79 {
80 "cell_type": "code",
81 "execution_count": 6,
82 "metadata": {
83 "collapsed": false,
84 "internals": {
85 "frag_helper": "fragment_end",
86 "frag_number": 4,
87 "slide_helper": "subslide_end"
88 },
89 "slide_helper": "slide_end",
90 "slideshow": {
91 "slide_type": "fragment"
92 }
93 },
94 "outputs": [
95 {
96 "name": "stdout",
97 "output_type": "stream",
98 "text": [
99 "<type 'netCDF4._netCDF4.Dataset'>\n",
100 "root group (NETCDF4_CLASSIC data model, file format HDF5):\n",
101 " Conventions: CF-1.0\n",
102 " title: HYCOM ATLb2.00\n",
103 " institution: National Centers for Environmental Prediction\n",
104 " source: HYCOM archive file\n",
105 " experiment: 90.9\n",
106 " history: archv2ncdf3z\n",
107 " dimensions(sizes): MT(1), Y(850), X(712), Depth(10)\n",
108 " variables(dimensions): float64 \u001b[4mMT\u001b[0m(MT), float64 \u001b[4mDate\u001b[0m(MT), float32 \u001b[4mDepth\u001b[0m(Depth), int32 \u001b[4mY\u001b[0m(Y), int32 \u001b[4mX\u001b[0m(X), float32 \u001b[4mLatitude\u001b[0m(Y,X), float32 \u001b[4mLongitude\u001b[0m(Y,X), float32 \u001b[4mu\u001b[0m(MT,Depth,Y,X), float32 \u001b[4mv\u001b[0m(MT,Depth,Y,X), float32 \u001b[4mtemperature\u001b[0m(MT,Depth,Y,X), float32 \u001b[4msalinity\u001b[0m(MT,Depth,Y,X)\n",
109 " groups: \n",
110 "\n"
111 ]
112 }
113 ],
114 "source": [
115 "f = netCDF4.Dataset('data/rtofs_glo_3dz_f006_6hrly_reg3.nc')\n",
116 "print(f) "
117 ]
118 },
119 {
120 "cell_type": "markdown",
121 "metadata": {
122 "internals": {
123 "frag_helper": "fragment_end",
124 "frag_number": 4,
125 "slide_type": "subslide"
126 },
127 "slideshow": {
128 "slide_type": "slide"
129 }
130 },
131 "source": [
132 "## Access a netCDF variable\n",
133 "- variable objects stored by name in **`variables`** dict.\n",
134 "- print the variable yields summary info (including all the attributes).\n",
135 "- no actual data read yet (just have a reference to the variable object with metadata)."
136 ]
137 },
138 {
139 "cell_type": "code",
140 "execution_count": 7,
141 "metadata": {
142 "collapsed": false,
143 "internals": {
144 "frag_helper": "fragment_end",
145 "frag_number": 6,
146 "slide_helper": "subslide_end"
147 },
148 "slide_helper": "slide_end",
149 "slideshow": {
150 "slide_type": "fragment"
151 }
152 },
153 "outputs": [
154 {
155 "name": "stdout",
156 "output_type": "stream",
157 "text": [
158 "[u'MT', u'Date', u'Depth', u'Y', u'X', u'Latitude', u'Longitude', u'u', u'v', u'temperature', u'salinity']\n",
159 "<type 'netCDF4._netCDF4.Variable'>\n",
160 "float32 temperature(MT, Depth, Y, X)\n",
161 " coordinates: Longitude Latitude Date\n",
162 " standard_name: sea_water_potential_temperature\n",
163 " units: degC\n",
164 " _FillValue: 1.26765e+30\n",
165 " valid_range: [ -5.07860279 11.14989948]\n",
166 " long_name: temp [90.9H]\n",
167 "unlimited dimensions: MT\n",
168 "current shape = (1, 10, 850, 712)\n",
169 "filling on\n"
170 ]
171 }
172 ],
173 "source": [
174 "print(f.variables.keys()) # get all variable names\n",
175 "temp = f.variables['temperature'] # temperature variable\n",
176 "print(temp) "
177 ]
178 },
179 {
180 "cell_type": "markdown",
181 "metadata": {
182 "internals": {
183 "frag_helper": "fragment_end",
184 "frag_number": 6,
185 "slide_type": "subslide"
186 },
187 "slideshow": {
188 "slide_type": "slide"
189 }
190 },
191 "source": [
192 "## List the Dimensions\n",
193 "\n",
194 "- All variables in a netCDF file have an associated shape, specified by a list of dimensions.\n",
195 "- Let's list all the dimensions in this netCDF file.\n",
196 "- Note that the **`MT`** dimension is special (*`unlimited`*), which means it can be appended to."
197 ]
198 },
199 {
200 "cell_type": "code",
201 "execution_count": 8,
202 "metadata": {
203 "collapsed": false,
204 "internals": {
205 "frag_helper": "fragment_end",
206 "frag_number": 8
207 },
208 "slideshow": {
209 "slide_type": "fragment"
210 }
211 },
212 "outputs": [
213 {
214 "name": "stdout",
215 "output_type": "stream",
216 "text": [
217 "(u'MT', <type 'netCDF4._netCDF4.Dimension'> (unlimited): name = 'MT', size = 1\n",
218 ")\n",
219 "(u'Y', <type 'netCDF4._netCDF4.Dimension'>: name = 'Y', size = 850\n",
220 ")\n",
221 "(u'X', <type 'netCDF4._netCDF4.Dimension'>: name = 'X', size = 712\n",
222 ")\n",
223 "(u'Depth', <type 'netCDF4._netCDF4.Dimension'>: name = 'Depth', size = 10\n",
224 ")\n"
225 ]
226 }
227 ],
228 "source": [
229 "for d in f.dimensions.items():\n",
230 " print(d)"
231 ]
232 },
233 {
234 "cell_type": "markdown",
235 "metadata": {
236 "internals": {
237 "frag_helper": "fragment_end",
238 "frag_number": 9
239 },
240 "slideshow": {
241 "slide_type": "fragment"
242 }
243 },
244 "source": [
245 "Each variable has a **`dimensions`** and a **`shape`** attribute."
246 ]
247 },
248 {
249 "cell_type": "code",
250 "execution_count": 9,
251 "metadata": {
252 "collapsed": false,
253 "internals": {
254 "frag_helper": "fragment_end",
255 "frag_number": 10
256 },
257 "slideshow": {
258 "slide_type": "fragment"
259 }
260 },
261 "outputs": [
262 {
263 "data": {
264 "text/plain": [
265 "(u'MT', u'Depth', u'Y', u'X')"
266 ]
267 },
268 "execution_count": 9,
269 "metadata": {},
270 "output_type": "execute_result"
271 }
272 ],
273 "source": [
274 "temp.dimensions"
275 ]
276 },
277 {
278 "cell_type": "code",
279 "execution_count": 10,
280 "metadata": {
281 "collapsed": false,
282 "internals": {
283 "frag_helper": "fragment_end",
284 "frag_number": 11,
285 "slide_helper": "subslide_end"
286 },
287 "slide_helper": "slide_end",
288 "slideshow": {
289 "slide_type": "fragment"
290 }
291 },
292 "outputs": [
293 {
294 "data": {
295 "text/plain": [
296 "(1, 10, 850, 712)"
297 ]
298 },
299 "execution_count": 10,
300 "metadata": {},
301 "output_type": "execute_result"
302 }
303 ],
304 "source": [
305 "temp.shape"
306 ]
307 },
308 {
309 "cell_type": "markdown",
310 "metadata": {
311 "internals": {
312 "frag_helper": "fragment_end",
313 "frag_number": 11,
314 "slide_type": "subslide"
315 },
316 "slideshow": {
317 "slide_type": "slide"
318 }
319 },
320 "source": [
321 "### Each dimension typically has a variable associated with it (called a *coordinate* variable).\n",
322 "- *Coordinate variables* are 1D variables that have the same name as dimensions.\n",
323 "- Coordinate variables and *auxiliary coordinate variables* (named by the *coordinates* attribute) locate values in time and space."
324 ]
325 },
326 {
327 "cell_type": "code",
328 "execution_count": 11,
329 "metadata": {
330 "collapsed": false,
331 "internals": {
332 "frag_helper": "fragment_end",
333 "frag_number": 13,
334 "slide_helper": "subslide_end"
335 },
336 "slide_helper": "slide_end",
337 "slideshow": {
338 "slide_type": "fragment"
339 }
340 },
341 "outputs": [
342 {
343 "name": "stdout",
344 "output_type": "stream",
345 "text": [
346 "<type 'netCDF4._netCDF4.Variable'>\n",
347 "float64 MT(MT)\n",
348 " long_name: time\n",
349 " units: days since 1900-12-31 00:00:00\n",
350 " calendar: standard\n",
351 " axis: T\n",
352 "unlimited dimensions: MT\n",
353 "current shape = (1,)\n",
354 "filling on, default _FillValue of 9.96920996839e+36 used\n",
355 "\n",
356 "<type 'netCDF4._netCDF4.Variable'>\n",
357 "int32 X(X)\n",
358 " point_spacing: even\n",
359 " axis: X\n",
360 "unlimited dimensions: \n",
361 "current shape = (712,)\n",
362 "filling on, default _FillValue of -2147483647 used\n",
363 "\n"
364 ]
365 }
366 ],
367 "source": [
368 "mt = f.variables['MT']\n",
369 "depth = f.variables['Depth']\n",
370 "x,y = f.variables['X'], f.variables['Y']\n",
371 "print(mt)\n",
372 "print(x) "
373 ]
374 },
375 {
376 "cell_type": "markdown",
377 "metadata": {
378 "internals": {
379 "frag_helper": "fragment_end",
380 "frag_number": 13,
381 "slide_type": "subslide"
382 },
383 "slideshow": {
384 "slide_type": "slide"
385 }
386 },
387 "source": [
388 "## Accessing data from a netCDF variable object\n",
389 "\n",
390 "- netCDF variables objects behave much like numpy arrays.\n",
391 "- slicing a netCDF variable object returns a numpy array with the data.\n",
392 "- Boolean array and integer sequence indexing behaves differently for netCDF variables than for numpy arrays. Only 1-d boolean arrays and integer sequences are allowed, and these indices work independently along each dimension (similar to the way vector subscripts work in fortran)."
393 ]
394 },
395 {
396 "cell_type": "code",
397 "execution_count": 12,
398 "metadata": {
399 "collapsed": false,
400 "internals": {
401 "frag_helper": "fragment_end",
402 "frag_number": 15
403 },
404 "slideshow": {
405 "slide_type": "fragment"
406 }
407 },
408 "outputs": [
409 {
410 "name": "stdout",
411 "output_type": "stream",
412 "text": [
413 "[ 41023.25]\n"
414 ]
415 }
416 ],
417 "source": [
418 "time = mt[:] # Reads the netCDF variable MT, array of one element\n",
419 "print(time) "
420 ]
421 },
422 {
423 "cell_type": "code",
424 "execution_count": 13,
425 "metadata": {
426 "collapsed": false,
427 "internals": {
428 "frag_helper": "fragment_end",
429 "frag_number": 16
430 },
431 "slideshow": {
432 "slide_type": "fragment"
433 }
434 },
435 "outputs": [
436 {
437 "name": "stdout",
438 "output_type": "stream",
439 "text": [
440 "[ 0. 100. 200. 400. 700. 1000. 2000. 3000. 4000. 5000.]\n"
441 ]
442 }
443 ],
444 "source": [
445 "dpth = depth[:] # examine depth array\n",
446 "print(dpth) "
447 ]
448 },
449 {
450 "cell_type": "code",
451 "execution_count": 14,
452 "metadata": {
453 "collapsed": false,
454 "internals": {
455 "frag_helper": "fragment_end",
456 "frag_number": 17,
457 "slide_helper": "subslide_end"
458 },
459 "slide_helper": "slide_end",
460 "slideshow": {
461 "slide_type": "fragment"
462 }
463 },
464 "outputs": [
465 {
466 "name": "stdout",
467 "output_type": "stream",
468 "text": [
469 "shape of temp variable: (1, 10, 850, 712)\n",
470 "shape of temp slice: (6, 425, 356)\n"
471 ]
472 }
473 ],
474 "source": [
475 "xx,yy = x[:],y[:]\n",
476 "print('shape of temp variable: %s' % repr(temp.shape))\n",
477 "tempslice = temp[0, dpth > 400, yy > yy.max()/2, xx > xx.max()/2]\n",
478 "print('shape of temp slice: %s' % repr(tempslice.shape))"
479 ]
480 },
481 {
482 "cell_type": "markdown",
483 "metadata": {
484 "internals": {
485 "frag_helper": "fragment_end",
486 "frag_number": 17,
487 "slide_type": "subslide"
488 },
489 "slideshow": {
490 "slide_type": "slide"
491 }
492 },
493 "source": [
494 "## What is the sea surface temperature and salinity at 50N, 140W?\n",
495 "### Finding the latitude and longitude indices of 50N, 140W\n",
496 "\n",
497 "- The `X` and `Y` dimensions don't look like longitudes and latitudes\n",
498 "- Use the auxilary coordinate variables named in the `coordinates` variable attribute, `Latitude` and `Longitude`"
499 ]
500 },
501 {
502 "cell_type": "code",
503 "execution_count": 15,
504 "metadata": {
505 "collapsed": false,
506 "internals": {
507 "frag_helper": "fragment_end",
508 "frag_number": 19
509 },
510 "slideshow": {
511 "slide_type": "fragment"
512 }
513 },
514 "outputs": [
515 {
516 "name": "stdout",
517 "output_type": "stream",
518 "text": [
519 "<type 'netCDF4._netCDF4.Variable'>\n",
520 "float32 Latitude(Y, X)\n",
521 " standard_name: latitude\n",
522 " units: degrees_north\n",
523 "unlimited dimensions: \n",
524 "current shape = (850, 712)\n",
525 "filling on, default _FillValue of 9.96920996839e+36 used\n",
526 "\n"
527 ]
528 }
529 ],
530 "source": [
531 "lat, lon = f.variables['Latitude'], f.variables['Longitude']\n",
532 "print(lat)"
533 ]
534 },
535 {
536 "cell_type": "markdown",
537 "metadata": {
538 "internals": {
539 "frag_helper": "fragment_end",
540 "frag_number": 20,
541 "slide_helper": "subslide_end"
542 },
543 "slide_helper": "slide_end",
544 "slideshow": {
545 "slide_type": "fragment"
546 }
547 },
548 "source": [
549 "Aha! So we need to find array indices `iy` and `ix` such that `Latitude[iy, ix]` is close to 50.0 and `Longitude[iy, ix]` is close to -140.0 ..."
550 ]
551 },
552 {
553 "cell_type": "code",
554 "execution_count": 16,
555 "metadata": {
556 "collapsed": false,
557 "internals": {
558 "frag_helper": "fragment_end",
559 "frag_number": 20,
560 "slide_type": "subslide"
561 },
562 "slideshow": {
563 "slide_type": "slide"
564 }
565 },
566 "outputs": [],
567 "source": [
568 "# extract lat/lon values (in degrees) to numpy arrays\n",
569 "latvals = lat[:]; lonvals = lon[:] \n",
570 "# a function to find the index of the point closest pt\n",
571 "# (in squared distance) to give lat/lon value.\n",
572 "def getclosest_ij(lats,lons,latpt,lonpt):\n",
573 " # find squared distance of every point on grid\n",
574 " dist_sq = (lats-latpt)**2 + (lons-lonpt)**2 \n",
575 " # 1D index of minimum dist_sq element\n",
576 " minindex_flattened = dist_sq.argmin() \n",
577 " # Get 2D index for latvals and lonvals arrays from 1D index\n",
578 " return np.unravel_index(minindex_flattened, lats.shape)\n",
579 "iy_min, ix_min = getclosest_ij(latvals, lonvals, 50., -140)"
580 ]
581 },
582 {
583 "cell_type": "markdown",
584 "metadata": {
585 "internals": {
586 "frag_helper": "fragment_end",
587 "frag_number": 22
588 },
589 "slideshow": {
590 "slide_type": "fragment"
591 }
592 },
593 "source": [
594 "### Now we have all the information we need to find our answer.\n"
595 ]
596 },
597 {
598 "cell_type": "markdown",
599 "metadata": {
600 "internals": {
601 "frag_helper": "fragment_end",
602 "frag_number": 23
603 },
604 "slideshow": {
605 "slide_type": "fragment"
606 }
607 },
608 "source": [
609 "```\n",
610 "|----------+--------|\n",
611 "| Variable | Index |\n",
612 "|----------+--------|\n",
613 "| MT | 0 |\n",
614 "| Depth | 0 |\n",
615 "| Y | iy_min |\n",
616 "| X | ix_min |\n",
617 "|----------+--------|\n",
618 "```"
619 ]
620 },
621 {
622 "cell_type": "markdown",
623 "metadata": {
624 "internals": {
625 "frag_helper": "fragment_end",
626 "frag_number": 24
627 },
628 "slideshow": {
629 "slide_type": "fragment"
630 }
631 },
632 "source": [
633 "### What is the sea surface temperature and salinity at the specified point?"
634 ]
635 },
636 {
637 "cell_type": "code",
638 "execution_count": 17,
639 "metadata": {
640 "collapsed": false,
641 "internals": {
642 "frag_helper": "fragment_end",
643 "frag_number": 25,
644 "slide_helper": "subslide_end"
645 },
646 "slide_helper": "slide_end",
647 "slideshow": {
648 "slide_type": "fragment"
649 }
650 },
651 "outputs": [
652 {
653 "name": "stdout",
654 "output_type": "stream",
655 "text": [
656 " 6.4631 degC\n",
657 "32.6572 psu\n"
658 ]
659 }
660 ],
661 "source": [
662 "sal = f.variables['salinity']\n",
663 "# Read values out of the netCDF file for temperature and salinity\n",
664 "print('%7.4f %s' % (temp[0,0,iy_min,ix_min], temp.units))\n",
665 "print('%7.4f %s' % (sal[0,0,iy_min,ix_min], sal.units))"
666 ]
667 },
668 {
669 "cell_type": "markdown",
670 "metadata": {
671 "internals": {
672 "frag_helper": "fragment_end",
673 "frag_number": 25,
674 "slide_type": "subslide"
675 },
676 "slideshow": {
677 "slide_type": "slide"
678 }
679 },
680 "source": [
681 "## Remote data access via openDAP\n",
682 "\n",
683 "- Remote data can be accessed seamlessly with the netcdf4-python API\n",
684 "- Access happens via the DAP protocol and DAP servers, such as TDS.\n",
685 "- many formats supported, like GRIB, are supported \"under the hood\"."
686 ]
687 },
688 {
689 "cell_type": "markdown",
690 "metadata": {
691 "internals": {
692 "frag_helper": "fragment_end",
693 "frag_number": 27
694 },
695 "slideshow": {
696 "slide_type": "fragment"
697 }
698 },
699 "source": [
700 "The following example showcases some nice netCDF features:\n",
701 "\n",
702 "1. We are seamlessly accessing **remote** data, from a TDS server.\n",
703 "2. We are seamlessly accessing **GRIB2** data, as if it were netCDF data.\n",
704 "3. We are generating **metadata** on-the-fly."
705 ]
706 },
707 {
708 "cell_type": "code",
709 "execution_count": 19,
710 "metadata": {
711 "collapsed": false,
712 "internals": {
713 "frag_helper": "fragment_end",
714 "frag_number": 28,
715 "slide_helper": "subslide_end"
716 },
717 "slide_helper": "slide_end",
718 "slideshow": {
719 "slide_type": "fragment"
720 }
721 },
722 "outputs": [
723 {
724 "name": "stdout",
725 "output_type": "stream",
726 "text": [
727 "http://thredds.ucar.edu/thredds/dodsC/grib/NCEP/GFS/Global_0p5deg/GFS_Global_0p5deg_20150711_0600.grib2/GC\n"
728 ]
729 }
730 ],
731 "source": [
732 "import datetime\n",
733 "date = datetime.datetime.now()\n",
734 "# build URL for latest synoptic analysis time\n",
735 "URL = 'http://thredds.ucar.edu/thredds/dodsC/grib/NCEP/GFS/Global_0p5deg/GFS_Global_0p5deg_%04i%02i%02i_%02i%02i.grib2/GC' %\\\n",
736 "(date.year,date.month,date.day,6*(date.hour//6),0)\n",
737 "# keep moving back 6 hours until a valid URL found\n",
738 "validURL = False; ncount = 0\n",
739 "while (not validURL and ncount < 10):\n",
740 " print(URL)\n",
741 " try:\n",
742 " gfs = netCDF4.Dataset(URL)\n",
743 " validURL = True\n",
744 " except RuntimeError:\n",
745 " date -= datetime.timedelta(hours=6)\n",
746 " ncount += 1 "
747 ]
748 },
749 {
750 "cell_type": "code",
751 "execution_count": 20,
752 "metadata": {
753 "collapsed": false,
754 "internals": {
755 "frag_helper": "fragment_end",
756 "frag_number": 28,
757 "slide_helper": "subslide_end",
758 "slide_type": "subslide"
759 },
760 "slide_helper": "slide_end",
761 "slideshow": {
762 "slide_type": "slide"
763 }
764 },
765 "outputs": [
766 {
767 "name": "stdout",
768 "output_type": "stream",
769 "text": [
770 "<type 'netCDF4._netCDF4.Variable'>\n",
771 "float32 Temperature_surface(time2, lat, lon)\n",
772 " long_name: Temperature @ Ground or water surface\n",
773 " units: K\n",
774 " abbreviation: TMP\n",
775 " missing_value: nan\n",
776 " grid_mapping: LatLon_Projection\n",
777 " coordinates: reftime time2 lat lon \n",
778 " Grib_Variable_Id: VAR_0-0-0_L1\n",
779 " Grib2_Parameter: [0 0 0]\n",
780 " Grib2_Parameter_Discipline: Meteorological products\n",
781 " Grib2_Parameter_Category: Temperature\n",
782 " Grib2_Parameter_Name: Temperature\n",
783 " Grib2_Level_Type: Ground or water surface\n",
784 " Grib2_Generating_Process_Type: Forecast\n",
785 "unlimited dimensions: \n",
786 "current shape = (93, 361, 720)\n",
787 "filling off\n",
788 "\n",
789 "<type 'netCDF4._netCDF4.Variable'>\n",
790 "float64 time2(time2)\n",
791 " units: Hour since 2015-07-11T06:00:00Z\n",
792 " standard_name: time\n",
793 " long_name: GRIB forecast or observation time\n",
794 " calendar: proleptic_gregorian\n",
795 " _CoordinateAxisType: Time\n",
796 "unlimited dimensions: \n",
797 "current shape = (93,)\n",
798 "filling off\n",
799 "\n",
800 "<type 'netCDF4._netCDF4.Variable'>\n",
801 "float32 lat(lat)\n",
802 " units: degrees_north\n",
803 " _CoordinateAxisType: Lat\n",
804 "unlimited dimensions: \n",
805 "current shape = (361,)\n",
806 "filling off\n",
807 "\n",
808 "<type 'netCDF4._netCDF4.Variable'>\n",
809 "float32 lon(lon)\n",
810 " units: degrees_east\n",
811 " _CoordinateAxisType: Lon\n",
812 "unlimited dimensions: \n",
813 "current shape = (720,)\n",
814 "filling off\n",
815 "\n"
816 ]
817 }
818 ],
819 "source": [
820 "# Look at metadata for a specific variable\n",
821 "# gfs.variables.keys() will show all available variables.\n",
822 "sfctmp = gfs.variables['Temperature_surface']\n",
823 "# get info about sfctmp\n",
824 "print(sfctmp)\n",
825 "# print coord vars associated with this variable\n",
826 "for dname in sfctmp.dimensions: \n",
827 " print(gfs.variables[dname])"
828 ]
829 },
830 {
831 "cell_type": "markdown",
832 "metadata": {
833 "internals": {
834 "frag_helper": "fragment_end",
835 "frag_number": 28,
836 "slide_type": "subslide"
837 },
838 "slideshow": {
839 "slide_type": "slide"
840 }
841 },
842 "source": [
843 "##Missing values\n",
844 "- when `data == var.missing_value` somewhere, a masked array is returned.\n",
845 "- illustrate with soil moisture data (only defined over land)\n",
846 "- white areas on plot are masked values over water."
847 ]
848 },
849 {
850 "cell_type": "code",
851 "execution_count": 21,
852 "metadata": {
853 "collapsed": false,
854 "internals": {
855 "frag_helper": "fragment_end",
856 "frag_number": 31
857 },
858 "slideshow": {
859 "slide_type": "fragment"
860 }
861 },
862 "outputs": [
863 {
864 "name": "stdout",
865 "output_type": "stream",
866 "text": [
867 "shape=(361, 720), type=<class 'numpy.ma.core.MaskedArray'>, missing_value=nan\n"
868 ]
869 },
870 {
871 "data": {
872 "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXIAAAD7CAYAAAB37B+tAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnX+MJkeZ378Ptmf5dbPLgM8/N7dWwCc23NhGwr6EO7Fc\nwNhSgi9ShNlVyAlOEQpZQAQns2sHZ+OLHQb5yEm3AkXHDzmEmeCYHwIFDtvEm3CKzE/bY1gc2zpW\nYn322vHCDhd0s177yR/dNVNvvVXV1d3V3VX9Ph9pNO/bb/+o7q769tNPPfUUMTMEQRCEfHnR0AUQ\nBEEQ2iFCLgiCkDki5IIgCJkjQi4IgpA5IuSCIAiZI0IuCIKQOWcPcVAikphHQRCEBjAz2RY6/wC8\nGMB3ADwI4EcADpXLDwE4DuCB8u9abZuDAB4D8AiAqx37Zd9xU/1T55/jn5Rdyj0LZc+13KFld2mn\n1yJn5r8hojcz86+I6GwAf0FE3wDAAD7OzB/X1yei3QCuB7AbwEUA7iWiS5n5hZAnjSAIglCfSh85\nM/+q/DgH4BwUIg4A0+Y9cB2AVWZ+jpmPAXgcwJURyikIgiA4qBRyInoRET0I4ASAu5n5u+VP7yei\nh4jo00S0o1x2IQqXi+I4Cst8LBwZugAtODJ0AVpwZOgCNOTI0AVowZGhC9CQI0MXoAVHmm4YYpG/\nwMyXA7gYwFVE9HcAfBLAJQAuB/AkgD/27aJp4VKDmY8MXYamSNn7J9dyA/mWPddyA+3KHhy1wsyn\niOg+ANcw86ZwE9GnAHyt/PoEgJ3aZheXy6YgokPa1yM53wBBEIQuIKI9APZUrlf2hLp28ioAZ5j5\nF0T0EgDfBPBRAD9k5qfKdT4E4A3MvK/s7FxB4Re/CMC9AF7NxkGIiNkWQiMIgpAg206ub2rYxsI8\nuZZ1jUs7qyzyCwDcQURnoXDDfIGZv05E/5mILkfhNvkpgPcCADMfJaI7ARwFcAbA+0wRFwRBAADa\nB547vN56PzFEtEqozXX1Yy4tLOPm0oW8fHKpN1HX8VrknR1ULHJBmBlo31Y/WQzhNtGF03UsU1xN\n4XaJtsnSwjJuueE24PYt/VrTBjgudqxrLu0UIR8ZekXmFWuIqCB0gl73gK36p0Ty9P75aEKuhLnN\nQ+KZ7ec6f7v9rBs2P9/w/O0Tvx07+3TlvheZ6WbcxDc8fzvmz9rYbIdt3TEi5DOCaVkM8ZonzB5K\nUE0xNeufWs80MlS9fWb7uZhfKoRyfXkOAHDuqWec+9O3A7C5rULtw0aIIMckhrU+GiE3X2PWLHlb\nun69SRnXK6IIutA1Pp/36f3zU8ts637vldsnvle15ZtxEwPAP6bbwgs6ALE0KVshV0KtLoQp5Eq4\nRKgmMS2k0688Vvywd1FcLkJtaB84pN64LHObkAPAR1ZuAjApxKboTblsLiSs+UauJMjMC7nCaXnf\nUC6/fXatcEGIgcvtUWf7qm1NUX5odXp1m5BPuWxu256FmMf2DoxOyGfZfdIUsxFhdQ3Mi3IdhU6x\nWemq47OOK8UUdHPbFHnDs6eieguaxpEngwh3GKZVNSXelvXnDq9PvfqK+2WchLpIYhwHcNejucPr\neGb7uTimLatq47wC2nZyqz7fxTfihudv773TMpTFD/d3rGws8tQZstO1SqxD2PSla4LeVYPvS0yE\nMFyiW3WffGJt1km9foW4U9pia49D8IZnTwGI14eXvWslZYZw+8QQ7ypEbMcD7QP36Urz1c9Tn9s2\nFSYYu4/rZtzEt9yw1YE6pD89ph6IkHdEn6O6dGILuYj2OKmqJzHvu2tA0JCoqLah/Om7zsxNDAhq\nS/Y+8uS4gVh/yncl4n1Y3nWPozfQtpEOQsesrgF7F50/V90/30jhuiGJLqoGEeWGPr5l+7s2wCvd\nH1Ms8oasP7+Nj519uujQqPlaaLNc9AbVl3h3QR1BDzlPeUA0x3V9m9Yxm5Db9ml70NdBRbNEGQlZ\nduYvLSwD6H7g0K4zc9j+rg3MHV7v5IHk0s7KiSUEO2rYsG8IsA2fiNt+19erX8r+Mcsfcm5CN/AK\nyKw3Vd+b7N8n8E1QogsUrpHQhFYuTu+fxy24lZZPLuEuvhF38Y1tdudkkZmULrgGQHXFaCzyIQRw\n/fltfPtZN+AW3Bp0jK6FzBZGOAuk+pDr0z9dlzZvgCFRKm3EXYUmzp+1semiWPwwQH/FzuOHsjkS\nvIMBReoNokt346g7O32VJKVGbitnDFeKHtplG3RhivsYBT/0PvcR+tjqfpaRJURrXCfCpGlHo2sc\ngY8mQu5b14USdKDIRrh8cgmn9883atP6fe9SzHedmcO5p57pzM8/qs7OOg1lCJGv45uMaaWHpvEc\nm4gD9fztXdWJKPdy72KxH/VfZ3Vtcx2FyxBwlUUXbfNcQwTdZWmb1zZU2H2W++n989iOjYnyNWWy\nfPPgC7t5luvpb/skK4u8S9dEVQVVy2wDZ0LwNZKYVrkqW9Ny9kFObwR1LNvOClEReVIH/dqrOuKy\nHl1vkFXr2HA9RNtY7rU61mmt2NfeRXxk5aZonZ6LHwZuvv3GYPdqW7J3rYRUmBgCoVc4s/KFZnTz\nlc01HL5LIUhZ1HOgSzEf+t70+VC11fNQC99Ev261QiBX1/AQLmtS/E2GTBeSrWslpsBVCaarUrle\n6WzLbbOgmBaQuY8if0R3jUkEvB0hVmTM8FFXveqCPutG3XDT0OtYq99j7yJ2fW6udn6WXWfqRaf1\nTfJCXgeXYKrlVbkhYuYsUcedO7zuFHDz+9gF13WOuZ97mxBLX0d0zPkt28Z2d1GONmxO9Yb653Lu\nqWfwPYSP9FTzHtTtwPQZgk07bZ3HSt210mTQiC3uVG8cthviO05Ig9L3rzdOU9irlgvD4xO9kIiN\ntuhuOGA89aOucMXulDaF1TdsP4b7ZNvJdT69f6tjVUXIqMgWoP4o1iwHBOk30mXZ6uvqVrVvail9\nPdoH74CDkMbks7yrGmGXjbSLGcubklJZXLgG0FSJhj4wJmR9E73u6P9P759PR8RX16b/GmC2vabF\nafMGra5pVwODFBsL8zR3eH0qxFElDIuap9xnkRPRiwH8TwDbULhh7mLmQ0S0AOALAH4DwDEA72Dm\nX5TbHATwHgDPA/gAM99t2a/TInf5Gn1pVpt2Mrlm9TY7Jc1j2ywml+D36e80yd1l0Qf6/YnZsEJn\njq+qO643vSa06awHYBdvFU1jiayJ5TqIaZnrkWdLC8vO6JWYHZoqnUeMBFqNLHJm/hsAb2bmywFc\nDuAaIroKwAEA9zDzpQC+VX4HEe0GcD2A3QCuAfAJIgqy+s0nrHnzXBMftH0q+ypzEyu7dWNx7KcJ\nqYt41VtWH8ePhRpKHjqk3OYH93WS20Q+5Pp1fn1bWOcmLis7pi+5al+LH44flTJ/1gYtMlPMLIgm\nlZ2dzPyr8uMcgHNQdC68HcCbyuV3ADiCQsyvA7DKzM8BOEZEjwO4EsD9vmOEirFrIEMTfBXcF6Wi\nW+uhQlnVeVV1vLHieuPpki4y7RVW3vTyKmvchms8gHmN6u67ahwDENAOTevbI+AxB1Y1cVf52FiY\nJ9fDdn15rsP4se6o7OwsLeofAvjbAA4z80Ei+jkzv6L8nQCcZOZXENGfArifmT9f/vYpAN9g5i8a\n+5x4PWib68HXuamvH2optbViXA0mNFwxdH8x6KvTtYuHktkpGSJIVYNgmuKqg1VuFeVCLKYxW596\nC43xhucrw8bCfJwxDJqoN528ou/cNHpmROViuYv7G9zThMadncz8QulauRjAVUT0OuN3hj8EKHpY\nTN1IljoZ1Lp4FW1jcbYRv7qv3V27OroKp1Pf1X02j6Ove3r//KD5rtWbnBJpvV6qcsXs5Ky65m0z\nC26ydxHYu5jVZN68AtpYmKdbcCtdtpdx2V5OWsR9BMeRM/MpIroPwNsAnCCi85n5KSK6AMDT5WpP\nANipbXZxuWwKIjqkfX0z9vJ9QeWwPJX1hqkqZtGgQ/YYH1fcuC+Spsl+ba4J/bPveC6hSM2VY7Oy\nbXVgShCNZP5dJ8qy1UHbOtsOb8Uj07754i1iZXKdybJPjoFQhHSQtiGHvPgxswymlFxPh4j2ANhT\nuV5F1MqrAJxh5l8Q0UsAfBPAR8sdP8vMy0R0AMAOZj5QdnauoPCLXwTgXgCvZuMgtteDqjzdQPVr\ncV3rwvfq28WgjLq4ImNCtnMxpBvHLEOsPg89EgEYfoYZXz1sMxDEPE+1Px9D3e82cd4h+26a7TEn\nNrM0avW56RD9CwDcQURnoXDDfIGZv05E9wO4k4j+EGX4IQAw81EiuhPAUQBnALzPFHEXthth3rDN\nSmdYW1Wjrlwi2KTHP4YPvQ5NHiipDjbyhZA2IVWL0ayLurC3ncihSOfQjBh1V2+TsTupQ98CXNke\nxyjmQNi5eYWcmR8G8HrL8pMA3uLY5jYArVKL+V6ZXB2dIYN6Qmlj0aaALv5mpE0qjLXRuYj1ljD1\nkDi8Fa9uw1YX1PcYuI5b5QoT/Oh6Vtwrf9tNbmTnUFaW2QmVyvDoJg3O1olZtS/z3Ju8wVQRsxNP\n79RM9eG67eQ60z7UztFRB3PfVR3W5u91r531DTlgm1TfnlKlbp1JTshduGJJ9cpkVixdnH0i4sqB\nYvs+BE3FvOp8becdep2aEjMm2IzpHzoixWRjYT5q/LMNZbn5xiK0iWWPSdeCPuaHRVX/X5JJs1QS\neFsoU9XNcg2oCInpNbcfgjox6G2OYRsY00dDiCVsesWOOVAsZ2ydoaF0Ff1SRdsR2iH7zgkz+mpq\nbMErtyP5iSVi53H2WdopEtM378sj49pfVw03ZFRhXcwKnpIl3id6ZIPKtgf4O+pjDTSKQZdCru8/\nB1x1Ws/bg1WyCnkyrpXYN7JuxfS5aOruw+bSGaKh1HGTdFG+0Nd65UtuepxZFXGgOPfQ8+96dG0K\n+xkb6uFcdd+SsMiJ1jjWnIQumligoVZLnY7Rqrj1utuF7qfPeOKQa21bp64gN0n2P3ZsGUN1unhg\ntx081JVVPnfYPvdAqkxY3nBcV4dFPqiQ225cH765VCJSTJqO/jRdSGbfgG9fXVnivjLq6+h+wCYz\nsIglN4kpBn260JrQdVscQ/2Y0MnUhBx73ce1+fFSqnxdYqvYIZ20IftRy6t88XWvdZP+B3V/2wi5\nYCdUzLtoX6HC3KVBoSfJG1udymry5Sr/bRIWtcr21rFLCKjuuFQ0DSF0hVw2cdOEjh7cfGCsVK4q\n1MTWflydnF0fuyuqLO1oycAyITmL3BSRkERRg6DnYu5BzIHq1+SmbimbZWbLh+KaTLaJO0ihLPKx\nWU4poI+QtoVrAsOmc2hybFtKalvdGWNUE+0DJx+1Akze2CYDGTon0kwoMbFZ0+afa11FiPWuRlJW\nNb6QdYR+sHUi6g/qLowhV7u1DQSrGx48d3h9KiV1iIjnjDkXsXO9VCzytlEZvWIKus0ij+R6qftK\n3LaTK3QAlbk/vZyhDXQMVlLqmHOHVrkp24q7+RDXJ85Qy0NT8vr2raiqQ2Ppf9m8ZjlY5FU3MmTA\njEs4msyObtsHr4CYF0kl0rcJNa+AXL/59u0qq0uUQyxiff0qzAbl26aOD95G7g0rR0JCQOsIeEgn\nvBIg3bI03xLNuqzXYf3NssnDpU6cfcq0yn7YF3Vvkm193YI0f1cXwZfjvOr4XYcxFf6vwoonLHpf\nk0JFtO51DWnoIdtWvUWMoWHliK8+NHVjmvsMsex1S91X50IHk+n7bHAK2cArIFq1u1iSda2YQhwy\nWMBVecxp34Dp4a+uY5vb+8oRul4IbV43Q7YLPX7d/fpe3wG3T1PEvRtc/uJQ11nbDnRfO7a1F1+9\nM+uR2YE7diEvclBdln4cucvPanba1PHjhdxcr5ivrk0k7woR8tg5Y2yEhiTGILTz0vSv28ro65wS\nMY9PlaFi3lvbd58VXyfaJGQWMNf+9bqlt7Oxi7eOL2olKSH3WbT6zYst5EB1ha8l0qtrhX88pFNU\nI1aUTmwxb/t2oJiVMLGUsA3Zd4Ws1nkLcz3cQ+qKWkfPuBly7FkQ7coHW2rZD5WQ+26OLtpmAw8R\n1Lo33vcaau7Le3xXmKIm5K7oEFW5QwfddBnFExIvbltXjz0POY5Y5N2h6pKtztSJSqqD603RjGcP\nmbBa37ZRYTKjUtdyiFox4RWQ6tE2YyltkR2+ikn7wCoG1YxFdVG7Iq+u+a1wi8CbZdZnvnGVp7GI\n14iDt4m4q1y2WPY6DW8skQUpYovmCnXdNcUWUaViwM11q9piEuHGQ1BzzMpgUSuuhm5OKmG6NHS/\nWIjFqCySucOWYxluGn0dMwSvsT/OEU9edzBESEhgbHyRQDEQS3xYTCvdFhYY0j/S1CVYZUzNkiXe\nljQtcldstgc1rZYeH+57TXGFRbnis6dQAt1gtKcZJ2uWRy9HcHksx5iiweAk2/F9I/TEuk4LVz03\nqeNG6wsR8XCSiCOfQHUSrjhGSzrEyBfC5qq4TUXSWq4AbC4IszzW/aprYjl3n5slZlSL6fc2O6Y3\n9ytJsJKi8EsX98fmXtHddXXfEhUhoYx1mNW6pLwRAGobXV6LnIh2EtF9RPRjIvoREX2gXH6IiI4T\n0QPl37XaNgeJ6DEieoSIrq53KuXoSctcnUDhbtEFZGNhnkJcDjYBnVhf+bZX1+xWgMX3zbxIdYfh\nVz04JsqpH8+w/qv6A2JgK2eVheTygwrDYrYRV3toWq+6qItijdfDG7VCROcDOJ+ZHySilwP4AYDf\nB/AOAL9k5o8b6+9G8Rx9A4CLANwL4FJmfsFYzzv5srUsDh+13itvDvyJ+npoxJOrY1dts4kh9iHD\nm13bh4RqBe2/Aa57oB8zxLUi/vF+8WWuDBXi0Le+tsxynZiwygFLsIR9QJDXtcLMTwF4qvz810T0\nExQCDcD6xLwOwCozPwfgGBE9DuBKAPdPFThS+GAxqGB+SuQ3FuaJsCXyXmxuC8sy2/B+575t7haP\na8jsWAQAHN6lLa/XYKoaaNPOSz01qlq2lVq0+M3n5hKLfVhsYYjmwJ++BpoJ0yhjcUrQS0OSyK6b\nwT5yItoF4AoUovxGAO8non8K4PsAPszMvwBwISZF+zi2hL8VPlF3/VYsn7f62rayiRmdluYT0OGX\n9lZsVwiixwUTGtrnI6Z15POZVllMIuLpEyLMrqil2GMYmvrmx8yUe9nWZ6gRNCCodKscAfDvmfkr\nRPTrAJ4pf/4jABcw8x8S0Z8CuJ+ZP19u9ykAX2fmLxn7Y+ChSt9yl36yKSH3UTfaI2DSidAKu7Sw\njOWTS/WOH0DdRhiSLyX0d996QneE5iuqGnDWScemhvjHHffK41qpDD8konMAfBHAf2HmrwAAMz/N\nJQA+hcJ9AgBPANipbX5xuczCJ4HVfw48fAg4cWS6wH1M4hB6DK0z1LptwEhOk9CImToibu6vTeii\nKxzNtT8VdugaraeW29YT0qDJ4KG2A4dMRMQNThwpNPLhQ8DrvuRcraqzkwDcAeBZZv6QtvwCZn6y\n/PwhAG9g5n1aZ+eV2OrsfDUbB9m0yG0YOUpcESxtmLDGldjWfXDYRmzqYYKeiBbXIIy21N2fz9ry\nve7KQI28qcrnYcOXT6VtndORejWdH2cy+KFB9kMi+h0A/wvAGrC58xsB7AVwebnspwDey8wnym1u\nBPAeAGcAfJCZv2nZr1vIHbgEXe8UCBV9Z0WusqxdfnS1zLa9R9Rto+dcVK3X9qFQZ9SeCPk4qCvo\nsUZ2uvYz63VqU8uMCLXT++c1bUktjW1NIcfexenEVWbPLsLEvDMh9+3D3Beqc3cPRZWlJW6RceBq\nB02jVprmcJl1AVcQrbHSB/cAwQbhh6mhz6ITYqF70YW3qjMzxOq2LXOIuqsjKYZlHgvfMWQiiJFh\nGCM2N0qMXDsSwljB3kV3u6swENMXciWISnDLyhYs2KGE+MirQhWrsDQUnRCRHlrEhfGgT+zAvEim\nb1YRo0NT6lQYzjQdFaQr5LqAd0FI1InGZqC+/jpqE/aWybRyYGlhGcCtQxdDiACvgKrmiK0TQ25G\nLW25TSbHc7SdRWtsTOmK543eRprZDwH3yMiU8ZXP5n5pcT5tsyM2pRBxYVSsrgXHmPsw66HP/SZ+\n8S3cMeMGHhdwuha5i5ZiXgxzremW2bsIojWe8su7OkGryhjhgTSU9a7i2m9ZGOTwQodUzaHpMxhE\nmJuhd3Bu4rLGPbqRjkXeIFd2E6yCHEJZPj3fuflbG3JxqwjjZajJjGfezRLBhZxe+GHTATo1aGqV\n2zIgKir3V5GUKychl4iVcaInRDNTKlTlFmpiHPmmbpwFmgVsNByi3zlaJEpfVjkQf8To5v7U+diE\nWxAywXxYe3OVN2y3syjeithRd8MLucKM+OhQ1BtdxLJs5rZEa6zPM6pPN8e8OPE3sb+K80s15nZj\nYX7KWhPGievNyxT1NoJsdVUKtRnWtRI5kqMOzry/LfdXB1t+85RRDVsGBM0Oroe2bmiIENejneak\n6loZmAmXSAt0y7xy3X1gWwePaYWnFOonwj2b+CZv7mPKQSGM4SzyveVxB/Qdx7bKzf36sI2iazoK\nrkt3hwi4YE4TZ45AbltHZmnav/Zak6JFnlIHYCSffKiLxZXv24VtsEWsvN7mPiRnuKDDK6A2MwS5\n3kAVUtfak59rJWIn6MTTcYCHSsxp3No2BH3ihzb7EcaHbpHbEmr53gbrxogr0Z/52PKaDCfkDWbV\nmdguoqDH8pPX6fBUVkjosGd9OH5MsdX3JSIuVNG1T3zsPvc2gxF9pGeR15l+LSKxxLwuSjyrJo1Q\n+CIE6gqxvt9tJ9dZwgoFG7wCqnKj2OpOk/q0sTBPekiiqpdSP/2kJ+R9snfRns2wAW0HGOkNpa9Z\nxZvuWxrV7BESYugT3aYhiq52MTMEZkGcbSHHpJg1tcpjjBKtFNXVtaDGYLPKzcZgWuJqO3GtCD6a\ndIK3DVHkFWweU+LV3aSXa6UPLEKtKslENrIaecrbsO3kOsealLbKUraFONYV8FkKFxPc1Hkrk7qy\nxeZMZ74BkS5jcpUyn7MzBo4Z7fXJhEOFPGauFr1BtJ3T0BTZ0MYW2tBExAUTVSdUO1LflxaWN9Me\nm8xq/WkcjVMx+fJwrhVdVG1JpuruoyWhF9iaN6UlMRPwu159Y01CMasNUHDjcn24RBxo1hE601Ro\n5LA+8hABd/1uy5jo25fxm813R/swmavc2Ca2gJt01ZkT0nkqDUuIhTzs+8cr5ES0k4juI6IfE9GP\niOgD5fIFIrqHiB4loruJaIe2zUEieoyIHiGiq2MUspaAOn1LbjdJVWdMF1Z4k3LExHxo1HmISEMV\nqlBW+kxGmgxA1VRvzwH4EDM/SEQvB/ADIroHwLsB3MPMHyOiJQAHABwgot0ArgewG8BFAO4lokuZ\n+QXnEQKmRZuYHDZ0wJBnXZ9gbnZEoHyArPQbVx6bjYV52nZ4ujN17AMvhDQo3C1F3aN94Fmvd7Z8\nNbY35roPQK+QM/NTAJ4qP/81Ef0EhUC/HcCbytXuAHAEhZhfB2CVmZ8DcIyIHgdwJYD7nQep4+f2\nuVkCp0uyVSTzojlnAdJmUKk8UE1U4it9GHTM42wNsLD3BRTXRawnoR9C3+oaT82YKLYEdy5BryPm\nwT5yItoF4AoA3wFwHjOfKH86AeC88vOFAI5rmx1HIfztadKxGTjAZ8pXbk4eMaLcD64GJO4SoWvq\nGibbTq6zGrQ3phwsrtQc+rK6FnmQkJdulS8C+CAz/1L/jYv4Rd/F7efC61OsuaZbq7EvZ8XpMLlW\nV2Jqy26oVxQRcaFvqhJtmalz9f9q5Cjtw+b/Pso8FCHuqCofOYjoHBQi/jlm/kq5+AQRnc/MTxHR\nBQCeLpc/AWCntvnF5bJpHj609fnX9wDn7aksbFt8FyT0adjXa14st4oabGTuj1dAtG9+kFnThdkl\nJHe+q536ltO+eZ47vJ6NUVJ1HZRr5ex3/necWf4L4OFt3v15BwQREaHwgT/LzB/Sln+sXLZMRAcA\n7GBm1dm5gsIvfhGAewG8mo2DTEwsgTAHf9NOANs+XEzs2zbyquFs4XXQ5/+M4ZPvegCPXhlzaUTC\n8Oj1Uk0d6JtWztV2zd9sRkvKKEPL1Xc3pXuOkZ1VrpU3AvgnAN5MRA+Uf9cA+CiAtxLRowB+r/wO\nZj4K4E4ARwF8A8D7TBE30U/A5Tdqkrc7yjRU+mTQPYg4MBnqGKOHX8RVSBF94FpVHa37Jp2bq6Xu\nG4iNwad6M29ELatZI2S70FSxmxj+8L57z3MYDp9DGYU8iDkoLaf6WOe8T79ye2JD9EvqukraWKk5\nDk6IVeY2qWclba3QBzHFN6f6GuO8B7PI9adKrSdSgNXu2842MMYplppFnnssa1NftnlvcrJ0hPzo\nUoBTr7vmudu0LlmLHJiebqyPnCO+ZWPATPDf5DxzsmoEAfDX89Trs6mDei72qjzwSQg5UG9ig6bi\nW9kBasaI9zztW0z0a9Sk88dW6VO3aIRxsbSwXHubqrf11DtCm07wUhlHPgRFTuNELvjexazCmRT6\nNVTxtbRvvrimK/5tRcSFITDrnS8NblPGmooiGYu8DjFiyq14Zg7KEV18Qy0R1+tn6q+lwjhpYpXP\nIkl0dtrwDQ4wCe38dGUcs5GzgJuYMxDZzs0cEm3DnHXIZqWrwR0Rii3MEH0bCrnWUZd2JulaCRFx\n20CAWCkyxyTiodTNtiYIuZKriPvI0rViI2RAkC7QUUZ+ZkLIDEFVYZ1mj3poFkVxyQhV9F1Hxjgu\nIkmL3EabGW1s27ks0DFa42bnsc39EdrBXCceXa0r7hYhRcY0KjlJi7yJSIdMLKx3+M2aG8E836rc\nzjEmarZtb8a3C8LQjKE+JtvZaU4LtZX9q9lUbiGiNEZrXMfs9AQmz9kUdnUdzU5OfVkVIdN7jcEi\nEtqRipDqOpGiHri0M0mLHCguorIKJ0RcF28jO6GJ2k5EvCAkUkf/c60fW3jHYBEJzaF9mJpTdmiK\nHOeJjGUccxLpAAARaUlEQVQJIFmLHHDEPttm6FFzdqrPLjzrzIKQA2GhiF0f14dY57OFemNrmkMp\nNrZ5M1PShuwscsAirqYQq+nc6k6/1uF0banTViibWimhQ49tgj+WuRoFO6lY467orhzqXtJC3giX\nSFuWq5uV0hO3D1RisiHOOyQpmi7m206uy3R0I0bd26Gs8Sn3rbZcJ3VjIn0h91nPum+8QYKrXGcU\niYFqQOYM5VUVNoao1tmHsuJFzIVQQi38qvVsKa9TJX0hB6bF3JWl0CXmeiepcsf49jej9FVRVedW\njBBHIX/0wIa+aBrinCrJDwhiXiSiNQ4SWyXYtomTAbfQZ5yuti2T4Yfzg76Z6BPwyiCi2SRWp6cr\nbUebFNgFaYp5Hha5z5oGqvOIh0SyCBPhh30eR7fGmmRsFPInZp3T02/k5B5pQ/IW+SZKjH3Ca4sx\nn2Fruw9oH1p1Rtq21S1zQWhKLNE2B8alSB4WuaKJ9RzokiFaE+FoQFfWu5rqqot9C4kS4e3Y5ToJ\n7pPJ9A29UsiJ6DNEdIKIHtaWHSKi40T0QPl3rfbbQSJ6jIgeIaKro5fY5V5pup4gCEmgJjiP2fFZ\naz9G35pu0adsjQNhFvlnAVxjLGMAH2fmK8q/bwAAEe0GcD2A3eU2nyCieFZ/E193JDEXf209iNaY\naK0IZ1T/HddQrdt3GYUE0dpxXUG3peQIdq849ENNkxhciIGoFFlm/jaAn1t+sp3cdQBWmfk5Zj4G\n4HEAV7Yp4EQDV3HjNQb9VFJD6EXMBaFblFVeB9eAHpegh8xFkINfXKeNtfx+InqIiD5NRDvKZRcC\nOK6tcxzARS2OUeATb9e6dbapEPOQDH7CFlbrus79EGYbo574kt/pYYamQKtcKepPj2Q5vX9+OpNq\nxq7YpkL+SQCXALgcwJMA/tiz7rBWbEQBETG3s+lGsblIQq592YjEvSK4aDOAxzlPb8bCbdJIyJn5\naS4B8ClsuU+eALBTW/XictkUZYep+tvTpBxebKM3Q0MXhTjINRUawLxIrrbqiguPPTI0lVGcRLRH\n10rXeo2EnIgu0L7+IwAqouWrAN5JRHNEdAmA1wD4rm0fzHxI+zvSpBxBBIqJGeq22UlHa5ybv6xP\n6lrR5voT/Q4i/EJJEzG3/eZaVkUqk5Ez8xFdK13rVQ4IIqJVAG8C8Coi+hmAfwtgDxFdjsJt8lMA\n7y0PepSI7gRwFMAZAO/jIRKeA/aZhBwVwxmvvCks4lIx2RRkVxphV/4bLWbf1rElQ/MFhaofRGts\n1iPbUH7faM6mYp7qkHyTpCeWKNaN4Dd1WHq+ASe6pSgW+TSVQq5wpVTwTNMn11kwcUWM6eJdJ69K\nqLinNigty4kloiERE91RdV1Dk52VpPA6KySIx81iRrX4IlnU8pDkXKmJuI98cq00pYE1Lvip/ZZk\nm9nJtk65vG3+FmGEBPafhGZPNMMUzW1zMyiSt8ibDBBwUgpKHZGQkMMtWo/A9I2+NRKeyWhPwUbs\n9ujaX27tPnkhb4XD8qsSCP13W3rVWWJi1iAluF1GlyjLPPMBGkJkSiNgaWG5schWhSrmlFvFJAsh\nb2SVRxKB3J7MMfE+8ERkhYxQ7djlN9fJza0CZCLkQqI0EfOanc6S30bQWT65BKBbAyvH/pn8hDxU\nPOrmW/GQ4xO6LZVD7dtc25rhiiLmgs7SwnL0fW4szJP6i77zHshPyOuixzk3dAfk+IRuTdV0eVXT\n70VGxFz4yMpNmyIeS8xzFm+dvIS8iXhYrL8qn/vm8ODVtZm0xoHAh5d+D0TMhS4x6pdysQgFecWR\ne0YEOlEWuQo9DOw4jRr2OAv4JsCWwVhCW1bXgJW4uxyDJa7IxiLfFFZbDhUbutUuoWzdEeIr18MJ\nG96LWY4eEgp0K3zqTXnGjYW8LPJQRLT7QX9DsvVDtOyb0JlVF5cwzR/tuxVAOhkKUyAbixwIdHd4\nREPcJQ2oM8uS7drLQ1VoCdEazz27C0CFVY7ZfXPLSsitiPtkOKqud8P7YcuDIQiA582srGuhFvqY\n/ONAhkJutaoDBGMmQwhbEu0Npqb/0pa8KGTCXGEGiDg+ZExkJ+RCz8R4y4lgmQszTM3645useSxx\n4yZZCnld61qs8bywWePA+F6HhQYEhrXOWifoOKNWNETE+yF0tqUQZq0RChWsrgGHd7XezZgNgSwt\n8lBExPuh6jq3vQ+n98+PuhEKflTEygSO4AZXR/nY68/ohJxXQOpv6LKMFf0ah15ntZ5rffGHC0EE\ndHLWmbtzLCQ/+bIwPMo1EuvhaHO1uPzip/fPy5vVjLPt5PrkRC+e0d1jrzcu7RQhFwZBF3PdgrLN\nuTj212LBjS7igGFdV+RemiUhr3StENFniOgEET2sLVsgonuI6FEiupuIdmi/HSSix4joESK6Ot4p\nCGPCbGRq+i1xsQhebHnxHcxStswQH/lnAVxjLDsA4B5mvhTAt8rvIKLdAK4HsLvc5hNENDo/vBAH\nU7Rtvs0u/ZvbTq6zafEJ6aFyj5/eP18rdfIYLXIXleGHzPxtItplLH47gDeVn+8AcASFmF8HYJWZ\nnwNwjIgeB3AlgPsjlVcYIfoADj0RUtcNUR2HELcPQBD6pmkc+XnMfKL8fALAeeXnCzEp2scBXNTw\nGMLI2ViYp+kc0/PR8067MDtYhUzRsmzO6sO49YAgZmYi8r2eyqurkDRzh9elQzVBtp1c59Ap3WY9\ns2lTIT9BROcz81NEdAGAp8vlTwDYqa13cblsCiI6pH09wsxHGpZFEBqxsTBP4iNPm+WTS51MtpwL\nRLQHwJ7K9ULCD0sf+deY+bfK7x8D8CwzLxPRAQA7mPlA2dm5gsIvfhGAewG8mo2DSPih4EIJa5dx\nwOoYSsjFGk8T10PWNjvQrFjkbcIPVwH8bwC/SUQ/I6J3A/gogLcS0aMAfq/8DmY+CuBOAEcBfAPA\n+0wRF4QhMcVBRDxd1L3xWeS8ApoVEfcRErWy1/HTWxzr3wbgtjaFEmYXFbnShTWui7gIeJ5Ix7Qd\nifEWkiN2Y9XjxWfZ3zoGZMCYndGnsRXyouvwseWTS2KNC6NDcq0Io0dcKnni6+yc1Xjxxp2dgiAI\nfSOusHqIa0UYPWKF54cvxr/wk0unp45Y5IIgJItrQJAM5JpELHJBEJJCF2mXa0XesiYRi1wQhKSo\nEmkR8WlEyAVBEDJHwg8FQUgOlw981q1xCT8UBCEbNhbmaWNhnpYWlmuFIM7S9G46YpELgpAsNst8\nlq1yl3ZK1IogCMmzsTBPN+OmUtRvHbYwCSIWuSAIySK+8knERy4IQnYoX/nQ5UgdscgFQRAyQSxy\nQRCEkSJCLgiCkDki5IIgCJkjQi4IgpA5IuSCIAiZI0IuCIKQOa1GdhLRMQDrAJ4H8BwzX0lECwC+\nAOA3ABwD8A5m/kXLcgqCIAgO2lrkDGAPM1/BzFeWyw4AuIeZLwXwrfK7IAiC0BExXCtmcPrbAdxR\nfr4DwO9HOIYgCILgoNXITiL6SwCnULhW/hMz/xkR/ZyZX1H+TgBOqu/adjKyUxCEqKwRTYjZ4gg1\npqvsh29k5ieJ6FwA9xDRI/qPzMxkXFxBEISYmAIOjFPEfbQScmZ+svz/DBF9GcCVAE4Q0fnM/BQR\nXQDgadu2RHRI+3qEmY+0KYuwhV6xVYVeI+JFZrL9Jgi5MnYRJ6I9APZUrtfUtUJELwVwFjP/kohe\nBuBuAP8OwFsAPMvMy0R0AMAOZj5gbJula8VWaRRdVp66x/Wt79uHa7s6xzAfFlXHFISmjF3Ebbi0\ns42QXwLgy+XXswF8npn/Qxl+eCeAvwVH+GFOQl5XFHWUqLWpXG2Onytjb4xCc6raw9jrTnQh76Iw\nqdCHeIZUuFkUcZOxN0yhPkO9GaeACDm2KkAMd0Rs9DINXZYcGHuDFbYw2+0sW+VZC7lPgEO3tSHi\nOR7G3HhnGbNzfpZFHEhQyB/q/ajCrDD2xjwrNDGuxnDvfed9GYAu4sgFITnqRN8IaTIrb8ixzlOE\nXJgZ2kYQCQXyoIxDzIeVCLkgCMH4xCcFKzrFh3Uf10WEXBg9qTXsronlW7aJYuiAr6FI7V73da1E\nyIXRklqj7oJYQuFKOCUpHeozxINOolaEUZKr6KRs7eZG33Wgj3snUSvCzJCjiIuAx2eW3iZEyAWh\nY2wD2kS4+yV2rvLU7p+4VgTBgiQ6E/SO3VQ6eV2uFRFyQRCETHAJeYw5OwVBEIQBESEXBEHIHBFy\nQRCEzBEhFwRByBwRckEQhMwRIRcEQcgcEXJBEITMESEXBEHIHBFyQRCEzOlEyInoGiJ6hIgeI6Kl\nLo4hCIIgFEQXciI6C8BhANcA2A1gLxG9NvZxhuB7QxegBVL2/sm13EC+Zc+13EC7sndhkV8J4HFm\nPsbMzwH4rwCu6+A4vfP9oQvQAil7/+RabiDfsudabqBd2bsQ8osA/Ez7frxcJgiCIHRAF0I+eKpH\nQRCEWSJ6Glsi+m0Ah5j5mvL7QQAvMPOyto6IvSAIQgN6yUdORGcD+D8A/j6AvwLwXQB7mfknUQ8k\nCIIgAOhgqjdmPkNE+wF8E8BZAD4tIi4IgtAdg8wQJAiCIMSj15GdqQ8UIqLPENEJInpYW7ZARPcQ\n0aNEdDcR7dB+O1ieyyNEdPUwpQaIaCcR3UdEPyaiHxHRBzIq+4uJ6DtE9GBZ9kO5lL0sy1lE9AAR\nfa38nku5jxHRWln275bLki87Ee0goruI6CdEdJSIrsqk3L9ZXmv1d4qIPhCt7Mzcyx8KN8vjAHYB\nOAfAgwBe29fxA8v4uwCuAPCwtuxjAP51+XkJwEfLz7vLczinPKfHAbxooHKfD+Dy8vPLUfRRvDaH\nspfleWn5/2wA9wO4KqOy/0sAnwfw1VzqS1menwJYMJYlX3YAdwB4j1ZftudQbuMcXgTgSQA7Y5W9\nz8L/XQB/rn0/AODA0BfVUs5dmBTyRwCcV34+H8Aj5eeDAJa09f4cwG8PXf6yLF8B8Jbcyg7gpQB+\ngGJQWfJlB3AxgHsBvBnA13KqL6WQv9JYlnTZS9H+S8vypMttKe/VAL4ds+x9ulZyHSh0HjOfKD+f\nAHBe+flCFOegSOJ8iGgXireK7yCTshPRi4joQRRlvJuZv4s8yv4fAfwrAC9oy3IoN1CM97iXiL5P\nRP+sXJZ62S8B8AwRfZaIfkhEf0ZEL0P65TZ5J4DV8nOUsvcp5Nn3qnLxaPSdx6DnSEQvB/BFAB9k\n5l/qv6VcdmZ+gZkvR2HhXkVErzN+T67sRPQPADzNzA8AmIrrBdIst8YbmfkKANcC+BdE9Lv6j4mW\n/WwArwfwCWZ+PYD/h+LNfqtQaZZ7EyKaA/APAfw387c2Ze9TyJ9A4RNS7MTkEydVThDR+QBARBcA\neLpcbp7PxeWyQSCic1CI+OeY+Svl4izKrmDmUwDuA/A2pF/2vwfg7UT0UxTW1e8R0eeQfrkBAMz8\nZPn/GQBfRuHOSr3sxwEcZ2aVX+ouFML+VOLl1rkWwA/K6w5EuuZ9Cvn3AbyGiHaVT6XrAXy1x+M3\n5asA/qD8/Aco/M9q+TuJaI6ILgHwGhSDn3qHiAjApwEcZeY/0X7KoeyvUj31RPQSAG8F8BMkXnZm\nvpGZdzLzJShelf8HM78r9XIDABG9lIh+rfz8MhQ+24eReNmZ+SkAPyOiS8tFbwHwYwBfQ8LlNtiL\nLbcKEOua9+zkvxZFRMXjAA4O3elgKd8qitGop1H4898NYAFFh9ajAO4GsENb/8byXB4B8LYBy/07\nKPy0DwJ4oPy7JpOy/xaAHwJ4CIWY/JtyefJl18rzJmxFrSRfbhS+5gfLvx+ptphJ2S9DkfH1IQBf\nQtEBmny5y7K8DMD/BfBr2rIoZZcBQYIgCJkjU70JgiBkjgi5IAhC5oiQC4IgZI4IuSAIQuaIkAuC\nIGSOCLkgCELmiJALgiBkjgi5IAhC5vx/oWJ9OHx0YTwAAAAASUVORK5CYII=\n",
873 "text/plain": [
874 "<matplotlib.figure.Figure at 0x1125c5a90>"
875 ]
876 },
877 "metadata": {},
878 "output_type": "display_data"
879 }
880 ],
881 "source": [
882 "soilmvar = gfs.variables['Volumetric_Soil_Moisture_Content_depth_below_surface_layer']\n",
883 "# flip the data in latitude so North Hemisphere is up on the plot\n",
884 "soilm = soilmvar[0,0,::-1,:] \n",
885 "print('shape=%s, type=%s, missing_value=%s' % \\\n",
886 " (soilm.shape, type(soilm), soilmvar.missing_value))\n",
887 "import matplotlib.pyplot as plt\n",
888 "%matplotlib inline\n",
889 "cs = plt.contourf(soilm)"
890 ]
891 },
892 {
893 "cell_type": "markdown",
894 "metadata": {
895 "internals": {
896 "frag_helper": "fragment_end",
897 "frag_number": 32,
898 "slide_helper": "subslide_end"
899 },
900 "slide_helper": "slide_end",
901 "slideshow": {
902 "slide_type": "fragment"
903 }
904 },
905 "source": [
906 "##Packed integer data\n",
907 "There is a similar feature for variables with `scale_factor` and `add_offset` attributes.\n",
908 "\n",
909 "- short integer data will automatically be returned as float data, with the scale and offset applied. "
910 ]
911 },
912 {
913 "cell_type": "markdown",
914 "metadata": {
915 "internals": {
916 "frag_helper": "fragment_end",
917 "frag_number": 32,
918 "slide_type": "subslide"
919 },
920 "slideshow": {
921 "slide_type": "slide"
922 }
923 },
924 "source": [
925 "## Dealing with dates and times\n",
926 "- time variables usually measure relative to a fixed date using a certain calendar, with units specified like ***`hours since YY:MM:DD hh-mm-ss`***.\n",
927 "- **`num2date`** and **`date2num`** convenience functions provided to convert between these numeric time coordinates and handy python datetime instances. \n",
928 "- **`date2index`** finds the time index corresponding to a datetime instance."
929 ]
930 },
931 {
932 "cell_type": "code",
933 "execution_count": 22,
934 "metadata": {
935 "collapsed": false,
936 "internals": {
937 "frag_helper": "fragment_end",
938 "frag_number": 34
939 },
940 "slideshow": {
941 "slide_type": "fragment"
942 }
943 },
944 "outputs": [
945 {
946 "name": "stdout",
947 "output_type": "stream",
948 "text": [
949 "name of time dimension = time2\n",
950 "units = Hour since 2015-07-11T06:00:00Z, values = [ 0. 3. 6. 9. 12. 15. 18. 21. 24. 27. 30. 33.\n",
951 " 36. 39. 42. 45. 48. 51. 54. 57. 60. 63. 66. 69.\n",
952 " 72. 75. 78. 81. 84. 87. 90. 93. 96. 99. 102. 105.\n",
953 " 108. 111. 114. 117. 120. 123. 126. 129. 132. 135. 138. 141.\n",
954 " 144. 147. 150. 153. 156. 159. 162. 165. 168. 171. 174. 177.\n",
955 " 180. 183. 186. 189. 192. 195. 198. 201. 204. 207. 210. 213.\n",
956 " 216. 219. 222. 225. 228. 231. 234. 237. 240. 252. 264. 276.\n",
957 " 288. 300. 312. 324. 336. 348. 360. 372. 384.]\n"
958 ]
959 }
960 ],
961 "source": [
962 "from netCDF4 import num2date, date2num, date2index\n",
963 "timedim = sfctmp.dimensions[0] # time dim name\n",
964 "print('name of time dimension = %s' % timedim)\n",
965 "times = gfs.variables[timedim] # time coord var\n",
966 "print('units = %s, values = %s' % (times.units, times[:]))"
967 ]
968 },
969 {
970 "cell_type": "code",
971 "execution_count": 23,
972 "metadata": {
973 "collapsed": false,
974 "internals": {
975 "frag_helper": "fragment_end",
976 "frag_number": 35,
977 "slide_helper": "subslide_end"
978 },
979 "slide_helper": "slide_end",
980 "slideshow": {
981 "slide_type": "fragment"
982 }
983 },
984 "outputs": [
985 {
986 "name": "stdout",
987 "output_type": "stream",
988 "text": [
989 "['2015-07-11 06:00:00', '2015-07-11 09:00:00', '2015-07-11 12:00:00', '2015-07-11 15:00:00', '2015-07-11 18:00:00', '2015-07-11 21:00:00', '2015-07-12 00:00:00', '2015-07-12 03:00:00', '2015-07-12 06:00:00', '2015-07-12 09:00:00']\n"
990 ]
991 }
992 ],
993 "source": [
994 "dates = num2date(times[:], times.units)\n",
995 "print([date.strftime('%Y-%m-%d %H:%M:%S') for date in dates[:10]]) # print only first ten..."
996 ]
997 },
998 {
999 "cell_type": "markdown",
1000 "metadata": {
1001 "internals": {
1002 "frag_helper": "fragment_end",
1003 "frag_number": 35,
1004 "slide_type": "subslide"
1005 },
1006 "slideshow": {
1007 "slide_type": "slide"
1008 }
1009 },
1010 "source": [
1011 "###Get index associated with a specified date, extract forecast data for that date."
1012 ]
1013 },
1014 {
1015 "cell_type": "code",
1016 "execution_count": 24,
1017 "metadata": {
1018 "collapsed": false,
1019 "internals": {
1020 "frag_helper": "fragment_end",
1021 "frag_number": 37
1022 },
1023 "slideshow": {
1024 "slide_type": "fragment"
1025 }
1026 },
1027 "outputs": [
1028 {
1029 "name": "stdout",
1030 "output_type": "stream",
1031 "text": [
1032 "2015-07-14 07:22:39.579246\n",
1033 "index = 24, date = 2015-07-14 06:00:00\n"
1034 ]
1035 }
1036 ],
1037 "source": [
1038 "from datetime import datetime, timedelta\n",
1039 "date = datetime.now() + timedelta(days=3)\n",
1040 "print(date)\n",
1041 "ntime = date2index(date,times,select='nearest')\n",
1042 "print('index = %s, date = %s' % (ntime, dates[ntime]))"
1043 ]
1044 },
1045 {
1046 "cell_type": "markdown",
1047 "metadata": {
1048 "internals": {
1049 "frag_helper": "fragment_end",
1050 "frag_number": 38
1051 },
1052 "slideshow": {
1053 "slide_type": "fragment"
1054 }
1055 },
1056 "source": [
1057 "###Get temp forecast for Boulder (near 40N, -105W)\n",
1058 "- use function **`getcloses_ij`** we created before..."
1059 ]
1060 },
1061 {
1062 "cell_type": "code",
1063 "execution_count": 25,
1064 "metadata": {
1065 "collapsed": false,
1066 "internals": {
1067 "frag_helper": "fragment_end",
1068 "frag_number": 39,
1069 "slide_helper": "subslide_end"
1070 },
1071 "slide_helper": "slide_end",
1072 "slideshow": {
1073 "slide_type": "fragment"
1074 }
1075 },
1076 "outputs": [
1077 {
1078 "name": "stdout",
1079 "output_type": "stream",
1080 "text": [
1081 "Boulder forecast valid at 2015-07-14 06:00:00 UTC = 296.8 K\n"
1082 ]
1083 }
1084 ],
1085 "source": [
1086 "lats, lons = gfs.variables['lat'][:], gfs.variables['lon'][:]\n",
1087 "# lats, lons are 1-d. Make them 2-d using numpy.meshgrid.\n",
1088 "lons, lats = np.meshgrid(lons,lats)\n",
1089 "j, i = getclosest_ij(lats,lons,40,-105)\n",
1090 "fcst_temp = sfctmp[ntime,j,i]\n",
1091 "print('Boulder forecast valid at %s UTC = %5.1f %s' % \\\n",
1092 " (dates[ntime],fcst_temp,sfctmp.units))"
1093 ]
1094 },
1095 {
1096 "cell_type": "markdown",
1097 "metadata": {
1098 "internals": {
1099 "frag_helper": "fragment_end",
1100 "frag_number": 39,
1101 "slide_type": "subslide"
1102 },
1103 "slideshow": {
1104 "slide_type": "slide"
1105 }
1106 },
1107 "source": [
1108 "##Simple multi-file aggregation\n",
1109 "\n",
1110 "What if you have a bunch of netcdf files, each with data for a different year, and you want to access all the data as if it were in one file?"
1111 ]
1112 },
1113 {
1114 "cell_type": "code",
1115 "execution_count": 26,
1116 "metadata": {
1117 "collapsed": false,
1118 "internals": {
1119 "frag_helper": "fragment_end",
1120 "frag_number": 41
1121 },
1122 "slideshow": {
1123 "slide_type": "fragment"
1124 }
1125 },
1126 "outputs": [
1127 {
1128 "name": "stdout",
1129 "output_type": "stream",
1130 "text": [
1131 "-rw-r--r-- 1 jwhitaker staff 8985332 Jul 10 06:43 data/prmsl.2000.nc\r\n",
1132 "-rw-r--r-- 1 jwhitaker staff 8968789 Jul 10 06:43 data/prmsl.2001.nc\r\n",
1133 "-rw-r--r-- 1 jwhitaker staff 8972796 Jul 10 06:43 data/prmsl.2002.nc\r\n",
1134 "-rw-r--r-- 1 jwhitaker staff 8974435 Jul 10 06:43 data/prmsl.2003.nc\r\n",
1135 "-rw-r--r-- 1 jwhitaker staff 8997438 Jul 10 06:43 data/prmsl.2004.nc\r\n",
1136 "-rw-r--r-- 1 jwhitaker staff 8976678 Jul 10 06:43 data/prmsl.2005.nc\r\n",
1137 "-rw-r--r-- 1 jwhitaker staff 8969714 Jul 10 06:43 data/prmsl.2006.nc\r\n",
1138 "-rw-r--r-- 1 jwhitaker staff 8974360 Jul 10 06:43 data/prmsl.2007.nc\r\n",
1139 "-rw-r--r-- 1 jwhitaker staff 8994260 Jul 10 06:43 data/prmsl.2008.nc\r\n",
1140 "-rw-r--r-- 1 jwhitaker staff 8974678 Jul 10 06:43 data/prmsl.2009.nc\r\n",
1141 "-rw-r--r-- 1 jwhitaker staff 8970732 Jul 10 06:43 data/prmsl.2010.nc\r\n",
1142 "-rw-r--r-- 1 jwhitaker staff 8976285 Jul 10 06:43 data/prmsl.2011.nc\r\n"
1143 ]
1144 }
1145 ],
1146 "source": [
1147 "!ls -l data/prmsl*nc"
1148 ]
1149 },
1150 {
1151 "cell_type": "markdown",
1152 "metadata": {
1153 "internals": {
1154 "frag_helper": "fragment_end",
1155 "frag_number": 42
1156 },
1157 "slideshow": {
1158 "slide_type": "fragment"
1159 }
1160 },
1161 "source": [
1162 "**`MFDataset`** uses file globbing to patch together all the files into one big Dataset.\n",
1163 "You can also pass it a list of specific files.\n",
1164 "\n",
1165 "Limitations:\n",
1166 "\n",
1167 "- It can only aggregate the data along the leftmost dimension of each variable.\n",
1168 "- only works with `NETCDF3`, or `NETCDF4_CLASSIC` formatted files.\n",
1169 "- kind of slow."
1170 ]
1171 },
1172 {
1173 "cell_type": "code",
1174 "execution_count": 27,
1175 "metadata": {
1176 "collapsed": false,
1177 "internals": {
1178 "frag_helper": "fragment_end",
1179 "frag_number": 43,
1180 "slide_helper": "subslide_end"
1181 },
1182 "slide_helper": "slide_end",
1183 "slideshow": {
1184 "slide_type": "fragment"
1185 }
1186 },
1187 "outputs": [
1188 {
1189 "name": "stdout",
1190 "output_type": "stream",
1191 "text": [
1192 "starting date = 2000-01-01 00:00:00\n",
1193 "ending date = 2011-12-31 00:00:00\n",
1194 "times shape = 4383\n",
1195 "prmsl dimensions = (u'time', u'lat', u'lon'), prmsl shape = (4383, 91, 180)\n"
1196 ]
1197 }
1198 ],
1199 "source": [
1200 "mf = netCDF4.MFDataset('data/prmsl*nc')\n",
1201 "times = mf.variables['time']\n",
1202 "dates = num2date(times[:],times.units)\n",
1203 "print('starting date = %s' % dates[0])\n",
1204 "print('ending date = %s'% dates[-1])\n",
1205 "prmsl = mf.variables['prmsl']\n",
1206 "print('times shape = %s' % times.shape)\n",
1207 "print('prmsl dimensions = %s, prmsl shape = %s' %\\\n",
1208 " (prmsl.dimensions, prmsl.shape))"
1209 ]
1210 },
1211 {
1212 "cell_type": "markdown",
1213 "metadata": {
1214 "internals": {
1215 "frag_helper": "fragment_end",
1216 "frag_number": 43,
1217 "slide_type": "subslide"
1218 },
1219 "slideshow": {
1220 "slide_type": "slide"
1221 }
1222 },
1223 "source": [
1224 "## Closing your netCDF file\n",
1225 "\n",
1226 "It's good to close netCDF files, but not actually necessary when Dataset is open for read access only.\n"
1227 ]
1228 },
1229 {
1230 "cell_type": "code",
1231 "execution_count": 28,
1232 "metadata": {
1233 "collapsed": false,
1234 "internals": {
1235 "frag_helper": "fragment_end",
1236 "frag_number": 45
1237 },
1238 "slideshow": {
1239 "slide_type": "fragment"
1240 }
1241 },
1242 "outputs": [],
1243 "source": [
1244 "f.close()\n",
1245 "gfs.close()"
1246 ]
1247 },
1248 {
1249 "cell_type": "markdown",
1250 "metadata": {
1251 "internals": {
1252 "frag_helper": "fragment_end",
1253 "frag_number": 45,
1254 "slide_helper": "subslide_end"
1255 },
1256 "slide_helper": "slide_end",
1257 "slideshow": {
1258 "slide_type": "-"
1259 }
1260 },
1261 "source": [
1262 "##That's it!\n",
1263 "\n",
1264 "Now you're ready to start exploring your data interactively.\n",
1265 "\n",
1266 "To be continued with **Writing netCDF data** ...."
1267 ]
1268 }
1269 ],
1270 "metadata": {
1271 "celltoolbar": "Raw Cell Format",
1272 "kernelspec": {
1273 "display_name": "Python 2",
1274 "language": "python",
1275 "name": "python2"
1276 },
1277 "language_info": {
1278 "codemirror_mode": {
1279 "name": "ipython",
1280 "version": 2
1281 },
1282 "file_extension": ".py",
1283 "mimetype": "text/x-python",
1284 "name": "python",
1285 "nbconvert_exporter": "python",
1286 "pygments_lexer": "ipython2",
1287 "version": "2.7.9"
1288 }
1289 },
1290 "nbformat": 4,
1291 "nbformat_minor": 0
1292 }
+0
-1206
examples/writing_netCDF.ipynb less more
0 {
1 "cells": [
2 {
3 "cell_type": "markdown",
4 "metadata": {
5 "internals": {
6 "slide_type": "subslide"
7 },
8 "slideshow": {
9 "slide_type": "slide"
10 }
11 },
12 "source": [
13 "# Writing netCDF data\n",
14 "\n",
15 "**Important Note**: when running this notebook interactively in a browser, you probably will not be able to execute individual cells out of order without getting an error. Instead, choose \"Run All\" from the Cell menu after you modify a cell."
16 ]
17 },
18 {
19 "cell_type": "code",
20 "execution_count": 25,
21 "metadata": {
22 "collapsed": false,
23 "internals": {
24 "frag_number": 1,
25 "slide_helper": "subslide_end"
26 },
27 "slide_helper": "slide_end",
28 "slideshow": {
29 "slide_type": "fragment"
30 }
31 },
32 "outputs": [],
33 "source": [
34 "import netCDF4 # Note: python is case-sensitive!\n",
35 "import numpy as np"
36 ]
37 },
38 {
39 "cell_type": "markdown",
40 "metadata": {
41 "internals": {
42 "frag_helper": "fragment_end",
43 "frag_number": 1,
44 "slide_type": "subslide"
45 },
46 "slideshow": {
47 "slide_type": "slide"
48 }
49 },
50 "source": [
51 "## Opening a file, creating a new Dataset\n",
52 "\n",
53 "Let's create a new, empty netCDF file named 'data/new.nc', opened for writing.\n",
54 "\n",
55 "Be careful, opening a file with 'w' will clobber any existing data (unless `clobber=False` is used, in which case an exception is raised if the file already exists).\n",
56 "\n",
57 "- `mode='r'` is the default.\n",
58 "- `mode='a'` opens an existing file and allows for appending (does not clobber existing data)\n",
59 "- `format` can be one of `NETCDF3_CLASSIC`, `NETCDF3_64BIT`, `NETCDF4_CLASSIC` or `NETCDF4` (default). `NETCDF4_CLASSIC` uses HDF5 for the underlying storage layer (as does `NETCDF4`) but enforces the classic netCDF 3 data model so data can be read with older clients. "
60 ]
61 },
62 {
63 "cell_type": "code",
64 "execution_count": 26,
65 "metadata": {
66 "collapsed": false,
67 "internals": {
68 "frag_helper": "fragment_end",
69 "frag_number": 3,
70 "slide_helper": "subslide_end"
71 },
72 "slide_helper": "slide_end",
73 "slideshow": {
74 "slide_type": "fragment"
75 }
76 },
77 "outputs": [
78 {
79 "name": "stdout",
80 "output_type": "stream",
81 "text": [
82 "<type 'netCDF4._netCDF4.Dataset'>\n",
83 "root group (NETCDF4_CLASSIC data model, file format HDF5):\n",
84 " dimensions(sizes): \n",
85 " variables(dimensions): \n",
86 " groups: \n",
87 "\n"
88 ]
89 }
90 ],
91 "source": [
92 "try: ncfile.close() # just to be safe, make sure dataset is not already open.\n",
93 "except: pass\n",
94 "ncfile = netCDF4.Dataset('data/new.nc',mode='w',format='NETCDF4_CLASSIC') \n",
95 "print(ncfile)"
96 ]
97 },
98 {
99 "cell_type": "markdown",
100 "metadata": {
101 "internals": {
102 "frag_helper": "fragment_end",
103 "frag_number": 3,
104 "slide_type": "subslide"
105 },
106 "slideshow": {
107 "slide_type": "slide"
108 }
109 },
110 "source": [
111 "## Creating dimensions\n",
112 "\n",
113 "The **ncfile** object we created is a container for _dimensions_, _variables_, and _attributes_. First, let's create some dimensions using the [`createDimension`](http://unidata.github.io/netcdf4-python/netCDF4.Dataset-class.html#createDimension) method. \n",
114 "\n",
115 "- Every dimension has a name and a length. \n",
116 "- The name is a string that is used to specify the dimension to be used when creating a variable, and as a key to access the dimension object in the `ncfile.dimensions` dictionary.\n",
117 "\n",
118 "Setting the dimension length to `0` or `None` makes it unlimited, so it can grow. \n",
119 "\n",
120 "- For `NETCDF4` files, any variable's dimension can be unlimited. \n",
121 "- For `NETCDF4_CLASSIC` and `NETCDF3*` files, only one per variable can be unlimited, and it must be the leftmost (fastest varying) dimension."
122 ]
123 },
124 {
125 "cell_type": "code",
126 "execution_count": 27,
127 "metadata": {
128 "collapsed": false,
129 "internals": {
130 "frag_helper": "fragment_end",
131 "frag_number": 5,
132 "slide_helper": "subslide_end"
133 },
134 "slide_helper": "slide_end",
135 "slideshow": {
136 "slide_type": "fragment"
137 }
138 },
139 "outputs": [
140 {
141 "name": "stdout",
142 "output_type": "stream",
143 "text": [
144 "('lat', <type 'netCDF4._netCDF4.Dimension'>: name = 'lat', size = 73\n",
145 ")\n",
146 "('lon', <type 'netCDF4._netCDF4.Dimension'>: name = 'lon', size = 144\n",
147 ")\n",
148 "('time', <type 'netCDF4._netCDF4.Dimension'> (unlimited): name = 'time', size = 0\n",
149 ")\n"
150 ]
151 }
152 ],
153 "source": [
154 "lat_dim = ncfile.createDimension('lat', 73) # latitude axis\n",
155 "lon_dim = ncfile.createDimension('lon', 144) # longitude axis\n",
156 "time_dim = ncfile.createDimension('time', None) # unlimited axis (can be appended to).\n",
157 "for dim in ncfile.dimensions.items():\n",
158 " print(dim)"
159 ]
160 },
161 {
162 "cell_type": "markdown",
163 "metadata": {
164 "internals": {
165 "frag_helper": "fragment_end",
166 "frag_number": 5,
167 "slide_type": "subslide"
168 },
169 "slideshow": {
170 "slide_type": "slide"
171 }
172 },
173 "source": [
174 "## Creating attributes\n",
175 "\n",
176 "netCDF attributes can be created just like you would for any python object. \n",
177 "\n",
178 "- Best to adhere to established conventions (like the [CF](http://cfconventions.org/) conventions)\n",
179 "- We won't try to adhere to any specific convention here though."
180 ]
181 },
182 {
183 "cell_type": "code",
184 "execution_count": 28,
185 "metadata": {
186 "collapsed": false,
187 "internals": {
188 "frag_helper": "fragment_end",
189 "frag_number": 7
190 },
191 "slideshow": {
192 "slide_type": "fragment"
193 }
194 },
195 "outputs": [
196 {
197 "name": "stdout",
198 "output_type": "stream",
199 "text": [
200 "My model data\n"
201 ]
202 }
203 ],
204 "source": [
205 "ncfile.title='My model data'\n",
206 "print(ncfile.title)"
207 ]
208 },
209 {
210 "cell_type": "markdown",
211 "metadata": {
212 "internals": {
213 "frag_helper": "fragment_end",
214 "frag_number": 8,
215 "slide_helper": "subslide_end"
216 },
217 "slide_helper": "slide_end",
218 "slideshow": {
219 "slide_type": "fragment"
220 }
221 },
222 "source": [
223 "Try adding some more attributes..."
224 ]
225 },
226 {
227 "cell_type": "markdown",
228 "metadata": {
229 "internals": {
230 "frag_helper": "fragment_end",
231 "frag_number": 8,
232 "slide_type": "subslide"
233 },
234 "slideshow": {
235 "slide_type": "slide"
236 }
237 },
238 "source": [
239 "## Creating variables\n",
240 "\n",
241 "Now let's add some variables and store some data in them. \n",
242 "\n",
243 "- A variable has a name, a type, a shape, and some data values. \n",
244 "- The shape of a variable is specified by a tuple of dimension names. \n",
245 "- A variable should also have some named attributes, such as 'units', that describe the data.\n",
246 "\n",
247 "The [`createVariable`](http://unidata.github.io/netcdf4-python/netCDF4.Dataset-class.html#createVariable) method takes 3 mandatory args.\n",
248 "\n",
249 "- the 1st argument is the variable name (a string). This is used as the key to access the variable object from the `variables` dictionary.\n",
250 "- the 2nd argument is the datatype (most numpy datatypes supported). \n",
251 "- the third argument is a tuple containing the dimension names (the dimensions must be created first). Unless this is a `NETCDF4` file, any unlimited dimension must be the leftmost one.\n",
252 "- there are lots of optional arguments (many of which are only relevant when `format='NETCDF4'`) to control compression, chunking, fill_value, etc.\n"
253 ]
254 },
255 {
256 "cell_type": "code",
257 "execution_count": 29,
258 "metadata": {
259 "collapsed": false,
260 "internals": {
261 "frag_helper": "fragment_end",
262 "frag_number": 10,
263 "slide_helper": "subslide_end"
264 },
265 "slide_helper": "slide_end",
266 "slideshow": {
267 "slide_type": "fragment"
268 }
269 },
270 "outputs": [
271 {
272 "name": "stdout",
273 "output_type": "stream",
274 "text": [
275 "<type 'netCDF4._netCDF4.Variable'>\n",
276 "float64 temp(time, lat, lon)\n",
277 " units: K\n",
278 " standard_name: air_temperature\n",
279 "unlimited dimensions: time\n",
280 "current shape = (0, 73, 144)\n",
281 "filling on, default _FillValue of 9.96920996839e+36 used\n",
282 "\n"
283 ]
284 }
285 ],
286 "source": [
287 "# Define two variables with the same names as dimensions,\n",
288 "# a conventional way to define \"coordinate variables\".\n",
289 "lat = ncfile.createVariable('lat', np.float32, ('lat',))\n",
290 "lat.units = 'degrees_north'\n",
291 "lat.long_name = 'latitude'\n",
292 "lon = ncfile.createVariable('lon', np.float32, ('lon',))\n",
293 "lon.units = 'degrees_east'\n",
294 "lon.long_name = 'longitude'\n",
295 "time = ncfile.createVariable('time', np.float64, ('time',))\n",
296 "time.units = 'hours since 1800-01-01'\n",
297 "time.long_name = 'time'\n",
298 "# Define a 3D variable to hold the data\n",
299 "temp = ncfile.createVariable('temp',np.float64,('time','lat','lon')) # note: unlimited dimension is leftmost\n",
300 "temp.units = 'K' # degrees Kelvin\n",
301 "temp.standard_name = 'air_temperature' # this is a CF standard name\n",
302 "print(temp)"
303 ]
304 },
305 {
306 "cell_type": "markdown",
307 "metadata": {
308 "internals": {
309 "frag_helper": "fragment_end",
310 "frag_number": 10,
311 "slide_type": "subslide"
312 },
313 "slideshow": {
314 "slide_type": "slide"
315 }
316 },
317 "source": [
318 "## Pre-defined variable attributes (read only)\n",
319 "\n",
320 "The netCDF4 module provides some useful pre-defined Python attributes for netCDF variables, such as dimensions, shape, dtype, ndim. \n",
321 "\n",
322 "Note: since no data has been written yet, the length of the 'time' dimension is 0."
323 ]
324 },
325 {
326 "cell_type": "code",
327 "execution_count": 30,
328 "metadata": {
329 "collapsed": false,
330 "internals": {
331 "frag_helper": "fragment_end",
332 "frag_number": 12,
333 "slide_helper": "subslide_end"
334 },
335 "slide_helper": "slide_end",
336 "slideshow": {
337 "slide_type": "fragment"
338 }
339 },
340 "outputs": [
341 {
342 "name": "stdout",
343 "output_type": "stream",
344 "text": [
345 "-- Some pre-defined attributes for variable temp:\n",
346 "('temp.dimensions:', (u'time', u'lat', u'lon'))\n",
347 "('temp.shape:', (0, 73, 144))\n",
348 "('temp.dtype:', dtype('float64'))\n",
349 "('temp.ndim:', 3)\n"
350 ]
351 }
352 ],
353 "source": [
354 "print(\"-- Some pre-defined attributes for variable temp:\")\n",
355 "print(\"temp.dimensions:\", temp.dimensions)\n",
356 "print(\"temp.shape:\", temp.shape)\n",
357 "print(\"temp.dtype:\", temp.dtype)\n",
358 "print(\"temp.ndim:\", temp.ndim)"
359 ]
360 },
361 {
362 "cell_type": "markdown",
363 "metadata": {
364 "internals": {
365 "frag_helper": "fragment_end",
366 "frag_number": 12,
367 "slide_type": "subslide"
368 },
369 "slideshow": {
370 "slide_type": "slide"
371 }
372 },
373 "source": [
374 "## Writing data\n",
375 "\n",
376 "To write data to a netCDF variable object, just treat it like a numpy array and assign values to a slice."
377 ]
378 },
379 {
380 "cell_type": "code",
381 "execution_count": 31,
382 "metadata": {
383 "collapsed": false,
384 "internals": {
385 "frag_helper": "fragment_end",
386 "frag_number": 14
387 },
388 "slideshow": {
389 "slide_type": "fragment"
390 }
391 },
392 "outputs": [
393 {
394 "name": "stdout",
395 "output_type": "stream",
396 "text": [
397 "('-- Wrote data, temp.shape is now ', (3, 73, 144))\n",
398 "('-- Min/Max values:', 280.00283562143028, 329.99987991477548)\n"
399 ]
400 }
401 ],
402 "source": [
403 "nlats = len(lat_dim); nlons = len(lon_dim); ntimes = 3\n",
404 "# Write latitudes, longitudes.\n",
405 "# Note: the \":\" is necessary in these \"write\" statements\n",
406 "lat[:] = -90. + (180./nlats)*np.arange(nlats) # south pole to north pole\n",
407 "lon[:] = (180./nlats)*np.arange(nlons) # Greenwich meridian eastward\n",
408 "# create a 3D array of random numbers\n",
409 "data_arr = np.random.uniform(low=280,high=330,size=(ntimes,nlats,nlons))\n",
410 "# Write the data. This writes the whole 3D netCDF variable all at once.\n",
411 "temp[:,:,:] = data_arr # Appends data along unlimited dimension\n",
412 "print(\"-- Wrote data, temp.shape is now \", temp.shape)\n",
413 "# read data back from variable (by slicing it), print min and max\n",
414 "print(\"-- Min/Max values:\", temp[:,:,:].min(), temp[:,:,:].max())"
415 ]
416 },
417 {
418 "cell_type": "markdown",
419 "metadata": {
420 "internals": {
421 "frag_helper": "fragment_end",
422 "frag_number": 15,
423 "slide_helper": "subslide_end"
424 },
425 "slide_helper": "slide_end",
426 "slideshow": {
427 "slide_type": "fragment"
428 }
429 },
430 "source": [
431 "- You can just treat a netCDF Variable object like a numpy array and assign values to it.\n",
432 "- Variables automatically grow along unlimited dimensions (unlike numpy arrays)\n",
433 "- The above writes the whole 3D variable all at once, but you can write it a slice at a time instead.\n",
434 "\n",
435 "Let's add another time slice....\n"
436 ]
437 },
438 {
439 "cell_type": "code",
440 "execution_count": 32,
441 "metadata": {
442 "collapsed": false,
443 "internals": {
444 "frag_helper": "fragment_end",
445 "frag_number": 15,
446 "slide_type": "subslide"
447 },
448 "slideshow": {
449 "slide_type": "slide"
450 }
451 },
452 "outputs": [
453 {
454 "name": "stdout",
455 "output_type": "stream",
456 "text": [
457 "('-- Wrote more data, temp.shape is now ', (4, 73, 144))\n"
458 ]
459 }
460 ],
461 "source": [
462 "# create a 2D array of random numbers\n",
463 "data_slice = np.random.uniform(low=280,high=330,size=(nlats,nlons))\n",
464 "temp[3,:,:] = data_slice # Appends the 4th time slice\n",
465 "print(\"-- Wrote more data, temp.shape is now \", temp.shape)"
466 ]
467 },
468 {
469 "cell_type": "markdown",
470 "metadata": {
471 "internals": {
472 "frag_helper": "fragment_end",
473 "frag_number": 17
474 },
475 "slideshow": {
476 "slide_type": "fragment"
477 }
478 },
479 "source": [
480 "Note that we have not yet written any data to the time variable. It automatically grew as we appended data along the time dimension to the variable `temp`, but the data is missing."
481 ]
482 },
483 {
484 "cell_type": "code",
485 "execution_count": 33,
486 "metadata": {
487 "collapsed": false,
488 "internals": {
489 "frag_helper": "fragment_end",
490 "frag_number": 18,
491 "slide_helper": "subslide_end"
492 },
493 "slide_helper": "slide_end",
494 "slideshow": {
495 "slide_type": "fragment"
496 }
497 },
498 "outputs": [
499 {
500 "name": "stdout",
501 "output_type": "stream",
502 "text": [
503 "<type 'netCDF4._netCDF4.Variable'>\n",
504 "float64 time(time)\n",
505 " units: hours since 1800-01-01\n",
506 " long_name: time\n",
507 "unlimited dimensions: time\n",
508 "current shape = (4,)\n",
509 "filling on, default _FillValue of 9.96920996839e+36 used\n",
510 "\n",
511 "(<class 'numpy.ma.core.MaskedArray'>, masked_array(data = [-- -- -- --],\n",
512 " mask = [ True True True True],\n",
513 " fill_value = 9.96920996839e+36)\n",
514 ")\n"
515 ]
516 }
517 ],
518 "source": [
519 "print(time)\n",
520 "times_arr = time[:]\n",
521 "print(type(times_arr),times_arr) # dashes indicate masked values (where data has not yet been written)"
522 ]
523 },
524 {
525 "cell_type": "markdown",
526 "metadata": {
527 "internals": {
528 "frag_helper": "fragment_end",
529 "frag_number": 18,
530 "slide_type": "subslide"
531 },
532 "slideshow": {
533 "slide_type": "slide"
534 }
535 },
536 "source": [
537 "Let's add write some data into the time variable. \n",
538 "\n",
539 "- Given a set of datetime instances, use date2num to convert to numeric time values and then write that data to the variable."
540 ]
541 },
542 {
543 "cell_type": "code",
544 "execution_count": 34,
545 "metadata": {
546 "collapsed": false,
547 "internals": {
548 "frag_helper": "fragment_end",
549 "frag_number": 20,
550 "slide_helper": "subslide_end"
551 },
552 "slide_helper": "slide_end",
553 "slideshow": {
554 "slide_type": "fragment"
555 }
556 },
557 "outputs": [
558 {
559 "name": "stdout",
560 "output_type": "stream",
561 "text": [
562 "[datetime.datetime(2014, 10, 1, 0, 0), datetime.datetime(2014, 10, 2, 0, 0), datetime.datetime(2014, 10, 3, 0, 0), datetime.datetime(2014, 10, 4, 0, 0)]\n",
563 "(array([ 1882440., 1882464., 1882488., 1882512.]), u'hours since 1800-01-01')\n",
564 "[datetime.datetime(2014, 10, 1, 0, 0) datetime.datetime(2014, 10, 2, 0, 0)\n",
565 " datetime.datetime(2014, 10, 3, 0, 0) datetime.datetime(2014, 10, 4, 0, 0)]\n"
566 ]
567 }
568 ],
569 "source": [
570 "from datetime import datetime\n",
571 "from netCDF4 import date2num,num2date\n",
572 "# 1st 4 days of October.\n",
573 "dates = [datetime(2014,10,1,0),datetime(2014,10,2,0),datetime(2014,10,3,0),datetime(2014,10,4,0)]\n",
574 "print(dates)\n",
575 "times = date2num(dates, time.units)\n",
576 "print(times, time.units) # numeric values\n",
577 "time[:] = times\n",
578 "# read time data back, convert to datetime instances, check values.\n",
579 "print(num2date(time[:],time.units))"
580 ]
581 },
582 {
583 "cell_type": "markdown",
584 "metadata": {
585 "internals": {
586 "frag_helper": "fragment_end",
587 "frag_number": 20,
588 "slide_type": "subslide"
589 },
590 "slideshow": {
591 "slide_type": "slide"
592 }
593 },
594 "source": [
595 "## Closing a netCDF file\n",
596 "\n",
597 "It's **important** to close a netCDF file you opened for writing:\n",
598 "\n",
599 "- flushes buffers to make sure all data gets written\n",
600 "- releases memory resources used by open netCDF files"
601 ]
602 },
603 {
604 "cell_type": "code",
605 "execution_count": 35,
606 "metadata": {
607 "collapsed": false,
608 "internals": {
609 "frag_helper": "fragment_end",
610 "frag_number": 22,
611 "slide_helper": "subslide_end"
612 },
613 "slide_helper": "slide_end",
614 "slideshow": {
615 "slide_type": "fragment"
616 }
617 },
618 "outputs": [
619 {
620 "name": "stdout",
621 "output_type": "stream",
622 "text": [
623 "<type 'netCDF4._netCDF4.Dataset'>\n",
624 "root group (NETCDF4_CLASSIC data model, file format HDF5):\n",
625 " title: My model data\n",
626 " dimensions(sizes): lat(73), lon(144), time(4)\n",
627 " variables(dimensions): float32 \u001b[4mlat\u001b[0m(lat), float32 \u001b[4mlon\u001b[0m(lon), float64 \u001b[4mtime\u001b[0m(time), float64 \u001b[4mtemp\u001b[0m(time,lat,lon)\n",
628 " groups: \n",
629 "\n",
630 "Dataset is closed!\n"
631 ]
632 }
633 ],
634 "source": [
635 "# first print the Dataset object to see what we've got\n",
636 "print(ncfile)\n",
637 "# close the Dataset.\n",
638 "ncfile.close(); print('Dataset is closed!')"
639 ]
640 },
641 {
642 "cell_type": "markdown",
643 "metadata": {
644 "internals": {
645 "frag_helper": "fragment_end",
646 "frag_number": 22,
647 "slide_type": "subslide"
648 },
649 "slideshow": {
650 "slide_type": "slide"
651 }
652 },
653 "source": [
654 "# Advanced features\n",
655 "\n",
656 "So far we've only exercised features associated with the old netCDF version 3 data model. netCDF version 4 adds a lot of new functionality that comes with the more flexible HDF5 storage layer. \n",
657 "\n",
658 "Let's create a new file with `format='NETCDF4'` so we can try out some of these features."
659 ]
660 },
661 {
662 "cell_type": "code",
663 "execution_count": 36,
664 "metadata": {
665 "collapsed": false,
666 "internals": {
667 "frag_helper": "fragment_end",
668 "frag_number": 25,
669 "slide_helper": "subslide_end"
670 },
671 "slide_helper": "slide_end",
672 "slideshow": {
673 "slide_type": "fragment"
674 }
675 },
676 "outputs": [
677 {
678 "name": "stdout",
679 "output_type": "stream",
680 "text": [
681 "<type 'netCDF4._netCDF4.Dataset'>\n",
682 "root group (NETCDF4 data model, file format HDF5):\n",
683 " dimensions(sizes): \n",
684 " variables(dimensions): \n",
685 " groups: \n",
686 "\n"
687 ]
688 }
689 ],
690 "source": [
691 "ncfile = netCDF4.Dataset('data/new2.nc','w',format='NETCDF4')\n",
692 "print(ncfile)"
693 ]
694 },
695 {
696 "cell_type": "markdown",
697 "metadata": {
698 "internals": {
699 "frag_helper": "fragment_end",
700 "frag_number": 25,
701 "slide_type": "subslide"
702 },
703 "slideshow": {
704 "slide_type": "slide"
705 }
706 },
707 "source": [
708 "## Creating Groups\n",
709 "\n",
710 "netCDF version 4 added support for organizing data in hierarchical groups.\n",
711 "\n",
712 "- analagous to directories in a filesystem. \n",
713 "- Groups serve as containers for variables, dimensions and attributes, as well as other groups. \n",
714 "- A `netCDF4.Dataset` creates a special group, called the 'root group', which is similar to the root directory in a unix filesystem. \n",
715 "\n",
716 "- groups are created using the [`createGroup`](http://unidata.github.io/netcdf4-python/netCDF4.Dataset-class.html#createGroup) method.\n",
717 "- takes a single argument (a string, which is the name of the Group instance). This string is used as a key to access the group instances in the `groups` dictionary.\n",
718 "\n",
719 "Here we create two groups to hold data for two different model runs."
720 ]
721 },
722 {
723 "cell_type": "code",
724 "execution_count": 37,
725 "metadata": {
726 "collapsed": false,
727 "internals": {
728 "frag_helper": "fragment_end",
729 "frag_number": 27,
730 "slide_helper": "subslide_end"
731 },
732 "slide_helper": "slide_end",
733 "slideshow": {
734 "slide_type": "fragment"
735 }
736 },
737 "outputs": [
738 {
739 "name": "stdout",
740 "output_type": "stream",
741 "text": [
742 "('model_run1', <type 'netCDF4._netCDF4.Group'>\n",
743 "group /model_run1:\n",
744 " dimensions(sizes): \n",
745 " variables(dimensions): \n",
746 " groups: \n",
747 ")\n",
748 "('model_run2', <type 'netCDF4._netCDF4.Group'>\n",
749 "group /model_run2:\n",
750 " dimensions(sizes): \n",
751 " variables(dimensions): \n",
752 " groups: \n",
753 ")\n"
754 ]
755 }
756 ],
757 "source": [
758 "grp1 = ncfile.createGroup('model_run1')\n",
759 "grp2 = ncfile.createGroup('model_run2')\n",
760 "for grp in ncfile.groups.items():\n",
761 " print(grp)"
762 ]
763 },
764 {
765 "cell_type": "markdown",
766 "metadata": {
767 "internals": {
768 "frag_helper": "fragment_end",
769 "frag_number": 27,
770 "slide_type": "subslide"
771 },
772 "slideshow": {
773 "slide_type": "slide"
774 }
775 },
776 "source": [
777 "Create some dimensions in the root group."
778 ]
779 },
780 {
781 "cell_type": "code",
782 "execution_count": 38,
783 "metadata": {
784 "collapsed": false,
785 "internals": {
786 "frag_helper": "fragment_end",
787 "frag_number": 29
788 },
789 "slideshow": {
790 "slide_type": "fragment"
791 }
792 },
793 "outputs": [],
794 "source": [
795 "lat_dim = ncfile.createDimension('lat', 73) # latitude axis\n",
796 "lon_dim = ncfile.createDimension('lon', 144) # longitude axis\n",
797 "time_dim = ncfile.createDimension('time', None) # unlimited axis (can be appended to)."
798 ]
799 },
800 {
801 "cell_type": "markdown",
802 "metadata": {
803 "internals": {
804 "frag_helper": "fragment_end",
805 "frag_number": 30
806 },
807 "slideshow": {
808 "slide_type": "fragment"
809 }
810 },
811 "source": [
812 "Now create a variable in grp1 and grp2. The library will search recursively upwards in the group tree to find the dimensions (which in this case are defined one level up).\n",
813 "\n",
814 "- These variables are create with **zlib compression**, another nifty feature of netCDF 4. \n",
815 "- The data are automatically compressed when data is written to the file, and uncompressed when the data is read. \n",
816 "- This can really save disk space, especially when used in conjunction with the [**least_significant_digit**](http://unidata.github.io/netcdf4-python/netCDF4.Dataset-class.html#createVariable) keyword argument, which causes the data to be quantized (truncated) before compression. This makes the compression lossy, but more efficient."
817 ]
818 },
819 {
820 "cell_type": "code",
821 "execution_count": 39,
822 "metadata": {
823 "collapsed": false,
824 "internals": {
825 "frag_helper": "fragment_end",
826 "frag_number": 31,
827 "slide_helper": "subslide_end"
828 },
829 "slide_helper": "slide_end",
830 "slideshow": {
831 "slide_type": "fragment"
832 }
833 },
834 "outputs": [
835 {
836 "name": "stdout",
837 "output_type": "stream",
838 "text": [
839 "('model_run1', <type 'netCDF4._netCDF4.Group'>\n",
840 "group /model_run1:\n",
841 " dimensions(sizes): \n",
842 " variables(dimensions): float64 \u001b[4mtemp\u001b[0m(time,lat,lon)\n",
843 " groups: \n",
844 ")\n",
845 "('model_run2', <type 'netCDF4._netCDF4.Group'>\n",
846 "group /model_run2:\n",
847 " dimensions(sizes): \n",
848 " variables(dimensions): float64 \u001b[4mtemp\u001b[0m(time,lat,lon)\n",
849 " groups: \n",
850 ")\n"
851 ]
852 }
853 ],
854 "source": [
855 "temp1 = grp1.createVariable('temp',np.float64,('time','lat','lon'),zlib=True)\n",
856 "temp2 = grp2.createVariable('temp',np.float64,('time','lat','lon'),zlib=True)\n",
857 "for grp in ncfile.groups.items(): # shows that each group now contains 1 variable\n",
858 " print(grp)"
859 ]
860 },
861 {
862 "cell_type": "markdown",
863 "metadata": {
864 "internals": {
865 "frag_helper": "fragment_end",
866 "frag_number": 31,
867 "slide_type": "subslide"
868 },
869 "slideshow": {
870 "slide_type": "slide"
871 }
872 },
873 "source": [
874 "##Creating a variable with a compound data type\n",
875 "\n",
876 "- Compound data types map directly to numpy structured (a.k.a 'record' arrays). \n",
877 "- Structured arrays are akin to C structs, or derived types in Fortran. \n",
878 "- They allow for the construction of table-like structures composed of combinations of other data types, including other compound types. \n",
879 "- Might be useful for representing multiple parameter values at each point on a grid, or at each time and space location for scattered (point) data. \n",
880 "\n",
881 "Here we create a variable with a compound data type to represent complex data (there is no native complex data type in netCDF). \n",
882 "\n",
883 "- The compound data type is created with the [`createCompoundType`](http://unidata.github.io/netcdf4-python/netCDF4.Dataset-class.html#createCompoundType) method."
884 ]
885 },
886 {
887 "cell_type": "code",
888 "execution_count": 40,
889 "metadata": {
890 "collapsed": false,
891 "internals": {
892 "frag_helper": "fragment_end",
893 "frag_number": 33,
894 "slide_helper": "subslide_end"
895 },
896 "slide_helper": "slide_end",
897 "slideshow": {
898 "slide_type": "fragment"
899 }
900 },
901 "outputs": [
902 {
903 "name": "stdout",
904 "output_type": "stream",
905 "text": [
906 "<type 'netCDF4._netCDF4.Variable'>\n",
907 "compound cmplx_var(time, lat, lon)\n",
908 "compound data type: [('real', '<f8'), ('imag', '<f8')]\n",
909 "path = /model_run1\n",
910 "unlimited dimensions: time\n",
911 "current shape = (1, 73, 144)\n",
912 "\n",
913 "(dtype([('real', '<f8'), ('imag', '<f8')]), (73, 144), (0.578177705604801, 0.18086070805676357))\n"
914 ]
915 }
916 ],
917 "source": [
918 "# create complex128 numpy structured data type\n",
919 "complex128 = np.dtype([('real',np.float64),('imag',np.float64)])\n",
920 "# using this numpy dtype, create a netCDF compound data type object\n",
921 "# the string name can be used as a key to access the datatype from the cmptypes dictionary.\n",
922 "complex128_t = ncfile.createCompoundType(complex128,'complex128')\n",
923 "# create a variable with this data type, write some data to it.\n",
924 "cmplxvar = grp1.createVariable('cmplx_var',complex128_t,('time','lat','lon'))\n",
925 "# write some data to this variable\n",
926 "# first create some complex random data\n",
927 "nlats = len(lat_dim); nlons = len(lon_dim)\n",
928 "data_arr_cmplx = np.random.uniform(size=(nlats,nlons))+1.j*np.random.uniform(size=(nlats,nlons))\n",
929 "# write this complex data to a numpy complex128 structured array\n",
930 "data_arr = np.empty((nlats,nlons),complex128)\n",
931 "data_arr['real'] = data_arr_cmplx.real; data_arr['imag'] = data_arr_cmplx.imag\n",
932 "cmplxvar[0] = data_arr # write the data to the variable (appending to time dimension)\n",
933 "print(cmplxvar)\n",
934 "data_out = cmplxvar[0] # read one value of data back from variable\n",
935 "print(data_out.dtype, data_out.shape, data_out[0,0])"
936 ]
937 },
938 {
939 "cell_type": "markdown",
940 "metadata": {
941 "internals": {
942 "frag_helper": "fragment_end",
943 "frag_number": 33,
944 "slide_type": "subslide"
945 },
946 "slideshow": {
947 "slide_type": "slide"
948 }
949 },
950 "source": [
951 "##Creating a variable with a variable-length (vlen) data type\n",
952 "\n",
953 "netCDF 4 has support for variable-length or \"ragged\" arrays. These are arrays of variable length sequences having the same type. \n",
954 "\n",
955 "- To create a variable-length data type, use the [`createVLType`](http://unidata.github.io/netcdf4-python/netCDF4.Dataset-class.html#createVLType) method.\n",
956 "- The numpy datatype of the variable-length sequences and the name of the new datatype must be specified. "
957 ]
958 },
959 {
960 "cell_type": "code",
961 "execution_count": 41,
962 "metadata": {
963 "collapsed": false,
964 "internals": {
965 "frag_helper": "fragment_end",
966 "frag_number": 35
967 },
968 "slideshow": {
969 "slide_type": "fragment"
970 }
971 },
972 "outputs": [],
973 "source": [
974 "vlen_t = ncfile.createVLType(np.int64, 'phony_vlen')"
975 ]
976 },
977 {
978 "cell_type": "markdown",
979 "metadata": {
980 "internals": {
981 "frag_helper": "fragment_end",
982 "frag_number": 36
983 },
984 "slideshow": {
985 "slide_type": "fragment"
986 }
987 },
988 "source": [
989 "A new variable can then be created using this datatype."
990 ]
991 },
992 {
993 "cell_type": "code",
994 "execution_count": 42,
995 "metadata": {
996 "collapsed": false,
997 "internals": {
998 "frag_helper": "fragment_end",
999 "frag_number": 37,
1000 "slide_helper": "subslide_end"
1001 },
1002 "slide_helper": "slide_end",
1003 "slideshow": {
1004 "slide_type": "fragment"
1005 }
1006 },
1007 "outputs": [],
1008 "source": [
1009 "vlvar = grp2.createVariable('phony_vlen_var', vlen_t, ('time','lat','lon'))"
1010 ]
1011 },
1012 {
1013 "cell_type": "markdown",
1014 "metadata": {
1015 "internals": {
1016 "frag_helper": "fragment_end",
1017 "frag_number": 37,
1018 "slide_type": "subslide"
1019 },
1020 "slideshow": {
1021 "slide_type": "slide"
1022 }
1023 },
1024 "source": [
1025 "Since there is no native vlen datatype in numpy, vlen arrays are represented in python as object arrays (arrays of dtype `object`). \n",
1026 "\n",
1027 "- These are arrays whose elements are Python object pointers, and can contain any type of python object. \n",
1028 "- For this application, they must contain 1-D numpy arrays all of the same type but of varying length. \n",
1029 "- Fill with 1-D random numpy int64 arrays of random length between 1 and 10."
1030 ]
1031 },
1032 {
1033 "cell_type": "code",
1034 "execution_count": 43,
1035 "metadata": {
1036 "collapsed": false,
1037 "internals": {
1038 "frag_helper": "fragment_end",
1039 "frag_number": 39,
1040 "slide_helper": "subslide_end"
1041 },
1042 "slide_helper": "slide_end",
1043 "slideshow": {
1044 "slide_type": "fragment"
1045 }
1046 },
1047 "outputs": [
1048 {
1049 "name": "stdout",
1050 "output_type": "stream",
1051 "text": [
1052 "<type 'netCDF4._netCDF4.Variable'>\n",
1053 "vlen phony_vlen_var(time, lat, lon)\n",
1054 "vlen data type: int64\n",
1055 "path = /model_run2\n",
1056 "unlimited dimensions: time\n",
1057 "current shape = (1, 73, 144)\n",
1058 "\n",
1059 "('data =\\n', array([[[array([0, 4, 0, 9, 2, 2, 2, 4, 2]), array([7, 5, 4, 4, 9, 8, 0]),\n",
1060 " array([3, 6, 6, 8, 2, 7]), ..., array([5, 0, 0, 8, 8, 1, 5, 3]),\n",
1061 " array([4, 2, 7]), array([0])],\n",
1062 " [array([5, 6, 6, 6, 1, 0, 7]), array([7]),\n",
1063 " array([7, 5, 8, 9, 6, 9, 3]), ..., array([0, 6, 5, 4]),\n",
1064 " array([7, 1, 9, 7, 7, 2]), array([1, 4, 0])],\n",
1065 " [array([4, 3, 1]), array([6, 3, 9, 7, 8]), array([8]), ...,\n",
1066 " array([6, 5, 8, 0]), array([0]), array([0, 9, 6, 2, 4])],\n",
1067 " ..., \n",
1068 " [array([8, 4, 4]), array([4, 1, 6]), array([1, 4, 2, 3, 9]), ...,\n",
1069 " array([9, 1]), array([7, 2, 5, 1, 5, 8, 2]),\n",
1070 " array([2, 9, 9, 1, 4, 6, 3, 5, 2])],\n",
1071 " [array([4, 7, 9, 8, 2, 3, 6, 6]),\n",
1072 " array([1, 4, 1, 6, 1, 1, 2, 3, 9]),\n",
1073 " array([9, 5, 6, 2, 4, 3, 8, 2, 9]), ..., array([9, 5, 7]),\n",
1074 " array([3, 9]), array([4, 2, 6, 9])],\n",
1075 " [array([8, 9, 9, 2, 2, 8, 8, 5]), array([3]),\n",
1076 " array([8, 8, 0, 2, 9, 2, 3, 0, 9]), ..., array([7]),\n",
1077 " array([5, 1, 0, 6, 8, 6]), array([8, 6, 3, 6, 9, 8, 4, 2, 5])]]], dtype=object))\n"
1078 ]
1079 }
1080 ],
1081 "source": [
1082 "vlen_data = np.empty((nlats,nlons),object)\n",
1083 "for i in range(nlons):\n",
1084 " for j in range(nlats):\n",
1085 " size = np.random.randint(1,10,size=1) # random length of sequence\n",
1086 " vlen_data[j,i] = np.random.randint(0,10,size=size)# generate random sequence\n",
1087 "vlvar[0] = vlen_data # append along unlimited dimension (time)\n",
1088 "print(vlvar)\n",
1089 "print('data =\\n',vlvar[:])"
1090 ]
1091 },
1092 {
1093 "cell_type": "markdown",
1094 "metadata": {
1095 "internals": {
1096 "frag_helper": "fragment_end",
1097 "frag_number": 39,
1098 "slide_type": "subslide"
1099 },
1100 "slideshow": {
1101 "slide_type": "slide"
1102 }
1103 },
1104 "source": [
1105 "Close the Dataset and examine the contents with ncdump."
1106 ]
1107 },
1108 {
1109 "cell_type": "code",
1110 "execution_count": 44,
1111 "metadata": {
1112 "collapsed": false,
1113 "internals": {
1114 "frag_helper": "fragment_end",
1115 "frag_number": 41,
1116 "slide_helper": "subslide_end"
1117 },
1118 "slide_helper": "slide_end",
1119 "slideshow": {
1120 "slide_type": "fragment"
1121 }
1122 },
1123 "outputs": [
1124 {
1125 "name": "stdout",
1126 "output_type": "stream",
1127 "text": [
1128 "netcdf new2 {\r\n",
1129 "types:\r\n",
1130 " compound complex128 {\r\n",
1131 " double real ;\r\n",
1132 " double imag ;\r\n",
1133 " }; // complex128\r\n",
1134 " int64(*) phony_vlen ;\r\n",
1135 "dimensions:\r\n",
1136 "\tlat = 73 ;\r\n",
1137 "\tlon = 144 ;\r\n",
1138 "\ttime = UNLIMITED ; // (1 currently)\r\n",
1139 "\r\n",
1140 "group: model_run1 {\r\n",
1141 " variables:\r\n",
1142 " \tdouble temp(time, lat, lon) ;\r\n",
1143 " \tcomplex128 cmplx_var(time, lat, lon) ;\r\n",
1144 " } // group model_run1\r\n",
1145 "\r\n",
1146 "group: model_run2 {\r\n",
1147 " variables:\r\n",
1148 " \tdouble temp(time, lat, lon) ;\r\n",
1149 " \tphony_vlen phony_vlen_var(time, lat, lon) ;\r\n",
1150 " } // group model_run2\r\n",
1151 "}\r\n"
1152 ]
1153 }
1154 ],
1155 "source": [
1156 "ncfile.close()\n",
1157 "!ncdump -h data/new2.nc"
1158 ]
1159 },
1160 {
1161 "cell_type": "markdown",
1162 "metadata": {
1163 "internals": {
1164 "frag_helper": "fragment_end",
1165 "frag_number": 41,
1166 "slide_helper": "subslide_end",
1167 "slide_type": "subslide"
1168 },
1169 "slide_helper": "slide_end",
1170 "slideshow": {
1171 "slide_type": "slide"
1172 }
1173 },
1174 "source": [
1175 "##Other interesting and useful projects using netcdf4-python\n",
1176 "\n",
1177 "- [xarray](https://xarray.pydata.org/en/stable/): N-dimensional variant of the core [pandas](https://pandas.pydata.org) data structure that can operate on netcdf variables.\n",
1178 "- [Iris](https://scitools.org.uk/iris/docs/latest/): a data model to create a data abstraction layer which isolates analysis and visualisation code from data format specifics. Uses netcdf4-python to access netcdf data (can also handle GRIB).\n",
1179 "- [Dask](https://dask.org/): Virtual large arrays (from netcdf variables) with lazy evaluation.\n",
1180 "- [cf-python](https://cfpython.bitbucket.io/): Implements the [CF](http://cfconventions.org) data model for the reading, writing and processing of data and metadata. "
1181 ]
1182 }
1183 ],
1184 "metadata": {
1185 "kernelspec": {
1186 "display_name": "Python 2",
1187 "language": "python",
1188 "name": "python2"
1189 },
1190 "language_info": {
1191 "codemirror_mode": {
1192 "name": "ipython",
1193 "version": 2
1194 },
1195 "file_extension": ".py",
1196 "mimetype": "text/x-python",
1197 "name": "python",
1198 "nbconvert_exporter": "python",
1199 "pygments_lexer": "ipython2",
1200 "version": "2.7.9"
1201 }
1202 },
1203 "nbformat": 4,
1204 "nbformat_minor": 0
1205 }
0 # Rename this file to setup.cfg to set build options.
1 # Follow instructions below for editing.
20 [options]
3 # if true, the nc-config script (installed with netcdf 4.1.2 and higher)
4 # will be used to determine the locations of required libraries.
5 # Usually, nothing else is needed.
6 use_ncconfig=True
7 # path to nc-config script (use if not found in unix PATH).
8 #ncconfig=/usr/local/bin/nc-config
1 use_ncconfig = True
2
93 [directories]
10 #
11 # If nc-config doesn't do the trick, you can specify the locations
12 # of the libraries and headers manually below
13 #
14 # uncomment and set to netCDF install location.
15 # Include files should be located in netCDF4_dir/include and
16 # the library should be located in netCDF4_dir/lib.
17 # If the libraries and include files are installed in separate locations,
18 # use netCDF4_libdir and netCDF4_incdir to specify the locations
19 # separately.
20 #netCDF4_dir = /usr/local
21 # uncomment and set to HDF5 install location.
22 # Include files should be located in HDF5_dir/include and
23 # the library should be located in HDF5_dir/lib.
24 # If the libraries and include files are installed in separate locations,
25 # use HDF5_libdir and HDF5_incdir to specify the locations
26 # separately.
27 #HDF5_dir = /usr/local
28 # if HDF5 was built with szip support as a static lib,
29 # uncomment and set to szip lib install location.
30 # If the libraries and include files are installed in separate locations,
31 # use szip_libdir and szip_incdir.
32 #szip_dir = /usr/local
33 # if netcdf lib was build statically with HDF4 support,
34 # uncomment and set to hdf4 lib (libmfhdf and libdf) nstall location.
35 # If the libraries and include files are installed in separate locations,
36 # use hdf4_libdir and hdf4_incdir.
37 #hdf4_dir = /usr/local
38 # if netcdf lib was build statically with HDF4 support,
39 # uncomment and set to jpeg lib install location (hdf4 needs jpeg).
40 # If the libraries and include files are installed in separate locations,
41 # use jpeg_libdir and jpeg_incdir.
42 #jpeg_dir = /usr/local
43 # if netcdf lib was build statically with OpenDAP support,
44 # uncomment and set to curl lib install location.
45 # If the libraries and include files are installed in separate locations,
46 # use curl_libdir and curl_incdir.
47 #curl_dir = /usr/local
48 # location of mpi.h (needed for parallel support)
49 #mpi_incdir=/opt/local/include/mpich-mp
4
505 [check-manifest]
51 ignore =
52 .gitignore
53 README.gh-pages
54 README.release
55 examples/data/*nc
56 examples/*ipynb
6 ignore =
7 .gitignore
8 README.gh-pages
9 README.release
10 examples/data/*nc
11 examples/*ipynb
12
13 [egg_info]
14 tag_build =
15 tag_date = 0
16
0 Metadata-Version: 2.1
1 Name: netCDF4
2 Version: 1.6.1
3 Summary: Provides an object-oriented python interface to the netCDF version 4 library.
4 Home-page: http://github.com/Unidata/netcdf4-python
5 Author: Jeff Whitaker
6 Author-email: jeffrey.s.whitaker@noaa.gov
7 License: License :: OSI Approved :: MIT License
8 Download-URL: http://python.org/pypi/netCDF4
9 Keywords: numpy,netcdf,data,science,network,oceanography,meteorology,climate
10 Platform: any
11 Classifier: Development Status :: 3 - Alpha
12 Classifier: Programming Language :: Python :: 3
13 Classifier: Programming Language :: Python :: 3.6
14 Classifier: Programming Language :: Python :: 3.7
15 Classifier: Programming Language :: Python :: 3.8
16 Classifier: Intended Audience :: Science/Research
17 Classifier: License :: OSI Approved :: MIT License
18 Classifier: Topic :: Software Development :: Libraries :: Python Modules
19 Classifier: Topic :: System :: Archiving :: Compression
20 Classifier: Operating System :: OS Independent
21 License-File: LICENSE
22
23 netCDF version 4 has many features not found in earlier versions of the library, such as hierarchical groups, zlib compression, multiple unlimited dimensions, and new data types. It is implemented on top of HDF5. This module implements most of the new features, and can read and write netCDF files compatible with older versions of the library. The API is modelled after Scientific.IO.NetCDF, and should be familiar to users of that module.
24
25 This project is hosted on a `GitHub repository <https://github.com/Unidata/netcdf4-python>`_ where you may access the most up-to-date source.
26
0 Changelog
1 LICENSE
2 MANIFEST.in
3 README.htmldocs
4 README.md
5 README.release
6 README.wheels.md
7 checkversion.py
8 create_docs.sh
9 setup.cfg
10 setup.py
11 docs/index.html
12 examples/README.md
13 examples/bench.py
14 examples/bench_compress.py
15 examples/bench_compress2.py
16 examples/bench_compress3.py
17 examples/bench_compress4.py
18 examples/bench_diskless.py
19 examples/json_att.py
20 examples/mpi_example.py
21 examples/mpi_example_compressed.py
22 examples/subset.py
23 examples/test_stringarr.py
24 examples/threaded_read.py
25 examples/tutorial.py
26 include/membuf.pyx
27 include/mpi-compat.h
28 include/netCDF4.pxi
29 man/nc3tonc4.1
30 man/nc4tonc3.1
31 man/ncinfo.1
32 src/netCDF4/__init__.py
33 src/netCDF4/_netCDF4.pyx
34 src/netCDF4/utils.py
35 src/netCDF4.egg-info/PKG-INFO
36 src/netCDF4.egg-info/SOURCES.txt
37 src/netCDF4.egg-info/dependency_links.txt
38 src/netCDF4.egg-info/entry_points.txt
39 src/netCDF4.egg-info/requires.txt
40 src/netCDF4.egg-info/top_level.txt
41 src/netCDF4/plugins/empty.txt
42 test/20171025_2056.Cloud_Top_Height.nc
43 test/CRM032_test1.nc
44 test/issue1152.nc
45 test/issue671.nc
46 test/issue672.nc
47 test/netcdf_dummy_file.nc
48 test/run_all.py
49 test/test_gold.nc
50 test/tst_Unsigned.py
51 test/tst_alignment.py
52 test/tst_atts.py
53 test/tst_cdf5.py
54 test/tst_cdl.py
55 test/tst_chunk_cache.py
56 test/tst_compound_alignment.py
57 test/tst_compoundatt.py
58 test/tst_compoundvar.py
59 test/tst_compression.py
60 test/tst_compression_blosc.py
61 test/tst_compression_bzip2.py
62 test/tst_compression_quant.py
63 test/tst_compression_szip.py
64 test/tst_compression_zstd.py
65 test/tst_create_mem.py
66 test/tst_dap.py
67 test/tst_dims.py
68 test/tst_diskless.py
69 test/tst_endian.py
70 test/tst_enum.py
71 test/tst_fancyslicing.py
72 test/tst_filepath.py
73 test/tst_get_variables_by_attributes.py
74 test/tst_grps.py
75 test/tst_grps2.py
76 test/tst_issue908.py
77 test/tst_masked.py
78 test/tst_masked2.py
79 test/tst_masked3.py
80 test/tst_masked4.py
81 test/tst_masked5.py
82 test/tst_masked6.py
83 test/tst_multifile.py
84 test/tst_multifile2.py
85 test/tst_open_mem.py
86 test/tst_refcount.py
87 test/tst_rename.py
88 test/tst_scalarvar.py
89 test/tst_scaled.py
90 test/tst_shape.py
91 test/tst_slicing.py
92 test/tst_stringarr.py
93 test/tst_types.py
94 test/tst_unicode.py
95 test/tst_unicodeatt.py
96 test/tst_unlimdim.py
97 test/tst_utils.py
98 test/tst_vars.py
99 test/tst_vlen.py
100 test/ubyte.nc
0 [console_scripts]
1 nc3tonc4 = netCDF4.utils:nc3tonc4
2 nc4tonc3 = netCDF4.utils:nc4tonc3
3 ncinfo = netCDF4.utils:ncinfo
4
3939
4040 def setUp(self):
4141 self.file = FILE_NAME
42 f = netCDF4.Dataset(self.file,'w')
43 # try to set a dataset attribute with one of the reserved names.
44 f.setncattr('file_format','netcdf4_format')
45 # test attribute renaming
46 f.stratt_tmp = STRATT
47 f.renameAttribute('stratt_tmp','stratt')
48 f.emptystratt = EMPTYSTRATT
49 f.intatt = INTATT
50 f.floatatt = FLOATATT
51 f.seqatt = SEQATT
52 # sequences of strings converted to a single string.
53 f.stringseqatt = STRINGSEQATT
54 f.setncattr_string('stringseqatt_array',STRINGSEQATT) # array of NC_STRING
55 g = f.createGroup(GROUP_NAME)
56 f.createDimension(DIM1_NAME, DIM1_LEN)
57 f.createDimension(DIM2_NAME, DIM2_LEN)
58 f.createDimension(DIM3_NAME, DIM3_LEN)
59 g.createDimension(DIM1_NAME, DIM1_LEN)
60 g.createDimension(DIM2_NAME, DIM2_LEN)
61 g.createDimension(DIM3_NAME, DIM3_LEN)
62 g.stratt_tmp = STRATT
63 g.renameAttribute('stratt_tmp','stratt')
64 g.emptystratt = EMPTYSTRATT
65 g.intatt = INTATT
66 g.floatatt = FLOATATT
67 g.seqatt = SEQATT
68 g.stringseqatt = STRINGSEQATT
69 if netCDF4.__version__ > "1.4.2":
70 with self.assertRaises(ValueError):
71 g.arrayatt = [[1, 2], [3, 4]] # issue #841
72 g.setncattr_string('stringseqatt_array',STRINGSEQATT) # array of NC_STRING
73 v = f.createVariable(VAR_NAME, 'f8',(DIM1_NAME,DIM2_NAME,DIM3_NAME))
74 # try to set a variable attribute with one of the reserved names.
75 v.setncattr('ndim','three')
76 v.setncatts({'foo': 1})
77 v.setncatts(OrderedDict(bar=2))
78 v.stratt_tmp = STRATT
79 v.renameAttribute('stratt_tmp','stratt')
80 v.emptystratt = EMPTYSTRATT
81 v.intatt = INTATT
82 v.floatatt = FLOATATT
83 v.seqatt = SEQATT
84 v.stringseqatt = STRINGSEQATT
85 v.setncattr_string('stringseqatt_array',STRINGSEQATT) # array of NC_STRING
86 v1 = g.createVariable(VAR_NAME, 'f8',(DIM1_NAME,DIM2_NAME,DIM3_NAME))
87 v1.stratt = STRATT
88 v1.emptystratt = EMPTYSTRATT
89 v1.intatt = INTATT
90 v1.floatatt = FLOATATT
91 v1.seqatt = SEQATT
92 v1.stringseqatt = STRINGSEQATT
93 v1.setncattr_string('stringseqatt_array',STRINGSEQATT) # array of NC_STRING
94 # issue #959: should not be able to set _FillValue after var creation
95 try:
96 v1._FillValue(-999.)
97 except AttributeError:
98 pass
99 else:
100 raise ValueError('This test should have failed.')
101 try:
102 v1.setncattr('_FillValue',-999.)
103 except AttributeError:
104 pass
105 else:
106 raise ValueError('This test should have failed.')
107 # issue #485 (triggers segfault in C lib
108 # with version 1.2.1 without pull request #486)
109 f.foo = np.array('bar','S')
110 f.foo = np.array('bar','U')
111 # issue #529 write string attribute as NC_CHAR unless
112 # it can't be decoded to ascii. Add setncattr_string
113 # method to force NC_STRING.
114 f.charatt = 'foo' # will be written as NC_CHAR
115 f.setncattr_string('stringatt','bar') # NC_STRING
116 f.cafe = 'caf\xe9' # NC_STRING
117 f.batt = 'caf\xe9'.encode('utf-8') #NC_CHAR
118 v.setncattr_string('stringatt','bar') # NC_STRING
119 # issue #882 - provide an option to always string attribute
120 # as NC_STRINGs. Testing various approaches to setting text attributes...
121 f.set_ncstring_attrs(True)
122 f.stringatt_ncstr = 'foo' # will now be written as NC_STRING
123 f.setncattr_string('stringatt_ncstr','bar') # NC_STRING anyway
124 f.caf_ncstr = 'caf\xe9' # NC_STRING anyway
125 f.bat_ncstr = 'caf\xe9'.encode('utf-8') # now NC_STRING
126 g.stratt_ncstr = STRATT # now NC_STRING
127 #g.renameAttribute('stratt_tmp','stratt_ncstr')
128 v.setncattr_string('stringatt_ncstr','bar') # NC_STRING anyway
129 v.stratt_ncstr = STRATT
130 v1.emptystratt_ncstr = EMPTYSTRATT
131 f.close()
42 with netCDF4.Dataset(self.file,'w') as f:
43 # try to set a dataset attribute with one of the reserved names.
44 f.setncattr('file_format','netcdf4_format')
45 # test attribute renaming
46 f.stratt_tmp = STRATT
47 f.renameAttribute('stratt_tmp','stratt')
48 f.emptystratt = EMPTYSTRATT
49 f.intatt = INTATT
50 f.floatatt = FLOATATT
51 f.seqatt = SEQATT
52 # sequences of strings converted to a single string.
53 f.stringseqatt = STRINGSEQATT
54 f.setncattr_string('stringseqatt_array',STRINGSEQATT) # array of NC_STRING
55 g = f.createGroup(GROUP_NAME)
56 f.createDimension(DIM1_NAME, DIM1_LEN)
57 f.createDimension(DIM2_NAME, DIM2_LEN)
58 f.createDimension(DIM3_NAME, DIM3_LEN)
59 g.createDimension(DIM1_NAME, DIM1_LEN)
60 g.createDimension(DIM2_NAME, DIM2_LEN)
61 g.createDimension(DIM3_NAME, DIM3_LEN)
62 g.stratt_tmp = STRATT
63 g.renameAttribute('stratt_tmp','stratt')
64 g.emptystratt = EMPTYSTRATT
65 g.intatt = INTATT
66 g.floatatt = FLOATATT
67 g.seqatt = SEQATT
68 g.stringseqatt = STRINGSEQATT
69 if netCDF4.__version__ > "1.4.2":
70 with self.assertRaises(ValueError):
71 g.arrayatt = [[1, 2], [3, 4]] # issue #841
72 g.setncattr_string('stringseqatt_array',STRINGSEQATT) # array of NC_STRING
73 v = f.createVariable(VAR_NAME, 'f8',(DIM1_NAME,DIM2_NAME,DIM3_NAME))
74 # try to set a variable attribute with one of the reserved names.
75 v.setncattr('ndim','three')
76 v.setncatts({'foo': 1})
77 v.setncatts(OrderedDict(bar=2))
78 v.stratt_tmp = STRATT
79 v.renameAttribute('stratt_tmp','stratt')
80 v.emptystratt = EMPTYSTRATT
81 v.intatt = INTATT
82 v.floatatt = FLOATATT
83 v.seqatt = SEQATT
84 v.stringseqatt = STRINGSEQATT
85 v.setncattr_string('stringseqatt_array',STRINGSEQATT) # array of NC_STRING
86 v1 = g.createVariable(VAR_NAME, 'f8',(DIM1_NAME,DIM2_NAME,DIM3_NAME))
87 v1.stratt = STRATT
88 v1.emptystratt = EMPTYSTRATT
89 v1.intatt = INTATT
90 v1.floatatt = FLOATATT
91 v1.seqatt = SEQATT
92 v1.stringseqatt = STRINGSEQATT
93 v1.setncattr_string('stringseqatt_array',STRINGSEQATT) # array of NC_STRING
94 # issue #959: should not be able to set _FillValue after var creation
95 try:
96 v1._FillValue(-999.)
97 except AttributeError:
98 pass
99 else:
100 raise ValueError('This test should have failed.')
101 try:
102 v1.setncattr('_FillValue',-999.)
103 except AttributeError:
104 pass
105 else:
106 raise ValueError('This test should have failed.')
107 # issue #485 (triggers segfault in C lib
108 # with version 1.2.1 without pull request #486)
109 f.foo = np.array('bar','S')
110 f.foo = np.array('bar','U')
111 # issue #529 write string attribute as NC_CHAR unless
112 # it can't be decoded to ascii. Add setncattr_string
113 # method to force NC_STRING.
114 f.charatt = 'foo' # will be written as NC_CHAR
115 f.setncattr_string('stringatt','bar') # NC_STRING
116 f.cafe = 'caf\xe9' # NC_STRING
117 f.batt = 'caf\xe9'.encode('utf-8') #NC_CHAR
118 v.setncattr_string('stringatt','bar') # NC_STRING
119 # issue #882 - provide an option to always string attribute
120 # as NC_STRINGs. Testing various approaches to setting text attributes...
121 f.set_ncstring_attrs(True)
122 f.stringatt_ncstr = 'foo' # will now be written as NC_STRING
123 f.setncattr_string('stringatt_ncstr','bar') # NC_STRING anyway
124 f.caf_ncstr = 'caf\xe9' # NC_STRING anyway
125 f.bat_ncstr = 'caf\xe9'.encode('utf-8') # now NC_STRING
126 g.stratt_ncstr = STRATT # now NC_STRING
127 #g.renameAttribute('stratt_tmp','stratt_ncstr')
128 v.setncattr_string('stringatt_ncstr','bar') # NC_STRING anyway
129 v.stratt_ncstr = STRATT
130 v1.emptystratt_ncstr = EMPTYSTRATT
132131
133132 def tearDown(self):
134133 # Remove the temporary files
137136
138137 def runTest(self):
139138 """testing attributes"""
140 f = netCDF4.Dataset(self.file, 'r')
141 v = f.variables[VAR_NAME]
142 g = f.groups[GROUP_NAME]
143 v1 = g.variables[VAR_NAME]
144 # check attributes in root group.
145 # global attributes.
146 # check __dict__ method for accessing all netCDF attributes.
147 for key,val in ATTDICT.items():
148 if type(val) == np.ndarray:
149 assert f.__dict__[key].tolist() == val.tolist()
150 else:
151 assert f.__dict__[key] == val
152 # check accessing individual attributes.
153 assert f.intatt == INTATT
154 assert f.floatatt == FLOATATT
155 assert f.stratt == STRATT
156 assert f.emptystratt == EMPTYSTRATT
157 assert f.seqatt.tolist() == SEQATT.tolist()
158 #assert f.stringseqatt == ''.join(STRINGSEQATT) # issue 770
159 assert f.stringseqatt == STRINGSEQATT
160 assert f.stringseqatt_array == STRINGSEQATT
161 assert f.getncattr('file_format') == 'netcdf4_format'
162 # variable attributes.
163 # check __dict__ method for accessing all netCDF attributes.
164 for key,val in ATTDICT.items():
165 if type(val) == np.ndarray:
166 assert v.__dict__[key].tolist() == val.tolist()
167 else:
168 assert v.__dict__[key] == val
169 # check accessing individual attributes.
170 assert v.intatt == INTATT
171 assert v.floatatt == FLOATATT
172 assert v.stratt == STRATT
173 assert v.seqatt.tolist() == SEQATT.tolist()
174 #assert v.stringseqatt == ''.join(STRINGSEQATT) # issue 770
175 assert v.stringseqatt == STRINGSEQATT
176 assert v.stringseqatt_array == STRINGSEQATT
177 assert v.getncattr('ndim') == 'three'
178 assert v.getncattr('foo') == 1
179 assert v.getncattr('bar') == 2
180 # check type of attributes using ncdump (issue #529)
181 if not os.getenv('NO_CDL'):
182 ncdump_output = f.tocdl()
183 for line in ncdump_output:
184 line = line.strip('\t\n\r')
185 line = line.strip()# Must be done another time for group variables
186 if "stringatt" in line: assert line.startswith('string')
187 if "charatt" in line: assert line.startswith(':')
188 if "cafe" in line: assert line.startswith('string')
189 if "batt" in line: assert line.startswith(':')
190 if "_ncstr" in line: assert line.startswith('string')
191 # check attributes in subgroup.
192 # global attributes.
193 for key,val in ATTDICT.items():
194 if type(val) == np.ndarray:
195 assert g.__dict__[key].tolist() == val.tolist()
196 else:
197 assert g.__dict__[key] == val
198 assert g.intatt == INTATT
199 assert g.floatatt == FLOATATT
200 assert g.stratt == STRATT
201 assert g.emptystratt == EMPTYSTRATT
202 assert g.seqatt.tolist() == SEQATT.tolist()
203 #assert g.stringseqatt == ''.join(STRINGSEQATT) # issue 770
204 assert g.stringseqatt == STRINGSEQATT
205 assert g.stringseqatt_array == STRINGSEQATT
206 for key,val in ATTDICT.items():
207 if type(val) == np.ndarray:
208 assert v1.__dict__[key].tolist() == val.tolist()
209 else:
210 assert v1.__dict__[key] == val
211 assert v1.intatt == INTATT
212 assert v1.floatatt == FLOATATT
213 assert v1.stratt == STRATT
214 assert v1.emptystratt == EMPTYSTRATT
215 assert v1.seqatt.tolist() == SEQATT.tolist()
216 #assert v1.stringseqatt == ''.join(STRINGSEQATT) # issue 770
217 assert v1.stringseqatt == STRINGSEQATT
218 assert v1.stringseqatt_array == STRINGSEQATT
219 assert getattr(v1,'nonexistantatt',None) == None
220 f.close()
139 with netCDF4.Dataset(self.file, 'r') as f:
140 v = f.variables[VAR_NAME]
141 g = f.groups[GROUP_NAME]
142 v1 = g.variables[VAR_NAME]
143 # check attributes in root group.
144 # global attributes.
145 # check __dict__ method for accessing all netCDF attributes.
146 for key,val in ATTDICT.items():
147 if type(val) == np.ndarray:
148 assert f.__dict__[key].tolist() == val.tolist()
149 else:
150 assert f.__dict__[key] == val
151 # check accessing individual attributes.
152 assert f.intatt == INTATT
153 assert f.floatatt == FLOATATT
154 assert f.stratt == STRATT
155 assert f.emptystratt == EMPTYSTRATT
156 assert f.seqatt.tolist() == SEQATT.tolist()
157 #assert f.stringseqatt == ''.join(STRINGSEQATT) # issue 770
158 assert f.stringseqatt == STRINGSEQATT
159 assert f.stringseqatt_array == STRINGSEQATT
160 assert f.getncattr('file_format') == 'netcdf4_format'
161 # variable attributes.
162 # check __dict__ method for accessing all netCDF attributes.
163 for key,val in ATTDICT.items():
164 if type(val) == np.ndarray:
165 assert v.__dict__[key].tolist() == val.tolist()
166 else:
167 assert v.__dict__[key] == val
168 # check accessing individual attributes.
169 assert v.intatt == INTATT
170 assert v.floatatt == FLOATATT
171 assert v.stratt == STRATT
172 assert v.seqatt.tolist() == SEQATT.tolist()
173 #assert v.stringseqatt == ''.join(STRINGSEQATT) # issue 770
174 assert v.stringseqatt == STRINGSEQATT
175 assert v.stringseqatt_array == STRINGSEQATT
176 assert v.getncattr('ndim') == 'three'
177 assert v.getncattr('foo') == 1
178 assert v.getncattr('bar') == 2
179 # check type of attributes using ncdump (issue #529)
180 if not os.getenv('NO_CDL'):
181 ncdump_output = f.tocdl()
182 for line in ncdump_output:
183 line = line.strip('\t\n\r')
184 line = line.strip()# Must be done another time for group variables
185 if "stringatt" in line: assert line.startswith('string')
186 if "charatt" in line: assert line.startswith(':')
187 if "cafe" in line: assert line.startswith('string')
188 if "batt" in line: assert line.startswith(':')
189 if "_ncstr" in line: assert line.startswith('string')
190 # check attributes in subgroup.
191 # global attributes.
192 for key,val in ATTDICT.items():
193 if type(val) == np.ndarray:
194 assert g.__dict__[key].tolist() == val.tolist()
195 else:
196 assert g.__dict__[key] == val
197 assert g.intatt == INTATT
198 assert g.floatatt == FLOATATT
199 assert g.stratt == STRATT
200 assert g.emptystratt == EMPTYSTRATT
201 assert g.seqatt.tolist() == SEQATT.tolist()
202 #assert g.stringseqatt == ''.join(STRINGSEQATT) # issue 770
203 assert g.stringseqatt == STRINGSEQATT
204 assert g.stringseqatt_array == STRINGSEQATT
205 for key,val in ATTDICT.items():
206 if type(val) == np.ndarray:
207 assert v1.__dict__[key].tolist() == val.tolist()
208 else:
209 assert v1.__dict__[key] == val
210 assert v1.intatt == INTATT
211 assert v1.floatatt == FLOATATT
212 assert v1.stratt == STRATT
213 assert v1.emptystratt == EMPTYSTRATT
214 assert v1.seqatt.tolist() == SEQATT.tolist()
215 #assert v1.stringseqatt == ''.join(STRINGSEQATT) # issue 770
216 assert v1.stringseqatt == STRINGSEQATT
217 assert v1.stringseqatt_array == STRINGSEQATT
218 assert getattr(v1,'nonexistantatt',None) == None
219
221220 # issue 915 empty string attribute (ncdump reports 'NIL')
222221 f = netCDF4.Dataset('test_gold.nc')
223222 assert f['RADIANCE'].VAR_NOTES == ""