New upstream version 0.5.5
Ole Streicher
6 years ago
0 | Maintainer and project lead: | |
1 | ||
2 | - Kolja Glogowski <kolja@pixie.de> | |
3 | ||
4 | ||
5 | Contributors: | |
6 | ||
7 | - Monica Bobra | |
8 | - Arthur Amezcua | |
9 | - David Perez-Suarez |
0 | Copyright (c) 2014-2016 Kolja Glogowski and others. | |
1 | See AUTHORS.txt for a list of contributors. | |
2 | ||
3 | Permission is hereby granted, free of charge, to any person obtaining a copy | |
4 | of this software and associated documentation files (the "Software"), to deal | |
5 | in the Software without restriction, including without limitation the rights | |
6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | |
7 | copies of the Software, and to permit persons to whom the Software is | |
8 | furnished to do so, subject to the following conditions: | |
9 | ||
10 | The above copyright notice and this permission notice shall be included in | |
11 | all copies or substantial portions of the Software. | |
12 | ||
13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |
14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |
15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | |
16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | |
17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |
18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | |
19 | THE SOFTWARE. |
0 | include MANIFEST.in | |
1 | include *.txt *.md *.rst | |
2 | include setup.py setup.cfg | |
3 | include tox.ini | |
4 | include versioneer.py | |
5 | recursive-include drms *.py | |
6 | recursive-include examples *.py | |
7 | recursive-include doc * | |
8 | prune doc/build | |
9 | prune doc/generated | |
10 | prune */__pycache__ | |
11 | global-exclude *.pyc *~ *.bak *.swp | |
12 | include drms/_version.py |
0 | Metadata-Version: 1.1 | |
1 | Name: drms | |
2 | Version: 0.5.5 | |
3 | Summary: Access HMI, AIA and MDI data with Python | |
4 | Home-page: https://github.com/kbg/drms | |
5 | Author: Kolja Glogowski | |
6 | Author-email: "Kolja Glogowski" <kolja@pixie.de> | |
7 | License: MIT | |
8 | Description-Content-Type: UNKNOWN | |
9 | Description: ==== | |
10 | drms | |
11 | ==== | |
12 | ||
13 | `Docs <http://drms.readthedocs.io/>`_ | | |
14 | `Tutorial <http://drms.readthedocs.io/en/stable/tutorial.html>`_ | | |
15 | `Github <https://github.com/kbg/drms>`_ | | |
16 | `PyPI <https://pypi.python.org/pypi/drms>`_ | |
17 | ||
18 | The ``drms`` module provides an easy-to-use interface for accessing HMI, | |
19 | AIA and MDI data with Python. It uses the publicly accessible | |
20 | `JSOC <http://jsoc.stanford.edu/>`_ DRMS server by default, but can also | |
21 | be used with local `NetDRMS <http://jsoc.stanford.edu/netdrms/>`_ sites. | |
22 | More information, including a detailed tutorial is available on | |
23 | `Read the Docs <http://drms.readthedocs.io/>`_. | |
24 | ||
25 | ||
26 | Requirements | |
27 | ------------ | |
28 | ||
29 | The ``drms`` module supports Python 2.7 and Python 3.4 or newer. It | |
30 | requires the following Python packages: | |
31 | ||
32 | - NumPy, version 1.9.0 or newer | |
33 | - Pandas, version 0.14.1 or newer | |
34 | - Six, version 1.8.0 or newer | |
35 | ||
36 | The module might also work with earlier versions, but it has not been | |
37 | tested with any versions older than the ones listed above. | |
38 | ||
39 | ||
40 | Installation | |
41 | ------------ | |
42 | ||
43 | The ``drms`` Python package can be installed from | |
44 | `PyPI <https://pypi.python.org/pypi/drms>`_ using | |
45 | ||
46 | :: | |
47 | ||
48 | pip install drms | |
49 | ||
50 | To upgrade an already existing installation to the latest release, you | |
51 | can write:: | |
52 | ||
53 | pip install -U drms | |
54 | ||
55 | ||
56 | Note: If you do not use a Python distribution, like | |
57 | `Anaconda <https://www.continuum.io/downloads>`_, | |
58 | and did not create an isolated Python environment using | |
59 | `Virtualenv <https://virtualenv.pypa.io/en/stable/>`_, | |
60 | you might need to add ``--user`` to the ``pip`` command:: | |
61 | ||
62 | pip install --user drms | |
63 | ||
64 | ||
65 | Acknowledgements | |
66 | ---------------- | |
67 | ||
68 | The main author of this project has received funding from the European | |
69 | Research Council under the European Union's Seventh Framework Programme | |
70 | (FP/2007-2013) / ERC Grant Agreement no. 307117. | |
71 | ||
72 | See AUTHORS.txt for a list of contributors. | |
73 | ||
74 | Platform: any | |
75 | Classifier: Intended Audience :: Developers | |
76 | Classifier: Intended Audience :: Science/Research | |
77 | Classifier: License :: OSI Approved :: MIT License | |
78 | Classifier: Operating System :: OS Independent | |
79 | Classifier: Programming Language :: Python | |
80 | Classifier: Programming Language :: Python :: 2 | |
81 | Classifier: Programming Language :: Python :: 2.7 | |
82 | Classifier: Programming Language :: Python :: 3 | |
83 | Classifier: Programming Language :: Python :: 3.4 | |
84 | Classifier: Programming Language :: Python :: 3.5 | |
85 | Classifier: Programming Language :: Python :: 3.6 | |
86 | Classifier: Topic :: Scientific/Engineering :: Astronomy |
0 | ==== | |
1 | drms | |
2 | ==== | |
3 | ||
4 | `Docs <http://drms.readthedocs.io/>`_ | | |
5 | `Tutorial <http://drms.readthedocs.io/en/stable/tutorial.html>`_ | | |
6 | `Github <https://github.com/kbg/drms>`_ | | |
7 | `PyPI <https://pypi.python.org/pypi/drms>`_ | |
8 | ||
9 | The ``drms`` module provides an easy-to-use interface for accessing HMI, | |
10 | AIA and MDI data with Python. It uses the publicly accessible | |
11 | `JSOC <http://jsoc.stanford.edu/>`_ DRMS server by default, but can also | |
12 | be used with local `NetDRMS <http://jsoc.stanford.edu/netdrms/>`_ sites. | |
13 | More information, including a detailed tutorial is available on | |
14 | `Read the Docs <http://drms.readthedocs.io/>`_. | |
15 | ||
16 | ||
17 | Requirements | |
18 | ------------ | |
19 | ||
20 | The ``drms`` module supports Python 2.7 and Python 3.4 or newer. It | |
21 | requires the following Python packages: | |
22 | ||
23 | - NumPy, version 1.9.0 or newer | |
24 | - Pandas, version 0.14.1 or newer | |
25 | - Six, version 1.8.0 or newer | |
26 | ||
27 | The module might also work with earlier versions, but it has not been | |
28 | tested with any versions older than the ones listed above. | |
29 | ||
30 | ||
31 | Installation | |
32 | ------------ | |
33 | ||
34 | The ``drms`` Python package can be installed from | |
35 | `PyPI <https://pypi.python.org/pypi/drms>`_ using | |
36 | ||
37 | :: | |
38 | ||
39 | pip install drms | |
40 | ||
41 | To upgrade an already existing installation to the latest release, you | |
42 | can write:: | |
43 | ||
44 | pip install -U drms | |
45 | ||
46 | ||
47 | Note: If you do not use a Python distribution, like | |
48 | `Anaconda <https://www.continuum.io/downloads>`_, | |
49 | and did not create an isolated Python environment using | |
50 | `Virtualenv <https://virtualenv.pypa.io/en/stable/>`_, | |
51 | you might need to add ``--user`` to the ``pip`` command:: | |
52 | ||
53 | pip install --user drms | |
54 | ||
55 | ||
56 | Acknowledgements | |
57 | ---------------- | |
58 | ||
59 | The main author of this project has received funding from the European | |
60 | Research Council under the European Union's Seventh Framework Programme | |
61 | (FP/2007-2013) / ERC Grant Agreement no. 307117. | |
62 | ||
63 | See AUTHORS.txt for a list of contributors. |
0 | # Makefile for Sphinx documentation | |
1 | # | |
2 | ||
3 | # You can set these variables from the command line. | |
4 | SPHINXOPTS = | |
5 | SPHINXBUILD = sphinx-build | |
6 | PAPER = | |
7 | BUILDDIR = build | |
8 | ||
9 | # User-friendly check for sphinx-build | |
10 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) | |
11 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don\'t have Sphinx installed, grab it from http://sphinx-doc.org/) | |
12 | endif | |
13 | ||
14 | # Internal variables. | |
15 | PAPEROPT_a4 = -D latex_paper_size=a4 | |
16 | PAPEROPT_letter = -D latex_paper_size=letter | |
17 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . | |
18 | # the i18n builder cannot share the environment and doctrees with the others | |
19 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . | |
20 | ||
21 | .PHONY: help | |
22 | help: | |
23 | @echo "Please use \`make <target>' where <target> is one of" | |
24 | @echo " html to make standalone HTML files" | |
25 | @echo " dirhtml to make HTML files named index.html in directories" | |
26 | @echo " singlehtml to make a single large HTML file" | |
27 | @echo " pickle to make pickle files" | |
28 | @echo " json to make JSON files" | |
29 | @echo " htmlhelp to make HTML files and a HTML help project" | |
30 | @echo " qthelp to make HTML files and a qthelp project" | |
31 | @echo " applehelp to make an Apple Help Book" | |
32 | @echo " devhelp to make HTML files and a Devhelp project" | |
33 | @echo " epub to make an epub" | |
34 | @echo " epub3 to make an epub3" | |
35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" | |
36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" | |
37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" | |
38 | @echo " text to make text files" | |
39 | @echo " man to make manual pages" | |
40 | @echo " texinfo to make Texinfo files" | |
41 | @echo " info to make Texinfo files and run them through makeinfo" | |
42 | @echo " gettext to make PO message catalogs" | |
43 | @echo " changes to make an overview of all changed/added/deprecated items" | |
44 | @echo " xml to make Docutils-native XML files" | |
45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" | |
46 | @echo " linkcheck to check all external links for integrity" | |
47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" | |
48 | @echo " coverage to run coverage check of the documentation (if enabled)" | |
49 | @echo " dummy to check syntax errors of document sources" | |
50 | ||
51 | .PHONY: clean | |
52 | clean: | |
53 | rm -rf $(BUILDDIR)/* generated | |
54 | ||
55 | .PHONY: html | |
56 | html: | |
57 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html | |
58 | @echo | |
59 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." | |
60 | ||
61 | .PHONY: dirhtml | |
62 | dirhtml: | |
63 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml | |
64 | @echo | |
65 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." | |
66 | ||
67 | .PHONY: singlehtml | |
68 | singlehtml: | |
69 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml | |
70 | @echo | |
71 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." | |
72 | ||
73 | .PHONY: pickle | |
74 | pickle: | |
75 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle | |
76 | @echo | |
77 | @echo "Build finished; now you can process the pickle files." | |
78 | ||
79 | .PHONY: json | |
80 | json: | |
81 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json | |
82 | @echo | |
83 | @echo "Build finished; now you can process the JSON files." | |
84 | ||
85 | .PHONY: htmlhelp | |
86 | htmlhelp: | |
87 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp | |
88 | @echo | |
89 | @echo "Build finished; now you can run HTML Help Workshop with the" \ | |
90 | ".hhp project file in $(BUILDDIR)/htmlhelp." | |
91 | ||
92 | .PHONY: qthelp | |
93 | qthelp: | |
94 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp | |
95 | @echo | |
96 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ | |
97 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" | |
98 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/drms.qhcp" | |
99 | @echo "To view the help file:" | |
100 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/drms.qhc" | |
101 | ||
102 | .PHONY: applehelp | |
103 | applehelp: | |
104 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp | |
105 | @echo | |
106 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." | |
107 | @echo "N.B. You won't be able to view it unless you put it in" \ | |
108 | "~/Library/Documentation/Help or install it in your application" \ | |
109 | "bundle." | |
110 | ||
111 | .PHONY: devhelp | |
112 | devhelp: | |
113 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp | |
114 | @echo | |
115 | @echo "Build finished." | |
116 | @echo "To view the help file:" | |
117 | @echo "# mkdir -p $$HOME/.local/share/devhelp/drms" | |
118 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/drms" | |
119 | @echo "# devhelp" | |
120 | ||
121 | .PHONY: epub | |
122 | epub: | |
123 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub | |
124 | @echo | |
125 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." | |
126 | ||
127 | .PHONY: epub3 | |
128 | epub3: | |
129 | $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3 | |
130 | @echo | |
131 | @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3." | |
132 | ||
133 | .PHONY: latex | |
134 | latex: | |
135 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex | |
136 | @echo | |
137 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." | |
138 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ | |
139 | "(use \`make latexpdf' here to do that automatically)." | |
140 | ||
141 | .PHONY: latexpdf | |
142 | latexpdf: | |
143 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex | |
144 | @echo "Running LaTeX files through pdflatex..." | |
145 | $(MAKE) -C $(BUILDDIR)/latex all-pdf | |
146 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." | |
147 | ||
148 | .PHONY: latexpdfja | |
149 | latexpdfja: | |
150 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex | |
151 | @echo "Running LaTeX files through platex and dvipdfmx..." | |
152 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja | |
153 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." | |
154 | ||
155 | .PHONY: text | |
156 | text: | |
157 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text | |
158 | @echo | |
159 | @echo "Build finished. The text files are in $(BUILDDIR)/text." | |
160 | ||
161 | .PHONY: man | |
162 | man: | |
163 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man | |
164 | @echo | |
165 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." | |
166 | ||
167 | .PHONY: texinfo | |
168 | texinfo: | |
169 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo | |
170 | @echo | |
171 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." | |
172 | @echo "Run \`make' in that directory to run these through makeinfo" \ | |
173 | "(use \`make info' here to do that automatically)." | |
174 | ||
175 | .PHONY: info | |
176 | info: | |
177 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo | |
178 | @echo "Running Texinfo files through makeinfo..." | |
179 | make -C $(BUILDDIR)/texinfo info | |
180 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." | |
181 | ||
182 | .PHONY: gettext | |
183 | gettext: | |
184 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale | |
185 | @echo | |
186 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." | |
187 | ||
188 | .PHONY: changes | |
189 | changes: | |
190 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes | |
191 | @echo | |
192 | @echo "The overview file is in $(BUILDDIR)/changes." | |
193 | ||
194 | .PHONY: linkcheck | |
195 | linkcheck: | |
196 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck | |
197 | @echo | |
198 | @echo "Link check complete; look for any errors in the above output " \ | |
199 | "or in $(BUILDDIR)/linkcheck/output.txt." | |
200 | ||
201 | .PHONY: doctest | |
202 | doctest: | |
203 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest | |
204 | @echo "Testing of doctests in the sources finished, look at the " \ | |
205 | "results in $(BUILDDIR)/doctest/output.txt." | |
206 | ||
207 | .PHONY: coverage | |
208 | coverage: | |
209 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage | |
210 | @echo "Testing of coverage in the sources finished, look at the " \ | |
211 | "results in $(BUILDDIR)/coverage/python.txt." | |
212 | ||
213 | .PHONY: xml | |
214 | xml: | |
215 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml | |
216 | @echo | |
217 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." | |
218 | ||
219 | .PHONY: pseudoxml | |
220 | pseudoxml: | |
221 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml | |
222 | @echo | |
223 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." | |
224 | ||
225 | .PHONY: dummy | |
226 | dummy: | |
227 | $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy | |
228 | @echo | |
229 | @echo "Build finished. Dummy builder generates no files." |
0 | /* Copyright (c) 2016, Python Software Foundation | |
1 | * Licensed under the Python Software Foundation License Version 2. | |
2 | * This file was copied from the Python 3.5.2 release. */ | |
3 | ||
4 | $(document).ready(function() { | |
5 | /* Add a [>>>] button on the top-right corner of code samples to hide | |
6 | * the >>> and ... prompts and the output and thus make the code | |
7 | * copyable. */ | |
8 | var div = $('.highlight-python .highlight,' + | |
9 | '.highlight-python3 .highlight') | |
10 | var pre = div.find('pre'); | |
11 | ||
12 | // get the styles from the current theme | |
13 | pre.parent().parent().css('position', 'relative'); | |
14 | var hide_text = 'Hide the prompts and output'; | |
15 | var show_text = 'Show the prompts and output'; | |
16 | var border_width = pre.css('border-top-width'); | |
17 | var border_style = pre.css('border-top-style'); | |
18 | var border_color = pre.css('border-top-color'); | |
19 | var button_styles = { | |
20 | 'cursor':'pointer', 'position': 'absolute', 'top': '0', 'right': '0', | |
21 | 'border-color': border_color, 'border-style': border_style, | |
22 | 'border-width': border_width, 'color': border_color, 'text-size': '75%', | |
23 | 'font-family': 'monospace', 'padding-left': '0.2em', 'padding-right': '0.2em', | |
24 | 'border-radius': '0 3px 0 0' | |
25 | } | |
26 | ||
27 | // create and add the button to all the code blocks that contain >>> | |
28 | div.each(function(index) { | |
29 | var jthis = $(this); | |
30 | if (jthis.find('.gp').length > 0) { | |
31 | var button = $('<span class="copybutton">>>></span>'); | |
32 | button.css(button_styles) | |
33 | button.attr('title', hide_text); | |
34 | button.data('hidden', 'false'); | |
35 | jthis.prepend(button); | |
36 | } | |
37 | // tracebacks (.gt) contain bare text elements that need to be | |
38 | // wrapped in a span to work with .nextUntil() (see later) | |
39 | jthis.find('pre:has(.gt)').contents().filter(function() { | |
40 | return ((this.nodeType == 3) && (this.data.trim().length > 0)); | |
41 | }).wrap('<span>'); | |
42 | }); | |
43 | ||
44 | // define the behavior of the button when it's clicked | |
45 | $('.copybutton').click(function(e){ | |
46 | e.preventDefault(); | |
47 | var button = $(this); | |
48 | if (button.data('hidden') === 'false') { | |
49 | // hide the code output | |
50 | button.parent().find('.go, .gp, .gt').hide(); | |
51 | button.next('pre').find('.gt').nextUntil('.gp, .go').css('visibility', 'hidden'); | |
52 | button.css('text-decoration', 'line-through'); | |
53 | button.attr('title', show_text); | |
54 | button.data('hidden', 'true'); | |
55 | } else { | |
56 | // show the code output | |
57 | button.parent().find('.go, .gp, .gt').show(); | |
58 | button.next('pre').find('.gt').nextUntil('.gp, .go').css('visibility', 'visible'); | |
59 | button.css('text-decoration', 'none'); | |
60 | button.attr('title', hide_text); | |
61 | button.data('hidden', 'false'); | |
62 | } | |
63 | }); | |
64 | }); | |
65 |
0 | .. Copyright (c) 2005-2016, NumPy Developers | |
1 | .. Licensed under the BSD License. | |
2 | .. This file was adapted from NumPy 1.11.1. | |
3 | ||
4 | {% extends "!autosummary/class.rst" %} | |
5 | ||
6 | {% block methods %} | |
7 | {% if methods %} | |
8 | .. | |
9 | HACK -- the point here is that we don't want this to appear in the output, | |
10 | but the autosummary should still generate the pages. | |
11 | .. autosummary:: | |
12 | :toctree: | |
13 | {% for item in all_methods %} | |
14 | {%- if not item.startswith('_') or item in ['__call__'] %} | |
15 | {{ name }}.{{ item }} | |
16 | {%- endif -%} | |
17 | {%- endfor %} | |
18 | {% endif %} | |
19 | {% endblock %} | |
20 | ||
21 | {% block attributes %} | |
22 | {% if attributes %} | |
23 | .. | |
24 | HACK -- the point here is that we don't want this to appear in the output, | |
25 | but the autosummary should still generate the pages. | |
26 | .. autosummary:: | |
27 | :toctree: | |
28 | {% for item in all_attributes %} | |
29 | {%- if not item.startswith('_') %} | |
30 | {{ name }}.{{ item }} | |
31 | {%- endif -%} | |
32 | {%- endfor %} | |
33 | {% endif %} | |
34 | {% endblock %} |
0 | {% extends "!layout.html" %} | |
1 | ||
2 | {% block extrahead %} | |
3 | {% if not embedded %} | |
4 | <script type="text/javascript" | |
5 | src="{{ pathto('_static/copybutton.js', 1) }}"> | |
6 | </script> | |
7 | {% endif %} | |
8 | {% endblock %} | |
9 | ||
10 | {%- block relbar2 %} | |
11 | {{ relbar() }} | |
12 | {% include "versions.html" %} | |
13 | {% endblock %} |
0 | {% if READTHEDOCS %} | |
1 | <div class="rst-versions" data-toggle="rst-versions" role="note" aria-label="versions" style="display: none;"> | |
2 | <div class="rst-other-versions" style="display: none;"> | |
3 | </div> | |
4 | </div> | |
5 | {% endif %} |
0 | /* Copyright (c) 2016, Python Software Foundation | |
1 | * Copyright (c) 2016, Kolja Glogowski | |
2 | * Licensed under the Python Software Foundation License Version 2. | |
3 | * This file is based on the theme included in the Python 3.5.2 release. */ | |
4 | ||
5 | @import url("default.css"); | |
6 | ||
7 | body { | |
8 | background-color: white; | |
9 | margin-left: 1em; | |
10 | margin-right: 1em; | |
11 | } | |
12 | ||
13 | div.related { | |
14 | margin-bottom: 1.2em; | |
15 | padding: 0.5em 0; | |
16 | border-top: 1px solid #ccc; | |
17 | margin-top: 0.5em; | |
18 | } | |
19 | ||
20 | div.related a:hover { | |
21 | color: #4079c4; | |
22 | } | |
23 | ||
24 | div.related:first-child { | |
25 | border-top: 0; | |
26 | border-bottom: 1px solid #ccc; | |
27 | } | |
28 | ||
29 | div.sphinxsidebar { | |
30 | /*background-color: #eeeeee;*/ | |
31 | /*background-color: #f5f5f5;*/ | |
32 | background-color: #e7ebf0; | |
33 | border-radius: 5px; | |
34 | line-height: 130%; | |
35 | font-size: smaller; | |
36 | } | |
37 | ||
38 | div.sphinxsidebar h3, div.sphinxsidebar h4 { | |
39 | margin-top: 1.5em; | |
40 | } | |
41 | ||
42 | div.sphinxsidebarwrapper > h3:first-child { | |
43 | margin-top: 0.2em; | |
44 | } | |
45 | ||
46 | div.sphinxsidebarwrapper > ul > li > ul > li { | |
47 | margin-bottom: 0.4em; | |
48 | } | |
49 | ||
50 | div.sphinxsidebar a:hover { | |
51 | color: #4079c4; | |
52 | } | |
53 | ||
54 | div.sphinxsidebar input { | |
55 | border: 1px solid #999999; | |
56 | border-radius: 3px; | |
57 | } | |
58 | ||
59 | div.sphinxsidebar input[type=text] { | |
60 | max-width: 150px; | |
61 | } | |
62 | ||
63 | div.body { | |
64 | padding: 0 0 0 1.2em; | |
65 | } | |
66 | ||
67 | div.body p { | |
68 | line-height: 140%; | |
69 | } | |
70 | ||
71 | div.body h1, div.body h2, div.body h3, div.body h4, div.body h5, div.body h6 { | |
72 | margin: 0; | |
73 | border: 0; | |
74 | padding: 0.3em 0; | |
75 | } | |
76 | ||
77 | div.body hr { | |
78 | border: 0; | |
79 | background-color: #ccc; | |
80 | height: 1px; | |
81 | } | |
82 | ||
83 | div.body pre { | |
84 | border-radius: 3px; | |
85 | border: 1px solid #ac9; | |
86 | } | |
87 | ||
88 | div.body div.admonition, div.body div.impl-detail { | |
89 | border-radius: 3px; | |
90 | } | |
91 | ||
92 | div.body div.impl-detail > p { | |
93 | margin: 0; | |
94 | } | |
95 | ||
96 | div.body div.seealso { | |
97 | border: 1px solid #dddd66; | |
98 | } | |
99 | ||
100 | div.body a { | |
101 | /*color: #4079c4;*/ | |
102 | } | |
103 | ||
104 | div.body a:visited { | |
105 | /*color: #4079c4;*/ | |
106 | } | |
107 | ||
108 | div.body a:hover { | |
109 | /*color: #4079c4;*/ | |
110 | } | |
111 | ||
112 | tt, code, pre { | |
113 | font-family: Consolas, Inconsolata, monospace, sans-serif; | |
114 | font-size: 96.5%; | |
115 | } | |
116 | ||
117 | div.body tt, div.body code { | |
118 | border-radius: 3px; | |
119 | } | |
120 | ||
121 | div.body tt.descname, div.body code.descname { | |
122 | font-size: 120%; | |
123 | } | |
124 | ||
125 | div.body tt.xref, div.body a tt, div.body code.xref, div.body a code { | |
126 | font-weight: normal; | |
127 | } | |
128 | ||
129 | .deprecated { | |
130 | border-radius: 3px; | |
131 | } | |
132 | ||
133 | table.docutils { | |
134 | border: 1px solid #ddd; | |
135 | min-width: 20%; | |
136 | border-radius: 3px; | |
137 | margin-top: 10px; | |
138 | margin-bottom: 10px; | |
139 | } | |
140 | ||
141 | table.docutils td, table.docutils th { | |
142 | border: 1px solid #ddd !important; | |
143 | border-radius: 3px; | |
144 | } | |
145 | ||
146 | table p, table li { | |
147 | text-align: left !important; | |
148 | } | |
149 | ||
150 | table.docutils th { | |
151 | /*background-color: #eee;*/ | |
152 | background-color: #e7ebf0; | |
153 | padding: 0.3em 0.5em; | |
154 | } | |
155 | ||
156 | table.docutils td { | |
157 | background-color: white; | |
158 | padding: 0.3em 0.5em; | |
159 | } | |
160 | ||
161 | table.footnote, table.footnote td { | |
162 | border: 0 !important; | |
163 | } | |
164 | ||
165 | div.footer { | |
166 | line-height: 150%; | |
167 | margin-top: -2em; | |
168 | text-align: right; | |
169 | width: auto; | |
170 | margin-right: 10px; | |
171 | } | |
172 | ||
173 | div.footer a:hover { | |
174 | color: #4079c4; | |
175 | } | |
176 | ||
177 | ||
178 | .refcount { | |
179 | color: #060; | |
180 | } | |
181 | ||
182 | .stableabi { | |
183 | color: #229; | |
184 | } |
0 | [theme] | |
1 | inherit = default | |
2 | stylesheet = drmsdoc.css | |
3 | pygments_style = sphinx | |
4 | ||
5 | [options] | |
6 | bodyfont = 'Nimbus Sans', 'Open Sans', 'Lucida Grande', Verdana, sans-serif | |
7 | headfont = 'Nimbus Sans', 'Open Sans', 'Lucida Grande', Verdana, sans-serif | |
8 | ||
9 | bgcolor = white | |
10 | textcolor = #333333 | |
11 | linkcolor = #4079c4 | |
12 | visitedlinkcolor = #4079c4 | |
13 | ||
14 | headbgcolor = white | |
15 | headtextcolor = #264976 | |
16 | headlinkcolor = #a3b1c1 | |
17 | ||
18 | footerbgcolor = white | |
19 | footertextcolor = #555555 | |
20 | ||
21 | relbarbgcolor = #c3deff | |
22 | relbartextcolor = #3572a5 | |
23 | relbarlinkcolor = #333333 | |
24 | ||
25 | sidebarbgcolor = white | |
26 | sidebartextcolor = #333333 | |
27 | sidebarlinkcolor = #333333 | |
28 | sidebarbtncolor = #d4e1f0 | |
29 | collapsiblesidebar = True |
0 | .. _api: | |
1 | ||
2 | API Reference | |
3 | ============== | |
4 | ||
5 | :Release: |version| | |
6 | :Date: |today| | |
7 | ||
8 | .. module:: drms | |
9 | ||
10 | This reference manual contains detailed information about classes and | |
11 | functions included in the ``drms`` module. For an introduction on how to | |
12 | use the ``drms`` module, see also the :ref:`tutorial <tutorial>`. | |
13 | ||
14 | ||
15 | Classes | |
16 | ------- | |
17 | ||
18 | .. autosummary:: | |
19 | :toctree: generated/ | |
20 | ||
21 | Client | |
22 | SeriesInfo | |
23 | ExportRequest | |
24 | ||
25 | ||
26 | Constants and utility functions | |
27 | ------------------------------- | |
28 | .. autosummary:: | |
29 | :toctree: generated/ | |
30 | ||
31 | const | |
32 | to_datetime | |
33 | ||
34 | ||
35 | Exceptions | |
36 | ---------- | |
37 | ||
38 | .. autosummary:: | |
39 | :toctree: generated/ | |
40 | ||
41 | DrmsError | |
42 | DrmsQueryError | |
43 | DrmsExportError |
0 | #!/usr/bin/env python3 | |
1 | # -*- coding: utf-8 -*- | |
2 | import sys | |
3 | import os | |
4 | ||
5 | # If extensions (or modules to document with autodoc) are in another directory, | |
6 | # add these directories to sys.path here. If the directory is relative to the | |
7 | # documentation root, use os.path.abspath to make it absolute, like shown here. | |
8 | sys.path.insert(0, os.path.abspath('..')) | |
9 | import drms | |
10 | ||
11 | # -- General configuration ------------------------------------------------ | |
12 | ||
13 | # If your documentation needs a minimal Sphinx version, state it here. | |
14 | needs_sphinx = '1.3' | |
15 | ||
16 | # Add any Sphinx extension module names here, as strings. They can be | |
17 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom | |
18 | # ones. | |
19 | extensions = [ | |
20 | 'sphinx.ext.autodoc', | |
21 | 'sphinx.ext.autosummary', | |
22 | 'sphinx.ext.intersphinx', | |
23 | 'sphinx.ext.coverage', | |
24 | 'sphinx.ext.napoleon' | |
25 | ] | |
26 | ||
27 | # Generate API docs. | |
28 | autosummary_generate = ['api.rst'] | |
29 | ||
30 | ||
31 | # The suffix(es) of source filenames. | |
32 | # You can specify multiple suffix as a list of string: | |
33 | # source_suffix = ['.rst', '.md'] | |
34 | source_suffix = '.rst' | |
35 | ||
36 | # The encoding of source files. | |
37 | #source_encoding = 'utf-8-sig' | |
38 | ||
39 | # The master toctree document. | |
40 | master_doc = 'index' | |
41 | ||
42 | # General information about the project. | |
43 | project = 'drms' | |
44 | copyright = '2016, Kolja Glogowski' | |
45 | author = 'Kolja Glogowski' | |
46 | ||
47 | # The version info for the project you're documenting, acts as replacement for | |
48 | # |version| and |release|, also used in various other places throughout the | |
49 | # built documents. | |
50 | # | |
51 | # The short X.Y version. | |
52 | version = drms.__version__ | |
53 | ||
54 | # Read the Docs apparently does not build the documentation in a clean working | |
55 | # directory, which causes versioneer to return a version that ends with | |
56 | # ".dirty". As a workaround, we just remove this substring from the version. | |
57 | if version.endswith('.dirty'): | |
58 | version = version[:-6] | |
59 | ||
60 | # RtD also seems to change some files in the working directory, which causes | |
61 | # versioneer to return a version like '0.5.0+0g.....'. Remove the part after | |
62 | # the '+' if this starts with a '0'. | |
63 | v = version.split('+') | |
64 | if len(v) > 1 and v[1].startswith('0'): | |
65 | version = v[0] | |
66 | ||
67 | # The full version, including alpha/beta/rc tags. | |
68 | release = version | |
69 | ||
70 | # Default highlight language, needed for copybutton.js | |
71 | highlight_language = 'python' | |
72 | ||
73 | # The language for content autogenerated by Sphinx. Refer to documentation | |
74 | # for a list of supported languages. | |
75 | # | |
76 | # This is also used if you do content translation via gettext catalogs. | |
77 | # Usually you set "language" from the command line for these cases. | |
78 | language = None | |
79 | ||
80 | # There are two options for replacing |today|: either, you set today to some | |
81 | # non-false value, then it is used: | |
82 | #today = '' | |
83 | # Else, today_fmt is used as the format for a strftime call. | |
84 | today_fmt = '%B %d, %Y' | |
85 | ||
86 | # List of patterns, relative to source directory, that match files and | |
87 | # directories to ignore when looking for source files. | |
88 | # This patterns also effect to html_static_path and html_extra_path | |
89 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] | |
90 | ||
91 | # The reST default role (used for this markup: `text`) to use for all | |
92 | # documents. | |
93 | #default_role = None | |
94 | ||
95 | # If true, '()' will be appended to :func: etc. cross-reference text. | |
96 | #add_function_parentheses = True | |
97 | ||
98 | # If true, the current module name will be prepended to all description | |
99 | # unit titles (such as .. function::). | |
100 | #add_module_names = True | |
101 | ||
102 | # If true, sectionauthor and moduleauthor directives will be shown in the | |
103 | # output. They are ignored by default. | |
104 | #show_authors = False | |
105 | ||
106 | # The name of the Pygments (syntax highlighting) style to use. | |
107 | pygments_style = 'sphinx' | |
108 | ||
109 | # A list of ignored prefixes for module index sorting. | |
110 | #modindex_common_prefix = [] | |
111 | ||
112 | # If true, keep warnings as "system message" paragraphs in the built documents. | |
113 | #keep_warnings = False | |
114 | ||
115 | # If true, `todo` and `todoList` produce output, else they produce nothing. | |
116 | todo_include_todos = False | |
117 | ||
118 | ||
119 | # -- Options for HTML output ---------------------------------------------- | |
120 | ||
121 | # The theme to use for HTML and HTML Help pages. See the documentation for | |
122 | # a list of builtin themes. | |
123 | ||
124 | try: | |
125 | from sunpy_sphinx_theme.conf import * | |
126 | ||
127 | # Custom sidebar templates, maps document names to template names. | |
128 | html_sidebars = {'**': ['docsidebar.html']} | |
129 | ||
130 | except ImportError: | |
131 | ||
132 | html_theme = 'drmsdoc' | |
133 | # Add any paths that contain templates here, relative to this directory. | |
134 | templates_path = ['_templates'] | |
135 | # Add any paths that contain custom themes here, relative to this directory. | |
136 | html_theme_path = ['_themes'] | |
137 | # Add any paths that contain custom static files (such as style sheets) here, | |
138 | # relative to this directory. They are copied after the builtin static files, | |
139 | # so a file named "default.css" will overwrite the builtin "default.css". | |
140 | html_static_path = ['_static'] | |
141 | ||
142 | #html_theme = 'classic' | |
143 | #html_theme = 'sphinxdoc' | |
144 | #html_theme = 'sphinx_rtd_theme' | |
145 | ||
146 | # Theme options are theme-specific and customize the look and feel of a theme | |
147 | # further. For a list of options available for each theme, see the | |
148 | # documentation. | |
149 | #html_theme_options = {} | |
150 | ||
151 | # The name for this set of Sphinx documents. | |
152 | # "<project> v<release> documentation" by default. | |
153 | #html_title = 'drms v0.5.0' | |
154 | ||
155 | # A shorter title for the navigation bar. Default is the same as html_title. | |
156 | #html_short_title = None | |
157 | ||
158 | # The name of an image file (relative to this directory) to place at the top | |
159 | # of the sidebar. | |
160 | #html_logo = None | |
161 | ||
162 | # The name of an image file (relative to this directory) to use as a favicon of | |
163 | # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 | |
164 | # pixels large. | |
165 | #html_favicon = None | |
166 | ||
167 | ||
168 | # Add any extra paths that contain custom files (such as robots.txt or | |
169 | # .htaccess) here, relative to this directory. These files are copied | |
170 | # directly to the root of the documentation. | |
171 | #html_extra_path = [] | |
172 | ||
173 | # If not None, a 'Last updated on:' timestamp is inserted at every page | |
174 | # bottom, using the given strftime format. | |
175 | # The empty string is equivalent to '%b %d, %Y'. | |
176 | ||
177 | # If true, SmartyPants will be used to convert quotes and dashes to | |
178 | # typographically correct entities. | |
179 | #html_use_smartypants = True | |
180 | ||
181 | # Custom sidebar templates, maps document names to template names. | |
182 | #html_sidebars = {} | |
183 | ||
184 | # Additional templates that should be rendered to pages, maps page names to | |
185 | # template names. | |
186 | #html_additional_pages = {} | |
187 | ||
188 | # If false, no module index is generated. | |
189 | html_domain_indices = False | |
190 | ||
191 | # If false, no index is generated. | |
192 | html_use_index = False | |
193 | ||
194 | # If true, the index is split into individual pages for each letter. | |
195 | #html_split_index = False | |
196 | ||
197 | # If true, links to the reST sources are added to the pages. | |
198 | html_show_sourcelink = False | |
199 | ||
200 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. | |
201 | #html_show_sphinx = True | |
202 | ||
203 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. | |
204 | #html_show_copyright = True | |
205 | ||
206 | # If true, an OpenSearch description file will be output, and all pages will | |
207 | # contain a <link> tag referring to it. The value of this option must be the | |
208 | # base URL from which the finished HTML is served. | |
209 | #html_use_opensearch = '' | |
210 | ||
211 | # This is the file name suffix for HTML files (e.g. ".xhtml"). | |
212 | #html_file_suffix = None | |
213 | ||
214 | # Language to be used for generating the HTML full-text search index. | |
215 | # Sphinx supports the following languages: | |
216 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' | |
217 | # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh' | |
218 | #html_search_language = 'en' | |
219 | ||
220 | # A dictionary with options for the search language support, empty by default. | |
221 | # 'ja' uses this config value. | |
222 | # 'zh' user can custom change `jieba` dictionary path. | |
223 | #html_search_options = {'type': 'default'} | |
224 | ||
225 | # The name of a javascript file (relative to the configuration directory) that | |
226 | # implements a search results scorer. If empty, the default will be used. | |
227 | #html_search_scorer = 'scorer.js' | |
228 | ||
229 | # Output file base name for HTML help builder. | |
230 | htmlhelp_basename = 'drmsdoc' | |
231 | ||
232 | # -- Options for LaTeX output --------------------------------------------- | |
233 | ||
234 | latex_elements = { | |
235 | # The paper size ('letterpaper' or 'a4paper'). | |
236 | #'papersize': 'letterpaper', | |
237 | ||
238 | # The font size ('10pt', '11pt' or '12pt'). | |
239 | #'pointsize': '10pt', | |
240 | ||
241 | # Additional stuff for the LaTeX preamble. | |
242 | #'preamble': '', | |
243 | ||
244 | # Latex figure (float) alignment | |
245 | #'figure_align': 'htbp', | |
246 | } | |
247 | ||
248 | # Grouping the document tree into LaTeX files. List of tuples | |
249 | # (source start file, target name, title, | |
250 | # author, documentclass [howto, manual, or own class]). | |
251 | latex_documents = [ | |
252 | (master_doc, 'drms.tex', 'drms Documentation', | |
253 | 'Kolja Glogowski', 'manual'), | |
254 | ] | |
255 | ||
256 | # The name of an image file (relative to this directory) to place at the top of | |
257 | # the title page. | |
258 | #latex_logo = None | |
259 | ||
260 | # For "manual" documents, if this is true, then toplevel headings are parts, | |
261 | # not chapters. | |
262 | #latex_use_parts = False | |
263 | ||
264 | # If true, show page references after internal links. | |
265 | #latex_show_pagerefs = False | |
266 | ||
267 | # If true, show URL addresses after external links. | |
268 | #latex_show_urls = False | |
269 | ||
270 | # Documents to append as an appendix to all manuals. | |
271 | #latex_appendices = [] | |
272 | ||
273 | # If false, no module index is generated. | |
274 | latex_domain_indices = False | |
275 | ||
276 | ||
277 | # -- Options for manual page output --------------------------------------- | |
278 | ||
279 | # One entry per manual page. List of tuples | |
280 | # (source start file, name, description, authors, manual section). | |
281 | man_pages = [ | |
282 | (master_doc, 'drms', 'drms Documentation', | |
283 | [author], 1) | |
284 | ] | |
285 | ||
286 | # If true, show URL addresses after external links. | |
287 | #man_show_urls = False | |
288 | ||
289 | ||
290 | # -- Options for Texinfo output ------------------------------------------- | |
291 | ||
292 | # Grouping the document tree into Texinfo files. List of tuples | |
293 | # (source start file, target name, title, author, | |
294 | # dir menu entry, description, category) | |
295 | texinfo_documents = [ | |
296 | (master_doc, 'drms', 'drms Documentation', | |
297 | author, 'drms', 'One line description of project.', | |
298 | 'Miscellaneous'), | |
299 | ] | |
300 | ||
301 | # Documents to append as an appendix to all manuals. | |
302 | #texinfo_appendices = [] | |
303 | ||
304 | # If false, no module index is generated. | |
305 | #texinfo_domain_indices = True | |
306 | ||
307 | # How to display URL addresses: 'footnote', 'no', or 'inline'. | |
308 | #texinfo_show_urls = 'footnote' | |
309 | ||
310 | # If true, do not generate a @detailmenu in the "Top" node's menu. | |
311 | #texinfo_no_detailmenu = False | |
312 |
0 | ################## | |
1 | drms documentation | |
2 | ################## | |
3 | ||
4 | :Release: |version| | |
5 | :Date: |today| | |
6 | :Github: https://github.com/kbg/drms | |
7 | :PyPI: https://pypi.python.org/pypi/drms | |
8 | ||
9 | Python module for accessing HMI, AIA and MDI data. | |
10 | ||
11 | .. toctree:: | |
12 | :maxdepth: 2 | |
13 | ||
14 | intro | |
15 | tutorial | |
16 | api | |
17 | ||
18 | .. Index and search pages | |
19 | .. ====================== | |
20 | .. | |
21 | .. * :ref:`genindex` | |
22 | .. * :ref:`search` |
0 | Introduction | |
1 | ============ | |
2 | ||
3 | The ``drms`` Python package can be used to access HMI, AIA and MDI data | |
4 | which are stored in a DRMS database system. | |
5 | ||
6 | DRMS stands for *Data Record Management System* and is a system that was | |
7 | developed by the | |
8 | `Joint Science Operation Center <http://jsoc.stanford.edu/>`_ | |
9 | (JSOC), headquartered at Stanford University, to handle the data produced | |
10 | by the AIA and HMI instruments aboard the | |
11 | `Solar Dynamics Observatory <http://sdo.gsfc.nasa.gov/>`_ | |
12 | spacecraft. | |
13 | ||
14 | By default the ``drms`` module uses the HTTP/JSON interface provided by | |
15 | JSOC and has similar functionality to the | |
16 | `JSOC Lookdata <http://jsoc.stanford.edu/ajax/lookdata.html>`_ | |
17 | website. It can be used to query metadata, submit data export requests | |
18 | and download data files. | |
19 | ||
20 | This module also works well for local | |
21 | `NetDRMS <http://jsoc.stanford.edu/netdrms/>`_ | |
22 | sites, as long as the site runs a web server providing the needed CGI | |
23 | programs ``show_series`` and ``jsoc_info`` (for the data export | |
24 | functionality, additional CGIs, like ``jsoc_fetch``, are needed). | |
25 | ||
26 | ||
27 | Requirements | |
28 | ------------ | |
29 | ||
30 | The ``drms`` module supports Python 2.7 and Python 3.4 or newer. It | |
31 | requires the following Python packages: | |
32 | ||
33 | - NumPy, version 1.9.0 or newer | |
34 | - Pandas, version 0.14.1 or newer | |
35 | - Six, version 1.8.0 or newer | |
36 | ||
37 | The module might also work with earlier versions, but it has not been | |
38 | tested with any versions older than the ones listed above. | |
39 | ||
40 | ||
41 | Installation | |
42 | ------------ | |
43 | ||
44 | The ``drms`` Python package can be installed from | |
45 | `PyPI <https://pypi.python.org/pypi/drms>`_ using | |
46 | ||
47 | :: | |
48 | ||
49 | pip install drms | |
50 | ||
51 | To upgrade an already existing installation to the latest release, you | |
52 | can write:: | |
53 | ||
54 | pip install -U drms | |
55 | ||
56 | ||
57 | .. note:: | |
58 | If you do not use a Python distribution, like | |
59 | `Anaconda <https://www.continuum.io/downloads>`_, | |
60 | and did not create an isolated Python environment using | |
61 | `Virtualenv <https://virtualenv.pypa.io/en/stable/>`_, | |
62 | you might need to add ``--user`` to the ``pip`` command:: | |
63 | ||
64 | pip install --user drms | |
65 | ||
66 | ||
67 | Acknowledgements | |
68 | ---------------- | |
69 | ||
70 | The main author of this project has received funding from the European | |
71 | Research Council under the European Union's Seventh Framework Programme | |
72 | (FP/2007-2013) / ERC Grant Agreement no. 307117. | |
73 | ||
74 | See :download:`LICENSE.txt <../LICENSE.txt>` for the license text and | |
75 | :download:`AUTHORS.txt <../AUTHORS.txt>` for a list of contributors. |
0 | .. _tutorial: | |
1 | ||
2 | Tutorial | |
3 | ======== | |
4 | ||
5 | .. currentmodule:: drms | |
6 | ||
7 | This tutorial gives an introduction on how to use the ``drms`` Python | |
8 | module. More detailed information on the different classes and functions | |
9 | can be found in the :ref:`API Reference Manual <api>`. In addition to | |
10 | this tutorial, many example scripts are available in the | |
11 | `source code package <https://github.com/kbg/drms/releases/latest>`_ | |
12 | of the ``drms`` module. | |
13 | ||
14 | .. tip:: | |
15 | Instead of using a plain Python interpreter session, it is highly | |
16 | recommended to use an interactive | |
17 | `IPython <http://ipython.org/>`_ shell or a | |
18 | `Jupyter <https://jupyter.org/>`_ notebook for this tutorial. | |
19 | ||
20 | ||
21 | Basic usage | |
22 | ----------- | |
23 | ||
24 | In this first part, we start with looking at data series that are | |
25 | available from `JSOC <http://jsoc.stanford.edu/>`_ and perform some | |
26 | basic DRMS queries to obtain keyword data (metadata) and segment file | |
27 | (data) locations. This is essentially what you can do on the | |
28 | `JSOC Lookdata <http://jsoc.stanford.edu/ajax/lookdata.html>`_ website. | |
29 | ||
30 | To be able to access the JSOC DRMS from Python, we first need to import | |
31 | the ``drms`` module and create an instance of the :class:`drms.Client` | |
32 | class:: | |
33 | ||
34 | >>> import drms | |
35 | >>> c = drms.Client() | |
36 | ||
37 | All available data series can be now retrieved by calling the | |
38 | :func:`Client.series` method. HMI series names start with ``"hmi."``, | |
39 | AIA series names with ``"aia."`` and the names of MDI series with | |
40 | ``"mdi."``. | |
41 | ||
42 | The first (optional) parameter of this method takes a regular expression | |
43 | that allows you to filter the result. If you, for example, want to obtain | |
44 | a list of HMI series, with a name that start with the string ``"m_"``, | |
45 | you can write:: | |
46 | ||
47 | >>> c.series(r'hmi\.m_') | |
48 | ['hmi.M_45s', 'hmi.M_720s', 'hmi.m_720s_mod', 'hmi.m_720s_nrt'] | |
49 | ||
50 | Keep in mind to escape the dot character (``'.'``), like it is shown in | |
51 | the example above, if you want to include it in your filter string. | |
52 | Also note that series names are handled in a case-insensitive way. | |
53 | ||
54 | DRMS records can be selected by creating a query string that contains a | |
55 | series name, followed by one or more fields, which are surrounded by | |
56 | square brackets. Each of those fields corresponds to a specific primekey, | |
57 | that is specified in the series definition. A complete set of primekeys | |
58 | represents a unique identifier for a record in that particular series. | |
59 | For more detailed information on building record set queries, including | |
60 | additional non-primekey fields, see the | |
61 | `JSOC Help <http://jsoc.stanford.edu/ajax/RecordSetHelp.html>`_ page | |
62 | about this topic. | |
63 | ||
64 | With the ``drms`` module you can use the :func:`Client.pkeys` method to | |
65 | obtain a list of all primekeys of a series, e.g.:: | |
66 | ||
67 | >>> c.pkeys('hmi.m_720s') | |
68 | ['T_REC', 'CAMERA'] | |
69 | ||
70 | >>> c.pkeys('hmi.v_sht_modes') | |
71 | ['T_START', 'LMIN', 'LMAX', 'NDT'] | |
72 | ||
73 | A list of all (regular) keywords can be obtained using | |
74 | :func:`Client.keys`. You can also use the method :func:`Client.info` to | |
75 | get more detailed information about a series, e.g.:: | |
76 | ||
77 | >>> si = c.info('hmi.v_avg120') | |
78 | >>> si.segments | |
79 | type units protocol dims note | |
80 | name | |
81 | mean short m/s fits 4096x4096 Doppler mean | |
82 | power short m2/s2 fits 4096x4096 Doppler power | |
83 | valid short NA fits 4096x4096 valid pixel count | |
84 | Log char NA generic run log | |
85 | ||
86 | All table-like structures, returned by routines in the ``drms`` module, are | |
87 | `Pandas DataFrames <http://pandas.pydata.org/pandas-docs/stable/generated/pandas.DataFrame.html>`_. | |
88 | If you are new to `Pandas <http://pandas.pydata.org/>`_, you should have | |
89 | a look at the introduction to | |
90 | `Pandas Data Structures <http://pandas.pydata.org/pandas-docs/stable/dsintro.html>`_. | |
91 | ||
92 | Record set queries, used to obtain keyword data and get the location of | |
93 | data segments, can be performed using the :func:`Client.query` method. | |
94 | To get, for example, the record time and the mean value for some of the | |
95 | HMI Dopplergrams that were recorded on April 1, 2016, together with the | |
96 | spacecraft's radial velocity in respect to the Sun, you can write:: | |
97 | ||
98 | >>> k = c.query('hmi.v_45s[2016.04.01_TAI/1d@6h]', | |
99 | ... key='T_REC, DATAMEAN, OBS_VR') | |
100 | >>> k | |
101 | T_REC DATAMEAN OBS_VR | |
102 | 0 2016.04.01_00:00:00_TAI 3313.104980 3309.268006 | |
103 | 1 2016.04.01_06:00:00_TAI 878.075195 887.864139 | |
104 | 2 2016.04.01_12:00:00_TAI -2289.062500 -2284.690263 | |
105 | 3 2016.04.01_18:00:00_TAI 128.609283 137.836168 | |
106 | ||
107 | JSOC time strings can be converted to a naive ``datetime`` | |
108 | representation using the :func:`drms.to_datetime` utility function:: | |
109 | ||
110 | >>> t = drms.to_datetime(k.T_REC) | |
111 | >>> t | |
112 | 0 2016-04-01 00:00:00 | |
113 | 1 2016-04-01 06:00:00 | |
114 | 2 2016-04-01 12:00:00 | |
115 | 3 2016-04-01 18:00:00 | |
116 | Name: T_REC, dtype: datetime64[ns] | |
117 | ||
118 | For most of the HMI and MDI data sets, the | |
119 | `TAI <https://en.wikipedia.org/wiki/International_Atomic_Time>`_ time | |
120 | standard is used which, in contrast to | |
121 | `UTC <https://en.wikipedia.org/wiki/Coordinated_Universal_Time>`_, does | |
122 | not make use of any leap seconds. The TAI standard is currently not | |
123 | supported by the Python standard libraries. If you need to convert | |
124 | timestamps between TAI and UTC, you can use the | |
125 | `Astropy <http://www.astropy.org/>`_ time module:: | |
126 | ||
127 | >>> from astropy.time import Time | |
128 | >>> ta = Time(t[0], format='datetime', scale='tai') | |
129 | >>> ta | |
130 | <Time object: scale='tai' format='datetime' value=2016-04-01 00:00:00> | |
131 | >>> ta.utc | |
132 | <Time object: scale='utc' format='datetime' value=2016-03-31 23:59:24> | |
133 | ||
134 | The ``"hmi.v_45s"`` series has a data segment with the name | |
135 | ``"Dopplergram"``, which contains Dopplergrams for each record in the | |
136 | series, that are stored as `FITS <http://fits.gsfc.nasa.gov/>`_ files. | |
137 | The location of the FITS files for the record set query in the example | |
138 | above, can be obtained by using the ``seg`` parameter of the | |
139 | :func:`Client.query` method:: | |
140 | ||
141 | >>> s = c.query('hmi.v_45s[2016.04.01_TAI/1d@6h]', seg='Dopplergram') | |
142 | >>> s | |
143 | Dopplergram | |
144 | 0 /SUM58/D803708321/S00008/Dopplergram.fits | |
145 | 1 /SUM41/D803708361/S00008/Dopplergram.fits | |
146 | 2 /SUM71/D803720859/S00008/Dopplergram.fits | |
147 | 3 /SUM70/D803730119/S00008/Dopplergram.fits | |
148 | ||
149 | Note that the ``key`` and ``seg`` parameters can also be used together in | |
150 | one :func:`Client.query` call, i.e.:: | |
151 | ||
152 | >>> k, s = c.query('hmi.v_45s[2016.04.01_TAI/1d@6h]', | |
153 | ... key='T_REC, DATAMEAN, OBS_VR', seg='Dopplergram') | |
154 | ||
155 | The file paths listed above are the storage location on the JSOC server. | |
156 | You can access these files, even if you do not have direct NFS access to | |
157 | the filesystem, by prepending the JSOC URL to segment file path:: | |
158 | ||
159 | >>> url = 'http://jsoc.stanford.edu' + s.Dopplergram[0] | |
160 | >>> url | |
161 | 'http://jsoc.stanford.edu/SUM58/D803708321/S00008/Dopplergram.fits' | |
162 | ||
163 | >>> from astropy.io import fits | |
164 | >>> a = fits.getdata(url) | |
165 | >>> print(a.shape, a.dtype) | |
166 | (4096, 4096) float32 | |
167 | ||
168 | Note that FITS files which are accessed in this way, do not contain any | |
169 | keyword data in their headers. This is perfectly fine in many cases, | |
170 | because you can just use :func:`Client.query` to obtain the data of all | |
171 | required keywords. If you need FITS files with headers that contain all | |
172 | the keyword data, you need to submit an export request to JSOC, which is | |
173 | described in the next section. | |
174 | ||
175 | Export requests can also be useful, if you want to download more than | |
176 | only one or two files (even without keyword headers), because you can | |
177 | then use the :func:`ExportRequest.download` method, which takes care of | |
178 | creating URLs, downloading the data and (if necessary) generating | |
179 | suitable local filenames. | |
180 | ||
181 | ||
182 | Data export requests | |
183 | -------------------- | |
184 | ||
185 | Data export requests can be interactively built and submitted on the | |
186 | `JSOC Export Data <http://jsoc.stanford.edu/ajax/exportdata.html>`_ | |
187 | webpage, where you can also find more information about the different | |
188 | export options that are available. Note that a registered email address | |
189 | is required to for submitting export requests. You can register your | |
190 | email address on the | |
191 | `JSOC email registration <http://jsoc.stanford.edu/ajax/register_email.html>`_ | |
192 | webpage. | |
193 | ||
194 | It is advisable to have a closer look at the export webpage before | |
195 | submitting export requests using the ``drms`` module. It is also possible | |
196 | to submit an export request on the webpage and then use the Python | |
197 | routines to query the request status and download files. | |
198 | ||
199 | .. So you do not | |
200 | .. need to use the Python routines to submit the export request, in case you | |
201 | .. only want to use the Python module for downloading the data. | |
202 | ||
203 | First, we start again with importing the ``drms`` module and creating a | |
204 | :class:`drms.Client` instance:: | |
205 | ||
206 | >>> import drms | |
207 | >>> c = drms.Client(email='name@example.com', verbose=True) | |
208 | ||
209 | In this case we also provide an email address (which needs to be already | |
210 | registered at JSOC) and turn on status messages by enabling the | |
211 | ``verbose`` flag. | |
212 | ||
213 | We now create a download directory for our downloads, in case it does not | |
214 | exist yet:: | |
215 | ||
216 | >>> import os | |
217 | >>> out_dir = 'downloads' | |
218 | >>> if not os.path.exists(out_dir): | |
219 | ... os.mkdir(out_dir) | |
220 | ||
221 | Data export requests can be submitted using :func:`Client.export`. The | |
222 | most important parameters of this method, besides the export query | |
223 | string, are the parameters ``method`` and ``protocol``. There are many | |
224 | different export methods and protocols available. In the following | |
225 | examples we confine ourselves to the methods ``url_quick`` and ``url`` | |
226 | and the protocols ``as-is`` and ``fits``. You can find more examples | |
227 | (including other export methods and protocols) in the source code package | |
228 | of the ``drms`` module. | |
229 | ||
230 | ||
231 | url_quick / as-is | |
232 | ~~~~~~~~~~~~~~~~~ | |
233 | ||
234 | The most direct and quickest way of downloading files is the combination | |
235 | ``url_quick`` / ``as-is``. This (in most cases) does not create an actual | |
236 | export request, where you would have to wait for it being finished, but | |
237 | rather compiles a list of files from your data export query, which can | |
238 | then be directly downloaded. This also means that this kind of export | |
239 | usually has no ``ExportID`` assigned to it. The only time it is treated | |
240 | as a "real" export request (including an ``ExportID`` and some wait time) | |
241 | is, when the requested data segments are not entirely online, and parts | |
242 | of the requested files need to be restored from tape drives. | |
243 | ||
244 | As an example, we now create an ``url_quick`` / ``as-is`` export request | |
245 | for the same record set that was used in the previous section. For export | |
246 | requests, the segment name is specified using an additional field in the | |
247 | query string, surrounded by curly braces. Note that :func:`Client.export` | |
248 | performs an ``url_quick`` / ``as-is`` export request by default, so you | |
249 | do not need to explicitly use ``method='url_quick'`` and | |
250 | ``protocol='as-is'`` in this case. | |
251 | ||
252 | :: | |
253 | ||
254 | >>> r = c.export('hmi.v_45s[2016.04.01_TAI/1d@6h]{Dopplergram}') | |
255 | >>> r | |
256 | <ExportRequest id=None, status=0> | |
257 | ||
258 | >>> r.data.filename | |
259 | 0 /SUM58/D803708321/S00008/Dopplergram.fits | |
260 | 1 /SUM41/D803708361/S00008/Dopplergram.fits | |
261 | 2 /SUM71/D803720859/S00008/Dopplergram.fits | |
262 | 3 /SUM70/D803730119/S00008/Dopplergram.fits | |
263 | ||
264 | Download URLs can now be generated using the :attr:`ExportRequest.urls` | |
265 | attribute:: | |
266 | ||
267 | >>> r.urls.url[0] | |
268 | 'http://jsoc.stanford.edu/SUM58/D803708321/S00008/Dopplergram.fits' | |
269 | ||
270 | Files can be downloaded using the :func:`ExportRequest.download` method. | |
271 | You can (optionally) select which file(s) you want to download, by using | |
272 | the ``index`` parameter of this method. The following, for example, only | |
273 | downloads the first file of the request:: | |
274 | ||
275 | >>> r.download(out_dir, 0) | |
276 | Downloading file 1 of 1... | |
277 | record: hmi.V_45s[2016.04.01_00:00:00_TAI][2]{Dopplergram} | |
278 | filename: Dopplergram.fits | |
279 | -> "downloads/hmi.v_45s.20160401_000000_TAI.2.Dopplergram.fits" | |
280 | ||
281 | Being a direct ``as-is`` export, there are no keyword data written to any | |
282 | FITS headers. If you need keyword data added to the headers, you have to | |
283 | use the ``fits`` export protocol instead, which is described below. | |
284 | ||
285 | ||
286 | url / fits | |
287 | ~~~~~~~~~~ | |
288 | ||
289 | Using the ``fits`` export protocol, allows you to request FITS files that | |
290 | include all keyword data in their headers. Note that this protocol *does | |
291 | not convert* other file formats into the FITS format. The only purpose of | |
292 | ``protocol='fits'`` is to add keyword data to headers of segment files, | |
293 | that are already stored using the FITS format. | |
294 | ||
295 | In contrast to ``url_quick`` / ``as-is`` exports, described in the | |
296 | previous subsection, ``url`` / ``fits`` exports always create a "real" | |
297 | data export request on the server, which needs to be processed before you | |
298 | can download the requested files. For each request you will get an unique | |
299 | ``ExportID``, which can be accessed using the :attr:`ExportRequest.id` | |
300 | attribute. In addition you will get an email notification (including the | |
301 | ``ExportID``), which is sent to your registered email address when the | |
302 | requested files are ready for download. | |
303 | ||
304 | In the following example, we use the ``hmi.sharp_720s`` series, which | |
305 | contains | |
306 | `Spaceweather HMI Active Region Patches <http://jsoc.stanford.edu/doc/data/hmi/sharp/sharp.htm>`_ (SHARPs), | |
307 | and download some data files from this series. | |
308 | ||
309 | First we have a look at the content of the series, by using | |
310 | :func:`Client.info` to get a :class:`SeriesInfo` instance for this | |
311 | particular series:: | |
312 | ||
313 | >>> si = c.info('hmi.sharp_720s') | |
314 | ||
315 | >>> si.note | |
316 | 'Spaceweather HMI Active Region Patch (SHARP): CCD coordinates' | |
317 | ||
318 | >>> si.primekeys | |
319 | ['HARPNUM', 'T_REC'] | |
320 | ||
321 | This series contains a total of 31 different data segments:: | |
322 | ||
323 | >>> len(si.segments) | |
324 | 31 | |
325 | ||
326 | >>> si.segments.index.values | |
327 | array(['magnetogram', 'bitmap', 'Dopplergram', 'continuum', 'inclination', | |
328 | 'azimuth', 'field', 'vlos_mag', 'dop_width', 'eta_0', 'damping', | |
329 | 'src_continuum', 'src_grad', 'alpha_mag', 'chisq', 'conv_flag', | |
330 | 'info_map', 'confid_map', 'inclination_err', 'azimuth_err', | |
331 | 'field_err', 'vlos_err', 'alpha_err', 'field_inclination_err', | |
332 | 'field_az_err', 'inclin_azimuth_err', 'field_alpha_err', | |
333 | 'inclination_alpha_err', 'azimuth_alpha_err', 'disambig', | |
334 | 'conf_disambig'], dtype=object) | |
335 | ||
336 | Here, we are only interested in magnetograms and continuum intensity maps | |
337 | ||
338 | :: | |
339 | ||
340 | >>> si.segments.loc[['continuum', 'magnetogram']] | |
341 | type units protocol dims note | |
342 | name | |
343 | continuum int DN/s fits VARxVAR continuum intensity | |
344 | magnetogram int Gauss fits VARxVAR magnetogram | |
345 | ||
346 | which are stored as FITS files with varying dimensions. | |
347 | ||
348 | If we now want to submit an export request for a magnetogram and an | |
349 | intensity map of HARP number 4864, recorded at midnight on November 30, | |
350 | 2014, we can use the following export query string:: | |
351 | ||
352 | >>> ds = 'hmi.sharp_720s[4864][2014.11.30_00:00_TAI]{continuum, magnetogram}' | |
353 | ||
354 | In order to obtain FITS files that include keyword data in their headers, | |
355 | we then need to use ``protocol='fits'`` when submitting the request using | |
356 | :func:`Client.export`:: | |
357 | ||
358 | >>> r = c.export(ds, method='url', protocol='fits') | |
359 | >>> r | |
360 | <ExportRequest id="JSOC_20160921_568", status=2> | |
361 | ||
362 | We now need to wait for the server to prepare the requested files:: | |
363 | ||
364 | >>> r.wait() | |
365 | Export request pending. [id="JSOC_20160921_568", status=2] | |
366 | Waiting for 5 seconds... | |
367 | Export request pending. [id="JSOC_20160921_568", status=1] | |
368 | Waiting for 5 seconds... | |
369 | ||
370 | >>> r.status | |
371 | 0 | |
372 | ||
373 | Note that calling :func:`ExportRequest.wait` is optional. It gives you | |
374 | some control over the waiting process, but it can be usually omitted, in | |
375 | which case :func:`ExportRequest.wait` is called implicitly, when you for | |
376 | example try to download the requested files. | |
377 | ||
378 | After the export request is finished, a unique request URL is created for | |
379 | you, which points to the location where all your requested files are | |
380 | stored. You can use the :attr:`ExportRequest.request_url` attribute to | |
381 | obtain this URL:: | |
382 | ||
383 | >>> r.request_url | |
384 | 'http://jsoc.stanford.edu/SUM80/D857351442/S00000' | |
385 | ||
386 | Note that this location is only temporary and that all files will be | |
387 | deleted after a couple of days. | |
388 | ||
389 | Downloading the data works exactly like in the previous example, by using | |
390 | the :func:`ExportRequest.download` method:: | |
391 | ||
392 | >>> r.download(out_dir) | |
393 | Downloading file 1 of 2... | |
394 | record: hmi.sharp_720s[4864][2014.11.30_00:00:00_TAI] | |
395 | filename: hmi.sharp_720s.4864.20141130_000000_TAI.magnetogram.fits | |
396 | -> "downloads/hmi.sharp_720s.4864.20141130_000000_TAI.magnetogram.fits" | |
397 | Downloading file 2 of 2... | |
398 | record: hmi.sharp_720s[4864][2014.11.30_00:00:00_TAI] | |
399 | filename: hmi.sharp_720s.4864.20141130_000000_TAI.continuum.fits | |
400 | -> "downloads/hmi.sharp_720s.4864.20141130_000000_TAI.continuum.fits" | |
401 | ||
402 | .. tip:: | |
403 | If you want to access an existing export request that you have | |
404 | submitted earlier, or if you submitted an export request using the | |
405 | `JSOC Export Data <http://jsoc.stanford.edu/ajax/exportdata.html>`_ | |
406 | webpage and want to access it from Python, you can use the | |
407 | :func:`Client.export_from_id` method with the corresponding | |
408 | ``ExportID`` to create an :class:`ExportRequest` instance for this | |
409 | particular request. | |
410 | ||
411 | ||
412 | Example scripts | |
413 | --------------- | |
414 | ||
415 | There are many example scripts available in the | |
416 | `examples directory <https://github.com/kbg/drms/tree/master/examples>`_ | |
417 | of the ``drms`` Python package source code. An archive of the latest | |
418 | source code release can be downloaded from the | |
419 | `drms relase page <https://github.com/kbg/drms/releases/latest>`_ | |
420 | on Github. | |
421 | ||
422 | .. For more information, use ``help(drms)`` inside the Python interpreter, |
0 | # Copyright (c) 2014-2016 Kolja Glogowski and others. | |
1 | # See AUTHORS.txt for a list of contributors. | |
2 | # | |
3 | # Permission is hereby granted, free of charge, to any person | |
4 | # obtaining a copy of this software and associated documentation | |
5 | # files (the "Software"), to deal in the Software without | |
6 | # restriction, including without limitation the rights to use, | |
7 | # copy, modify, merge, publish, distribute, sublicense, and/or sell | |
8 | # copies of the Software, and to permit persons to whom the | |
9 | # Software is furnished to do so, subject to the following | |
10 | # conditions: | |
11 | # | |
12 | # The above copyright notice and this permission notice shall be | |
13 | # included in all copies or substantial portions of the Software. | |
14 | # | |
15 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, | |
16 | # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES | |
17 | # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND | |
18 | # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT | |
19 | # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, | |
20 | # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING | |
21 | # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR | |
22 | # OTHER DEALINGS IN THE SOFTWARE. | |
23 | ||
24 | """ | |
25 | Access HMI, AIA and MDI data with Python | |
26 | ||
27 | The latest release is avaiable at https://github.com/kbg/drms . | |
28 | """ | |
29 | ||
30 | from __future__ import absolute_import, division, print_function | |
31 | ||
32 | from . import config, error, json, client, utils | |
33 | from .error import * | |
34 | from .client import * | |
35 | from .utils import * | |
36 | from .json import const | |
37 | ||
38 | # Keep the following three lines like this, so that versioneer does not add | |
39 | # them again when running "python versioneer.py setup". | |
40 | from ._version import get_versions | |
41 | __version__ = get_versions()['version'] | |
42 | del get_versions | |
43 | ||
44 | __all__ = ['const'] | |
45 | __all__ += error.__all__ | |
46 | __all__ += client.__all__ | |
47 | __all__ += utils.__all__ | |
48 | ||
49 | # We imported all public classes and functions from submodules. The submodule | |
50 | # symbols themselves are now removed to keep the package namespace cleaner. | |
51 | del config | |
52 | del error | |
53 | del json | |
54 | del utils | |
55 | del client |
0 | from __future__ import absolute_import, division, print_function | |
1 | import argparse as _argparse | |
2 | import drms | |
3 | ||
4 | # Handle command line options | |
5 | _arg_parser = _argparse.ArgumentParser('drms') | |
6 | _arg_parser.add_argument( | |
7 | '--debug', action='store_true', help='enable debug output') | |
8 | _arg_parser.add_argument( | |
9 | '--version', action='version', version='drms %s' % drms.__version__, | |
10 | help='show package version and exit') | |
11 | _arg_parser.add_argument( | |
12 | '--email', help='email address for data export requests') | |
13 | _arg_parser.add_argument( | |
14 | '--verbose', action='store_true', | |
15 | help='print export status messages to stdout') | |
16 | _arg_parser.add_argument( | |
17 | 'server', nargs='?', default='jsoc', help='DRMS server, default is JSOC') | |
18 | _args = _arg_parser.parse_args() | |
19 | ||
20 | # Create a Client instance | |
21 | c = drms.Client(_args.server, email=_args.email, verbose=_args.verbose, | |
22 | debug=_args.debug) | |
23 | print('c = %r' % c) |
0 | ||
1 | # This file was generated by 'versioneer.py' (0.18) from | |
2 | # revision-control system data, or from the parent directory name of an | |
3 | # unpacked source archive. Distribution tarballs contain a pre-generated copy | |
4 | # of this file. | |
5 | ||
6 | from __future__ import absolute_import | |
7 | import json | |
8 | ||
9 | version_json = ''' | |
10 | { | |
11 | "date": "2018-04-18T17:59:38+0200", | |
12 | "dirty": false, | |
13 | "error": null, | |
14 | "full-revisionid": "37fa7eb2b50d12d7add9dd76e867ffc223800e5d", | |
15 | "version": "0.5.5" | |
16 | } | |
17 | ''' # END VERSION_JSON | |
18 | ||
19 | ||
20 | def get_versions(): | |
21 | return json.loads(version_json) |
0 | from __future__ import absolute_import, division, print_function | |
1 | ||
2 | import os | |
3 | import re | |
4 | import time | |
5 | import warnings | |
6 | from six import string_types | |
7 | from six.moves.urllib.request import urlretrieve | |
8 | from six.moves.urllib.error import HTTPError, URLError | |
9 | from six.moves.urllib.parse import urljoin | |
10 | import pandas as pd | |
11 | import numpy as np | |
12 | ||
13 | from .json import HttpJsonClient | |
14 | from .error import DrmsQueryError, DrmsExportError, DrmsOperationNotSupported | |
15 | from .utils import _pd_to_numeric_coerce, _split_arg, _extract_series_name | |
16 | ||
17 | __all__ = ['SeriesInfo', 'ExportRequest', 'Client'] | |
18 | ||
19 | ||
20 | class SeriesInfo(object): | |
21 | """ | |
22 | DRMS series details. | |
23 | ||
24 | Use :func:`Client.info` to create an instance. | |
25 | ||
26 | Attributes | |
27 | ---------- | |
28 | name : string | |
29 | Series name. | |
30 | primekeys : list of strings | |
31 | Series primekeys. | |
32 | keywords : pandas.DataFrame | |
33 | Details about series keywords. | |
34 | links : pandas.DataFrame | |
35 | Details about series links. | |
36 | segments : pandas.DataFrame | |
37 | Details about series segments. | |
38 | note : string | |
39 | Series description. | |
40 | dbindex : list of strings | |
41 | Series database index. | |
42 | retention : int | |
43 | Default retention time. | |
44 | unitsize : int | |
45 | Storage unit size. | |
46 | archive : int | |
47 | Series archive flag. | |
48 | tapegroup : int | |
49 | Tape group. | |
50 | """ | |
51 | def __init__(self, d, name=None): | |
52 | self._d = d | |
53 | self.name = name | |
54 | self.retention = self._d.get('retention') | |
55 | self.unitsize = self._d.get('unitsize') | |
56 | self.archive = self._d.get('archive') | |
57 | self.tapegroup = self._d.get('tapegroup') | |
58 | self.note = self._d.get('note') | |
59 | self.primekeys = self._d.get('primekeys') | |
60 | self.dbindex = self._d.get('dbindex') | |
61 | self.keywords = self._parse_keywords(d['keywords']) | |
62 | self.links = self._parse_links(d['links']) | |
63 | self.segments = self._parse_segments(d['segments']) | |
64 | ||
65 | @staticmethod | |
66 | def _parse_keywords(d): | |
67 | keys = [ | |
68 | 'name', 'type', 'recscope', 'defval', 'units', 'note', 'linkinfo'] | |
69 | res = [] | |
70 | for di in d: | |
71 | resi = [] | |
72 | for k in keys: | |
73 | resi.append(di.get(k)) | |
74 | res.append(tuple(resi)) | |
75 | if not res: | |
76 | res = None # workaround for older pandas versions | |
77 | res = pd.DataFrame(res, columns=keys) | |
78 | res.index = res.pop('name') | |
79 | res['is_time'] = (res.type == 'time') | |
80 | res['is_integer'] = (res.type == 'short') | |
81 | res['is_integer'] |= (res.type == 'int') | |
82 | res['is_integer'] |= (res.type == 'longlong') | |
83 | res['is_real'] = (res.type == 'float') | |
84 | res['is_real'] |= (res.type == 'double') | |
85 | res['is_numeric'] = (res.is_integer | res.is_real) | |
86 | return res | |
87 | ||
88 | @staticmethod | |
89 | def _parse_links(d): | |
90 | keys = ['name', 'target', 'kind', 'note'] | |
91 | res = [] | |
92 | for di in d: | |
93 | resi = [] | |
94 | for k in keys: | |
95 | resi.append(di.get(k)) | |
96 | res.append(tuple(resi)) | |
97 | if not res: | |
98 | res = None # workaround for older pandas versions | |
99 | res = pd.DataFrame(res, columns=keys) | |
100 | res.index = res.pop('name') | |
101 | return res | |
102 | ||
103 | @staticmethod | |
104 | def _parse_segments(d): | |
105 | keys = ['name', 'type', 'units', 'protocol', 'dims', 'note'] | |
106 | res = [] | |
107 | for di in d: | |
108 | resi = [] | |
109 | for k in keys: | |
110 | resi.append(di.get(k)) | |
111 | res.append(tuple(resi)) | |
112 | if not res: | |
113 | res = None # workaround for older pandas versions | |
114 | res = pd.DataFrame(res, columns=keys) | |
115 | res.index = res.pop('name') | |
116 | return res | |
117 | ||
118 | def __repr__(self): | |
119 | if self.name is None: | |
120 | return '<SeriesInfo>' | |
121 | else: | |
122 | return '<SeriesInfo "%s">' % self.name | |
123 | ||
124 | ||
125 | class ExportRequest(object): | |
126 | """ | |
127 | Class for handling data export requests. | |
128 | ||
129 | Use :func:`Client.export` or :func:`Client.export_from_id` to | |
130 | create an instance. | |
131 | ||
132 | Attributes | |
133 | ---------- | |
134 | id : string | |
135 | Request ID. | |
136 | status : int | |
137 | Export request status. | |
138 | urls : pandas.DataFrame | |
139 | URLs of all downloadable files. | |
140 | request_url : string | |
141 | URL of the export request. | |
142 | method : string | |
143 | Data export method. | |
144 | protocol : string | |
145 | Data export protocol. | |
146 | data : pandas.DataFrame | |
147 | Records and filenames of the export request. | |
148 | dir : string | |
149 | Common directory of the requested files on the server. | |
150 | tarfile : string | |
151 | Filename, if a TAR file was requested. | |
152 | keywords : string | |
153 | Filename of textfile containing record keywords. | |
154 | """ | |
155 | _status_code_ok = 0 | |
156 | _status_code_notfound = 6 | |
157 | _status_codes_pending = [1, 2, _status_code_notfound] | |
158 | _status_codes_ok_or_pending = [_status_code_ok] + _status_codes_pending | |
159 | ||
160 | def __init__(self, d, client): | |
161 | self._client = client | |
162 | self._requestid = None | |
163 | self._status = None | |
164 | self._download_urls_cache = None | |
165 | self._update_status(d) | |
166 | ||
167 | @classmethod | |
168 | def _create_from_id(cls, requestid, client): | |
169 | d = client._json.exp_status(requestid) | |
170 | return cls(d, client) | |
171 | ||
172 | def __repr__(self): | |
173 | idstr = str(None) if self._requestid is None else ( | |
174 | '"%s"' % self._requestid) | |
175 | return '<ExportRequest id=%s, status=%d>' % (idstr, self._status) | |
176 | ||
177 | @staticmethod | |
178 | def _parse_data(d): | |
179 | keys = ['record', 'filename'] | |
180 | res = None if d is None else [ | |
181 | (di.get(keys[0]), di.get(keys[1])) for di in d] | |
182 | if not res: | |
183 | res = None # workaround for older pandas versions | |
184 | res = pd.DataFrame(res, columns=keys) | |
185 | return res | |
186 | ||
187 | def _update_status(self, d=None): | |
188 | if d is None and self._requestid is not None: | |
189 | d = self._client._json.exp_status(self._requestid) | |
190 | self._d = d | |
191 | self._d_time = time.time() | |
192 | self._status = int(self._d.get('status', self._status)) | |
193 | self._requestid = self._d.get('requestid', self._requestid) | |
194 | if self._requestid is None: | |
195 | # Apparently 'reqid' is used instead of 'requestid' for certain | |
196 | # protocols like 'mpg' | |
197 | self._requestid = self._d.get('reqid') | |
198 | if self._requestid == '': | |
199 | # Use None if the requestid is empty (url_quick + as-is) | |
200 | self._requestid = None | |
201 | ||
202 | def _raise_on_error(self, notfound_ok=True): | |
203 | if self._status in self._status_codes_ok_or_pending: | |
204 | if self._status != self._status_code_notfound or notfound_ok: | |
205 | return # request has not failed (yet) | |
206 | msg = self._d.get('error') | |
207 | if msg is None: | |
208 | msg = 'DRMS export request failed.' | |
209 | msg += ' [status=%d]' % self._status | |
210 | raise DrmsExportError(msg) | |
211 | ||
212 | def _generate_download_urls(self): | |
213 | """Generate download URLs for the current request.""" | |
214 | res = self.data.copy() | |
215 | data_dir = self.dir | |
216 | ||
217 | # Clear first record name for movies, as it is not a DRMS record. | |
218 | if self.protocol in ['mpg', 'mp4']: | |
219 | if res.record[0].startswith('movie'): | |
220 | res.record[0] = None | |
221 | ||
222 | # tar exports provide only a single TAR file with full path | |
223 | if self.tarfile is not None: | |
224 | data_dir = None | |
225 | res = pd.DataFrame( | |
226 | [(None, self.tarfile)], columns=['record', 'filename']) | |
227 | ||
228 | # If data_dir is None, the filename column should contain the full | |
229 | # path of the file and we need to extract the basename part. If | |
230 | # data_dir contains a directory, the filename column should contain | |
231 | # only the basename and we need to join it with the directory. | |
232 | if data_dir is None: | |
233 | res.rename(columns={'filename': 'fpath'}, inplace=True) | |
234 | split_fpath = res.fpath.str.split('/') | |
235 | res['filename'] = [sfp[-1] for sfp in split_fpath] | |
236 | else: | |
237 | res['fpath'] = data_dir + '/' + res.filename | |
238 | ||
239 | if self.method.startswith('url'): | |
240 | baseurl = self._client._server.http_download_baseurl | |
241 | elif self.method.startswith('ftp'): | |
242 | baseurl = self._client._server.ftp_download_baseurl | |
243 | else: | |
244 | raise RuntimeError( | |
245 | 'Download is not supported for export method "%s"' % | |
246 | self.method) | |
247 | ||
248 | # Generate download URLs. | |
249 | urls = [] | |
250 | for fp in res.fpath: | |
251 | while fp.startswith('/'): | |
252 | fp = fp[1:] | |
253 | urls.append(urljoin(baseurl, fp)) | |
254 | res['url'] = urls | |
255 | ||
256 | # Remove rows with missing files. | |
257 | res = res[res.filename != 'NoDataFile'] | |
258 | ||
259 | del res['fpath'] | |
260 | return res | |
261 | ||
262 | @staticmethod | |
263 | def _next_available_filename(fname): | |
264 | """Find next available filename, append a number if neccessary.""" | |
265 | i = 1 | |
266 | new_fname = fname | |
267 | while os.path.exists(new_fname): | |
268 | new_fname = '%s.%d' % (fname, i) | |
269 | i += 1 | |
270 | return new_fname | |
271 | ||
272 | @property | |
273 | def id(self): | |
274 | """(string) Request ID.""" | |
275 | return self._requestid | |
276 | ||
277 | @property | |
278 | def status(self): | |
279 | """(int) Export request status.""" | |
280 | return self._status | |
281 | ||
282 | @property | |
283 | def method(self): | |
284 | """(string) Export method.""" | |
285 | return self._d.get('method') | |
286 | ||
287 | @property | |
288 | def protocol(self): | |
289 | """(string) Export protocol.""" | |
290 | return self._d.get('protocol') | |
291 | ||
292 | @property | |
293 | def dir(self): | |
294 | """(string) Common directory of the requested files on the server.""" | |
295 | if self.has_finished(skip_update=True): | |
296 | self._raise_on_error() | |
297 | else: | |
298 | self.wait() | |
299 | data_dir = self._d.get('dir') | |
300 | return data_dir if data_dir else None | |
301 | ||
302 | @property | |
303 | def data(self): | |
304 | """ | |
305 | (pandas.DataFrame) Records and filenames of the export request. | |
306 | ||
307 | Returns a pandas.DataFrame containing the records and filenames | |
308 | of the export request (DataFrame columns: 'record', 'filename'). | |
309 | """ | |
310 | if self.has_finished(skip_update=True): | |
311 | self._raise_on_error() | |
312 | else: | |
313 | self.wait() | |
314 | return self._parse_data(self._d.get('data')) | |
315 | ||
316 | @property | |
317 | def tarfile(self): | |
318 | """(string) Filename, if a TAR file was requested.""" | |
319 | if self.has_finished(skip_update=True): | |
320 | self._raise_on_error() | |
321 | else: | |
322 | self.wait() | |
323 | data_tarfile = self._d.get('tarfile') | |
324 | return data_tarfile if data_tarfile else None | |
325 | ||
326 | @property | |
327 | def keywords(self): | |
328 | """(string) Filename of textfile containing record keywords.""" | |
329 | if self.has_finished(skip_update=True): | |
330 | self._raise_on_error() | |
331 | else: | |
332 | self.wait() | |
333 | data_keywords = self._d.get('keywords') | |
334 | return data_keywords if data_keywords else None | |
335 | ||
336 | @property | |
337 | def request_url(self): | |
338 | """(string) URL of the export request.""" | |
339 | data_dir = self.dir | |
340 | http_baseurl = self._client._server.http_download_baseurl | |
341 | if data_dir is None or http_baseurl is None: | |
342 | return None | |
343 | if data_dir.startswith('/'): | |
344 | data_dir = data_dir[1:] | |
345 | return urljoin(http_baseurl, data_dir) | |
346 | ||
347 | @property | |
348 | def urls(self): | |
349 | """ | |
350 | (pandas.DataFrame) URLs of all downloadable files. | |
351 | ||
352 | Returns a pandas.DataFrame containing the records, filenames | |
353 | and URLs of the export request (DataFrame columns: 'record', | |
354 | 'filename' and 'url'). | |
355 | """ | |
356 | if self._download_urls_cache is None: | |
357 | self._download_urls_cache = self._generate_download_urls() | |
358 | return self._download_urls_cache | |
359 | ||
360 | def has_finished(self, skip_update=False): | |
361 | """ | |
362 | Check if the export request has finished. | |
363 | ||
364 | Parameters | |
365 | ---------- | |
366 | skip_update : bool | |
367 | If set to True, the export status will not be updated from | |
368 | the server, even if it was in pending state after the last | |
369 | status update. | |
370 | ||
371 | Returns | |
372 | ------- | |
373 | result : bool | |
374 | True if the export request has finished or False if the | |
375 | request is still pending. | |
376 | """ | |
377 | pending = self._status in self._status_codes_pending | |
378 | if not pending: | |
379 | return True | |
380 | if not skip_update: | |
381 | self._update_status() | |
382 | pending = self._status in self._status_codes_pending | |
383 | return not pending | |
384 | ||
385 | def has_succeeded(self, skip_update=False): | |
386 | """ | |
387 | Check if the export request has finished successfully. | |
388 | ||
389 | Parameters | |
390 | ---------- | |
391 | skip_update : bool | |
392 | If set to True, the export status will not be updated from | |
393 | the server, even if it was in pending state after the last | |
394 | status update. | |
395 | ||
396 | Returns | |
397 | ------- | |
398 | result : bool | |
399 | True if the export request has finished successfully or | |
400 | False if the request failed or is still pending. | |
401 | """ | |
402 | if not self.has_finished(skip_update): | |
403 | return False | |
404 | return self._status == self._status_code_ok | |
405 | ||
406 | def has_failed(self, skip_update=False): | |
407 | """ | |
408 | Check if the export request has finished unsuccessfully. | |
409 | ||
410 | Parameters | |
411 | ---------- | |
412 | skip_update : bool | |
413 | If set to True, the export status will not be updated from | |
414 | the server, even if it was in pending state after the last | |
415 | status update. | |
416 | ||
417 | Returns | |
418 | ------- | |
419 | result : bool | |
420 | True if the export request has finished unsuccessfully or | |
421 | False if the request has succeeded or is still pending. | |
422 | """ | |
423 | if not self.has_finished(skip_update): | |
424 | return False | |
425 | return self._status not in self._status_codes_ok_or_pending | |
426 | ||
427 | def wait(self, timeout=None, sleep=5, retries_notfound=5, verbose=None): | |
428 | """ | |
429 | Wait for the server to process the export request. This method | |
430 | continously updates the request status until the server signals | |
431 | that the export request has succeeded or failed. | |
432 | ||
433 | Parameters | |
434 | ---------- | |
435 | timeout : number or None | |
436 | Maximum number of seconds until this method times out. If | |
437 | set to None (the default), the status will be updated | |
438 | indefinitely until the request succeeded or failed. | |
439 | sleep : number or None | |
440 | Time in seconds between status updates (defaults to 5 | |
441 | seconds). If set to None, a server supplied value is used. | |
442 | retries_notfound : int | |
443 | Number of retries in case the request was not found on the | |
444 | server. Note that it usually takes a short time until a new | |
445 | request is registered on the server, so a value too low | |
446 | might cause an exception to be raised, even if the request | |
447 | is valid and will eventually show up on the server. | |
448 | verbose : bool or None | |
449 | Set to True if status messages should be printed to stdout. | |
450 | If set to None (default), the :attr:`Client.verbose` flag | |
451 | of the associated client instance is used instead. | |
452 | ||
453 | Returns | |
454 | ------- | |
455 | result : bool | |
456 | True if the request succeeded or False if a timeout | |
457 | occured. In case of an error an exception is raised. | |
458 | """ | |
459 | if timeout is not None: | |
460 | t_start = time.time() | |
461 | timeout = float(timeout) | |
462 | if sleep is not None: | |
463 | sleep = float(sleep) | |
464 | retries_notfound = int(retries_notfound) | |
465 | if verbose is None: | |
466 | verbose = self._client.verbose | |
467 | ||
468 | # We are done, if the request has already finished. | |
469 | if self.has_finished(skip_update=True): | |
470 | self._raise_on_error() | |
471 | return True | |
472 | ||
473 | while True: | |
474 | if verbose: | |
475 | idstr = str(None) if self._requestid is None else ( | |
476 | '"%s"' % self._requestid) | |
477 | print('Export request pending. [id=%s, status=%d]' % ( | |
478 | idstr, self._status)) | |
479 | ||
480 | # Use the user-provided sleep value or the server's wait value. | |
481 | # In case neither is available, wait for 5 seconds. | |
482 | wait_secs = self._d.get('wait', 5) if sleep is None else sleep | |
483 | ||
484 | # Consider the time that passed since the last status update. | |
485 | wait_secs -= (time.time() - self._d_time) | |
486 | if wait_secs < 0: | |
487 | wait_secs = 0 | |
488 | ||
489 | if timeout is not None: | |
490 | # Return, if we would time out while sleeping. | |
491 | if t_start + timeout + wait_secs - time.time() < 0: | |
492 | return False | |
493 | ||
494 | if verbose: | |
495 | print('Waiting for %d seconds...' % round(wait_secs)) | |
496 | time.sleep(wait_secs) | |
497 | ||
498 | if self.has_finished(): | |
499 | self._raise_on_error() | |
500 | return True | |
501 | elif self._status == self._status_code_notfound: | |
502 | # Raise exception, if no retries are left. | |
503 | if retries_notfound <= 0: | |
504 | self._raise_on_error(notfound_ok=False) | |
505 | if verbose: | |
506 | print('Request not found on server, %d retries left.' % | |
507 | retries_notfound) | |
508 | retries_notfound -= 1 | |
509 | ||
510 | def download(self, directory, index=None, fname_from_rec=None, | |
511 | verbose=None): | |
512 | """ | |
513 | Download data files. | |
514 | ||
515 | By default, the server-side filenames are used as local | |
516 | filenames, except for export method 'url_quick', where the | |
517 | local filenames are generated from record names (see parameter | |
518 | fname_from_rec). In case a file with the same name already | |
519 | exists in the download directory, an ascending number is | |
520 | appended to the filename. | |
521 | ||
522 | Note: Downloading data segments that are directories, e.g. data | |
523 | segments from series like "hmi.rdVflows_fd15_frame", is | |
524 | currently not supported. In order to download data from series | |
525 | like this, you need to use the export methods 'url-tar' or | |
526 | 'ftp-tar' when submitting the data export request. | |
527 | ||
528 | Parameters | |
529 | ---------- | |
530 | directory : string | |
531 | Download directory (must already exist). | |
532 | index : int, list of ints or None | |
533 | Index (or indices) of the file(s) to be downloaded. If set | |
534 | to None (the default), all files of the export request are | |
535 | downloaded. Note that this parameter is ignored for export | |
536 | methods 'url-tar' and 'ftp-tar', where only a single tar | |
537 | file is available for download. | |
538 | fname_from_rec : bool or None | |
539 | If True, local filenames are generated from record names. | |
540 | If set to False, the original filenames are used. If set to | |
541 | None (default), local filenames are generated only for | |
542 | export method 'url_quick'. Exceptions: For exports with | |
543 | methods 'url-tar' and 'ftp-tar', no filename will be | |
544 | generated. This also applies to movie files from exports | |
545 | with protocols 'mpg' or 'mp4', where the original filename | |
546 | is used locally. | |
547 | verbose : bool or None | |
548 | Set to True if status messages should be printed to stdout. | |
549 | If set to None (default), the :attr:`Client.verbose` flag | |
550 | of the associated client instance is used instead. | |
551 | ||
552 | Returns | |
553 | ------- | |
554 | result : pandas.DataFrame | |
555 | DataFrame containing the record string, download URL and | |
556 | local location of each downloaded file (DataFrame columns: | |
557 | 'record', 'url' and 'download'). | |
558 | """ | |
559 | out_dir = os.path.abspath(directory) | |
560 | if not os.path.isdir(out_dir): | |
561 | raise IOError('Download directory "%s" does not exist' % out_dir) | |
562 | ||
563 | if np.isscalar(index): | |
564 | index = [int(index)] | |
565 | elif index is not None: | |
566 | index = list(index) | |
567 | ||
568 | if verbose is None: | |
569 | verbose = self._client.verbose | |
570 | ||
571 | # Wait until the export request has finished. | |
572 | self.wait(verbose=verbose) | |
573 | ||
574 | if fname_from_rec is None: | |
575 | # For 'url_quick', generate local filenames from record strings. | |
576 | if self.method == 'url_quick': | |
577 | fname_from_rec = True | |
578 | ||
579 | # self.urls contains the same records as self.data, except for the tar | |
580 | # methods, where self.urls only contains one entry, the TAR file. | |
581 | data = self.urls | |
582 | if index is not None and self.tarfile is None: | |
583 | data = data.iloc[index].copy() | |
584 | ndata = len(data) | |
585 | ||
586 | downloads = [] | |
587 | for i in range(ndata): | |
588 | di = data.iloc[i] | |
589 | if fname_from_rec: | |
590 | filename = self._client._filename_from_export_record( | |
591 | di.record, old_fname=di.filename) | |
592 | if filename is None: | |
593 | filename = di.filename | |
594 | else: | |
595 | filename = di.filename | |
596 | ||
597 | fpath = os.path.join(out_dir, filename) | |
598 | fpath_new = self._next_available_filename(fpath) | |
599 | fpath_tmp = self._next_available_filename(fpath_new + '.part') | |
600 | if verbose: | |
601 | print('Downloading file %d of %d...' % (i + 1, ndata)) | |
602 | print(' record: %s' % di.record) | |
603 | print(' filename: %s' % di.filename) | |
604 | try: | |
605 | urlretrieve(di.url, fpath_tmp) | |
606 | except (HTTPError, URLError): | |
607 | fpath_new = None | |
608 | if verbose: | |
609 | print(' -> Error: Could not download file') | |
610 | else: | |
611 | fpath_new = self._next_available_filename(fpath) | |
612 | os.rename(fpath_tmp, fpath_new) | |
613 | if verbose: | |
614 | print(' -> "%s"' % os.path.relpath(fpath_new)) | |
615 | downloads.append(fpath_new) | |
616 | ||
617 | res = data[['record', 'url']].copy() | |
618 | res['download'] = downloads | |
619 | return res | |
620 | ||
621 | ||
622 | class Client(object): | |
623 | """ | |
624 | Client for remote DRMS server access. | |
625 | ||
626 | Parameters | |
627 | ---------- | |
628 | server : string or ServerConfig | |
629 | Registered server ID or ServerConfig instance. | |
630 | Defaults to JSOC. | |
631 | email : string or None | |
632 | Default email address used data export requests. | |
633 | verbose : bool | |
634 | Print export status messages to stdout (disabled by default). | |
635 | debug : bool | |
636 | Print debug output (disabled by default). | |
637 | ||
638 | Attributes | |
639 | ---------- | |
640 | email : string | |
641 | Default email address used for data export requests. | |
642 | verbose : bool | |
643 | Enable/disable export status output. | |
644 | debug : bool | |
645 | Enable/disable debug output. | |
646 | """ | |
647 | def __init__(self, server='jsoc', email=None, verbose=False, debug=False): | |
648 | self._json = HttpJsonClient(server=server, debug=debug) | |
649 | self._info_cache = {} | |
650 | self.verbose = verbose # use property for convertion to bool | |
651 | self.email = email # use property for email validation | |
652 | ||
653 | def __repr__(self): | |
654 | return '<Client "%s">' % self._server.name | |
655 | ||
656 | def _convert_numeric_keywords(self, ds, kdf, skip_conversion=None): | |
657 | si = self.info(ds) | |
658 | int_keys = list(si.keywords[si.keywords.is_integer].index) | |
659 | num_keys = list(si.keywords[si.keywords.is_numeric].index) | |
660 | num_keys += ['*recnum*', '*sunum*', '*size*'] | |
661 | if skip_conversion is None: | |
662 | skip_conversion = [] | |
663 | elif isinstance(skip_conversion, string_types): | |
664 | skip_conversion = [skip_conversion] | |
665 | for k in kdf: | |
666 | if k in skip_conversion: | |
667 | continue | |
668 | # pandas apparently does not support hexadecimal strings, so | |
669 | # we need a special treatment for integer strings that start | |
670 | # with '0x', like QUALITY. The following to_numeric call is | |
671 | # still neccessary as the results are still Python objects. | |
672 | if k in int_keys and kdf[k].dtype is np.dtype(object): | |
673 | idx = kdf[k].str.startswith('0x') | |
674 | if idx.any(): | |
675 | kdf.loc[idx, k] = kdf.loc[idx, k].map( | |
676 | lambda x: int(x, base=16)) | |
677 | if k in num_keys: | |
678 | kdf[k] = _pd_to_numeric_coerce(kdf[k]) | |
679 | ||
680 | @staticmethod | |
681 | def _raise_query_error(d, status=None): | |
682 | """Raises a DrmsQueryError, using the json error message from d""" | |
683 | if status is None: | |
684 | status = d.get('status') | |
685 | msg = d.get('error') | |
686 | if msg is None: | |
687 | msg = 'DRMS Query failed.' | |
688 | msg += ' [status=%s]' % status | |
689 | raise DrmsQueryError(msg) | |
690 | ||
691 | def _generate_filenamefmt(self, sname): | |
692 | """Generate filename format string for export requests.""" | |
693 | try: | |
694 | si = self.info(sname) | |
695 | except: | |
696 | # Cannot generate filename format for unknown series. | |
697 | return None | |
698 | ||
699 | pkfmt_list = [] | |
700 | for k in si.primekeys: | |
701 | if si.keywords.loc[k].is_time: | |
702 | pkfmt_list.append('{%s:A}' % k) | |
703 | else: | |
704 | pkfmt_list.append('{%s}' % k) | |
705 | ||
706 | if pkfmt_list: | |
707 | return '%s.%s.{segment}' % (si.name, '.'.join(pkfmt_list)) | |
708 | else: | |
709 | return si.name + '.{recnum:%lld}.{segment}' | |
710 | ||
711 | # Some regular expressions used to parse export request queries. | |
712 | _re_export_recset = re.compile( | |
713 | r'^\s*([\w\.]+)\s*(\[.*\])?\s*(?:\{([\w\s\.,]*)\})?\s*$') | |
714 | _re_export_recset_pkeys = re.compile(r'\[([^\[^\]]*)\]') | |
715 | _re_export_recset_slist = re.compile(r'[\s,]+') | |
716 | ||
717 | @staticmethod | |
718 | def _parse_export_recset(rs): | |
719 | """Parse export request record set.""" | |
720 | if rs is None: | |
721 | return None, None, None | |
722 | m = Client._re_export_recset.match(rs) | |
723 | if not m: | |
724 | return None, None, None | |
725 | sname, pkeys, segs = m.groups() | |
726 | if pkeys is not None: | |
727 | pkeys = Client._re_export_recset_pkeys.findall(pkeys) | |
728 | if segs is not None: | |
729 | segs = Client._re_export_recset_slist.split(segs) | |
730 | return sname, pkeys, segs | |
731 | ||
732 | def _filename_from_export_record(self, rs, old_fname=None): | |
733 | """Generate a filename from an export request record.""" | |
734 | sname, pkeys, segs = self._parse_export_recset(rs) | |
735 | if sname is None: | |
736 | return None | |
737 | ||
738 | # We need to identify time primekeys and change the time strings to | |
739 | # make them suitable for filenames. | |
740 | try: | |
741 | si = self.info(sname) | |
742 | except: | |
743 | # Cannot generate filename for unknown series. | |
744 | return None | |
745 | ||
746 | if pkeys is not None: | |
747 | n = len(pkeys) | |
748 | if n != len(si.primekeys): | |
749 | # Number of parsed pkeys differs from series definition. | |
750 | return None | |
751 | for i in range(n): | |
752 | # Cleanup time strings. | |
753 | if si.keywords.loc[si.primekeys[i]].is_time: | |
754 | v = pkeys[i] | |
755 | v = v.replace('.', '').replace(':', '').replace('-', '') | |
756 | pkeys[i] = v | |
757 | ||
758 | # Generate filename. | |
759 | fname = si.name | |
760 | if pkeys is not None: | |
761 | pkeys = [k for k in pkeys if k.strip()] | |
762 | pkeys_str = '.'.join(pkeys) | |
763 | if pkeys_str: | |
764 | fname += '.' + pkeys_str | |
765 | if segs is not None: | |
766 | segs = [s for s in segs if s.strip()] | |
767 | segs_str = '.'.join(segs) | |
768 | if segs_str: | |
769 | fname += '.' + segs_str | |
770 | ||
771 | if old_fname is not None: | |
772 | # Try to use the file extension of the original filename. | |
773 | known_fname_extensions = [ | |
774 | '.fits', '.txt', '.jpg', '.mpg', '.mp4', '.tar'] | |
775 | for ext in known_fname_extensions: | |
776 | if old_fname.endswith(ext): | |
777 | return fname + ext | |
778 | return fname | |
779 | ||
780 | # Export color table names, from (internal) series "jsoc.Color_Tables" | |
781 | _export_color_table_names = [ | |
782 | 'HMI_mag.lut', | |
783 | 'aia_131.lut', | |
784 | 'aia_1600.lut', | |
785 | 'aia_1700.lut', | |
786 | 'aia_171.lut', | |
787 | 'aia_193.lut', | |
788 | 'aia_211.lut', | |
789 | 'aia_304.lut', | |
790 | 'aia_335.lut', | |
791 | 'aia_4500.lut', | |
792 | 'aia_94.lut', | |
793 | 'aia_mixed', | |
794 | 'bb.sao', | |
795 | 'grey.sao', | |
796 | 'heat.sao'] | |
797 | ||
798 | # Export scaling types, from (internal) series "jsoc.Color_Tables" | |
799 | _export_scaling_names = [ | |
800 | 'LOG', | |
801 | 'MINMAX', | |
802 | 'MINMAXGIVEN', | |
803 | 'SQRT', | |
804 | 'mag'] | |
805 | ||
806 | @staticmethod | |
807 | def _validate_export_protocol_args(protocol_args): | |
808 | """ | |
809 | Validate export protocol arguments. | |
810 | """ | |
811 | if protocol_args is None: | |
812 | return | |
813 | ||
814 | ct_key = 'ct' | |
815 | ct = protocol_args.get(ct_key) | |
816 | if ct is None: | |
817 | ct_key = 'CT' | |
818 | ct = protocol_args.get(ct_key) | |
819 | if ct is not None: | |
820 | ll = [s.lower() for s in Client._export_color_table_names] | |
821 | try: | |
822 | i = ll.index(ct.lower()) | |
823 | except ValueError: | |
824 | msg = "'%s' is not a valid color table, " % ct | |
825 | msg += 'available color tables: %s' % ', '.join( | |
826 | ["'%s'" % s for s in Client._export_color_table_names]) | |
827 | raise ValueError(msg) | |
828 | protocol_args[ct_key] = Client._export_color_table_names[i] | |
829 | ||
830 | scaling = protocol_args.get('scaling') | |
831 | if scaling is not None: | |
832 | ll = [s.lower() for s in Client._export_scaling_names] | |
833 | try: | |
834 | i = ll.index(scaling.lower()) | |
835 | except ValueError: | |
836 | msg = "'%s' is not a valid scaling type, " % scaling | |
837 | msg += 'available scaling types: %s' % ', '.join( | |
838 | ["'%s'" % s for s in Client._export_scaling_names]) | |
839 | raise ValueError(msg) | |
840 | protocol_args['scaling'] = Client._export_scaling_names[i] | |
841 | ||
842 | @property | |
843 | def _server(self): | |
844 | """(ServerConfig) Remote server configuration.""" | |
845 | return self._json.server | |
846 | ||
847 | @property | |
848 | def debug(self): | |
849 | """(bool) Enable/disable debug output.""" | |
850 | return self._json.debug | |
851 | ||
852 | @debug.setter | |
853 | def debug(self, value): | |
854 | self._json.debug = value | |
855 | ||
856 | @property | |
857 | def email(self): | |
858 | """(string) Default email address used for data export requests.""" | |
859 | return self._email | |
860 | ||
861 | @email.setter | |
862 | def email(self, value): | |
863 | if value is not None and not self.check_email(value): | |
864 | raise ValueError('Email address is invalid or not registered') | |
865 | self._email = value | |
866 | ||
867 | @property | |
868 | def verbose(self): | |
869 | """(bool) Enable/disable export status output.""" | |
870 | return self._verbose | |
871 | ||
872 | @verbose.setter | |
873 | def verbose(self, value): | |
874 | self._verbose = bool(value) | |
875 | ||
876 | def series(self, regex=None, full=False): | |
877 | """ | |
878 | List available data series. | |
879 | ||
880 | Parameters | |
881 | ---------- | |
882 | regex : string or None | |
883 | Regular expression, used to select a subset of the | |
884 | available series. If set to None, a list of all available | |
885 | series is returned. | |
886 | full : bool | |
887 | If True, return a pandas.DataFrame containing additional | |
888 | series information, like description and primekeys. If | |
889 | False (default), the result is a list containing only the | |
890 | series names. | |
891 | ||
892 | Returns | |
893 | ------- | |
894 | result : list or pandas.DataFrame | |
895 | List of series names or DataFrame containing name, | |
896 | primekeys and a description of the selected series (see | |
897 | parameter ``full``). | |
898 | """ | |
899 | if not self._server.check_supported('series'): | |
900 | raise DrmsOperationNotSupported( | |
901 | 'Server does not support series list access') | |
902 | if self._server.url_show_series_wrapper is None: | |
903 | # No wrapper CGI available, use the regular version. | |
904 | d = self._json.show_series(regex) | |
905 | status = d.get('status') | |
906 | if status != 0: | |
907 | self._raise_query_error(d) | |
908 | if full: | |
909 | keys = ('name', 'primekeys', 'note') | |
910 | if not d['names']: | |
911 | return pd.DataFrame(columns=keys) | |
912 | recs = [(it['name'], _split_arg(it['primekeys']), it['note']) | |
913 | for it in d['names']] | |
914 | return pd.DataFrame(recs, columns=keys) | |
915 | else: | |
916 | if not d['names']: | |
917 | return [] | |
918 | return [it['name'] for it in d['names']] | |
919 | else: | |
920 | # Use show_series_wrapper instead of the regular version. | |
921 | d = self._json.show_series_wrapper(regex, info=full) | |
922 | if full: | |
923 | keys = ('name', 'note') | |
924 | if not d['seriesList']: | |
925 | return pd.DataFrame(columns=keys) | |
926 | recs = [] | |
927 | for it in d['seriesList']: | |
928 | name, info = tuple(it.items())[0] | |
929 | note = info.get('description', '') | |
930 | recs.append((name, note)) | |
931 | return pd.DataFrame(recs, columns=keys) | |
932 | else: | |
933 | return d['seriesList'] | |
934 | ||
935 | def info(self, ds): | |
936 | """ | |
937 | Get information about the content of a data series. | |
938 | ||
939 | Parameters | |
940 | ---------- | |
941 | ds : string | |
942 | Name of the data series. | |
943 | ||
944 | Returns | |
945 | ------- | |
946 | result : :class:`SeriesInfo` | |
947 | SeriesInfo instance containing information about the data | |
948 | series. | |
949 | """ | |
950 | if not self._server.check_supported('info'): | |
951 | raise DrmsOperationNotSupported( | |
952 | 'Server does not support series info access') | |
953 | name = _extract_series_name(ds) | |
954 | if name is not None: | |
955 | name = name.lower() | |
956 | if name in self._info_cache: | |
957 | return self._info_cache[name] | |
958 | d = self._json.series_struct(name) | |
959 | status = d.get('status') | |
960 | if status != 0: | |
961 | self._raise_query_error(d) | |
962 | si = SeriesInfo(d, name=name) | |
963 | if name is not None: | |
964 | self._info_cache[name] = si | |
965 | return si | |
966 | ||
967 | def keys(self, ds): | |
968 | """ | |
969 | Get a list of keywords that are available for a series. Use | |
970 | the :func:`info` method for more details. | |
971 | ||
972 | Parameters | |
973 | ---------- | |
974 | ds : string | |
975 | Name of the data series. | |
976 | ||
977 | Returns | |
978 | ------- | |
979 | result : list | |
980 | List of keywords available for the selected series. | |
981 | """ | |
982 | si = self.info(ds) | |
983 | return list(si.keywords.index) | |
984 | ||
985 | def pkeys(self, ds): | |
986 | """ | |
987 | Get a list of primekeys that are available for a series. Use | |
988 | the :func:`info` method for more details. | |
989 | ||
990 | Parameters | |
991 | ---------- | |
992 | ds : string | |
993 | Name of the data series. | |
994 | ||
995 | Returns | |
996 | ------- | |
997 | result : list | |
998 | List of primekeys available for the selected series. | |
999 | """ | |
1000 | si = self.info(ds) | |
1001 | return list(si.primekeys) | |
1002 | ||
1003 | def get(self, ds, key=None, seg=None, link=None, convert_numeric=True, | |
1004 | skip_conversion=None): | |
1005 | """ | |
1006 | This method is deprecated. Use :func:`query` instead. | |
1007 | """ | |
1008 | warnings.warn( | |
1009 | 'Client.get() is deprecated, use Client.query() instead', | |
1010 | DeprecationWarning) | |
1011 | return self.query( | |
1012 | ds, key=key, seg=seg, link=link, convert_numeric=convert_numeric, | |
1013 | skip_conversion=skip_conversion) | |
1014 | ||
1015 | def query(self, ds, key=None, seg=None, link=None, convert_numeric=True, | |
1016 | skip_conversion=None, pkeys=False, rec_index=False, n=None): | |
1017 | """ | |
1018 | Query keywords, segments and/or links of a record set. At | |
1019 | least one of the parameters key, seg, link or pkeys needs to | |
1020 | be specified. | |
1021 | ||
1022 | Parameters | |
1023 | ---------- | |
1024 | ds : string | |
1025 | Record set query. | |
1026 | key : string, list of strings or None | |
1027 | List of requested keywords, optional. If set to None | |
1028 | (default), no keyword results will be returned, except | |
1029 | when pkeys is True. | |
1030 | seg : string, list of strings or None | |
1031 | List of requested segments, optional. If set to None | |
1032 | (default), no segment results will be returned. | |
1033 | link : string, list of strings or None | |
1034 | List of requested Links, optional. If set to None | |
1035 | (default), no link results will be returned. | |
1036 | convert_numeric : bool | |
1037 | Convert keywords with numeric types from string to | |
1038 | numbers. This may result in NaNs for invalid/missing | |
1039 | values. Default is True. | |
1040 | skip_conversion : list of strings or None | |
1041 | List of keywords names to be skipped when performing a | |
1042 | numeric conversion. Default is None. | |
1043 | pkeys : bool | |
1044 | If True, all primekeys of the series are added to the | |
1045 | ``key`` parameter. | |
1046 | rec_index : bool | |
1047 | If True, record names are used as index for the resulting | |
1048 | DataFrames. | |
1049 | n : int or None | |
1050 | Limits the number of records returned by the query. For | |
1051 | positive | |
1052 | values, the first n records of the record set are | |
1053 | returned, for negative values the last abs(n) records. If | |
1054 | set to None (default), no limit is applied. | |
1055 | ||
1056 | Returns | |
1057 | ------- | |
1058 | res_key : pandas.DataFrame, optional | |
1059 | Keyword query results. This DataFrame is only returned, | |
1060 | if key is not None or pkeys is set to True. | |
1061 | res_seg : pandas.DataFrame, optional | |
1062 | Segment query results. This DataFrame is only returned, | |
1063 | if seg is not None. | |
1064 | res_link : pandas.DataFrame, optional | |
1065 | Link query results. This DataFrame is only returned, | |
1066 | if link is not None. | |
1067 | """ | |
1068 | if not self._server.check_supported('query'): | |
1069 | raise DrmsOperationNotSupported( | |
1070 | 'Server does not support DRMS queries') | |
1071 | if pkeys: | |
1072 | pk = self.pkeys(ds) | |
1073 | key = _split_arg(key) if key is not None else [] | |
1074 | key = [k for k in key if k not in pk] | |
1075 | key = pk + key | |
1076 | ||
1077 | lres = self._json.rs_list( | |
1078 | ds, key, seg, link, recinfo=rec_index, n=n) | |
1079 | status = lres.get('status') | |
1080 | if status != 0: | |
1081 | self._raise_query_error(lres) | |
1082 | ||
1083 | res = [] | |
1084 | if key is not None: | |
1085 | if 'keywords' in lres: | |
1086 | names = [it['name'] for it in lres['keywords']] | |
1087 | values = [it['values'] for it in lres['keywords']] | |
1088 | res_key = pd.DataFrame.from_items(zip(names, values)) | |
1089 | else: | |
1090 | res_key = pd.DataFrame() | |
1091 | if convert_numeric: | |
1092 | self._convert_numeric_keywords(ds, res_key, skip_conversion) | |
1093 | res.append(res_key) | |
1094 | ||
1095 | if seg is not None: | |
1096 | if 'segments' in lres: | |
1097 | names = [it['name'] for it in lres['segments']] | |
1098 | values = [it['values'] for it in lres['segments']] | |
1099 | res_seg = pd.DataFrame.from_items(zip(names, values)) | |
1100 | else: | |
1101 | res_seg = pd.DataFrame() | |
1102 | res.append(res_seg) | |
1103 | ||
1104 | if link is not None: | |
1105 | if 'links' in lres: | |
1106 | names = [it['name'] for it in lres['links']] | |
1107 | values = [it['values'] for it in lres['links']] | |
1108 | res_link = pd.DataFrame.from_items(zip(names, values)) | |
1109 | else: | |
1110 | res_link = pd.DataFrame() | |
1111 | res.append(res_link) | |
1112 | ||
1113 | if rec_index: | |
1114 | index = [it['name'] for it in lres['recinfo']] | |
1115 | for r in res: | |
1116 | r.index = index | |
1117 | ||
1118 | if len(res) == 0: | |
1119 | return None | |
1120 | elif len(res) == 1: | |
1121 | return res[0] | |
1122 | else: | |
1123 | return tuple(res) | |
1124 | ||
1125 | def check_email(self, email): | |
1126 | """ | |
1127 | Check if the email address is registered for data export. | |
1128 | You can register your email for data exports from JSOC on | |
1129 | the `JSOC email registration | |
1130 | <http://jsoc.stanford.edu/ajax/register_email.html>`__ | |
1131 | webpage. | |
1132 | ||
1133 | Parameters | |
1134 | ---------- | |
1135 | email : string | |
1136 | Email address to be checked. | |
1137 | ||
1138 | Returns | |
1139 | ------- | |
1140 | result : bool | |
1141 | True if the email address is valid and registered, False | |
1142 | otherwise. | |
1143 | """ | |
1144 | if not self._server.check_supported('email'): | |
1145 | raise DrmsOperationNotSupported( | |
1146 | 'Server does not support user emails') | |
1147 | res = self._json.check_address(email) | |
1148 | status = res.get('status') | |
1149 | return status is not None and int(status) == 2 | |
1150 | ||
1151 | def export(self, ds, method='url_quick', protocol='as-is', | |
1152 | protocol_args=None, filenamefmt=None, n=None, email=None, | |
1153 | requestor=None): | |
1154 | """ | |
1155 | Submit a data export request. | |
1156 | ||
1157 | A registered email address is required for data exports. You | |
1158 | can register your email address for data exports from JSOC on | |
1159 | the `JSOC email registration | |
1160 | <http://jsoc.stanford.edu/ajax/register_email.html>`__ | |
1161 | webpage. | |
1162 | ||
1163 | An interactive webinterface and additional information is | |
1164 | available on the `JSOC data export | |
1165 | <http://jsoc.stanford.edu/ajax/exportdata.html>`__ webpage. | |
1166 | ||
1167 | Note that export requests that were submitted using the | |
1168 | webinterface can be accessed using the :func:`export_from_id` | |
1169 | method. | |
1170 | ||
1171 | Parameters | |
1172 | ---------- | |
1173 | ds : string | |
1174 | Data export record set query. | |
1175 | method : string | |
1176 | Export method. Supported methods are: 'url_quick', 'url', | |
1177 | 'url-tar', 'ftp' and 'ftp-tar'. Default is 'url_quick'. | |
1178 | protocol : string | |
1179 | Export protocol. Supported protocols are: 'as-is', 'fits', | |
1180 | 'jpg', 'mpg' and 'mp4'. Default is 'as-is'. | |
1181 | protocol_args : dict | |
1182 | Extra protocol arguments for protocols 'jpg', 'mpg' and | |
1183 | 'mp4'. Valid arguments are: 'ct', 'scaling', 'min', 'max' | |
1184 | and 'size'. See the JSOC data export webpage for more | |
1185 | details. | |
1186 | filenamefmt : string, None or False | |
1187 | Custom filename format string for exported files. This is | |
1188 | ignored for 'url_quick'/'as-is' data exports. If set to | |
1189 | None (default), the format string will be generated using | |
1190 | the primekeys of the data series. If set to False, the | |
1191 | filename format string will be omitted in the export | |
1192 | request. | |
1193 | n : int or None | |
1194 | Limits the number of records requested. For positive | |
1195 | values, the first n records of the record set are returned, | |
1196 | for negative values the last abs(n) records. If set to None | |
1197 | (default), no limit is applied. | |
1198 | email : string or None | |
1199 | Registered email address. If email is None (default), the | |
1200 | current default email address is used, which in this case | |
1201 | has to be set before calling export() by using the | |
1202 | :attr:`Client.email` attribute. | |
1203 | requestor : string, None or False | |
1204 | Export user ID. Default is None, in which case the user | |
1205 | name is determined from the email address. If set to False, | |
1206 | the requestor argument will be omitted in the export | |
1207 | request. | |
1208 | ||
1209 | Returns | |
1210 | ------- | |
1211 | result : :class:`ExportRequest` | |
1212 | """ | |
1213 | if not self._server.check_supported('export'): | |
1214 | raise DrmsOperationNotSupported( | |
1215 | 'Server does not support export requests') | |
1216 | if email is None: | |
1217 | if self._email is None: | |
1218 | raise ValueError( | |
1219 | 'The email argument is required, when no default email ' | |
1220 | 'address was set') | |
1221 | email = self._email | |
1222 | ||
1223 | if filenamefmt is None: | |
1224 | sname = _extract_series_name(ds) | |
1225 | filenamefmt = self._generate_filenamefmt(sname) | |
1226 | elif filenamefmt is False: | |
1227 | filenamefmt = None | |
1228 | ||
1229 | if protocol.lower() in ['jpg', 'mpg', 'mp4']: | |
1230 | self._validate_export_protocol_args(protocol_args) | |
1231 | ||
1232 | d = self._json.exp_request( | |
1233 | ds, email, method=method, protocol=protocol, | |
1234 | protocol_args=protocol_args, filenamefmt=filenamefmt, | |
1235 | n=n, requestor=requestor) | |
1236 | return ExportRequest(d, client=self) | |
1237 | ||
1238 | def export_from_id(self, requestid): | |
1239 | """ | |
1240 | Create an :class:`ExportRequest` instance from an existing | |
1241 | requestid. | |
1242 | ||
1243 | Parameters | |
1244 | ---------- | |
1245 | requestid : string | |
1246 | Export request ID. | |
1247 | ||
1248 | Returns | |
1249 | ------- | |
1250 | result : :class:`ExportRequest` | |
1251 | """ | |
1252 | if not self._server.check_supported('export'): | |
1253 | raise DrmsOperationNotSupported( | |
1254 | 'Server does not support export requests') | |
1255 | return ExportRequest._create_from_id(requestid, client=self) | |
1256 | ||
1257 | ||
1258 | def _test_info(c, ds): | |
1259 | sname = c.series(ds) | |
1260 | res = [] | |
1261 | skiplist = [r'jsoc.*'] | |
1262 | for sni in sname: | |
1263 | skipit = False | |
1264 | print(sni) | |
1265 | for spat in skiplist: | |
1266 | if re.match(spat, sni): | |
1267 | print('** skipping series **') | |
1268 | skipit = True | |
1269 | break | |
1270 | if not skipit: | |
1271 | res.append(c.info(sni)) | |
1272 | return res |
0 | from __future__ import absolute_import, division, print_function | |
1 | ||
2 | from six import string_types | |
3 | from six.moves.urllib.parse import urljoin | |
4 | ||
5 | __all__ = ['ServerConfig', 'register_server'] | |
6 | ||
7 | ||
8 | class ServerConfig(object): | |
9 | """ | |
10 | DRMS Server configuration. | |
11 | ||
12 | Parameters | |
13 | ---------- | |
14 | name : string | |
15 | Server configuration name. | |
16 | config : dict | |
17 | Dictionary containing configuration entries (see below for a | |
18 | list of available entries). | |
19 | ||
20 | Additional keyword arguments can be used to add additional entries | |
21 | to config. In case a keyword argument already exists in the config | |
22 | dictionary, the config entry will be replaced by the kwargs value. | |
23 | ||
24 | Available config keys are: | |
25 | name | |
26 | cgi_baseurl | |
27 | cgi_show_series | |
28 | cgi_jsoc_info | |
29 | cgi_jsoc_fetch | |
30 | cgi_check_address | |
31 | cgi_show_series_wrapper | |
32 | show_series_wrapper_dbhost | |
33 | url_show_series | |
34 | url_jsoc_info | |
35 | url_jsoc_fetch | |
36 | url_check_address | |
37 | url_show_series_wrapper | |
38 | encoding | |
39 | http_download_baseurl | |
40 | ftp_download_baseurl | |
41 | """ | |
42 | _valid_keys = [ | |
43 | 'name', | |
44 | 'cgi_baseurl', | |
45 | 'cgi_show_series', | |
46 | 'cgi_jsoc_info', | |
47 | 'cgi_jsoc_fetch', | |
48 | 'cgi_check_address', | |
49 | 'cgi_show_series_wrapper', | |
50 | 'show_series_wrapper_dbhost', | |
51 | 'url_show_series', | |
52 | 'url_jsoc_info', | |
53 | 'url_jsoc_fetch', | |
54 | 'url_check_address', | |
55 | 'url_show_series_wrapper', | |
56 | 'encoding', | |
57 | 'http_download_baseurl', | |
58 | 'ftp_download_baseurl' | |
59 | ] | |
60 | # print(('\n' + 12*' ').join(ServerConfig._valid_keys)) | |
61 | ||
62 | def __init__(self, config=None, **kwargs): | |
63 | self._d = d = config.copy() if config is not None else {} | |
64 | d.update(kwargs) | |
65 | ||
66 | for k in d: | |
67 | if k not in self._valid_keys: | |
68 | raise ValueError('Invalid server config key: "%s"' % k) | |
69 | ||
70 | if 'name' not in d: | |
71 | raise ValueError('Server config entry "name" is missing') | |
72 | ||
73 | # encoding defaults to latin1 | |
74 | if 'encoding' not in d: | |
75 | d['encoding'] = 'latin1' | |
76 | ||
77 | # Generate URL entries from CGI entries, if cgi_baseurl exists and | |
78 | # the specific URL entry is not already set. | |
79 | if 'cgi_baseurl' in d: | |
80 | cgi_baseurl = d['cgi_baseurl'] | |
81 | cgi_keys = [k for k in self._valid_keys | |
82 | if k.startswith('cgi') and k != 'cgi_baseurl'] | |
83 | for k in cgi_keys: | |
84 | url_key = 'url' + k[3:] | |
85 | cgi_value = d.get(k) | |
86 | if d.get(url_key) is None and cgi_value is not None: | |
87 | d[url_key] = urljoin(cgi_baseurl, cgi_value) | |
88 | ||
89 | def __repr__(self): | |
90 | return '<ServerConfig "%s">' % self._d.get('name') | |
91 | ||
92 | def __dir__(self): | |
93 | return dir(type(self)) + list(self.__dict__.keys()) + self._valid_keys | |
94 | ||
95 | def __getattr__(self, name): | |
96 | if name in self._valid_keys: | |
97 | return self._d.get(name) | |
98 | else: | |
99 | return object.__getattribute__(self, name) | |
100 | ||
101 | def __setattr__(self, name, value): | |
102 | if name in self._valid_keys: | |
103 | if not isinstance(value, string_types): | |
104 | raise ValueError('"%s" config value must be a string' % name) | |
105 | self._d[name] = value | |
106 | else: | |
107 | object.__setattr__(self, name, value) | |
108 | ||
109 | def copy(self): | |
110 | return ServerConfig(self._d) | |
111 | ||
112 | def to_dict(self): | |
113 | return self._d | |
114 | ||
115 | def check_supported(self, op): | |
116 | """Check if an operation is supported by the server.""" | |
117 | if op == 'series': | |
118 | return ((self.cgi_show_series is not None) or | |
119 | (self.cgi_show_series_wrapper is not None)) | |
120 | elif op == 'info': | |
121 | return self.cgi_jsoc_info is not None | |
122 | elif op == 'query': | |
123 | return self.cgi_jsoc_info is not None | |
124 | elif op == 'email': | |
125 | return self.cgi_check_address is not None | |
126 | elif op == 'export': | |
127 | return ((self.cgi_jsoc_info is not None) and | |
128 | (self.cgi_jsoc_fetch is not None)) | |
129 | else: | |
130 | raise ValueError('Unknown operation: %r' % op) | |
131 | ||
132 | ||
133 | def register_server(config): | |
134 | """Register a server configuration.""" | |
135 | global _server_configs | |
136 | name = config.name.lower() | |
137 | if name in _server_configs: | |
138 | raise RuntimeError('ServerConfig "%s" already registered' % name) | |
139 | _server_configs[config.name.lower()] = config | |
140 | ||
141 | ||
142 | # Registered servers | |
143 | _server_configs = {} | |
144 | ||
145 | # Register public JSOC DRMS server. | |
146 | register_server(ServerConfig( | |
147 | name='JSOC', | |
148 | cgi_baseurl='http://jsoc.stanford.edu/cgi-bin/ajax/', | |
149 | cgi_show_series='show_series', | |
150 | cgi_jsoc_info='jsoc_info', | |
151 | cgi_jsoc_fetch='jsoc_fetch', | |
152 | cgi_check_address='checkAddress.sh', | |
153 | cgi_show_series_wrapper='showextseries', | |
154 | show_series_wrapper_dbhost='hmidb2', | |
155 | http_download_baseurl='http://jsoc.stanford.edu/', | |
156 | ftp_download_baseurl='ftp://pail.stanford.edu/export/')) | |
157 | ||
158 | # Register KIS DRMS server. | |
159 | register_server(ServerConfig( | |
160 | name='KIS', | |
161 | cgi_baseurl='http://drms.leibniz-kis.de/cgi-bin/', | |
162 | cgi_show_series='show_series', | |
163 | cgi_jsoc_info='jsoc_info')) |
0 | from __future__ import absolute_import, division, print_function | |
1 | ||
2 | __all__ = ['DrmsError', 'DrmsQueryError', 'DrmsExportError', | |
3 | 'DrmsOperationNotSupported'] | |
4 | ||
5 | ||
6 | class DrmsError(RuntimeError): | |
7 | """ | |
8 | Unspecified DRMS run-time error. | |
9 | """ | |
10 | pass | |
11 | ||
12 | ||
13 | class DrmsQueryError(DrmsError): | |
14 | """ | |
15 | DRMS query error. | |
16 | """ | |
17 | pass | |
18 | ||
19 | ||
20 | class DrmsExportError(DrmsError): | |
21 | """ | |
22 | DRMS data export error. | |
23 | """ | |
24 | pass | |
25 | ||
26 | ||
27 | class DrmsOperationNotSupported(DrmsError): | |
28 | """ | |
29 | Operation is not supported by DRMS server. | |
30 | """ | |
31 | pass |
0 | from __future__ import absolute_import, division, print_function | |
1 | ||
2 | import json as _json | |
3 | from six.moves.urllib.request import urlopen | |
4 | from six.moves.urllib.parse import urlencode, quote_plus | |
5 | ||
6 | from .config import ServerConfig, _server_configs | |
7 | from .utils import _split_arg | |
8 | ||
9 | __all__ = ['const', 'HttpJsonRequest', 'HttpJsonClient'] | |
10 | ||
11 | ||
12 | # Constants for jsoc_info calls | |
13 | class JsocInfoConstants: | |
14 | """ | |
15 | Constants for DRMS queries. | |
16 | ||
17 | Attributes | |
18 | ---------- | |
19 | all | |
20 | = ``'**ALL**'`` | |
21 | none | |
22 | = ``'**NONE**'`` | |
23 | recdir | |
24 | = ``'*recdir*'`` | |
25 | dirmtime | |
26 | = ``'*dirmtime*'`` | |
27 | logdir | |
28 | = ``'*logdir*'`` | |
29 | recnum | |
30 | = ``'*recnum*'`` | |
31 | sunum | |
32 | = ``'*sunum*'`` | |
33 | size | |
34 | = ``'*size*'`` | |
35 | online | |
36 | = ``'*online*'`` | |
37 | retain | |
38 | = ``'*retain*'`` | |
39 | archive | |
40 | = ``'*archive*'`` | |
41 | """ | |
42 | all = '**ALL**' | |
43 | none = '**NONE**' | |
44 | recdir = '*recdir*' | |
45 | dirmtime = '*dirmtime*' | |
46 | logdir = '*logdir*' | |
47 | recnum = '*recnum*' | |
48 | sunum = '*sunum*' | |
49 | size = '*size*' | |
50 | online = '*online*' | |
51 | retain = '*retain*' | |
52 | archive = '*archive*' | |
53 | const = JsocInfoConstants() | |
54 | ||
55 | ||
56 | class HttpJsonRequest(object): | |
57 | """ | |
58 | Class for handling HTTP/JSON requests. | |
59 | ||
60 | Use :class:`HttpJsonClient` to create an instance. | |
61 | """ | |
62 | def __init__(self, url, encoding): | |
63 | self._encoding = encoding | |
64 | self._http = urlopen(url) | |
65 | self._data_str = None | |
66 | self._data = None | |
67 | ||
68 | def __repr__(self): | |
69 | return '<HttpJsonRequest "%s">' % self.url | |
70 | ||
71 | @property | |
72 | def url(self): | |
73 | return self._http.url | |
74 | ||
75 | @property | |
76 | def raw_data(self): | |
77 | if self._data_str is None: | |
78 | self._data_str = self._http.read() | |
79 | return self._data_str | |
80 | ||
81 | @property | |
82 | def data(self): | |
83 | if self._data is None: | |
84 | self._data = _json.loads(self.raw_data.decode(self._encoding)) | |
85 | return self._data | |
86 | ||
87 | ||
88 | class HttpJsonClient(object): | |
89 | """ | |
90 | HTTP/JSON communication with the DRMS server CGIs. | |
91 | ||
92 | Parameters | |
93 | ---------- | |
94 | server : string or drms.config.ServerConfig | |
95 | Registered server ID or ServerConfig instance. | |
96 | Defaults to JSOC. | |
97 | debug : bool | |
98 | Enable or disable debug mode (default is disabled). | |
99 | ||
100 | Attributes | |
101 | ---------- | |
102 | server : drms.config.ServerConfig | |
103 | Remote server configuration. | |
104 | debug : bool | |
105 | Enable/disable debug output. | |
106 | """ | |
107 | def __init__(self, server='jsoc', debug=False): | |
108 | if isinstance(server, ServerConfig): | |
109 | self._server = server | |
110 | else: | |
111 | self._server = _server_configs[server.lower()] | |
112 | self.debug = debug | |
113 | ||
114 | def __repr__(self): | |
115 | return '<HttpJsonClient "%s">' % self._server.name | |
116 | ||
117 | def _json_request(self, url): | |
118 | if self.debug: | |
119 | print(url) | |
120 | return HttpJsonRequest(url, self._server.encoding) | |
121 | ||
122 | @property | |
123 | def server(self): | |
124 | return self._server | |
125 | ||
126 | @property | |
127 | def debug(self): | |
128 | return self._debug | |
129 | ||
130 | @debug.setter | |
131 | def debug(self, value): | |
132 | self._debug = True if value else False | |
133 | ||
134 | def show_series(self, ds_filter=None): | |
135 | """ | |
136 | List available data series. | |
137 | ||
138 | Parameters | |
139 | ---------- | |
140 | ds_filter : string | |
141 | Name filter regexp. | |
142 | ||
143 | Returns | |
144 | ------- | |
145 | result : dict | |
146 | """ | |
147 | query = '?' if ds_filter is not None else '' | |
148 | if ds_filter is not None: | |
149 | query += urlencode({'filter': ds_filter}) | |
150 | req = self._json_request(self._server.url_show_series + query) | |
151 | return req.data | |
152 | ||
153 | def show_series_wrapper(self, ds_filter=None, info=False): | |
154 | """ | |
155 | List available data series. | |
156 | ||
157 | This is an alternative to show_series, which needs to be used | |
158 | to get a list of all available series provided by JSOC. There | |
159 | is currently no support for retrieving primekeys using this | |
160 | CGI. | |
161 | ||
162 | Parameters | |
163 | ---------- | |
164 | ds_filter : string | |
165 | Name filter regexp. | |
166 | info : bool | |
167 | If False (default), the result only contains series names. | |
168 | If set to True, the result includes a description for each | |
169 | series. | |
170 | ||
171 | Returns | |
172 | ------- | |
173 | result : dict | |
174 | """ | |
175 | query_args = {'dbhost': self._server.show_series_wrapper_dbhost} | |
176 | if ds_filter is not None: | |
177 | query_args['filter'] = ds_filter | |
178 | if info: | |
179 | query_args['info'] = '1' | |
180 | query = '?' + urlencode(query_args) | |
181 | req = self._json_request(self._server.url_show_series_wrapper + query) | |
182 | return req.data | |
183 | ||
184 | def series_struct(self, ds): | |
185 | """ | |
186 | Get information about the content of a data series. | |
187 | ||
188 | Parameters | |
189 | ---------- | |
190 | ds : string | |
191 | Name of the data series. | |
192 | ||
193 | Returns | |
194 | ------- | |
195 | result : dict | |
196 | Dictionary containing information about the data series. | |
197 | """ | |
198 | query = '?' + urlencode({'op': 'series_struct', 'ds': ds}) | |
199 | req = self._json_request(self._server.url_jsoc_info + query) | |
200 | return req.data | |
201 | ||
202 | def rs_summary(self, ds): | |
203 | """ | |
204 | Get summary (i.e. count) of a given record set. | |
205 | ||
206 | Parameters | |
207 | ---------- | |
208 | ds : string | |
209 | Record set query (only one series). | |
210 | ||
211 | Returns | |
212 | ------- | |
213 | result : dict | |
214 | Dictionary containg 'count', 'status' and 'runtime'. | |
215 | """ | |
216 | query = '?' + urlencode({'op': 'rs_summary', 'ds': ds}) | |
217 | req = self._json_request(self._server.url_jsoc_info + query) | |
218 | return req.data | |
219 | ||
220 | def rs_list(self, ds, key=None, seg=None, link=None, recinfo=False, | |
221 | n=None, uid=None): | |
222 | """ | |
223 | Get detailed information about a record set. | |
224 | ||
225 | Parameters | |
226 | ---------- | |
227 | ds : string | |
228 | Record set query. | |
229 | key : string, list or None | |
230 | List of requested keywords, optional. | |
231 | seg : string, list or None | |
232 | List of requested segments, optional. | |
233 | link : string or None | |
234 | List of requested Links, optional. | |
235 | recinfo : bool | |
236 | Request record info for each record in the record set. | |
237 | n : int or None | |
238 | Record set limit. For positive values, the first n records | |
239 | of the record set are returned, for negative values the | |
240 | last abs(n) records. If set to None (default), no limit is | |
241 | applied. | |
242 | uid : string or None | |
243 | Session ID used when calling rs_list CGI, optional. | |
244 | ||
245 | Returns | |
246 | ------- | |
247 | result : dict | |
248 | Dictionary containing the requested record set information. | |
249 | """ | |
250 | if key is None and seg is None and link is None: | |
251 | raise ValueError('At least one key, seg or link must be specified') | |
252 | d = {'op': 'rs_list', 'ds': ds} | |
253 | if key is not None: | |
254 | d['key'] = ','.join(_split_arg(key)) | |
255 | if seg is not None: | |
256 | d['seg'] = ','.join(_split_arg(seg)) | |
257 | if link is not None: | |
258 | d['link'] = ','.join(_split_arg(link)) | |
259 | if recinfo: | |
260 | d['R'] = '1' | |
261 | if n is not None: | |
262 | d['n'] = '%d' % int(n) | |
263 | if uid is not None: | |
264 | d['userhandle'] = uid | |
265 | query = '?' + urlencode(d) | |
266 | req = self._json_request(self._server.url_jsoc_info + query) | |
267 | return req.data | |
268 | ||
269 | def check_address(self, email): | |
270 | """ | |
271 | Check if an email address is registered for export data | |
272 | requests. | |
273 | ||
274 | Parameters | |
275 | ---------- | |
276 | email : string | |
277 | Email address to be verified. | |
278 | ||
279 | Returns | |
280 | ------- | |
281 | result : dict | |
282 | Dictionary containing 'status' and 'msg'. Some status | |
283 | codes are: | |
284 | 2: Email address is valid and registered | |
285 | 4: Email address has neither been validated nor | |
286 | registered | |
287 | -2: Not a valid email address | |
288 | """ | |
289 | query = '?' + urlencode({ | |
290 | 'address': quote_plus(email), 'checkonly': '1'}) | |
291 | req = self._json_request(self._server.url_check_address + query) | |
292 | return req.data | |
293 | ||
294 | def exp_request(self, ds, notify, method='url_quick', protocol='as-is', | |
295 | protocol_args=None, filenamefmt=None, n=None, | |
296 | requestor=None): | |
297 | """ | |
298 | Request data export. | |
299 | ||
300 | Parameters | |
301 | ---------- | |
302 | ds : string | |
303 | Data export record set query. | |
304 | notify : string | |
305 | Registered email address. | |
306 | method : string | |
307 | Export method. Supported methods are: 'url_quick', 'url', | |
308 | 'url-tar', 'ftp' and 'ftp-tar'. Default is 'url_quick'. | |
309 | protocol : string | |
310 | Export protocol. Supported protocols are: 'as-is', 'fits', | |
311 | 'jpg', 'mpg' and 'mp4'. Default is 'as-is'. | |
312 | protocol_args : dict or None | |
313 | Extra protocol arguments for protocols 'jpg', 'mpg' and | |
314 | 'mp4'. Valid arguments are: 'ct', 'scaling', 'min', 'max' | |
315 | and 'size'. | |
316 | filenamefmt : string, None | |
317 | Custom filename format string for exported files. This is | |
318 | ignored for 'url_quick'/'as-is' data exports. | |
319 | n : int or None | |
320 | Limits the number of records requested. For positive | |
321 | values, the first n records of the record set are returned, | |
322 | for negative values the last abs(n) records. If set to None | |
323 | (default), no limit is applied. | |
324 | requestor : string, None or False | |
325 | Export user ID. Default is None, in which case the user | |
326 | name is determined from the email address. If set to False, | |
327 | the requestor argument will be omitted in the export | |
328 | request. | |
329 | ||
330 | Returns | |
331 | ------- | |
332 | result : dict | |
333 | Dictionary containing the server response to the export | |
334 | request. | |
335 | """ | |
336 | method = method.lower() | |
337 | method_list = ['url_quick', 'url', 'url-tar', 'ftp', 'ftp-tar'] | |
338 | if method not in method_list: | |
339 | raise ValueError( | |
340 | "Method '%s' is not supported, valid methods are: %s" % | |
341 | (method, ', '.join("'%s'" % s for s in method_list))) | |
342 | ||
343 | protocol = protocol.lower() | |
344 | img_protocol_list = ['jpg', 'mpg', 'mp4'] | |
345 | protocol_list = ['as-is', 'fits'] + img_protocol_list | |
346 | if protocol not in protocol_list: | |
347 | raise ValueError( | |
348 | "Protocol '%s' is not supported, valid protocols are: %s" % | |
349 | (protocol, ', '.join("'%s'" % s for s in protocol_list))) | |
350 | ||
351 | # method "url_quick" is meant to be used with "as-is", change method | |
352 | # to "url" if protocol is not "as-is" | |
353 | if method == 'url_quick' and protocol != 'as-is': | |
354 | method = 'url' | |
355 | ||
356 | if protocol in img_protocol_list: | |
357 | d = {'ct': 'grey.sao', 'scaling': 'MINMAX', 'size': 1} | |
358 | if protocol_args is not None: | |
359 | for k, v in protocol_args.items(): | |
360 | if k.lower() == 'ct': | |
361 | d['ct'] = v | |
362 | elif k == 'scaling': | |
363 | d[k] = v | |
364 | elif k == 'size': | |
365 | d[k] = int(v) | |
366 | elif k in ['min', 'max']: | |
367 | d[k] = float(v) | |
368 | else: | |
369 | raise ValueError("Unknown protocol argument: '%s'" % k) | |
370 | protocol += ',CT={ct},scaling={scaling},size={size}'.format(**d) | |
371 | if 'min' in d: | |
372 | protocol += ',min=%g' % d['min'] | |
373 | if 'max' in d: | |
374 | protocol += ',max=%g' % d['max'] | |
375 | else: | |
376 | if protocol_args is not None: | |
377 | raise ValueError( | |
378 | "protocol_args not supported for protocol '%s'" % protocol) | |
379 | ||
380 | d = {'op': 'exp_request', 'format': 'json', 'ds': ds, | |
381 | 'notify': notify, 'method': method, 'protocol': protocol} | |
382 | ||
383 | if filenamefmt is not None: | |
384 | d['filenamefmt'] = filenamefmt | |
385 | ||
386 | if n is not None: | |
387 | n = int(n) | |
388 | d['process=n'] = '%d' % n | |
389 | ||
390 | if requestor is None: | |
391 | d['requestor'] = notify.split('@')[0] | |
392 | elif requestor is not False: | |
393 | d['requestor'] = requestor | |
394 | ||
395 | query = '?' + urlencode(d) | |
396 | req = self._json_request(self._server.url_jsoc_fetch + query) | |
397 | return req.data | |
398 | ||
399 | def exp_status(self, requestid): | |
400 | """ | |
401 | Query data export status. | |
402 | ||
403 | Parameters | |
404 | ---------- | |
405 | requestid : string | |
406 | Request identifier returned by exp_request. | |
407 | ||
408 | Returns | |
409 | ------- | |
410 | result : dict | |
411 | Dictionary containing the export request status. | |
412 | """ | |
413 | query = '?' + urlencode({'op': 'exp_status', 'requestid': requestid}) | |
414 | req = self._json_request(self._server.url_jsoc_fetch + query) | |
415 | return req.data |
0 | from __future__ import absolute_import, division, print_function | |
1 | ||
2 | import sys | |
3 | from ._runner import run_tests | |
4 | ||
5 | sys.exit(run_tests(sys.argv[1:])) |
0 | from __future__ import absolute_import, division, print_function | |
1 | ||
2 | import os | |
3 | ||
4 | ||
5 | # Package directory, i.e. the parent directory in respect to this file | |
6 | pkg_dir = os.path.dirname(os.path.dirname(__file__)) | |
7 | ||
8 | ||
9 | try: | |
10 | import pytest | |
11 | except ImportError: | |
12 | def run_tests(extra_args=None): | |
13 | raise ImportError('pytest is needed to run tests') | |
14 | else: | |
15 | def run_tests(extra_args=None): | |
16 | args = [] | |
17 | if extra_args is not None: | |
18 | args += extra_args | |
19 | args += [pkg_dir] | |
20 | print('running: pytest ' + ' '.join(args)) | |
21 | return pytest.main(args) |
0 | from __future__ import absolute_import, division, print_function | |
1 | ||
2 | import pytest | |
3 | from six.moves.urllib.request import urlopen | |
4 | from six.moves.urllib.error import URLError, HTTPError | |
5 | import drms | |
6 | ||
7 | ||
8 | # Directory containing all online tests | |
9 | online_tests_dir = 'online' | |
10 | ||
11 | # Filename prefixes for JSOC and KIS online tests | |
12 | jsoc_tests_prefix = 'test_jsoc' | |
13 | kis_tests_prefix = 'test_kis' | |
14 | ||
15 | # Test URLs, used to check if a online site is reachable | |
16 | jsoc_testurl = 'http://jsoc.stanford.edu/' | |
17 | kis_testurl = 'http://drms.leibniz-kis.de/' | |
18 | ||
19 | ||
20 | def pytest_configure(config): | |
21 | # Register markers here so we don't need to add a pytest.ini file to | |
22 | # the drms.tests subpackage. | |
23 | config.addinivalue_line( | |
24 | 'markers', 'jsoc: mark online tests for JSOC') | |
25 | config.addinivalue_line( | |
26 | 'markers', 'kis: mark online tests for KIS') | |
27 | config.addinivalue_line( | |
28 | 'markers', 'export: mark online tests that perform data exports') | |
29 | ||
30 | ||
31 | def pytest_addoption(parser): | |
32 | # Add command line options for enabling online tests | |
33 | parser.addoption('--run-jsoc', action='store_true', | |
34 | help='Run online tests for JSOC') | |
35 | parser.addoption('--run-kis', action='store_true', | |
36 | help='Run online tests for KIS') | |
37 | parser.addoption('--email', help='Export email address') | |
38 | ||
39 | ||
40 | def pytest_ignore_collect(path, config): | |
41 | # Handle selection of site-specific online tests. | |
42 | if path.dirname.endswith(online_tests_dir) and path.ext == '.py': | |
43 | # Only run online tests for JSOC, if --run-jsoc is specified. | |
44 | if not config.getoption('run_jsoc'): | |
45 | if path.basename.startswith(jsoc_tests_prefix): | |
46 | return True | |
47 | ||
48 | # Only run online tests for KIS, if --run-kis is specified. | |
49 | if not config.getoption('run_kis'): | |
50 | if path.basename.startswith(kis_tests_prefix): | |
51 | return True | |
52 | ||
53 | return False | |
54 | ||
55 | ||
56 | class lazily_cached(object): | |
57 | """Lazily evaluted function call with cached result.""" | |
58 | def __init__(self, f, *args, **kwargs): | |
59 | self.func = lambda: f(*args, **kwargs) | |
60 | ||
61 | def __call__(self): | |
62 | if not hasattr(self, 'result'): | |
63 | self.result = self.func() | |
64 | return self.result | |
65 | ||
66 | ||
67 | def site_reachable(url, timeout=3): | |
68 | """Checks if the given URL is accessible.""" | |
69 | try: | |
70 | urlopen(url, timeout=timeout) | |
71 | except (URLError, HTTPError): | |
72 | return False | |
73 | return True | |
74 | ||
75 | ||
76 | # Create lazily evaluated, cached site checks for JSOC and KIS. | |
77 | jsoc_reachable = lazily_cached(site_reachable, jsoc_testurl) | |
78 | kis_reachable = lazily_cached(site_reachable, kis_testurl) | |
79 | ||
80 | ||
81 | def pytest_runtest_setup(item): | |
82 | # Skip JSOC online site tests if the site is not reachable. | |
83 | if item.get_marker('jsoc') is not None: | |
84 | if not jsoc_reachable(): | |
85 | pytest.skip('JSOC is not reachable') | |
86 | ||
87 | # Skip KIS online site tests if the site is not reachable. | |
88 | if item.get_marker('kis') is not None: | |
89 | if not kis_reachable(): | |
90 | pytest.skip('KIS is not reachable') | |
91 | ||
92 | # Skip export tests if no email address was specified. | |
93 | if item.get_marker('export') is not None: | |
94 | email = item.config.getoption('email') | |
95 | if email is None: | |
96 | pytest.skip('No email address specified; use the --email ' | |
97 | 'option to enable export tests') | |
98 | ||
99 | ||
100 | @pytest.fixture | |
101 | def email(request): | |
102 | """Email address from --email command line option.""" | |
103 | return request.config.getoption('--email') | |
104 | ||
105 | ||
106 | @pytest.fixture | |
107 | def jsoc_client(): | |
108 | """Client fixture for JSOC online tests, does not use email.""" | |
109 | return drms.Client('jsoc') | |
110 | ||
111 | ||
112 | @pytest.fixture | |
113 | def jsoc_client_export(email): | |
114 | """Client fixture for JSOC online tests, uses email if specified.""" | |
115 | return drms.Client('jsoc', email=email) | |
116 | ||
117 | ||
118 | @pytest.fixture | |
119 | def kis_client(): | |
120 | """Client fixture for KIS online tests.""" | |
121 | return drms.Client('kis') |
0 | from __future__ import absolute_import, division, print_function | |
1 | ||
2 | import pytest | |
3 | import drms | |
4 | ||
5 | ||
6 | @pytest.mark.jsoc | |
7 | def test_series_list_all(jsoc_client): | |
8 | slist = jsoc_client.series() | |
9 | assert isinstance(slist, list) | |
10 | assert 'hmi.v_45s' in (s.lower() for s in slist) | |
11 | assert 'hmi.m_720s' in (s.lower() for s in slist) | |
12 | assert 'hmi.ic_720s' in (s.lower() for s in slist) | |
13 | assert 'aia.lev1' in (s.lower() for s in slist) | |
14 | assert 'aia.lev1_euv_12s' in (s.lower() for s in slist) | |
15 | assert 'mdi.fd_v' in (s.lower() for s in slist) | |
16 | ||
17 | ||
18 | @pytest.mark.jsoc | |
19 | @pytest.mark.parametrize('schema', ['aia', 'hmi', 'mdi']) | |
20 | def test_series_list_schemata(jsoc_client, schema): | |
21 | regex = r'%s\.' % schema | |
22 | slist = jsoc_client.series(regex) | |
23 | assert len(slist) > 0 | |
24 | for sname in slist: | |
25 | assert sname.startswith(schema + '.') |
0 | from __future__ import absolute_import, division, print_function | |
1 | ||
2 | import pytest | |
3 | import drms | |
4 | ||
5 | ||
6 | # Invalid email addresses used for testing | |
7 | invalid_emails = [ | |
8 | 'notregistered@example.com', | |
9 | 'not-valid', | |
10 | '', | |
11 | ] | |
12 | ||
13 | ||
14 | @pytest.mark.jsoc | |
15 | @pytest.mark.parametrize('email', invalid_emails) | |
16 | def test_email_invalid_check(email): | |
17 | c = drms.Client('jsoc') | |
18 | assert not c.check_email(email) | |
19 | ||
20 | ||
21 | @pytest.mark.jsoc | |
22 | @pytest.mark.parametrize('email', invalid_emails) | |
23 | def test_email_invalid_set(email): | |
24 | c = drms.Client('jsoc') | |
25 | with pytest.raises(ValueError): | |
26 | c.email = email | |
27 | ||
28 | ||
29 | @pytest.mark.jsoc | |
30 | @pytest.mark.parametrize('email', invalid_emails) | |
31 | def test_email_invalid_init(email): | |
32 | with pytest.raises(ValueError): | |
33 | drms.Client('jsoc', email=email) | |
34 | ||
35 | ||
36 | @pytest.mark.jsoc | |
37 | def test_email_cmdopt_check(email): | |
38 | if email is None: | |
39 | pytest.skip('No email was specified using --email') | |
40 | else: | |
41 | c = drms.Client('jsoc') | |
42 | assert c.check_email(email) | |
43 | ||
44 | ||
45 | @pytest.mark.jsoc | |
46 | def test_email_cmdopt_set(email): | |
47 | if email is None: | |
48 | pytest.skip('No email was specified using --email') | |
49 | else: | |
50 | c = drms.Client('jsoc') | |
51 | c.email = email | |
52 | assert c.email == email | |
53 | ||
54 | ||
55 | @pytest.mark.jsoc | |
56 | def test_email_cmdopt_init(email): | |
57 | if email is None: | |
58 | pytest.skip('No email was specified using --email') | |
59 | else: | |
60 | c = drms.Client('jsoc', email=email) | |
61 | assert c.email == email |
0 | from __future__ import absolute_import, division, print_function | |
1 | ||
2 | import pytest | |
3 | import drms | |
4 | ||
5 | ||
6 | @pytest.mark.jsoc | |
7 | @pytest.mark.export | |
8 | @pytest.mark.parametrize('method', ['url_quick', 'url']) | |
9 | def test_export_asis_basic(jsoc_client_export, method): | |
10 | r = jsoc_client_export.export( | |
11 | 'hmi.v_avg120[2150]{mean,power}', protocol='as-is', method=method, | |
12 | requestor=False) | |
13 | ||
14 | assert isinstance(r, drms.ExportRequest) | |
15 | assert r.wait(timeout=60) | |
16 | assert r.has_succeeded() | |
17 | assert r.protocol == 'as-is' | |
18 | assert len(r.urls) == 12 # 6 files per segment | |
19 | ||
20 | for record in r.urls.record: | |
21 | record = record.lower() | |
22 | assert record.startswith('hmi.v_avg120[2150]') | |
23 | assert (record.endswith('{mean}') or | |
24 | record.endswith('{power}')) | |
25 | ||
26 | for filename in r.urls.filename: | |
27 | assert (filename.endswith('mean.fits') or | |
28 | filename.endswith('power.fits')) | |
29 | ||
30 | for url in r.urls.url: | |
31 | assert (url.endswith('mean.fits') or | |
32 | url.endswith('power.fits')) |
0 | from __future__ import absolute_import, division, print_function | |
1 | ||
2 | import pytest | |
3 | import drms | |
4 | ||
5 | ||
6 | @pytest.mark.jsoc | |
7 | @pytest.mark.parametrize('series, pkeys, segments', [ | |
8 | ('hmi.v_45s', ['T_REC', 'CAMERA'], ['Dopplergram']), | |
9 | ('hmi.m_720s', ['T_REC', 'CAMERA'], ['magnetogram']), | |
10 | ('hmi.v_avg120', ['CarrRot', 'CMLon'], ['mean', 'power', 'valid', 'Log']), | |
11 | ]) | |
12 | def test_series_info_basic(jsoc_client, series, pkeys, segments): | |
13 | si = jsoc_client.info(series) | |
14 | assert si.name.lower() == series | |
15 | for k in pkeys: | |
16 | assert k in si.primekeys | |
17 | assert k in si.keywords.index | |
18 | for s in segments: | |
19 | assert s in si.segments.index |
0 | from __future__ import absolute_import, division, print_function | |
1 | ||
2 | import pytest | |
3 | import drms | |
4 | ||
5 | ||
6 | @pytest.mark.jsoc | |
7 | def test_query_basic(jsoc_client): | |
8 | keys, segs = jsoc_client.query( | |
9 | 'hmi.v_45s[2013.07.03_08:42_TAI/3m]', | |
10 | key='T_REC, CRLT_OBS', seg='Dopplergram') | |
11 | assert len(keys) == 4 | |
12 | for k in ['T_REC', 'CRLT_OBS']: | |
13 | assert k in keys.columns | |
14 | assert len(segs) == 4 | |
15 | assert 'Dopplergram' in segs.columns | |
16 | assert ((keys.CRLT_OBS - 3.14159).abs() < 0.0001).all() |
0 | from __future__ import absolute_import, division, print_function | |
1 | ||
2 | import pytest | |
3 | import drms | |
4 | ||
5 | ||
6 | @pytest.mark.kis | |
7 | def test_series_list_all(kis_client): | |
8 | slist = kis_client.series() | |
9 | assert isinstance(slist, list) | |
10 | assert 'hmi.v_45s' in (s.lower() for s in slist) | |
11 | assert 'hmi.m_720s' in (s.lower() for s in slist) | |
12 | assert 'hmi.ic_720s' in (s.lower() for s in slist) | |
13 | assert 'mdi.fd_v' in (s.lower() for s in slist) | |
14 | ||
15 | ||
16 | @pytest.mark.kis | |
17 | @pytest.mark.parametrize('schema', ['hmi', 'mdi']) | |
18 | def test_series_list_schemata(kis_client, schema): | |
19 | regex = r'%s\.' % schema | |
20 | slist = kis_client.series(regex) | |
21 | assert len(slist) > 0 | |
22 | for sname in slist: | |
23 | assert sname.startswith(schema + '.') | |
24 | ||
25 | ||
26 | @pytest.mark.kis | |
27 | @pytest.mark.parametrize('series, pkeys, segments', [ | |
28 | ('hmi.v_45s', ['T_REC', 'CAMERA'], ['Dopplergram']), | |
29 | ('hmi.m_720s', ['T_REC', 'CAMERA'], ['magnetogram']), | |
30 | ('hmi.v_avg120', ['CarrRot', 'CMLon'], ['mean', 'power', 'valid', 'Log']), | |
31 | ]) | |
32 | def test_series_info_basic(kis_client, series, pkeys, segments): | |
33 | si = kis_client.info(series) | |
34 | assert si.name.lower() == series | |
35 | for k in pkeys: | |
36 | assert k in si.primekeys | |
37 | assert k in si.keywords.index | |
38 | for s in segments: | |
39 | assert s in si.segments.index | |
40 | ||
41 | ||
42 | @pytest.mark.kis | |
43 | def test_query_basic(kis_client): | |
44 | keys, segs = kis_client.query( | |
45 | 'hmi.v_45s[2013.07.03_08:42_TAI/3m]', | |
46 | key='T_REC, CRLT_OBS', seg='Dopplergram') | |
47 | assert len(keys) == 4 | |
48 | for k in ['T_REC', 'CRLT_OBS']: | |
49 | assert k in keys.columns | |
50 | assert len(segs) == 4 | |
51 | assert 'Dopplergram' in segs.columns | |
52 | assert ((keys.CRLT_OBS - 3.14159).abs() < 0.0001).all() | |
53 | ||
54 | ||
55 | @pytest.mark.kis | |
56 | def test_not_supported_email(kis_client): | |
57 | with pytest.raises(drms.DrmsOperationNotSupported): | |
58 | kis_client.email = 'name@example.com' | |
59 | ||
60 | ||
61 | @pytest.mark.kis | |
62 | def test_not_supported_export(kis_client): | |
63 | with pytest.raises(drms.DrmsOperationNotSupported): | |
64 | r = kis_client.export('hmi.v_45s[2010.05.01_TAI]') | |
65 | with pytest.raises(drms.DrmsOperationNotSupported): | |
66 | r = kis_client.export_from_id('KIS_20120101_123') |
0 | from __future__ import absolute_import, division, print_function | |
1 | ||
2 | import pytest | |
3 | import drms | |
4 | from drms.config import ServerConfig | |
5 | ||
6 | ||
7 | def test_client_init_defaults(): | |
8 | c = drms.Client() | |
9 | assert isinstance(c._server, ServerConfig) | |
10 | assert c._server.name.lower() == 'jsoc' | |
11 | assert c.email is None | |
12 | assert c.verbose is False | |
13 | assert c.debug is False | |
14 | ||
15 | ||
16 | @pytest.mark.parametrize('value', [True, False]) | |
17 | def test_client_init_verbose(value): | |
18 | c = drms.Client(verbose=value) | |
19 | assert c.verbose is value | |
20 | assert c.debug is False | |
21 | ||
22 | ||
23 | @pytest.mark.parametrize('value', [True, False]) | |
24 | def test_client_init_debug(value): | |
25 | c = drms.Client(debug=value) | |
26 | assert c.verbose is False | |
27 | assert c.debug is value | |
28 | ||
29 | ||
30 | @pytest.mark.parametrize('server_name', ['jsoc', 'kis']) | |
31 | def test_client_registered_servers(server_name): | |
32 | c = drms.Client(server_name) | |
33 | assert isinstance(c._server, ServerConfig) | |
34 | assert c._server.name.lower() == server_name | |
35 | assert c.email is None | |
36 | assert c.verbose is False | |
37 | assert c.debug is False | |
38 | ||
39 | ||
40 | def test_client_custom_config(): | |
41 | cfg = ServerConfig(name='TEST') | |
42 | c = drms.Client(server=cfg) | |
43 | assert isinstance(c._server, ServerConfig) | |
44 | assert c._server.name == 'TEST' |
0 | from __future__ import absolute_import, division, print_function | |
1 | ||
2 | import pytest | |
3 | import six | |
4 | import drms | |
5 | from drms.config import ServerConfig, register_server, _server_configs | |
6 | ||
7 | ||
8 | def test_create_config_basic(): | |
9 | cfg = ServerConfig(name='TEST') | |
10 | valid_keys = ServerConfig._valid_keys | |
11 | assert 'name' in valid_keys | |
12 | assert 'encoding' in valid_keys | |
13 | for k in valid_keys: | |
14 | v = getattr(cfg, k) | |
15 | if k == 'name': | |
16 | assert v == 'TEST' | |
17 | elif k == 'encoding': | |
18 | assert v == 'latin1' | |
19 | else: | |
20 | assert v is None | |
21 | ||
22 | ||
23 | def test_create_config_missing_name(): | |
24 | with pytest.raises(ValueError): | |
25 | cfg = ServerConfig() | |
26 | ||
27 | ||
28 | def test_copy_config(): | |
29 | cfg = ServerConfig(name='TEST') | |
30 | assert cfg.name == 'TEST' | |
31 | ||
32 | cfg2 = cfg.copy() | |
33 | assert cfg2 is not cfg | |
34 | assert cfg2.name == 'TEST' | |
35 | ||
36 | cfg.name = 'MUH' | |
37 | assert cfg.name != cfg2.name | |
38 | ||
39 | ||
40 | def test_register_server(): | |
41 | cfg = ServerConfig(name='TEST') | |
42 | ||
43 | assert 'test' not in _server_configs | |
44 | register_server(cfg) | |
45 | assert 'test' in _server_configs | |
46 | ||
47 | del _server_configs['test'] | |
48 | assert 'test' not in _server_configs | |
49 | ||
50 | ||
51 | def test_register_server_existing(): | |
52 | assert 'jsoc' in _server_configs | |
53 | cfg = ServerConfig(name='jsoc') | |
54 | with pytest.raises(RuntimeError): | |
55 | register_server(cfg) | |
56 | assert 'jsoc' in _server_configs | |
57 | ||
58 | ||
59 | def test_config_jsoc(): | |
60 | assert 'jsoc' in _server_configs | |
61 | cfg = _server_configs['jsoc'] | |
62 | ||
63 | assert cfg.name.lower() == 'jsoc' | |
64 | assert isinstance(cfg.encoding, six.string_types) | |
65 | ||
66 | assert isinstance(cfg.cgi_show_series, six.string_types) | |
67 | assert isinstance(cfg.cgi_jsoc_info, six.string_types) | |
68 | assert isinstance(cfg.cgi_jsoc_fetch, six.string_types) | |
69 | assert isinstance(cfg.cgi_check_address, six.string_types) | |
70 | assert isinstance(cfg.cgi_show_series_wrapper, six.string_types) | |
71 | assert isinstance(cfg.show_series_wrapper_dbhost, six.string_types) | |
72 | assert cfg.http_download_baseurl.startswith('http://') | |
73 | assert cfg.ftp_download_baseurl.startswith('ftp://') | |
74 | ||
75 | baseurl = cfg.cgi_baseurl | |
76 | assert baseurl.startswith('http://') | |
77 | assert cfg.url_show_series.startswith(baseurl) | |
78 | assert cfg.url_jsoc_info.startswith(baseurl) | |
79 | assert cfg.url_jsoc_fetch.startswith(baseurl) | |
80 | assert cfg.url_check_address.startswith(baseurl) | |
81 | assert cfg.url_show_series_wrapper.startswith(baseurl) | |
82 | ||
83 | ||
84 | def test_config_kis(): | |
85 | assert 'kis' in _server_configs | |
86 | cfg = _server_configs['kis'] | |
87 | ||
88 | assert cfg.name.lower() == 'kis' | |
89 | assert isinstance(cfg.encoding, six.string_types) | |
90 | ||
91 | assert isinstance(cfg.cgi_show_series, six.string_types) | |
92 | assert isinstance(cfg.cgi_jsoc_info, six.string_types) | |
93 | assert cfg.cgi_jsoc_fetch is None | |
94 | assert cfg.cgi_check_address is None | |
95 | assert cfg.cgi_show_series_wrapper is None | |
96 | assert cfg.show_series_wrapper_dbhost is None | |
97 | assert cfg.http_download_baseurl is None | |
98 | assert cfg.ftp_download_baseurl is None | |
99 | ||
100 | baseurl = cfg.cgi_baseurl | |
101 | assert baseurl.startswith('http://') | |
102 | assert cfg.url_show_series.startswith(baseurl) | |
103 | assert cfg.url_jsoc_info.startswith(baseurl) | |
104 | assert cfg.url_jsoc_fetch is None | |
105 | assert cfg.url_check_address is None | |
106 | assert cfg.url_show_series_wrapper is None | |
107 | ||
108 | ||
109 | @pytest.mark.parametrize('server_name, operation, expected', [ | |
110 | ('jsoc', 'series', True), | |
111 | ('jsoc', 'info', True), | |
112 | ('jsoc', 'query', True), | |
113 | ('jsoc', 'email', True), | |
114 | ('jsoc', 'export', True), | |
115 | ('kis', 'series', True), | |
116 | ('kis', 'info', True), | |
117 | ('kis', 'query', True), | |
118 | ('kis', 'email', False), | |
119 | ('kis', 'export', False), | |
120 | ]) | |
121 | def test_supported(server_name, operation, expected): | |
122 | cfg = _server_configs[server_name] | |
123 | assert cfg.check_supported(operation) == expected | |
124 | ||
125 | ||
126 | @pytest.mark.parametrize('server_name, operation', [ | |
127 | ('jsoc', 'bar'), | |
128 | ('kis', 'foo'), | |
129 | ]) | |
130 | def test_supported_invalid_operation(server_name, operation): | |
131 | cfg = _server_configs[server_name] | |
132 | with pytest.raises(ValueError): | |
133 | cfg.check_supported(operation) |
0 | from __future__ import absolute_import, division, print_function | |
1 | ||
2 | import pytest | |
3 | import drms | |
4 | ||
5 | ||
6 | @pytest.mark.parametrize('exception_class', [ | |
7 | drms.DrmsError, drms.DrmsQueryError, drms.DrmsExportError, | |
8 | drms.DrmsOperationNotSupported]) | |
9 | def test_exception_class(exception_class): | |
10 | with pytest.raises(RuntimeError): | |
11 | raise exception_class() | |
12 | with pytest.raises(drms.DrmsError): | |
13 | raise exception_class() |
0 | from __future__ import absolute_import, division, print_function | |
1 | ||
2 | import pytest | |
3 | import six | |
4 | import re | |
5 | import drms | |
6 | ||
7 | ||
8 | @pytest.mark.parametrize('symbol', [ | |
9 | 'DrmsError', 'DrmsQueryError', 'DrmsExportError', | |
10 | 'DrmsOperationNotSupported', | |
11 | 'SeriesInfo', 'ExportRequest', 'Client', | |
12 | 'const', 'to_datetime', | |
13 | ]) | |
14 | def test_symbols(symbol): | |
15 | assert symbol in dir(drms) | |
16 | ||
17 | ||
18 | def test_version(): | |
19 | assert isinstance(drms.__version__, six.string_types) | |
20 | vstr = drms.__version__.split('+')[0] | |
21 | m = re.match(r'(\d+)\.(\d+)\.(\d+).*', drms.__version__) | |
22 | assert m is not None |
0 | from __future__ import absolute_import, division, print_function | |
1 | ||
2 | import pytest | |
3 | import pandas as pd | |
4 | import drms | |
5 | ||
6 | ||
7 | @pytest.mark.parametrize('time_string, expected', [ | |
8 | ('2010.05.01_TAI', pd.Timestamp('2010-05-01 00:00:00')), | |
9 | ('2010.05.01_00:00_TAI', pd.Timestamp('2010-05-01 00:00:00')), | |
10 | ('2010.05.01_00:00:00_TAI', pd.Timestamp('2010-05-01 00:00:00')), | |
11 | ('2010.05.01_01:23:45_TAI', pd.Timestamp('2010-05-01 01:23:45')), | |
12 | ('2013.12.21_23:32_TAI', pd.Timestamp('2013-12-21 23:32:00')), | |
13 | ('2013.12.21_23:32:34_TAI', pd.Timestamp('2013-12-21 23:32:34')), | |
14 | ]) | |
15 | def test_tai_string(time_string, expected): | |
16 | assert drms.to_datetime(time_string) == expected | |
17 | ||
18 | ||
19 | @pytest.mark.parametrize('time_string, expected', [ | |
20 | ('2010-05-01T00:00Z', pd.Timestamp('2010-05-01 00:00:00')), | |
21 | ('2010-05-01T00:00:00Z', pd.Timestamp('2010-05-01 00:00:00')), | |
22 | ('2010-05-01T01:23:45Z', pd.Timestamp('2010-05-01 01:23:45')), | |
23 | ('2013-12-21T23:32Z', pd.Timestamp('2013-12-21 23:32:00')), | |
24 | ('2013-12-21T23:32:34Z', pd.Timestamp('2013-12-21 23:32:34')), | |
25 | ('2010-05-01 00:00Z', pd.Timestamp('2010-05-01 00:00:00')), | |
26 | ('2010-05-01 00:00:00Z', pd.Timestamp('2010-05-01 00:00:00')), | |
27 | ('2010-05-01 01:23:45Z', pd.Timestamp('2010-05-01 01:23:45')), | |
28 | ('2013-12-21 23:32Z', pd.Timestamp('2013-12-21 23:32:00')), | |
29 | ('2013-12-21 23:32:34Z', pd.Timestamp('2013-12-21 23:32:34')), | |
30 | ]) | |
31 | def test_z_string(time_string, expected): | |
32 | assert drms.to_datetime(time_string) == expected | |
33 | ||
34 | ||
35 | @pytest.mark.skip('pandas does not support leap seconds') | |
36 | @pytest.mark.parametrize('time_string, expected', [ | |
37 | ('2012-06-30T23:59:60Z', '2012-06-30 23:59:60'), | |
38 | ('2015-06-30T23:59:60Z', '2015-06-30 23:59:60'), | |
39 | ('2016-12-31T23:59:60Z', '2016-12-31 23:59:60'), | |
40 | ]) | |
41 | def test_z_leap_string(time_string, expected): | |
42 | assert drms.to_datetime(time_string) == expected | |
43 | ||
44 | ||
45 | @pytest.mark.parametrize('time_string, expected', [ | |
46 | ('2013.12.21_23:32:34_TAI', pd.Timestamp('2013-12-21 23:32:34')), | |
47 | ('2013.12.21_23:32:34_UTC', pd.Timestamp('2013-12-21 23:32:34')), | |
48 | ('2013.12.21_23:32:34Z', pd.Timestamp('2013-12-21 23:32:34')), | |
49 | ]) | |
50 | def test_force_string(time_string, expected): | |
51 | assert drms.to_datetime(time_string, force=True) == expected | |
52 | ||
53 | ||
54 | # test_xx_list | |
55 | # test_xx_ndarray | |
56 | # test_xx_pandas_series |
0 | from __future__ import absolute_import, division, print_function | |
1 | ||
2 | import pytest | |
3 | from drms.utils import ( | |
4 | _pd_to_datetime_coerce, _pd_to_numeric_coerce, | |
5 | _split_arg, _extract_series_name) | |
6 | ||
7 | ||
8 | # test_pd_to_datetime | |
9 | # test_pd_to_numeric | |
10 | ||
11 | ||
12 | @pytest.mark.parametrize('in_obj, expected', [ | |
13 | ('', []), | |
14 | ('asd', ['asd']), | |
15 | ('aa,bb,cc', ['aa', 'bb', 'cc']), | |
16 | ('aa, bb, cc', ['aa', 'bb', 'cc']), | |
17 | (' aa,bb, cc, dd', ['aa', 'bb', 'cc', 'dd']), | |
18 | ('aa,\tbb,cc, dd ', ['aa', 'bb', 'cc', 'dd']), | |
19 | (u'aa,\tbb,cc, dd ', [u'aa', u'bb', u'cc', u'dd']), | |
20 | ([], []), | |
21 | (['a', 'b', 'c'], ['a', 'b', 'c']), | |
22 | (('a', 'b', 'c'), ['a', 'b', 'c']), | |
23 | ]) | |
24 | def test_split_arg(in_obj, expected): | |
25 | res = _split_arg(in_obj) | |
26 | assert len(res) == len(expected) | |
27 | for i in range(len(res)): | |
28 | assert res[i] == expected[i] | |
29 | ||
30 | ||
31 | @pytest.mark.parametrize('ds_string, expected', [ | |
32 | ('hmi.v_45s', 'hmi.v_45s'), | |
33 | ('hmi.v_45s[2010.05.01_TAI]', 'hmi.v_45s'), | |
34 | ('hmi.v_45s[2010.05.01_TAI/365d@1d]', 'hmi.v_45s'), | |
35 | ('hmi.v_45s[2010.05.01_TAI/365d@1d][?QUALITY>=0?]', 'hmi.v_45s'), | |
36 | ('hmi.v_45s[2010.05.01_TAI/1d@6h]{Dopplergram}', 'hmi.v_45s'), | |
37 | ]) | |
38 | def test_extract_series(ds_string, expected): | |
39 | assert _extract_series_name(ds_string) == expected |
0 | from __future__ import absolute_import, division, print_function | |
1 | ||
2 | import re | |
3 | import six | |
4 | import pandas as pd | |
5 | import numpy as np | |
6 | ||
7 | __all__ = ['to_datetime'] | |
8 | ||
9 | ||
10 | # Compatibility functions for older pandas versions. | |
11 | if tuple(map(int, pd.__version__.split('.')[:2])) < (0, 17): | |
12 | def _pd_to_datetime_coerce(arg): | |
13 | return pd.to_datetime(arg, coerce=True) | |
14 | ||
15 | def _pd_to_numeric_coerce(arg): | |
16 | if not isinstance(arg, pd.Series): | |
17 | arg = pd.Series(arg) | |
18 | return arg.convert_objects( | |
19 | convert_dates=False, convert_numeric=True, | |
20 | convert_timedeltas=False) | |
21 | else: | |
22 | def _pd_to_datetime_coerce(arg): | |
23 | return pd.to_datetime(arg, errors='coerce') | |
24 | ||
25 | def _pd_to_numeric_coerce(arg): | |
26 | return pd.to_numeric(arg, errors='coerce') | |
27 | ||
28 | ||
29 | def _split_arg(arg): | |
30 | """Split a comma-separated string into a list.""" | |
31 | if isinstance(arg, six.string_types): | |
32 | arg = [it for it in re.split(r'[\s,]+', arg) if it] | |
33 | return arg | |
34 | ||
35 | ||
36 | def _extract_series_name(ds): | |
37 | """Extract series name from record set.""" | |
38 | m = re.match(r'^\s*([\w\.]+).*$', ds) | |
39 | return m.group(1) if m is not None else None | |
40 | ||
41 | ||
42 | def to_datetime(tstr, force=False): | |
43 | """ | |
44 | Parse JSOC time strings. | |
45 | ||
46 | In general, this is quite complicated, because of the many | |
47 | different (non-standard) time strings supported by the DRMS. For | |
48 | more (much more!) details on this matter, see | |
49 | `Rick Bogart's notes <http://jsoc.stanford.edu/doc/timerep.html>`__. | |
50 | ||
51 | The current implementation only tries to convert typical HMI time | |
52 | strings, with a format like "%Y.%m.%d_%H:%M:%S_TAI", to an ISO time | |
53 | string, that is then parsed by pandas. Note that "_TAI", aswell as | |
54 | other timezone indentifiers like "Z", will not be taken into | |
55 | account, so the result will be a naive timestamp without any | |
56 | associated timezone. | |
57 | ||
58 | If you know the time string format, it might be better calling | |
59 | pandas.to_datetime() directly. For handling TAI timestamps, e.g. | |
60 | converting between TAI and UTC, the astropy.time package can be | |
61 | used. | |
62 | ||
63 | Parameters | |
64 | ---------- | |
65 | tstr : string or list/Series of strings | |
66 | DateTime strings. | |
67 | force : bool | |
68 | Set to True to omit the endswith('_TAI') check. | |
69 | ||
70 | Returns | |
71 | ------- | |
72 | result : pandas.Series or pandas.Timestamp | |
73 | Pandas series or a single Timestamp object. | |
74 | """ | |
75 | s = pd.Series(tstr).astype(str) | |
76 | if force or s.str.endswith('_TAI').any(): | |
77 | s = s.str.replace('_TAI', '') | |
78 | s = s.str.replace('_', ' ') | |
79 | s = s.str.replace('.', '-', n=2) | |
80 | res = _pd_to_datetime_coerce(s) | |
81 | return res.iloc[0] if (len(res) == 1) and np.isscalar(tstr) else res |
0 | Metadata-Version: 1.1 | |
1 | Name: drms | |
2 | Version: 0.5.5 | |
3 | Summary: Access HMI, AIA and MDI data with Python | |
4 | Home-page: https://github.com/kbg/drms | |
5 | Author: Kolja Glogowski | |
6 | Author-email: "Kolja Glogowski" <kolja@pixie.de> | |
7 | License: MIT | |
8 | Description-Content-Type: UNKNOWN | |
9 | Description: ==== | |
10 | drms | |
11 | ==== | |
12 | ||
13 | `Docs <http://drms.readthedocs.io/>`_ | | |
14 | `Tutorial <http://drms.readthedocs.io/en/stable/tutorial.html>`_ | | |
15 | `Github <https://github.com/kbg/drms>`_ | | |
16 | `PyPI <https://pypi.python.org/pypi/drms>`_ | |
17 | ||
18 | The ``drms`` module provides an easy-to-use interface for accessing HMI, | |
19 | AIA and MDI data with Python. It uses the publicly accessible | |
20 | `JSOC <http://jsoc.stanford.edu/>`_ DRMS server by default, but can also | |
21 | be used with local `NetDRMS <http://jsoc.stanford.edu/netdrms/>`_ sites. | |
22 | More information, including a detailed tutorial is available on | |
23 | `Read the Docs <http://drms.readthedocs.io/>`_. | |
24 | ||
25 | ||
26 | Requirements | |
27 | ------------ | |
28 | ||
29 | The ``drms`` module supports Python 2.7 and Python 3.4 or newer. It | |
30 | requires the following Python packages: | |
31 | ||
32 | - NumPy, version 1.9.0 or newer | |
33 | - Pandas, version 0.14.1 or newer | |
34 | - Six, version 1.8.0 or newer | |
35 | ||
36 | The module might also work with earlier versions, but it has not been | |
37 | tested with any versions older than the ones listed above. | |
38 | ||
39 | ||
40 | Installation | |
41 | ------------ | |
42 | ||
43 | The ``drms`` Python package can be installed from | |
44 | `PyPI <https://pypi.python.org/pypi/drms>`_ using | |
45 | ||
46 | :: | |
47 | ||
48 | pip install drms | |
49 | ||
50 | To upgrade an already existing installation to the latest release, you | |
51 | can write:: | |
52 | ||
53 | pip install -U drms | |
54 | ||
55 | ||
56 | Note: If you do not use a Python distribution, like | |
57 | `Anaconda <https://www.continuum.io/downloads>`_, | |
58 | and did not create an isolated Python environment using | |
59 | `Virtualenv <https://virtualenv.pypa.io/en/stable/>`_, | |
60 | you might need to add ``--user`` to the ``pip`` command:: | |
61 | ||
62 | pip install --user drms | |
63 | ||
64 | ||
65 | Acknowledgements | |
66 | ---------------- | |
67 | ||
68 | The main author of this project has received funding from the European | |
69 | Research Council under the European Union's Seventh Framework Programme | |
70 | (FP/2007-2013) / ERC Grant Agreement no. 307117. | |
71 | ||
72 | See AUTHORS.txt for a list of contributors. | |
73 | ||
74 | Platform: any | |
75 | Classifier: Intended Audience :: Developers | |
76 | Classifier: Intended Audience :: Science/Research | |
77 | Classifier: License :: OSI Approved :: MIT License | |
78 | Classifier: Operating System :: OS Independent | |
79 | Classifier: Programming Language :: Python | |
80 | Classifier: Programming Language :: Python :: 2 | |
81 | Classifier: Programming Language :: Python :: 2.7 | |
82 | Classifier: Programming Language :: Python :: 3 | |
83 | Classifier: Programming Language :: Python :: 3.4 | |
84 | Classifier: Programming Language :: Python :: 3.5 | |
85 | Classifier: Programming Language :: Python :: 3.6 | |
86 | Classifier: Topic :: Scientific/Engineering :: Astronomy |
0 | .rtd-requirements.txt | |
1 | AUTHORS.txt | |
2 | LICENSE.txt | |
3 | MANIFEST.in | |
4 | README.rst | |
5 | setup.cfg | |
6 | setup.py | |
7 | tox.ini | |
8 | versioneer.py | |
9 | doc/Makefile | |
10 | doc/api.rst | |
11 | doc/conf.py | |
12 | doc/index.rst | |
13 | doc/intro.rst | |
14 | doc/requirements.txt | |
15 | doc/tutorial.rst | |
16 | doc/_static/copybutton.js | |
17 | doc/_templates/layout.html | |
18 | doc/_templates/versions.html | |
19 | doc/_templates/autosummary/class.rst | |
20 | doc/_themes/drmsdoc/theme.conf | |
21 | doc/_themes/drmsdoc/static/drmsdoc.css | |
22 | drms/__init__.py | |
23 | drms/__main__.py | |
24 | drms/_version.py | |
25 | drms/client.py | |
26 | drms/config.py | |
27 | drms/error.py | |
28 | drms/json.py | |
29 | drms/utils.py | |
30 | drms.egg-info/PKG-INFO | |
31 | drms.egg-info/SOURCES.txt | |
32 | drms.egg-info/dependency_links.txt | |
33 | drms.egg-info/requires.txt | |
34 | drms.egg-info/top_level.txt | |
35 | drms/tests/__init__.py | |
36 | drms/tests/__main__.py | |
37 | drms/tests/_runner.py | |
38 | drms/tests/conftest.py | |
39 | drms/tests/test_client.py | |
40 | drms/tests/test_config.py | |
41 | drms/tests/test_exceptions.py | |
42 | drms/tests/test_init.py | |
43 | drms/tests/test_to_datetime.py | |
44 | drms/tests/test_utils.py | |
45 | drms/tests/online/test_jsoc_basic.py | |
46 | drms/tests/online/test_jsoc_email.py | |
47 | drms/tests/online/test_jsoc_export.py | |
48 | drms/tests/online/test_jsoc_info.py | |
49 | drms/tests/online/test_jsoc_query.py | |
50 | drms/tests/online/test_kis_basic.py | |
51 | examples/example_helpers.py | |
52 | examples/export_as_is.py | |
53 | examples/export_fits.py | |
54 | examples/export_from_id.py | |
55 | examples/export_jpg.py | |
56 | examples/export_movie.py | |
57 | examples/export_print_urls.py | |
58 | examples/export_tar.py | |
59 | examples/list_hmi_series.py | |
60 | examples/list_keywords.py | |
61 | examples/plot_aia_ligthcurve.py | |
62 | examples/plot_hmi_lightcurve.py | |
63 | examples/plot_hmi_modes.py | |
64 | examples/plot_polarfield.py | |
65 | examples/plot_synoptic_mr.py | |
66 | examples/print_versions.py⏎ |
0 | drms |
0 | from __future__ import absolute_import, division, print_function | |
1 | import os | |
2 | import os.path as op | |
3 | import sys | |
4 | import six | |
5 | ||
6 | ||
7 | def ask_for_export_email(): | |
8 | """Ask for a registered email address.""" | |
9 | print('You have not set the email variable at the top of this script.') | |
10 | print('Please set this variable in the script, or enter it below. Note') | |
11 | print('that you need to register your email at JSOC first. You can do') | |
12 | print('this at: http://jsoc.stanford.edu/ajax/register_email.html') | |
13 | try: | |
14 | email = six.moves.input('\nPlease enter a REGISTERED email address: ') | |
15 | except EOFError: | |
16 | email = '' | |
17 | print() | |
18 | return email | |
19 | ||
20 | ||
21 | def python_path_prepend(reldir): | |
22 | """Prepend relative path to the Python import path list.""" | |
23 | absdir = op.abspath(op.join(op.dirname(__file__), reldir)) | |
24 | sys.path.insert(0, absdir) | |
25 | ||
26 | ||
27 | def is_drms_package_directory(path): | |
28 | """Check if the given path is a directory containing the drms package.""" | |
29 | if not op.isdir(path): | |
30 | return False | |
31 | ||
32 | init_fpath = op.join(path, '__init__.py') | |
33 | if not op.isfile(init_fpath): | |
34 | return False | |
35 | ||
36 | client_fpath = op.join(path, 'client.py') | |
37 | if not op.isfile(client_fpath): | |
38 | return False | |
39 | ||
40 | try: | |
41 | code = open(client_fpath).read() | |
42 | except IOError: | |
43 | return False | |
44 | ||
45 | for s in ['class Client', 'def series', 'def query', 'def export']: | |
46 | if s not in code: | |
47 | return False | |
48 | ||
49 | return True | |
50 | ||
51 | ||
52 | # If the parent directory contains the drms package, then we assume that we | |
53 | # are in the drms source directory and add the parent directory to the top | |
54 | # of the Python import path to make sure that this version of the drms package | |
55 | # is imported instead of any other installed version. | |
56 | if is_drms_package_directory(op.join(op.dirname(__file__), '..', 'drms')): | |
57 | python_path_prepend('..') |
0 | """ | |
1 | This example shows how to submit an 'url_quick' / 'as-is' data export | |
2 | request to JSOC and how to download the requested files. The export protocol | |
3 | 'as-is', should be preferred over other protocols, because it minimizes the | |
4 | server load. The only reason to use protocol='fits' is, when keywords in the | |
5 | FITS header are really needed. | |
6 | """ | |
7 | from __future__ import absolute_import, division, print_function | |
8 | import os | |
9 | import example_helpers | |
10 | import drms | |
11 | ||
12 | # Print the doc string of this example. | |
13 | print(__doc__) | |
14 | ||
15 | ||
16 | # If you don't want to enter your email address during program execution, you | |
17 | # can set this variable to the email address you have registered for JSOC data | |
18 | # exports. If you have not registered your email yet, you can do this on the | |
19 | # JSOC website at: http://jsoc.stanford.edu/ajax/register_email.html | |
20 | email = '' | |
21 | ||
22 | # Series, harpnum, timespan and segment selection | |
23 | series = 'hmi.sharp_720s' | |
24 | harpnum = 4864 | |
25 | tsel = '2014.11.29_00:00:00_TAI/3d@12h' | |
26 | segments = ['continuum', 'magnetogram', 'field'] | |
27 | ||
28 | # Download directory | |
29 | out_dir = os.path.join('downloads', 'sharp_%d_as_is' % harpnum) | |
30 | ||
31 | # Create download directory if it does not exist yet. | |
32 | if not os.path.exists(out_dir): | |
33 | os.makedirs(out_dir) | |
34 | ||
35 | # Create DRMS client, use debug=True to see the query URLs. | |
36 | c = drms.Client(verbose=True) | |
37 | ||
38 | # Check if the email address was set at the top of this script. If not, ask for | |
39 | # a registered email address. | |
40 | if not email: | |
41 | email = example_helpers.ask_for_export_email() | |
42 | if not email or not c.check_email(email): | |
43 | raise RuntimeError('Email address is not valid or not registered.') | |
44 | ||
45 | # Data export query string | |
46 | qstr = '%s[%d][%s]{%s}' % (series, harpnum, tsel, ','.join(segments)) | |
47 | print('Data export query:\n %s\n' % qstr) | |
48 | ||
49 | # Submit export request, defaults to method='url_quick' and protocol='as-is' | |
50 | print('Submitting export request...') | |
51 | r = c.export(qstr, email=email) | |
52 | print('%d file(s) available for download.\n' % len(r.urls)) | |
53 | ||
54 | # Download selected files. | |
55 | r.download(out_dir) | |
56 | print('Download finished.') | |
57 | print('\nDownload directory:\n "%s"\n' % os.path.abspath(out_dir)) |
0 | """ | |
1 | This example shows how to submit a data export request using the 'fits' | |
2 | protocol and how to download the requested files. Note that the 'as-is' | |
3 | protocol should be used instead of 'fits', if record keywords in the FITS | |
4 | headers are not needed, as it greatly reduces the server load. | |
5 | """ | |
6 | from __future__ import absolute_import, division, print_function | |
7 | import os | |
8 | import example_helpers | |
9 | import drms | |
10 | ||
11 | # Print the doc string of this example. | |
12 | print(__doc__) | |
13 | ||
14 | ||
15 | # If you don't want to enter your email address during program execution, you | |
16 | # can set this variable to the email address you have registered for JSOC data | |
17 | # exports. If you have not registered your email yet, you can do this on the | |
18 | # JSOC website at: http://jsoc.stanford.edu/ajax/register_email.html | |
19 | email = '' | |
20 | ||
21 | # Use 'as-is' instead of 'fits', if record keywords are not needed in the | |
22 | # FITS header. This greatly reduces the server load! | |
23 | export_protocol = 'fits' | |
24 | #export_protocol = 'as-is' | |
25 | ||
26 | # Series, harpnum, timespan and segment selection | |
27 | series = 'hmi.sharp_720s' | |
28 | harpnum = 4864 | |
29 | tsel = '2014.11.30_00:00:00_TAI/1d@8h' | |
30 | segments = ['continuum', 'magnetogram', 'field'] | |
31 | ||
32 | # Download directory | |
33 | out_dir = os.path.join('downloads', 'sharp_%d' % harpnum) | |
34 | ||
35 | # Create download directory if it does not exist yet. | |
36 | if not os.path.exists(out_dir): | |
37 | os.makedirs(out_dir) | |
38 | ||
39 | # Create DRMS client, use debug=True to see the query URLs. | |
40 | c = drms.Client(verbose=True) | |
41 | ||
42 | # Check if the email address was set at the top of this script. If not, ask for | |
43 | # a registered email address. | |
44 | if not email: | |
45 | email = example_helpers.ask_for_export_email() | |
46 | if not email or not c.check_email(email): | |
47 | raise RuntimeError('Email address is not valid or not registered.') | |
48 | ||
49 | # Data export query string | |
50 | qstr = '%s[%d][%s]{%s}' % (series, harpnum, tsel, ','.join(segments)) | |
51 | print('Data export query:\n %s\n' % qstr) | |
52 | ||
53 | # Submit export request using the 'fits' protocol | |
54 | print('Submitting export request...') | |
55 | r = c.export(qstr, method='url', protocol=export_protocol, email=email) | |
56 | ||
57 | # Print request URL. | |
58 | print('\nRequest URL: %s' % r.request_url) | |
59 | print('%d file(s) available for download.\n' % len(r.urls)) | |
60 | ||
61 | # Download selected files. | |
62 | r.download(out_dir) | |
63 | print('Download finished.') | |
64 | print('\nDownload directory:\n "%s"\n' % os.path.abspath(out_dir)) |
0 | """ | |
1 | This example takes a RequestID of an already existing export request, prints | |
2 | the corresponding "Request URL" and offers to download the available files. | |
3 | Note that you can also use RequestIDs from export requests, that were | |
4 | submitted using the JSOC website. | |
5 | """ | |
6 | from __future__ import absolute_import, division, print_function | |
7 | import os | |
8 | from six.moves import input | |
9 | import example_helpers | |
10 | import drms | |
11 | ||
12 | # Print the doc string of this example. | |
13 | print(__doc__) | |
14 | ||
15 | ||
16 | # Export request ID | |
17 | request_id = '' | |
18 | ||
19 | # Create DRMS client, use debug=True to see the query URLs. | |
20 | c = drms.Client(verbose=True) | |
21 | ||
22 | # Ask for a RequestID, if it is not set yet. | |
23 | if not request_id: | |
24 | request_id = input('Please enter a RequestID: ') | |
25 | print() | |
26 | ||
27 | # Querying the server using the entered RequestID. | |
28 | print('Looking up export request "%s"...' % request_id) | |
29 | r = c.export_from_id(request_id) | |
30 | ||
31 | # Print request URL and number of available files. | |
32 | print('\nRequest URL: %s' % r.request_url) | |
33 | print('%d file(s) available for download.\n' % len(r.urls)) | |
34 | ||
35 | # Ask if the files should be downloaded. | |
36 | do_download = input('Retrieve all files [y/N]? ') | |
37 | print() | |
38 | ||
39 | if do_download.lower() in ['y', 'yes']: | |
40 | # Create download directory if it does not exist yet. | |
41 | out_dir = os.path.join('downloads', request_id) | |
42 | if not os.path.exists(out_dir): | |
43 | os.makedirs(out_dir) | |
44 | ||
45 | # Download all available files. | |
46 | r.download(out_dir) | |
47 | print('Download finished.') | |
48 | print('\nDownload directory:\n "%s"\n' % os.path.abspath(out_dir)) |
0 | """ | |
1 | This example shows how to export image data as JPEG file, using the 'jpg' | |
2 | protocol. The 'jpg' protocol accepts additional protocol arguments, like | |
3 | color table, color scaling or pixel binning. For a list of available color | |
4 | tables, see http://jsoc.stanford.edu/ajax/exportdata.html and select the | |
5 | JPEG protocol. | |
6 | """ | |
7 | from __future__ import absolute_import, division, print_function | |
8 | import os | |
9 | import example_helpers | |
10 | import drms | |
11 | ||
12 | # Print the doc string of this example. | |
13 | print(__doc__) | |
14 | ||
15 | ||
16 | # If you don't want to enter your email address during program execution, you | |
17 | # can set this variable to the email address you have registered for JSOC data | |
18 | # exports. If you have not registered your email yet, you can do this on the | |
19 | # JSOC website at: http://jsoc.stanford.edu/ajax/register_email.html | |
20 | email = '' | |
21 | ||
22 | # Series, timespan, wavelength and segment | |
23 | series = 'aia.lev1_euv_12s' | |
24 | tsel = '2012-08-31T19:48:01Z' | |
25 | wavelen = 304 | |
26 | segment = 'image' | |
27 | ||
28 | # Further arguments for 'jpg' protocol | |
29 | jpg_args = { | |
30 | 'ct': 'aia_304.lut', # color table | |
31 | 'min': 4, # min value | |
32 | 'max': 800, # max value | |
33 | 'scaling': 'log', # color scaling | |
34 | 'size': 2 # binning (1 -> 4k, 2 -> 2k, 4 -> 1k) | |
35 | } | |
36 | ||
37 | # Download directory | |
38 | out_dir = 'downloads' | |
39 | ||
40 | # Create download directory if it does not exist yet. | |
41 | if not os.path.exists(out_dir): | |
42 | os.makedirs(out_dir) | |
43 | ||
44 | # Create DRMS client, use debug=True to see the query URLs. | |
45 | c = drms.Client(verbose=True) | |
46 | ||
47 | # Check if the email address was set at the top of this script. If not, ask for | |
48 | # a registered email address. | |
49 | if not email: | |
50 | email = example_helpers.ask_for_export_email() | |
51 | if not email or not c.check_email(email): | |
52 | raise RuntimeError('Email address is not valid or not registered.') | |
53 | ||
54 | # Data export query string | |
55 | qstr = '%s[%s][%d]{%s}' % (series, tsel, wavelen, segment) | |
56 | print('Data export query:\n %s\n' % qstr) | |
57 | ||
58 | # Submit export request using the 'jpg' protocol with custom protocol_args | |
59 | print('Submitting export request...') | |
60 | r = c.export(qstr, protocol='jpg', protocol_args=jpg_args, email=email) | |
61 | ||
62 | # Print request URL. | |
63 | print('\nRequest URL: %s' % r.request_url) | |
64 | print('%d file(s) available for download.\n' % len(r.urls)) | |
65 | ||
66 | # Download selected files. | |
67 | r.download(out_dir) | |
68 | print('Download finished.') | |
69 | print('\nDownload directory:\n "%s"\n' % os.path.abspath(out_dir)) |
0 | """ | |
1 | This example shows how to export movies from image data, using the 'mp4' | |
2 | protocol. The 'mp4' protocol accepts additional protocol arguments, like | |
3 | color table, color scaling or pixel binning. For a list of available color | |
4 | tables, see http://jsoc.stanford.edu/ajax/exportdata.html and select the | |
5 | MP4 protocol. | |
6 | """ | |
7 | from __future__ import absolute_import, division, print_function | |
8 | import os | |
9 | import example_helpers | |
10 | import drms | |
11 | ||
12 | # Print the doc string of this example. | |
13 | print(__doc__) | |
14 | ||
15 | ||
16 | # If you don't want to enter your email address during program execution, you | |
17 | # can set this variable to the email address you have registered for JSOC data | |
18 | # exports. If you have not registered your email yet, you can do this on the | |
19 | # JSOC website at: http://jsoc.stanford.edu/ajax/register_email.html | |
20 | email = '' | |
21 | ||
22 | # Series, timespan and segment | |
23 | series = 'hmi.m_720s' | |
24 | tsel = '2014.11.28_00:00:00_TAI/5d@1h' | |
25 | segment = 'magnetogram' | |
26 | ||
27 | # Further arguments for 'mp4' protocol | |
28 | mp4_args = { | |
29 | 'ct': 'grey.sao', # color table | |
30 | 'min': -1500, # min value | |
31 | 'max': 1500, # max value | |
32 | 'scaling': 'mag', # color scaling | |
33 | 'size': 8 # binning (1 -> 4k, 2 -> 2k, 4 -> 1k, 8 -> 512) | |
34 | } | |
35 | ||
36 | # Download directory | |
37 | out_dir = 'downloads' | |
38 | ||
39 | # Create download directory if it does not exist yet. | |
40 | if not os.path.exists(out_dir): | |
41 | os.makedirs(out_dir) | |
42 | ||
43 | # Create DRMS client, use debug=True to see the query URLs. | |
44 | c = drms.Client(verbose=True) | |
45 | ||
46 | # Check if the email address was set at the top of this script. If not, ask for | |
47 | # a registered email address. | |
48 | if not email: | |
49 | email = example_helpers.ask_for_export_email() | |
50 | if not email or not c.check_email(email): | |
51 | raise RuntimeError('Email address is not valid or not registered.') | |
52 | ||
53 | # Data export query string | |
54 | qstr = '%s[%s]{%s}' % (series, tsel, segment) | |
55 | print('Data export query:\n %s\n' % qstr) | |
56 | ||
57 | # Submit export request using the 'mp4' protocol with custom protocol_args | |
58 | print('Submitting export request...') | |
59 | r = c.export(qstr, protocol='mp4', protocol_args=mp4_args, email=email) | |
60 | r.wait(sleep=10) | |
61 | ||
62 | # Print request URL. | |
63 | print('\nRequest URL: %s' % r.request_url) | |
64 | print('%d file(s) available for download.\n' % len(r.urls)) | |
65 | ||
66 | # Download movie file only: index=0 | |
67 | r.download(out_dir, 0) | |
68 | print('Download finished.') | |
69 | print('\nDownload directory:\n "%s"\n' % os.path.abspath(out_dir)) |
0 | """ | |
1 | This example prints the download URLs for files returned from an 'as-is' data | |
2 | export request. Note that there is no "Request URL" for method 'url_quick'. | |
3 | """ | |
4 | from __future__ import absolute_import, division, print_function | |
5 | import example_helpers | |
6 | import drms | |
7 | ||
8 | # Print the doc string of this example. | |
9 | print(__doc__) | |
10 | ||
11 | ||
12 | # If you don't want to enter your email address during program execution, you | |
13 | # can set this variable to the email address you have registered for JSOC data | |
14 | # exports. If you have not registered your email yet, you can do this on the | |
15 | # JSOC website at: http://jsoc.stanford.edu/ajax/register_email.html | |
16 | email = '' | |
17 | ||
18 | # Data export query string | |
19 | qstr = 'hmi.ic_720s[2015.01.01_00:00:00_TAI/10d@1d]{continuum}' | |
20 | ||
21 | # Create DRMS client, use debug=True to see the query URLs. | |
22 | c = drms.Client(verbose=True) | |
23 | ||
24 | # Check if the email address was set at the top of this script. If not, ask for | |
25 | # a registered email address. | |
26 | if not email: | |
27 | email = example_helpers.ask_for_export_email() | |
28 | if not email or not c.check_email(email): | |
29 | raise RuntimeError('Email address is not valid or not registered.') | |
30 | ||
31 | # Submit export request, defaults to method='url_quick' and protocol='as-is' | |
32 | print('Data export query:\n %s\n' % qstr) | |
33 | print('Submitting export request...') | |
34 | r = c.export(qstr, email=email) | |
35 | print('%d file(s) available for download.\n' % len(r.urls)) | |
36 | ||
37 | # Print download URLs. | |
38 | for i, row in r.urls[['record', 'url']].iterrows(): | |
39 | print('REC: %s' % row.record) | |
40 | print('URL: %s\n' % row.url) |
0 | """ | |
1 | This example shows how to submit a data export request using the 'url-tar' | |
2 | method, which provides a single TAR archive containing all requested files. | |
3 | Here we use this method to download data from the 'hmi.rdvflows_fd15_frame' | |
4 | series, which stores directories of text files for each record. This is | |
5 | currently the only way to download directory data segments using the Python | |
6 | DRMS client. The export protocol in this case is 'as-is'. You might change the | |
7 | protocol to 'fits', if you are downloading FITS files instead of text files. | |
8 | """ | |
9 | from __future__ import absolute_import, division, print_function | |
10 | import os | |
11 | import example_helpers | |
12 | import drms | |
13 | ||
14 | # Print the doc string of this example. | |
15 | print(__doc__) | |
16 | ||
17 | ||
18 | # If you don't want to enter your email address during program execution, you | |
19 | # can set this variable to the email address you have registered for JSOC data | |
20 | # exports. If you have not registered your email yet, you can do this on the | |
21 | # JSOC website at: http://jsoc.stanford.edu/ajax/register_email.html | |
22 | email = '' | |
23 | ||
24 | # Series, Carrington rotation, Carrington longitude and data segments | |
25 | series = 'hmi.rdvflows_fd15_frame' | |
26 | cr = 2150 | |
27 | cmlon = 360 | |
28 | segments = ['Ux', 'Uy'] | |
29 | ||
30 | # Download directory | |
31 | out_dir = 'downloads' | |
32 | ||
33 | # Create download directory if it does not exist yet. | |
34 | if not os.path.exists(out_dir): | |
35 | os.makedirs(out_dir) | |
36 | ||
37 | # Create DRMS client, use debug=True to see the query URLs. | |
38 | c = drms.Client(verbose=True) | |
39 | ||
40 | # Check if the email address was set at the top of this script. If not, ask for | |
41 | # a registered email address. | |
42 | if not email: | |
43 | email = example_helpers.ask_for_export_email() | |
44 | if not email or not c.check_email(email): | |
45 | raise RuntimeError('Email address is not valid or not registered.') | |
46 | ||
47 | # Data export query string | |
48 | qstr = '%s[%d][%d]{%s}' % (series, cr, cmlon, ','.join(segments)) | |
49 | print('Data export query:\n %s\n' % qstr) | |
50 | ||
51 | # Submit export request using the 'url-tar' method, protocol default: 'as-is' | |
52 | print('Submitting export request...') | |
53 | r = c.export(qstr, method='url-tar', email=email) | |
54 | ||
55 | # Print request URL. | |
56 | print('\nRequest URL: %s' % r.request_url) | |
57 | print('%d file(s) available for download.\n' % len(r.urls)) | |
58 | ||
59 | # Download selected files. | |
60 | dr = r.download(out_dir) | |
61 | print('Download finished.') | |
62 | print('\nDownloaded file:\n "%s"\n' % dr.download[0]) |
0 | from __future__ import absolute_import, division, print_function | |
1 | import textwrap | |
2 | import example_helpers | |
3 | import drms | |
4 | ||
5 | ||
6 | # Set to True, to list additional information | |
7 | full_output = False | |
8 | ||
9 | # Create DRMS JSON client, use debug=True to see the query URLs | |
10 | c = drms.Client() | |
11 | ||
12 | # Get all available HMI series | |
13 | s = c.series(r'hmi\.', full=full_output) | |
14 | ||
15 | if not full_output: | |
16 | # Print only the series names | |
17 | for name in s: | |
18 | print(name) | |
19 | else: | |
20 | # Print series names, pkeys and notes | |
21 | for i in s.index: | |
22 | print('Series:', s.name[i]) | |
23 | if c.server.url_show_series_wrapper is None: | |
24 | # JSOC's show_series wrapper currently does not support primekeys | |
25 | print(' Pkeys:', ', '.join(s.primekeys[i])) | |
26 | print(' Notes:', ('\n' + 8*' ').join(textwrap.wrap(s.note[i]))) | |
27 | print() |
0 | from __future__ import absolute_import, division, print_function | |
1 | import example_helpers | |
2 | import drms | |
3 | ||
4 | ||
5 | # DRMS series name | |
6 | series = 'hmi.v_45s' | |
7 | ||
8 | # Create DRMS JSON client, use debug=True to see the query URLs | |
9 | c = drms.Client() | |
10 | ||
11 | # Query series info | |
12 | si = c.info(series) | |
13 | ||
14 | # Print keyword info | |
15 | print('Listing keywords for "%s":\n' % si.name) | |
16 | for k in sorted(si.keywords.index): | |
17 | ki = si.keywords.loc[k] | |
18 | print(k) | |
19 | print(' type ....... %s ' % ki.type) | |
20 | print(' recscope ... %s ' % ki.recscope) | |
21 | print(' defval ..... %s ' % ki.defval) | |
22 | print(' units ...... %s ' % ki.units) | |
23 | print(' note ....... %s ' % ki.note) | |
24 | print() |
0 | from __future__ import absolute_import, division, print_function | |
1 | import matplotlib.pyplot as plt | |
2 | import example_helpers | |
3 | import drms | |
4 | ||
5 | ||
6 | # Series name, timespan and wavelength | |
7 | series = 'aia.lev1_euv_12s' | |
8 | series_lev1 = 'aia.lev1' | |
9 | wavelen = 335 | |
10 | #tsel = '2015-01-01T00:00:01Z/1h' | |
11 | #tsel = '2015-01-01T00:00:01Z/1d' | |
12 | #tsel = '2015-01-01T00:00:01Z/1d@60s' | |
13 | #tsel = '2015-01-01T00:00:01Z/7d@1h' | |
14 | #tsel = '2015-01-01T00:00:01Z/30d@6h' | |
15 | #tsel = '2015-01-01T00:00:01Z/100d@1d' | |
16 | tsel = '2014-01-01T00:00:01Z/365d@1d' | |
17 | ||
18 | # DRMS query string | |
19 | qstr = '%s[%s][%d]' % (series, tsel, wavelen) | |
20 | ||
21 | # Some keywords we are interested in; you can use c.keys(series) to get a | |
22 | # list of all available keywords of a series. | |
23 | keys = ['T_REC', 'T_OBS', 'DATAMIN', 'DATAMAX', 'DATAMEAN', 'DATARMS', | |
24 | 'DATASKEW', 'DATAKURT', 'QUALITY'] | |
25 | ||
26 | ||
27 | # Create DRMS client, uses JSOC baseurl by default, set debug=True to see the | |
28 | # DRMS query URLs. | |
29 | c = drms.Client(debug=False) | |
30 | ||
31 | # Get detailed information about the series. Some keywords from | |
32 | # aia.lev1_euv_12s are links to keywords in aia.lev1 and unfortunally some | |
33 | # entries (like note) are missing for linked keywords, so we are using the | |
34 | # entries from aia.lev1 in this case. | |
35 | print('Querying series info...') | |
36 | si = c.info(series) | |
37 | si_lev1 = c.info(series_lev1) | |
38 | for k in keys: | |
39 | linkinfo = si.keywords.loc[k].linkinfo | |
40 | if linkinfo is not None and linkinfo.startswith('lev1->'): | |
41 | note_str = si_lev1.keywords.loc[k].note | |
42 | else: | |
43 | note_str = si.keywords.loc[k].note | |
44 | print('%10s : %s' % (k, note_str)) | |
45 | ||
46 | # Get keyword values for the selected timespan and wavelength | |
47 | print('Querying keyword data...\n -> %s' % qstr) | |
48 | res = c.query(qstr, key=keys) | |
49 | print(' -> %d lines retrieved.' % len(res)) | |
50 | ||
51 | # Only use entries with QUALITY==0 | |
52 | res = res[res.QUALITY == 0] | |
53 | print(' -> %d lines after QUALITY selection.' % len(res)) | |
54 | ||
55 | # Convert T_REC strings to datetime and use it as index for the series | |
56 | res.index = drms.to_datetime(res.T_REC) | |
57 | ||
58 | # Create some simple plots | |
59 | ax = res[['DATAMIN', 'DATAMAX', 'DATAMEAN', 'DATARMS', 'DATASKEW']].plot( | |
60 | figsize=(8, 10), subplots=True) | |
61 | ax[0].set_title(qstr, fontsize='medium') | |
62 | plt.tight_layout() | |
63 | plt.show() |
0 | from __future__ import absolute_import, division, print_function | |
1 | import matplotlib.pyplot as plt | |
2 | import example_helpers | |
3 | import drms | |
4 | ||
5 | ||
6 | # Series name and timespan | |
7 | series = 'hmi.ic_720s' | |
8 | #tsel = '2014.01.01_TAI/365d@1h' | |
9 | tsel = '2010.05.01_TAI-2016.04.01_TAI@6h' | |
10 | ||
11 | # DRMS query string | |
12 | qstr = '%s[%s]' % (series, tsel) | |
13 | ||
14 | ||
15 | # Create DRMS JSON client, use debug=True to see the query URLs | |
16 | c = drms.Client() | |
17 | ||
18 | # Send request to the DRMS server | |
19 | print('Querying keyword data...\n -> %s' % qstr) | |
20 | res = c.query(qstr, key=['T_REC', 'DATAMEAN', 'DATARMS']) | |
21 | print(' -> %d lines retrieved.' % len(res)) | |
22 | ||
23 | # Convert T_REC strings to datetime and use it as index for the series | |
24 | res.index = drms.to_datetime(res.pop('T_REC')) | |
25 | ||
26 | # Note: DATARMS contains the standard deviation, not the RMS! | |
27 | t = res.index | |
28 | avg = res.DATAMEAN/1e3 | |
29 | std = res.DATARMS/1e3 | |
30 | ||
31 | # Create plot | |
32 | fig, ax = plt.subplots(1, 1, figsize=(15, 7)) | |
33 | ax.set_title(qstr, fontsize='medium') | |
34 | ax.fill_between( | |
35 | t, avg+std, avg-std, edgecolor='none', facecolor='b', alpha=0.3, | |
36 | interpolate=True) | |
37 | ax.plot(t, avg, color='b') | |
38 | ax.set_xlabel('Time') | |
39 | ax.set_ylabel('Disk-averaged continuum intensity [kDN/s]') | |
40 | fig.tight_layout() | |
41 | ||
42 | plt.show() |
0 | from __future__ import absolute_import, division, print_function | |
1 | import matplotlib.pyplot as plt | |
2 | import numpy as np | |
3 | import example_helpers | |
4 | import drms | |
5 | ||
6 | ||
7 | # Series name, start time and data segment | |
8 | series = 'hmi.v_sht_modes' | |
9 | tstart = '2014.06.20_00:00:00_TAI' | |
10 | segname = 'm6' # 'm6', 'm18' or 'm36' | |
11 | ||
12 | # DRMS-Server URL (or shortcut) and data url (if any) for the data segment | |
13 | drms_url, data_url = 'jsoc', 'http://jsoc.stanford.edu' | |
14 | #drms_url, data_url = 'kis', '' | |
15 | ||
16 | # DRMS query string | |
17 | qstr = '%s[%s]' % (series, tstart) | |
18 | ||
19 | ||
20 | # Create DRMS JSON client, use debug=True to see the query URLs | |
21 | c = drms.Client(drms_url) | |
22 | ||
23 | # Send request to the DRMS server | |
24 | print('Querying keyword data...\n -> %s' % qstr) | |
25 | k, s = c.query( | |
26 | qstr, key=['T_START', 'T_STOP', 'LMIN', 'LMAX', 'NDT'], seg=segname) | |
27 | print(' -> %d lines retrieved.' % len(k)) | |
28 | ||
29 | # Use only the first line of the query result | |
30 | k = k.iloc[0] | |
31 | fname = data_url + s[segname][0] | |
32 | ||
33 | # Read the data segment | |
34 | print('Reading data from %r...' % fname) | |
35 | a = np.genfromtxt(fname) | |
36 | ||
37 | # For column names, see appendix of Larson & Schou (2015SoPh..290.3221L) | |
38 | l = a[:, 0].astype(int) | |
39 | n = a[:, 1].astype(int) | |
40 | nu = a[:, 2]/1e3 | |
41 | if a.shape[1] in [24, 48, 84]: | |
42 | # tan(gamma) present | |
43 | sig_offs = 5 | |
44 | elif a.shape[1] in [26, 50, 86]: | |
45 | # tan(gamma) not present | |
46 | sig_offs = 6 | |
47 | snu = a[:, sig_offs + 2]/1e3 | |
48 | ||
49 | # Plot: zoomed in on lower l | |
50 | fig, ax = plt.subplots(1, 1, figsize=(11, 7)) | |
51 | ax.set_title('Time = %s ... %s, L = %d ... %d, NDT = %d' % ( | |
52 | k.T_START, k.T_STOP, k.LMIN, k.LMAX, k.NDT), fontsize='medium') | |
53 | for ni in np.unique(n): | |
54 | idx = (n == ni) | |
55 | ax.plot(l[idx], nu[idx], 'b.-') | |
56 | ax.set_xlim(0, 120) | |
57 | ax.set_ylim(0.8, 4.5) | |
58 | ax.set_xlabel('Harmonic degree') | |
59 | ax.set_ylabel('Frequency [mHz]') | |
60 | fig.tight_layout() | |
61 | ||
62 | # Plot: higher l, n <= 20, with errors | |
63 | fig, ax = plt.subplots(1, 1, figsize=(11, 7)) | |
64 | ax.set_title('Time = %s ... %s, L = %d ... %d, NDT = %d' % ( | |
65 | k.T_START, k.T_STOP, k.LMIN, k.LMAX, k.NDT), fontsize='medium') | |
66 | for ni in np.unique(n): | |
67 | if ni <= 20: | |
68 | idx = (n == ni) | |
69 | ax.plot(l[idx], nu[idx], 'b.', ms=3) | |
70 | if ni < 10: | |
71 | ax.plot(l[idx], nu[idx] + 1000*snu[idx], 'g') | |
72 | ax.plot(l[idx], nu[idx] - 1000*snu[idx], 'g') | |
73 | else: | |
74 | ax.plot(l[idx], nu[idx] + 500*snu[idx], 'r') | |
75 | ax.plot(l[idx], nu[idx] - 500*snu[idx], 'r') | |
76 | ax.legend(loc='upper right', handles=[ | |
77 | plt.Line2D([0], [0], color='r', label='500 sigma'), | |
78 | plt.Line2D([0], [0], color='g', label='1000 sigma')]) | |
79 | ax.set_xlim(-5, 305) | |
80 | ax.set_ylim(0.8, 4.5) | |
81 | ax.set_xlabel('Harmonic degree') | |
82 | ax.set_ylabel('Frequency [mHz]') | |
83 | fig.tight_layout() | |
84 | ||
85 | plt.show() |
0 | from __future__ import absolute_import, division, print_function | |
1 | import matplotlib.pyplot as plt | |
2 | import numpy as np | |
3 | import pandas as pd | |
4 | import example_helpers | |
5 | import drms | |
6 | ||
7 | ||
8 | # Series name, time range and time steps | |
9 | series = 'hmi.meanpf_720s' | |
10 | tsel = '2010.05.01_TAI-2016.04.01_TAI@12h' | |
11 | ||
12 | # DRMS query string | |
13 | qstr = '%s[%s]' % (series, tsel) | |
14 | ||
15 | ||
16 | # Create DRMS JSON client, use debug=True to see the query URLs | |
17 | c = drms.Client() | |
18 | ||
19 | # Send request to the DRMS server | |
20 | print('Querying keyword data...\n -> %s' % qstr) | |
21 | res = c.query(qstr, key=['T_REC', 'CAPN2', 'CAPS2']) | |
22 | print(' -> %d lines retrieved.' % len(res)) | |
23 | ||
24 | # Convert T_REC strings to datetime and use it as index for the series | |
25 | res.index = drms.to_datetime(res.pop('T_REC')) | |
26 | ||
27 | # Determine smallest timestep | |
28 | dt = np.diff(res.index.to_pydatetime()).min() | |
29 | ||
30 | # Make sure the time series contains all time steps (fills gaps with NaNs) | |
31 | # Note: This does not seem to work with old pandas versions (e.g. v0.14.1) | |
32 | a = res.asfreq(dt) | |
33 | ||
34 | # Compute 30d moving average and standard deviation using a boxcar window | |
35 | win_size = int(30*24*3600/dt.total_seconds()) | |
36 | if tuple(map(int, pd.__version__.split('.')[:2])) >= (0, 18): | |
37 | a_avg = a.rolling(win_size, min_periods=1, center=True).mean() | |
38 | a_std = a.rolling(win_size, min_periods=1, center=True).std() | |
39 | else: | |
40 | # this is deprecated since pandas v0.18.0 | |
41 | a_avg = pd.rolling_mean(a, win_size, min_periods=1, center=True) | |
42 | a_std = pd.rolling_std(a, win_size, min_periods=1, center=True) | |
43 | ||
44 | # Plot results | |
45 | t = a.index.to_pydatetime() | |
46 | n, mn, sn = a.CAPN2, a_avg.CAPN2, a_std.CAPN2 | |
47 | s, ms, ss = a.CAPS2, a_avg.CAPS2, a_std.CAPS2 | |
48 | ||
49 | fig, ax = plt.subplots(1, 1, figsize=(15, 7)) | |
50 | ax.set_title(qstr, fontsize='medium') | |
51 | ax.plot(t, n, 'b', alpha=0.5, label='North pole') | |
52 | ax.plot(t, s, 'g', alpha=0.5, label='South pole') | |
53 | ax.plot(t, mn, 'r', label='Moving average') | |
54 | ax.plot(t, ms, 'r', label='') | |
55 | ax.set_xlabel('Time') | |
56 | ax.set_ylabel('Mean radial field strength [G]') | |
57 | ax.legend() | |
58 | fig.tight_layout() | |
59 | ||
60 | fig, ax = plt.subplots(1, 1, figsize=(15, 7)) | |
61 | ax.set_title(qstr, fontsize='medium') | |
62 | ax.fill_between( | |
63 | t, mn-sn, mn+sn, edgecolor='none', facecolor='b', alpha=0.3, | |
64 | interpolate=True) | |
65 | ax.fill_between( | |
66 | t, ms-ss, ms+ss, edgecolor='none', facecolor='g', alpha=0.3, | |
67 | interpolate=True) | |
68 | ax.plot(t, mn, 'b', label='North pole') | |
69 | ax.plot(t, ms, 'g', label='South pole') | |
70 | ax.set_xlabel('Time') | |
71 | ax.set_ylabel('Mean radial field strength [G]') | |
72 | ax.legend() | |
73 | fig.tight_layout() | |
74 | ||
75 | plt.show() |
0 | from __future__ import absolute_import, division, print_function | |
1 | import matplotlib.pyplot as plt | |
2 | from astropy.io import fits | |
3 | import example_helpers | |
4 | import drms | |
5 | ||
6 | ||
7 | # Series name, carrington rotation and data segment | |
8 | series = 'hmi.synoptic_mr_720s' | |
9 | cr = 2150 | |
10 | segname = 'synopMr' | |
11 | ||
12 | # DRMS-Server URL (or shortcut) and data url (if any) for the data segment | |
13 | drms_url, data_url = 'jsoc', 'http://jsoc.stanford.edu' | |
14 | #drms_url, data_url = 'kis', '' | |
15 | ||
16 | # DRMS query string | |
17 | qstr = '%s[%s]' % (series, cr) | |
18 | ||
19 | ||
20 | # Create DRMS JSON client, use debug=True to see the query URLs | |
21 | c = drms.Client(drms_url) | |
22 | ||
23 | # Send request to the DRMS server | |
24 | print('Querying keyword data...\n -> %s' % qstr) | |
25 | k, s = c.query(qstr, key=drms.const.all, seg=segname) | |
26 | print(' -> %d lines retrieved.' % len(k)) | |
27 | ||
28 | # Use only the first line of the query result | |
29 | k = k.iloc[0] | |
30 | fname = data_url + s[segname][0] | |
31 | ||
32 | # Read the data segment | |
33 | # Note: HTTP downloads get cached in ~/.astropy/cache/downloads | |
34 | print('Reading data from %r...' % fname) | |
35 | a = fits.getdata(fname) | |
36 | ny, nx = a.shape | |
37 | ||
38 | # Convert pixel to world coordinates using WCS keywords | |
39 | xmin = (1 - k.CRPIX1)*k.CDELT1 + k.CRVAL1 | |
40 | xmax = (nx - k.CRPIX1)*k.CDELT1 + k.CRVAL1 | |
41 | ymin = (1 - k.CRPIX2)*k.CDELT2 + k.CRVAL2 | |
42 | ymax = (ny - k.CRPIX2)*k.CDELT2 + k.CRVAL2 | |
43 | ||
44 | # Convert to Carrington longitude | |
45 | xmin = k.LON_LAST - xmin | |
46 | xmax = k.LON_LAST - xmax | |
47 | ||
48 | # Compute the plot extent used with imshow | |
49 | extent = (xmin - abs(k.CDELT1)/2, xmax + abs(k.CDELT1)/2, | |
50 | ymin - abs(k.CDELT2)/2, ymax + abs(k.CDELT2)/2) | |
51 | ||
52 | # Aspect ratio for imshow in respect to the extent computed above | |
53 | aspect = abs((xmax - xmin)/nx * ny/(ymax - ymin)) | |
54 | ||
55 | # Create plot | |
56 | fig, ax = plt.subplots(1, 1, figsize=(13.5, 6)) | |
57 | ax.set_title('%s, Time: %s ... %s' % (qstr, k.T_START, k.T_STOP), | |
58 | fontsize='medium') | |
59 | ax.imshow(a, vmin=-300, vmax=300, origin='lower', interpolation='nearest', | |
60 | cmap='gray', extent=extent, aspect=aspect) | |
61 | ax.invert_xaxis() | |
62 | ax.set_xlabel('Carrington longitude') | |
63 | ax.set_ylabel('Sine latitude') | |
64 | fig.tight_layout() | |
65 | ||
66 | plt.show() |
0 | from __future__ import absolute_import, division, print_function | |
1 | import sys | |
2 | import argparse | |
3 | import example_helpers | |
4 | ||
5 | parser = argparse.ArgumentParser() | |
6 | parser.add_argument('--verbose', '-v', action='store_true') | |
7 | args = parser.parse_args() | |
8 | ||
9 | print(' python: %d.%d.%d' % sys.version_info[:3]) | |
10 | if args.verbose: | |
11 | print(' ->', sys.executable) | |
12 | ||
13 | import six | |
14 | print(' six:', six.__version__) | |
15 | if args.verbose: | |
16 | print(' ->', six.__file__) | |
17 | ||
18 | import numpy | |
19 | print(' numpy:', numpy.__version__) | |
20 | if args.verbose: | |
21 | print(' ->', numpy.__file__) | |
22 | ||
23 | import pandas | |
24 | print(' pandas:', pandas.__version__) | |
25 | if args.verbose: | |
26 | print(' ->', pandas.__file__) | |
27 | ||
28 | import drms | |
29 | print(' drms:', drms.__version__) | |
30 | if args.verbose: | |
31 | print(' ->', drms.__file__) |
0 | [bdist_wheel] | |
1 | universal = 1 | |
2 | ||
3 | [versioneer] | |
4 | vcs = git | |
5 | style = pep440 | |
6 | versionfile_source = drms/_version.py | |
7 | versionfile_build = drms/_version.py | |
8 | tag_prefix = v | |
9 | parentdir_prefix = drms- | |
10 | ||
11 | [egg_info] | |
12 | tag_build = | |
13 | tag_date = 0 | |
14 |
0 | #!/usr/bin/env python | |
1 | from setuptools import setup | |
2 | import versioneer | |
3 | ||
4 | NAME = 'drms' | |
5 | DESCRIPTION = 'Access HMI, AIA and MDI data with Python' | |
6 | LONG_DESCRIPTION = open('README.rst').read() | |
7 | AUTHOR = 'Kolja Glogowski' | |
8 | AUTHOR_EMAIL = '"Kolja Glogowski" <kolja@pixie.de>' | |
9 | URL = 'https://github.com/kbg/drms' | |
10 | LICENSE = 'MIT' | |
11 | ||
12 | setup(name=NAME, | |
13 | version=versioneer.get_version(), | |
14 | cmdclass=versioneer.get_cmdclass(), | |
15 | description=DESCRIPTION, | |
16 | long_description=LONG_DESCRIPTION, | |
17 | author=AUTHOR, | |
18 | author_email=AUTHOR_EMAIL, | |
19 | url=URL, | |
20 | license=LICENSE, | |
21 | packages=['drms', 'drms.tests'], | |
22 | install_requires=[ | |
23 | 'numpy>=1.9.0', | |
24 | 'pandas>=0.14.1', | |
25 | 'six>=1.8.0'], | |
26 | classifiers=[ | |
27 | 'Intended Audience :: Developers', | |
28 | 'Intended Audience :: Science/Research', | |
29 | 'License :: OSI Approved :: MIT License', | |
30 | 'Operating System :: OS Independent', | |
31 | 'Programming Language :: Python', | |
32 | 'Programming Language :: Python :: 2', | |
33 | 'Programming Language :: Python :: 2.7', | |
34 | 'Programming Language :: Python :: 3', | |
35 | 'Programming Language :: Python :: 3.4', | |
36 | 'Programming Language :: Python :: 3.5', | |
37 | 'Programming Language :: Python :: 3.6', | |
38 | 'Topic :: Scientific/Engineering :: Astronomy'], | |
39 | platforms='any') |
0 | [tox] | |
1 | envlist = py27, py34, py35, py36 | |
2 | ||
3 | [testenv] | |
4 | deps = | |
5 | six | |
6 | numpy | |
7 | pandas | |
8 | pytest | |
9 | changedir = {envtmpdir} | |
10 | commands = | |
11 | python --version | |
12 | python -m drms --version | |
13 | python -m drms.tests |
0 | ||
1 | # Version: 0.18 | |
2 | ||
3 | """The Versioneer - like a rocketeer, but for versions. | |
4 | ||
5 | The Versioneer | |
6 | ============== | |
7 | ||
8 | * like a rocketeer, but for versions! | |
9 | * https://github.com/warner/python-versioneer | |
10 | * Brian Warner | |
11 | * License: Public Domain | |
12 | * Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy | |
13 | * [![Latest Version] | |
14 | (https://pypip.in/version/versioneer/badge.svg?style=flat) | |
15 | ](https://pypi.python.org/pypi/versioneer/) | |
16 | * [![Build Status] | |
17 | (https://travis-ci.org/warner/python-versioneer.png?branch=master) | |
18 | ](https://travis-ci.org/warner/python-versioneer) | |
19 | ||
20 | This is a tool for managing a recorded version number in distutils-based | |
21 | python projects. The goal is to remove the tedious and error-prone "update | |
22 | the embedded version string" step from your release process. Making a new | |
23 | release should be as easy as recording a new tag in your version-control | |
24 | system, and maybe making new tarballs. | |
25 | ||
26 | ||
27 | ## Quick Install | |
28 | ||
29 | * `pip install versioneer` to somewhere to your $PATH | |
30 | * add a `[versioneer]` section to your setup.cfg (see below) | |
31 | * run `versioneer install` in your source tree, commit the results | |
32 | ||
33 | ## Version Identifiers | |
34 | ||
35 | Source trees come from a variety of places: | |
36 | ||
37 | * a version-control system checkout (mostly used by developers) | |
38 | * a nightly tarball, produced by build automation | |
39 | * a snapshot tarball, produced by a web-based VCS browser, like github's | |
40 | "tarball from tag" feature | |
41 | * a release tarball, produced by "setup.py sdist", distributed through PyPI | |
42 | ||
43 | Within each source tree, the version identifier (either a string or a number, | |
44 | this tool is format-agnostic) can come from a variety of places: | |
45 | ||
46 | * ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows | |
47 | about recent "tags" and an absolute revision-id | |
48 | * the name of the directory into which the tarball was unpacked | |
49 | * an expanded VCS keyword ($Id$, etc) | |
50 | * a `_version.py` created by some earlier build step | |
51 | ||
52 | For released software, the version identifier is closely related to a VCS | |
53 | tag. Some projects use tag names that include more than just the version | |
54 | string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool | |
55 | needs to strip the tag prefix to extract the version identifier. For | |
56 | unreleased software (between tags), the version identifier should provide | |
57 | enough information to help developers recreate the same tree, while also | |
58 | giving them an idea of roughly how old the tree is (after version 1.2, before | |
59 | version 1.3). Many VCS systems can report a description that captures this, | |
60 | for example `git describe --tags --dirty --always` reports things like | |
61 | "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the | |
62 | 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has | |
63 | uncommitted changes. | |
64 | ||
65 | The version identifier is used for multiple purposes: | |
66 | ||
67 | * to allow the module to self-identify its version: `myproject.__version__` | |
68 | * to choose a name and prefix for a 'setup.py sdist' tarball | |
69 | ||
70 | ## Theory of Operation | |
71 | ||
72 | Versioneer works by adding a special `_version.py` file into your source | |
73 | tree, where your `__init__.py` can import it. This `_version.py` knows how to | |
74 | dynamically ask the VCS tool for version information at import time. | |
75 | ||
76 | `_version.py` also contains `$Revision$` markers, and the installation | |
77 | process marks `_version.py` to have this marker rewritten with a tag name | |
78 | during the `git archive` command. As a result, generated tarballs will | |
79 | contain enough information to get the proper version. | |
80 | ||
81 | To allow `setup.py` to compute a version too, a `versioneer.py` is added to | |
82 | the top level of your source tree, next to `setup.py` and the `setup.cfg` | |
83 | that configures it. This overrides several distutils/setuptools commands to | |
84 | compute the version when invoked, and changes `setup.py build` and `setup.py | |
85 | sdist` to replace `_version.py` with a small static file that contains just | |
86 | the generated version data. | |
87 | ||
88 | ## Installation | |
89 | ||
90 | See [INSTALL.md](./INSTALL.md) for detailed installation instructions. | |
91 | ||
92 | ## Version-String Flavors | |
93 | ||
94 | Code which uses Versioneer can learn about its version string at runtime by | |
95 | importing `_version` from your main `__init__.py` file and running the | |
96 | `get_versions()` function. From the "outside" (e.g. in `setup.py`), you can | |
97 | import the top-level `versioneer.py` and run `get_versions()`. | |
98 | ||
99 | Both functions return a dictionary with different flavors of version | |
100 | information: | |
101 | ||
102 | * `['version']`: A condensed version string, rendered using the selected | |
103 | style. This is the most commonly used value for the project's version | |
104 | string. The default "pep440" style yields strings like `0.11`, | |
105 | `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section | |
106 | below for alternative styles. | |
107 | ||
108 | * `['full-revisionid']`: detailed revision identifier. For Git, this is the | |
109 | full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". | |
110 | ||
111 | * `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the | |
112 | commit date in ISO 8601 format. This will be None if the date is not | |
113 | available. | |
114 | ||
115 | * `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that | |
116 | this is only accurate if run in a VCS checkout, otherwise it is likely to | |
117 | be False or None | |
118 | ||
119 | * `['error']`: if the version string could not be computed, this will be set | |
120 | to a string describing the problem, otherwise it will be None. It may be | |
121 | useful to throw an exception in setup.py if this is set, to avoid e.g. | |
122 | creating tarballs with a version string of "unknown". | |
123 | ||
124 | Some variants are more useful than others. Including `full-revisionid` in a | |
125 | bug report should allow developers to reconstruct the exact code being tested | |
126 | (or indicate the presence of local changes that should be shared with the | |
127 | developers). `version` is suitable for display in an "about" box or a CLI | |
128 | `--version` output: it can be easily compared against release notes and lists | |
129 | of bugs fixed in various releases. | |
130 | ||
131 | The installer adds the following text to your `__init__.py` to place a basic | |
132 | version in `YOURPROJECT.__version__`: | |
133 | ||
134 | from ._version import get_versions | |
135 | __version__ = get_versions()['version'] | |
136 | del get_versions | |
137 | ||
138 | ## Styles | |
139 | ||
140 | The setup.cfg `style=` configuration controls how the VCS information is | |
141 | rendered into a version string. | |
142 | ||
143 | The default style, "pep440", produces a PEP440-compliant string, equal to the | |
144 | un-prefixed tag name for actual releases, and containing an additional "local | |
145 | version" section with more detail for in-between builds. For Git, this is | |
146 | TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags | |
147 | --dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the | |
148 | tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and | |
149 | that this commit is two revisions ("+2") beyond the "0.11" tag. For released | |
150 | software (exactly equal to a known tag), the identifier will only contain the | |
151 | stripped tag, e.g. "0.11". | |
152 | ||
153 | Other styles are available. See [details.md](details.md) in the Versioneer | |
154 | source tree for descriptions. | |
155 | ||
156 | ## Debugging | |
157 | ||
158 | Versioneer tries to avoid fatal errors: if something goes wrong, it will tend | |
159 | to return a version of "0+unknown". To investigate the problem, run `setup.py | |
160 | version`, which will run the version-lookup code in a verbose mode, and will | |
161 | display the full contents of `get_versions()` (including the `error` string, | |
162 | which may help identify what went wrong). | |
163 | ||
164 | ## Known Limitations | |
165 | ||
166 | Some situations are known to cause problems for Versioneer. This details the | |
167 | most significant ones. More can be found on Github | |
168 | [issues page](https://github.com/warner/python-versioneer/issues). | |
169 | ||
170 | ### Subprojects | |
171 | ||
172 | Versioneer has limited support for source trees in which `setup.py` is not in | |
173 | the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are | |
174 | two common reasons why `setup.py` might not be in the root: | |
175 | ||
176 | * Source trees which contain multiple subprojects, such as | |
177 | [Buildbot](https://github.com/buildbot/buildbot), which contains both | |
178 | "master" and "slave" subprojects, each with their own `setup.py`, | |
179 | `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI | |
180 | distributions (and upload multiple independently-installable tarballs). | |
181 | * Source trees whose main purpose is to contain a C library, but which also | |
182 | provide bindings to Python (and perhaps other langauges) in subdirectories. | |
183 | ||
184 | Versioneer will look for `.git` in parent directories, and most operations | |
185 | should get the right version string. However `pip` and `setuptools` have bugs | |
186 | and implementation details which frequently cause `pip install .` from a | |
187 | subproject directory to fail to find a correct version string (so it usually | |
188 | defaults to `0+unknown`). | |
189 | ||
190 | `pip install --editable .` should work correctly. `setup.py install` might | |
191 | work too. | |
192 | ||
193 | Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in | |
194 | some later version. | |
195 | ||
196 | [Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking | |
197 | this issue. The discussion in | |
198 | [PR #61](https://github.com/warner/python-versioneer/pull/61) describes the | |
199 | issue from the Versioneer side in more detail. | |
200 | [pip PR#3176](https://github.com/pypa/pip/pull/3176) and | |
201 | [pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve | |
202 | pip to let Versioneer work correctly. | |
203 | ||
204 | Versioneer-0.16 and earlier only looked for a `.git` directory next to the | |
205 | `setup.cfg`, so subprojects were completely unsupported with those releases. | |
206 | ||
207 | ### Editable installs with setuptools <= 18.5 | |
208 | ||
209 | `setup.py develop` and `pip install --editable .` allow you to install a | |
210 | project into a virtualenv once, then continue editing the source code (and | |
211 | test) without re-installing after every change. | |
212 | ||
213 | "Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a | |
214 | convenient way to specify executable scripts that should be installed along | |
215 | with the python package. | |
216 | ||
217 | These both work as expected when using modern setuptools. When using | |
218 | setuptools-18.5 or earlier, however, certain operations will cause | |
219 | `pkg_resources.DistributionNotFound` errors when running the entrypoint | |
220 | script, which must be resolved by re-installing the package. This happens | |
221 | when the install happens with one version, then the egg_info data is | |
222 | regenerated while a different version is checked out. Many setup.py commands | |
223 | cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into | |
224 | a different virtualenv), so this can be surprising. | |
225 | ||
226 | [Bug #83](https://github.com/warner/python-versioneer/issues/83) describes | |
227 | this one, but upgrading to a newer version of setuptools should probably | |
228 | resolve it. | |
229 | ||
230 | ### Unicode version strings | |
231 | ||
232 | While Versioneer works (and is continually tested) with both Python 2 and | |
233 | Python 3, it is not entirely consistent with bytes-vs-unicode distinctions. | |
234 | Newer releases probably generate unicode version strings on py2. It's not | |
235 | clear that this is wrong, but it may be surprising for applications when then | |
236 | write these strings to a network connection or include them in bytes-oriented | |
237 | APIs like cryptographic checksums. | |
238 | ||
239 | [Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates | |
240 | this question. | |
241 | ||
242 | ||
243 | ## Updating Versioneer | |
244 | ||
245 | To upgrade your project to a new release of Versioneer, do the following: | |
246 | ||
247 | * install the new Versioneer (`pip install -U versioneer` or equivalent) | |
248 | * edit `setup.cfg`, if necessary, to include any new configuration settings | |
249 | indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. | |
250 | * re-run `versioneer install` in your source tree, to replace | |
251 | `SRC/_version.py` | |
252 | * commit any changed files | |
253 | ||
254 | ## Future Directions | |
255 | ||
256 | This tool is designed to make it easily extended to other version-control | |
257 | systems: all VCS-specific components are in separate directories like | |
258 | src/git/ . The top-level `versioneer.py` script is assembled from these | |
259 | components by running make-versioneer.py . In the future, make-versioneer.py | |
260 | will take a VCS name as an argument, and will construct a version of | |
261 | `versioneer.py` that is specific to the given VCS. It might also take the | |
262 | configuration arguments that are currently provided manually during | |
263 | installation by editing setup.py . Alternatively, it might go the other | |
264 | direction and include code from all supported VCS systems, reducing the | |
265 | number of intermediate scripts. | |
266 | ||
267 | ||
268 | ## License | |
269 | ||
270 | To make Versioneer easier to embed, all its code is dedicated to the public | |
271 | domain. The `_version.py` that it creates is also in the public domain. | |
272 | Specifically, both are released under the Creative Commons "Public Domain | |
273 | Dedication" license (CC0-1.0), as described in | |
274 | https://creativecommons.org/publicdomain/zero/1.0/ . | |
275 | ||
276 | """ | |
277 | ||
278 | from __future__ import print_function, absolute_import | |
279 | try: | |
280 | import configparser | |
281 | except ImportError: | |
282 | import ConfigParser as configparser | |
283 | import errno | |
284 | import json | |
285 | import os | |
286 | import re | |
287 | import subprocess | |
288 | import sys | |
289 | ||
290 | ||
291 | class VersioneerConfig: | |
292 | """Container for Versioneer configuration parameters.""" | |
293 | ||
294 | ||
295 | def get_root(): | |
296 | """Get the project root directory. | |
297 | ||
298 | We require that all commands are run from the project root, i.e. the | |
299 | directory that contains setup.py, setup.cfg, and versioneer.py . | |
300 | """ | |
301 | root = os.path.realpath(os.path.abspath(os.getcwd())) | |
302 | setup_py = os.path.join(root, "setup.py") | |
303 | versioneer_py = os.path.join(root, "versioneer.py") | |
304 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): | |
305 | # allow 'python path/to/setup.py COMMAND' | |
306 | root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) | |
307 | setup_py = os.path.join(root, "setup.py") | |
308 | versioneer_py = os.path.join(root, "versioneer.py") | |
309 | if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): | |
310 | err = ("Versioneer was unable to run the project root directory. " | |
311 | "Versioneer requires setup.py to be executed from " | |
312 | "its immediate directory (like 'python setup.py COMMAND'), " | |
313 | "or in a way that lets it use sys.argv[0] to find the root " | |
314 | "(like 'python path/to/setup.py COMMAND').") | |
315 | raise VersioneerBadRootError(err) | |
316 | try: | |
317 | # Certain runtime workflows (setup.py install/develop in a setuptools | |
318 | # tree) execute all dependencies in a single python process, so | |
319 | # "versioneer" may be imported multiple times, and python's shared | |
320 | # module-import table will cache the first one. So we can't use | |
321 | # os.path.dirname(__file__), as that will find whichever | |
322 | # versioneer.py was first imported, even in later projects. | |
323 | me = os.path.realpath(os.path.abspath(__file__)) | |
324 | me_dir = os.path.normcase(os.path.splitext(me)[0]) | |
325 | vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) | |
326 | if me_dir != vsr_dir: | |
327 | print("Warning: build in %s is using versioneer.py from %s" | |
328 | % (os.path.dirname(me), versioneer_py)) | |
329 | except NameError: | |
330 | pass | |
331 | return root | |
332 | ||
333 | ||
334 | def get_config_from_root(root): | |
335 | """Read the project setup.cfg file to determine Versioneer config.""" | |
336 | # This might raise EnvironmentError (if setup.cfg is missing), or | |
337 | # configparser.NoSectionError (if it lacks a [versioneer] section), or | |
338 | # configparser.NoOptionError (if it lacks "VCS="). See the docstring at | |
339 | # the top of versioneer.py for instructions on writing your setup.cfg . | |
340 | setup_cfg = os.path.join(root, "setup.cfg") | |
341 | parser = configparser.SafeConfigParser() | |
342 | with open(setup_cfg, "r") as f: | |
343 | parser.readfp(f) | |
344 | VCS = parser.get("versioneer", "VCS") # mandatory | |
345 | ||
346 | def get(parser, name): | |
347 | if parser.has_option("versioneer", name): | |
348 | return parser.get("versioneer", name) | |
349 | return None | |
350 | cfg = VersioneerConfig() | |
351 | cfg.VCS = VCS | |
352 | cfg.style = get(parser, "style") or "" | |
353 | cfg.versionfile_source = get(parser, "versionfile_source") | |
354 | cfg.versionfile_build = get(parser, "versionfile_build") | |
355 | cfg.tag_prefix = get(parser, "tag_prefix") | |
356 | if cfg.tag_prefix in ("''", '""'): | |
357 | cfg.tag_prefix = "" | |
358 | cfg.parentdir_prefix = get(parser, "parentdir_prefix") | |
359 | cfg.verbose = get(parser, "verbose") | |
360 | return cfg | |
361 | ||
362 | ||
363 | class NotThisMethod(Exception): | |
364 | """Exception raised if a method is not valid for the current scenario.""" | |
365 | ||
366 | ||
367 | # these dictionaries contain VCS-specific tools | |
368 | LONG_VERSION_PY = {} | |
369 | HANDLERS = {} | |
370 | ||
371 | ||
372 | def register_vcs_handler(vcs, method): # decorator | |
373 | """Decorator to mark a method as the handler for a particular VCS.""" | |
374 | def decorate(f): | |
375 | """Store f in HANDLERS[vcs][method].""" | |
376 | if vcs not in HANDLERS: | |
377 | HANDLERS[vcs] = {} | |
378 | HANDLERS[vcs][method] = f | |
379 | return f | |
380 | return decorate | |
381 | ||
382 | ||
383 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, | |
384 | env=None): | |
385 | """Call the given command(s).""" | |
386 | assert isinstance(commands, list) | |
387 | p = None | |
388 | for c in commands: | |
389 | try: | |
390 | dispcmd = str([c] + args) | |
391 | # remember shell=False, so use git.cmd on windows, not just git | |
392 | p = subprocess.Popen([c] + args, cwd=cwd, env=env, | |
393 | stdout=subprocess.PIPE, | |
394 | stderr=(subprocess.PIPE if hide_stderr | |
395 | else None)) | |
396 | break | |
397 | except EnvironmentError: | |
398 | e = sys.exc_info()[1] | |
399 | if e.errno == errno.ENOENT: | |
400 | continue | |
401 | if verbose: | |
402 | print("unable to run %s" % dispcmd) | |
403 | print(e) | |
404 | return None, None | |
405 | else: | |
406 | if verbose: | |
407 | print("unable to find command, tried %s" % (commands,)) | |
408 | return None, None | |
409 | stdout = p.communicate()[0].strip() | |
410 | if sys.version_info[0] >= 3: | |
411 | stdout = stdout.decode() | |
412 | if p.returncode != 0: | |
413 | if verbose: | |
414 | print("unable to run %s (error)" % dispcmd) | |
415 | print("stdout was %s" % stdout) | |
416 | return None, p.returncode | |
417 | return stdout, p.returncode | |
418 | ||
419 | ||
420 | LONG_VERSION_PY['git'] = ''' | |
421 | # This file helps to compute a version number in source trees obtained from | |
422 | # git-archive tarball (such as those provided by githubs download-from-tag | |
423 | # feature). Distribution tarballs (built by setup.py sdist) and build | |
424 | # directories (produced by setup.py build) will contain a much shorter file | |
425 | # that just contains the computed version number. | |
426 | ||
427 | # This file is released into the public domain. Generated by | |
428 | # versioneer-0.18 (https://github.com/warner/python-versioneer) | |
429 | ||
430 | """Git implementation of _version.py.""" | |
431 | ||
432 | from __future__ import absolute_import | |
433 | import errno | |
434 | import os | |
435 | import re | |
436 | import subprocess | |
437 | import sys | |
438 | ||
439 | ||
440 | def get_keywords(): | |
441 | """Get the keywords needed to look up the version information.""" | |
442 | # these strings will be replaced by git during git-archive. | |
443 | # setup.py/versioneer.py will grep for the variable names, so they must | |
444 | # each be defined on a line of their own. _version.py will just call | |
445 | # get_keywords(). | |
446 | git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" | |
447 | git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" | |
448 | git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" | |
449 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} | |
450 | return keywords | |
451 | ||
452 | ||
453 | class VersioneerConfig: | |
454 | """Container for Versioneer configuration parameters.""" | |
455 | ||
456 | ||
457 | def get_config(): | |
458 | """Create, populate and return the VersioneerConfig() object.""" | |
459 | # these strings are filled in when 'setup.py versioneer' creates | |
460 | # _version.py | |
461 | cfg = VersioneerConfig() | |
462 | cfg.VCS = "git" | |
463 | cfg.style = "%(STYLE)s" | |
464 | cfg.tag_prefix = "%(TAG_PREFIX)s" | |
465 | cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" | |
466 | cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" | |
467 | cfg.verbose = False | |
468 | return cfg | |
469 | ||
470 | ||
471 | class NotThisMethod(Exception): | |
472 | """Exception raised if a method is not valid for the current scenario.""" | |
473 | ||
474 | ||
475 | LONG_VERSION_PY = {} | |
476 | HANDLERS = {} | |
477 | ||
478 | ||
479 | def register_vcs_handler(vcs, method): # decorator | |
480 | """Decorator to mark a method as the handler for a particular VCS.""" | |
481 | def decorate(f): | |
482 | """Store f in HANDLERS[vcs][method].""" | |
483 | if vcs not in HANDLERS: | |
484 | HANDLERS[vcs] = {} | |
485 | HANDLERS[vcs][method] = f | |
486 | return f | |
487 | return decorate | |
488 | ||
489 | ||
490 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, | |
491 | env=None): | |
492 | """Call the given command(s).""" | |
493 | assert isinstance(commands, list) | |
494 | p = None | |
495 | for c in commands: | |
496 | try: | |
497 | dispcmd = str([c] + args) | |
498 | # remember shell=False, so use git.cmd on windows, not just git | |
499 | p = subprocess.Popen([c] + args, cwd=cwd, env=env, | |
500 | stdout=subprocess.PIPE, | |
501 | stderr=(subprocess.PIPE if hide_stderr | |
502 | else None)) | |
503 | break | |
504 | except EnvironmentError: | |
505 | e = sys.exc_info()[1] | |
506 | if e.errno == errno.ENOENT: | |
507 | continue | |
508 | if verbose: | |
509 | print("unable to run %%s" %% dispcmd) | |
510 | print(e) | |
511 | return None, None | |
512 | else: | |
513 | if verbose: | |
514 | print("unable to find command, tried %%s" %% (commands,)) | |
515 | return None, None | |
516 | stdout = p.communicate()[0].strip() | |
517 | if sys.version_info[0] >= 3: | |
518 | stdout = stdout.decode() | |
519 | if p.returncode != 0: | |
520 | if verbose: | |
521 | print("unable to run %%s (error)" %% dispcmd) | |
522 | print("stdout was %%s" %% stdout) | |
523 | return None, p.returncode | |
524 | return stdout, p.returncode | |
525 | ||
526 | ||
527 | def versions_from_parentdir(parentdir_prefix, root, verbose): | |
528 | """Try to determine the version from the parent directory name. | |
529 | ||
530 | Source tarballs conventionally unpack into a directory that includes both | |
531 | the project name and a version string. We will also support searching up | |
532 | two directory levels for an appropriately named parent directory | |
533 | """ | |
534 | rootdirs = [] | |
535 | ||
536 | for i in range(3): | |
537 | dirname = os.path.basename(root) | |
538 | if dirname.startswith(parentdir_prefix): | |
539 | return {"version": dirname[len(parentdir_prefix):], | |
540 | "full-revisionid": None, | |
541 | "dirty": False, "error": None, "date": None} | |
542 | else: | |
543 | rootdirs.append(root) | |
544 | root = os.path.dirname(root) # up a level | |
545 | ||
546 | if verbose: | |
547 | print("Tried directories %%s but none started with prefix %%s" %% | |
548 | (str(rootdirs), parentdir_prefix)) | |
549 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") | |
550 | ||
551 | ||
552 | @register_vcs_handler("git", "get_keywords") | |
553 | def git_get_keywords(versionfile_abs): | |
554 | """Extract version information from the given file.""" | |
555 | # the code embedded in _version.py can just fetch the value of these | |
556 | # keywords. When used from setup.py, we don't want to import _version.py, | |
557 | # so we do it with a regexp instead. This function is not used from | |
558 | # _version.py. | |
559 | keywords = {} | |
560 | try: | |
561 | f = open(versionfile_abs, "r") | |
562 | for line in f.readlines(): | |
563 | if line.strip().startswith("git_refnames ="): | |
564 | mo = re.search(r'=\s*"(.*)"', line) | |
565 | if mo: | |
566 | keywords["refnames"] = mo.group(1) | |
567 | if line.strip().startswith("git_full ="): | |
568 | mo = re.search(r'=\s*"(.*)"', line) | |
569 | if mo: | |
570 | keywords["full"] = mo.group(1) | |
571 | if line.strip().startswith("git_date ="): | |
572 | mo = re.search(r'=\s*"(.*)"', line) | |
573 | if mo: | |
574 | keywords["date"] = mo.group(1) | |
575 | f.close() | |
576 | except EnvironmentError: | |
577 | pass | |
578 | return keywords | |
579 | ||
580 | ||
581 | @register_vcs_handler("git", "keywords") | |
582 | def git_versions_from_keywords(keywords, tag_prefix, verbose): | |
583 | """Get version information from git keywords.""" | |
584 | if not keywords: | |
585 | raise NotThisMethod("no keywords at all, weird") | |
586 | date = keywords.get("date") | |
587 | if date is not None: | |
588 | # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant | |
589 | # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 | |
590 | # -like" string, which we must then edit to make compliant), because | |
591 | # it's been around since git-1.5.3, and it's too difficult to | |
592 | # discover which version we're using, or to work around using an | |
593 | # older one. | |
594 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) | |
595 | refnames = keywords["refnames"].strip() | |
596 | if refnames.startswith("$Format"): | |
597 | if verbose: | |
598 | print("keywords are unexpanded, not using") | |
599 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") | |
600 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) | |
601 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of | |
602 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. | |
603 | TAG = "tag: " | |
604 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) | |
605 | if not tags: | |
606 | # Either we're using git < 1.8.3, or there really are no tags. We use | |
607 | # a heuristic: assume all version tags have a digit. The old git %%d | |
608 | # expansion behaves like git log --decorate=short and strips out the | |
609 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish | |
610 | # between branches and tags. By ignoring refnames without digits, we | |
611 | # filter out many common branch names like "release" and | |
612 | # "stabilization", as well as "HEAD" and "master". | |
613 | tags = set([r for r in refs if re.search(r'\d', r)]) | |
614 | if verbose: | |
615 | print("discarding '%%s', no digits" %% ",".join(refs - tags)) | |
616 | if verbose: | |
617 | print("likely tags: %%s" %% ",".join(sorted(tags))) | |
618 | for ref in sorted(tags): | |
619 | # sorting will prefer e.g. "2.0" over "2.0rc1" | |
620 | if ref.startswith(tag_prefix): | |
621 | r = ref[len(tag_prefix):] | |
622 | if verbose: | |
623 | print("picking %%s" %% r) | |
624 | return {"version": r, | |
625 | "full-revisionid": keywords["full"].strip(), | |
626 | "dirty": False, "error": None, | |
627 | "date": date} | |
628 | # no suitable tags, so version is "0+unknown", but full hex is still there | |
629 | if verbose: | |
630 | print("no suitable tags, using unknown + full revision id") | |
631 | return {"version": "0+unknown", | |
632 | "full-revisionid": keywords["full"].strip(), | |
633 | "dirty": False, "error": "no suitable tags", "date": None} | |
634 | ||
635 | ||
636 | @register_vcs_handler("git", "pieces_from_vcs") | |
637 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): | |
638 | """Get version from 'git describe' in the root of the source tree. | |
639 | ||
640 | This only gets called if the git-archive 'subst' keywords were *not* | |
641 | expanded, and _version.py hasn't already been rewritten with a short | |
642 | version string, meaning we're inside a checked out source tree. | |
643 | """ | |
644 | GITS = ["git"] | |
645 | if sys.platform == "win32": | |
646 | GITS = ["git.cmd", "git.exe"] | |
647 | ||
648 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, | |
649 | hide_stderr=True) | |
650 | if rc != 0: | |
651 | if verbose: | |
652 | print("Directory %%s not under git control" %% root) | |
653 | raise NotThisMethod("'git rev-parse --git-dir' returned error") | |
654 | ||
655 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] | |
656 | # if there isn't one, this yields HEX[-dirty] (no NUM) | |
657 | describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", | |
658 | "--always", "--long", | |
659 | "--match", "%%s*" %% tag_prefix], | |
660 | cwd=root) | |
661 | # --long was added in git-1.5.5 | |
662 | if describe_out is None: | |
663 | raise NotThisMethod("'git describe' failed") | |
664 | describe_out = describe_out.strip() | |
665 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) | |
666 | if full_out is None: | |
667 | raise NotThisMethod("'git rev-parse' failed") | |
668 | full_out = full_out.strip() | |
669 | ||
670 | pieces = {} | |
671 | pieces["long"] = full_out | |
672 | pieces["short"] = full_out[:7] # maybe improved later | |
673 | pieces["error"] = None | |
674 | ||
675 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] | |
676 | # TAG might have hyphens. | |
677 | git_describe = describe_out | |
678 | ||
679 | # look for -dirty suffix | |
680 | dirty = git_describe.endswith("-dirty") | |
681 | pieces["dirty"] = dirty | |
682 | if dirty: | |
683 | git_describe = git_describe[:git_describe.rindex("-dirty")] | |
684 | ||
685 | # now we have TAG-NUM-gHEX or HEX | |
686 | ||
687 | if "-" in git_describe: | |
688 | # TAG-NUM-gHEX | |
689 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) | |
690 | if not mo: | |
691 | # unparseable. Maybe git-describe is misbehaving? | |
692 | pieces["error"] = ("unable to parse git-describe output: '%%s'" | |
693 | %% describe_out) | |
694 | return pieces | |
695 | ||
696 | # tag | |
697 | full_tag = mo.group(1) | |
698 | if not full_tag.startswith(tag_prefix): | |
699 | if verbose: | |
700 | fmt = "tag '%%s' doesn't start with prefix '%%s'" | |
701 | print(fmt %% (full_tag, tag_prefix)) | |
702 | pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" | |
703 | %% (full_tag, tag_prefix)) | |
704 | return pieces | |
705 | pieces["closest-tag"] = full_tag[len(tag_prefix):] | |
706 | ||
707 | # distance: number of commits since tag | |
708 | pieces["distance"] = int(mo.group(2)) | |
709 | ||
710 | # commit: short hex revision ID | |
711 | pieces["short"] = mo.group(3) | |
712 | ||
713 | else: | |
714 | # HEX: no tags | |
715 | pieces["closest-tag"] = None | |
716 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], | |
717 | cwd=root) | |
718 | pieces["distance"] = int(count_out) # total number of commits | |
719 | ||
720 | # commit date: see ISO-8601 comment in git_versions_from_keywords() | |
721 | date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], | |
722 | cwd=root)[0].strip() | |
723 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) | |
724 | ||
725 | return pieces | |
726 | ||
727 | ||
728 | def plus_or_dot(pieces): | |
729 | """Return a + if we don't already have one, else return a .""" | |
730 | if "+" in pieces.get("closest-tag", ""): | |
731 | return "." | |
732 | return "+" | |
733 | ||
734 | ||
735 | def render_pep440(pieces): | |
736 | """Build up version string, with post-release "local version identifier". | |
737 | ||
738 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you | |
739 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty | |
740 | ||
741 | Exceptions: | |
742 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] | |
743 | """ | |
744 | if pieces["closest-tag"]: | |
745 | rendered = pieces["closest-tag"] | |
746 | if pieces["distance"] or pieces["dirty"]: | |
747 | rendered += plus_or_dot(pieces) | |
748 | rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) | |
749 | if pieces["dirty"]: | |
750 | rendered += ".dirty" | |
751 | else: | |
752 | # exception #1 | |
753 | rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], | |
754 | pieces["short"]) | |
755 | if pieces["dirty"]: | |
756 | rendered += ".dirty" | |
757 | return rendered | |
758 | ||
759 | ||
760 | def render_pep440_pre(pieces): | |
761 | """TAG[.post.devDISTANCE] -- No -dirty. | |
762 | ||
763 | Exceptions: | |
764 | 1: no tags. 0.post.devDISTANCE | |
765 | """ | |
766 | if pieces["closest-tag"]: | |
767 | rendered = pieces["closest-tag"] | |
768 | if pieces["distance"]: | |
769 | rendered += ".post.dev%%d" %% pieces["distance"] | |
770 | else: | |
771 | # exception #1 | |
772 | rendered = "0.post.dev%%d" %% pieces["distance"] | |
773 | return rendered | |
774 | ||
775 | ||
776 | def render_pep440_post(pieces): | |
777 | """TAG[.postDISTANCE[.dev0]+gHEX] . | |
778 | ||
779 | The ".dev0" means dirty. Note that .dev0 sorts backwards | |
780 | (a dirty tree will appear "older" than the corresponding clean one), | |
781 | but you shouldn't be releasing software with -dirty anyways. | |
782 | ||
783 | Exceptions: | |
784 | 1: no tags. 0.postDISTANCE[.dev0] | |
785 | """ | |
786 | if pieces["closest-tag"]: | |
787 | rendered = pieces["closest-tag"] | |
788 | if pieces["distance"] or pieces["dirty"]: | |
789 | rendered += ".post%%d" %% pieces["distance"] | |
790 | if pieces["dirty"]: | |
791 | rendered += ".dev0" | |
792 | rendered += plus_or_dot(pieces) | |
793 | rendered += "g%%s" %% pieces["short"] | |
794 | else: | |
795 | # exception #1 | |
796 | rendered = "0.post%%d" %% pieces["distance"] | |
797 | if pieces["dirty"]: | |
798 | rendered += ".dev0" | |
799 | rendered += "+g%%s" %% pieces["short"] | |
800 | return rendered | |
801 | ||
802 | ||
803 | def render_pep440_old(pieces): | |
804 | """TAG[.postDISTANCE[.dev0]] . | |
805 | ||
806 | The ".dev0" means dirty. | |
807 | ||
808 | Eexceptions: | |
809 | 1: no tags. 0.postDISTANCE[.dev0] | |
810 | """ | |
811 | if pieces["closest-tag"]: | |
812 | rendered = pieces["closest-tag"] | |
813 | if pieces["distance"] or pieces["dirty"]: | |
814 | rendered += ".post%%d" %% pieces["distance"] | |
815 | if pieces["dirty"]: | |
816 | rendered += ".dev0" | |
817 | else: | |
818 | # exception #1 | |
819 | rendered = "0.post%%d" %% pieces["distance"] | |
820 | if pieces["dirty"]: | |
821 | rendered += ".dev0" | |
822 | return rendered | |
823 | ||
824 | ||
825 | def render_git_describe(pieces): | |
826 | """TAG[-DISTANCE-gHEX][-dirty]. | |
827 | ||
828 | Like 'git describe --tags --dirty --always'. | |
829 | ||
830 | Exceptions: | |
831 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) | |
832 | """ | |
833 | if pieces["closest-tag"]: | |
834 | rendered = pieces["closest-tag"] | |
835 | if pieces["distance"]: | |
836 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) | |
837 | else: | |
838 | # exception #1 | |
839 | rendered = pieces["short"] | |
840 | if pieces["dirty"]: | |
841 | rendered += "-dirty" | |
842 | return rendered | |
843 | ||
844 | ||
845 | def render_git_describe_long(pieces): | |
846 | """TAG-DISTANCE-gHEX[-dirty]. | |
847 | ||
848 | Like 'git describe --tags --dirty --always -long'. | |
849 | The distance/hash is unconditional. | |
850 | ||
851 | Exceptions: | |
852 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) | |
853 | """ | |
854 | if pieces["closest-tag"]: | |
855 | rendered = pieces["closest-tag"] | |
856 | rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) | |
857 | else: | |
858 | # exception #1 | |
859 | rendered = pieces["short"] | |
860 | if pieces["dirty"]: | |
861 | rendered += "-dirty" | |
862 | return rendered | |
863 | ||
864 | ||
865 | def render(pieces, style): | |
866 | """Render the given version pieces into the requested style.""" | |
867 | if pieces["error"]: | |
868 | return {"version": "unknown", | |
869 | "full-revisionid": pieces.get("long"), | |
870 | "dirty": None, | |
871 | "error": pieces["error"], | |
872 | "date": None} | |
873 | ||
874 | if not style or style == "default": | |
875 | style = "pep440" # the default | |
876 | ||
877 | if style == "pep440": | |
878 | rendered = render_pep440(pieces) | |
879 | elif style == "pep440-pre": | |
880 | rendered = render_pep440_pre(pieces) | |
881 | elif style == "pep440-post": | |
882 | rendered = render_pep440_post(pieces) | |
883 | elif style == "pep440-old": | |
884 | rendered = render_pep440_old(pieces) | |
885 | elif style == "git-describe": | |
886 | rendered = render_git_describe(pieces) | |
887 | elif style == "git-describe-long": | |
888 | rendered = render_git_describe_long(pieces) | |
889 | else: | |
890 | raise ValueError("unknown style '%%s'" %% style) | |
891 | ||
892 | return {"version": rendered, "full-revisionid": pieces["long"], | |
893 | "dirty": pieces["dirty"], "error": None, | |
894 | "date": pieces.get("date")} | |
895 | ||
896 | ||
897 | def get_versions(): | |
898 | """Get version information or return default if unable to do so.""" | |
899 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have | |
900 | # __file__, we can work backwards from there to the root. Some | |
901 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which | |
902 | # case we can only use expanded keywords. | |
903 | ||
904 | cfg = get_config() | |
905 | verbose = cfg.verbose | |
906 | ||
907 | try: | |
908 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, | |
909 | verbose) | |
910 | except NotThisMethod: | |
911 | pass | |
912 | ||
913 | try: | |
914 | root = os.path.realpath(__file__) | |
915 | # versionfile_source is the relative path from the top of the source | |
916 | # tree (where the .git directory might live) to this file. Invert | |
917 | # this to find the root from __file__. | |
918 | for i in cfg.versionfile_source.split('/'): | |
919 | root = os.path.dirname(root) | |
920 | except NameError: | |
921 | return {"version": "0+unknown", "full-revisionid": None, | |
922 | "dirty": None, | |
923 | "error": "unable to find root of source tree", | |
924 | "date": None} | |
925 | ||
926 | try: | |
927 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) | |
928 | return render(pieces, cfg.style) | |
929 | except NotThisMethod: | |
930 | pass | |
931 | ||
932 | try: | |
933 | if cfg.parentdir_prefix: | |
934 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) | |
935 | except NotThisMethod: | |
936 | pass | |
937 | ||
938 | return {"version": "0+unknown", "full-revisionid": None, | |
939 | "dirty": None, | |
940 | "error": "unable to compute version", "date": None} | |
941 | ''' | |
942 | ||
943 | ||
944 | @register_vcs_handler("git", "get_keywords") | |
945 | def git_get_keywords(versionfile_abs): | |
946 | """Extract version information from the given file.""" | |
947 | # the code embedded in _version.py can just fetch the value of these | |
948 | # keywords. When used from setup.py, we don't want to import _version.py, | |
949 | # so we do it with a regexp instead. This function is not used from | |
950 | # _version.py. | |
951 | keywords = {} | |
952 | try: | |
953 | f = open(versionfile_abs, "r") | |
954 | for line in f.readlines(): | |
955 | if line.strip().startswith("git_refnames ="): | |
956 | mo = re.search(r'=\s*"(.*)"', line) | |
957 | if mo: | |
958 | keywords["refnames"] = mo.group(1) | |
959 | if line.strip().startswith("git_full ="): | |
960 | mo = re.search(r'=\s*"(.*)"', line) | |
961 | if mo: | |
962 | keywords["full"] = mo.group(1) | |
963 | if line.strip().startswith("git_date ="): | |
964 | mo = re.search(r'=\s*"(.*)"', line) | |
965 | if mo: | |
966 | keywords["date"] = mo.group(1) | |
967 | f.close() | |
968 | except EnvironmentError: | |
969 | pass | |
970 | return keywords | |
971 | ||
972 | ||
973 | @register_vcs_handler("git", "keywords") | |
974 | def git_versions_from_keywords(keywords, tag_prefix, verbose): | |
975 | """Get version information from git keywords.""" | |
976 | if not keywords: | |
977 | raise NotThisMethod("no keywords at all, weird") | |
978 | date = keywords.get("date") | |
979 | if date is not None: | |
980 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant | |
981 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 | |
982 | # -like" string, which we must then edit to make compliant), because | |
983 | # it's been around since git-1.5.3, and it's too difficult to | |
984 | # discover which version we're using, or to work around using an | |
985 | # older one. | |
986 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) | |
987 | refnames = keywords["refnames"].strip() | |
988 | if refnames.startswith("$Format"): | |
989 | if verbose: | |
990 | print("keywords are unexpanded, not using") | |
991 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") | |
992 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) | |
993 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of | |
994 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. | |
995 | TAG = "tag: " | |
996 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) | |
997 | if not tags: | |
998 | # Either we're using git < 1.8.3, or there really are no tags. We use | |
999 | # a heuristic: assume all version tags have a digit. The old git %d | |
1000 | # expansion behaves like git log --decorate=short and strips out the | |
1001 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish | |
1002 | # between branches and tags. By ignoring refnames without digits, we | |
1003 | # filter out many common branch names like "release" and | |
1004 | # "stabilization", as well as "HEAD" and "master". | |
1005 | tags = set([r for r in refs if re.search(r'\d', r)]) | |
1006 | if verbose: | |
1007 | print("discarding '%s', no digits" % ",".join(refs - tags)) | |
1008 | if verbose: | |
1009 | print("likely tags: %s" % ",".join(sorted(tags))) | |
1010 | for ref in sorted(tags): | |
1011 | # sorting will prefer e.g. "2.0" over "2.0rc1" | |
1012 | if ref.startswith(tag_prefix): | |
1013 | r = ref[len(tag_prefix):] | |
1014 | if verbose: | |
1015 | print("picking %s" % r) | |
1016 | return {"version": r, | |
1017 | "full-revisionid": keywords["full"].strip(), | |
1018 | "dirty": False, "error": None, | |
1019 | "date": date} | |
1020 | # no suitable tags, so version is "0+unknown", but full hex is still there | |
1021 | if verbose: | |
1022 | print("no suitable tags, using unknown + full revision id") | |
1023 | return {"version": "0+unknown", | |
1024 | "full-revisionid": keywords["full"].strip(), | |
1025 | "dirty": False, "error": "no suitable tags", "date": None} | |
1026 | ||
1027 | ||
1028 | @register_vcs_handler("git", "pieces_from_vcs") | |
1029 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): | |
1030 | """Get version from 'git describe' in the root of the source tree. | |
1031 | ||
1032 | This only gets called if the git-archive 'subst' keywords were *not* | |
1033 | expanded, and _version.py hasn't already been rewritten with a short | |
1034 | version string, meaning we're inside a checked out source tree. | |
1035 | """ | |
1036 | GITS = ["git"] | |
1037 | if sys.platform == "win32": | |
1038 | GITS = ["git.cmd", "git.exe"] | |
1039 | ||
1040 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, | |
1041 | hide_stderr=True) | |
1042 | if rc != 0: | |
1043 | if verbose: | |
1044 | print("Directory %s not under git control" % root) | |
1045 | raise NotThisMethod("'git rev-parse --git-dir' returned error") | |
1046 | ||
1047 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] | |
1048 | # if there isn't one, this yields HEX[-dirty] (no NUM) | |
1049 | describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", | |
1050 | "--always", "--long", | |
1051 | "--match", "%s*" % tag_prefix], | |
1052 | cwd=root) | |
1053 | # --long was added in git-1.5.5 | |
1054 | if describe_out is None: | |
1055 | raise NotThisMethod("'git describe' failed") | |
1056 | describe_out = describe_out.strip() | |
1057 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) | |
1058 | if full_out is None: | |
1059 | raise NotThisMethod("'git rev-parse' failed") | |
1060 | full_out = full_out.strip() | |
1061 | ||
1062 | pieces = {} | |
1063 | pieces["long"] = full_out | |
1064 | pieces["short"] = full_out[:7] # maybe improved later | |
1065 | pieces["error"] = None | |
1066 | ||
1067 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] | |
1068 | # TAG might have hyphens. | |
1069 | git_describe = describe_out | |
1070 | ||
1071 | # look for -dirty suffix | |
1072 | dirty = git_describe.endswith("-dirty") | |
1073 | pieces["dirty"] = dirty | |
1074 | if dirty: | |
1075 | git_describe = git_describe[:git_describe.rindex("-dirty")] | |
1076 | ||
1077 | # now we have TAG-NUM-gHEX or HEX | |
1078 | ||
1079 | if "-" in git_describe: | |
1080 | # TAG-NUM-gHEX | |
1081 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) | |
1082 | if not mo: | |
1083 | # unparseable. Maybe git-describe is misbehaving? | |
1084 | pieces["error"] = ("unable to parse git-describe output: '%s'" | |
1085 | % describe_out) | |
1086 | return pieces | |
1087 | ||
1088 | # tag | |
1089 | full_tag = mo.group(1) | |
1090 | if not full_tag.startswith(tag_prefix): | |
1091 | if verbose: | |
1092 | fmt = "tag '%s' doesn't start with prefix '%s'" | |
1093 | print(fmt % (full_tag, tag_prefix)) | |
1094 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" | |
1095 | % (full_tag, tag_prefix)) | |
1096 | return pieces | |
1097 | pieces["closest-tag"] = full_tag[len(tag_prefix):] | |
1098 | ||
1099 | # distance: number of commits since tag | |
1100 | pieces["distance"] = int(mo.group(2)) | |
1101 | ||
1102 | # commit: short hex revision ID | |
1103 | pieces["short"] = mo.group(3) | |
1104 | ||
1105 | else: | |
1106 | # HEX: no tags | |
1107 | pieces["closest-tag"] = None | |
1108 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], | |
1109 | cwd=root) | |
1110 | pieces["distance"] = int(count_out) # total number of commits | |
1111 | ||
1112 | # commit date: see ISO-8601 comment in git_versions_from_keywords() | |
1113 | date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], | |
1114 | cwd=root)[0].strip() | |
1115 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) | |
1116 | ||
1117 | return pieces | |
1118 | ||
1119 | ||
1120 | def do_vcs_install(manifest_in, versionfile_source, ipy): | |
1121 | """Git-specific installation logic for Versioneer. | |
1122 | ||
1123 | For Git, this means creating/changing .gitattributes to mark _version.py | |
1124 | for export-subst keyword substitution. | |
1125 | """ | |
1126 | GITS = ["git"] | |
1127 | if sys.platform == "win32": | |
1128 | GITS = ["git.cmd", "git.exe"] | |
1129 | files = [manifest_in, versionfile_source] | |
1130 | if ipy: | |
1131 | files.append(ipy) | |
1132 | try: | |
1133 | me = __file__ | |
1134 | if me.endswith(".pyc") or me.endswith(".pyo"): | |
1135 | me = os.path.splitext(me)[0] + ".py" | |
1136 | versioneer_file = os.path.relpath(me) | |
1137 | except NameError: | |
1138 | versioneer_file = "versioneer.py" | |
1139 | files.append(versioneer_file) | |
1140 | present = False | |
1141 | try: | |
1142 | f = open(".gitattributes", "r") | |
1143 | for line in f.readlines(): | |
1144 | if line.strip().startswith(versionfile_source): | |
1145 | if "export-subst" in line.strip().split()[1:]: | |
1146 | present = True | |
1147 | f.close() | |
1148 | except EnvironmentError: | |
1149 | pass | |
1150 | if not present: | |
1151 | f = open(".gitattributes", "a+") | |
1152 | f.write("%s export-subst\n" % versionfile_source) | |
1153 | f.close() | |
1154 | files.append(".gitattributes") | |
1155 | run_command(GITS, ["add", "--"] + files) | |
1156 | ||
1157 | ||
1158 | def versions_from_parentdir(parentdir_prefix, root, verbose): | |
1159 | """Try to determine the version from the parent directory name. | |
1160 | ||
1161 | Source tarballs conventionally unpack into a directory that includes both | |
1162 | the project name and a version string. We will also support searching up | |
1163 | two directory levels for an appropriately named parent directory | |
1164 | """ | |
1165 | rootdirs = [] | |
1166 | ||
1167 | for i in range(3): | |
1168 | dirname = os.path.basename(root) | |
1169 | if dirname.startswith(parentdir_prefix): | |
1170 | return {"version": dirname[len(parentdir_prefix):], | |
1171 | "full-revisionid": None, | |
1172 | "dirty": False, "error": None, "date": None} | |
1173 | else: | |
1174 | rootdirs.append(root) | |
1175 | root = os.path.dirname(root) # up a level | |
1176 | ||
1177 | if verbose: | |
1178 | print("Tried directories %s but none started with prefix %s" % | |
1179 | (str(rootdirs), parentdir_prefix)) | |
1180 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") | |
1181 | ||
1182 | ||
1183 | SHORT_VERSION_PY = """ | |
1184 | # This file was generated by 'versioneer.py' (0.18) from | |
1185 | # revision-control system data, or from the parent directory name of an | |
1186 | # unpacked source archive. Distribution tarballs contain a pre-generated copy | |
1187 | # of this file. | |
1188 | ||
1189 | from __future__ import absolute_import | |
1190 | import json | |
1191 | ||
1192 | version_json = ''' | |
1193 | %s | |
1194 | ''' # END VERSION_JSON | |
1195 | ||
1196 | ||
1197 | def get_versions(): | |
1198 | return json.loads(version_json) | |
1199 | """ | |
1200 | ||
1201 | ||
1202 | def versions_from_file(filename): | |
1203 | """Try to determine the version from _version.py if present.""" | |
1204 | try: | |
1205 | with open(filename) as f: | |
1206 | contents = f.read() | |
1207 | except EnvironmentError: | |
1208 | raise NotThisMethod("unable to read _version.py") | |
1209 | mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", | |
1210 | contents, re.M | re.S) | |
1211 | if not mo: | |
1212 | mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", | |
1213 | contents, re.M | re.S) | |
1214 | if not mo: | |
1215 | raise NotThisMethod("no version_json in _version.py") | |
1216 | return json.loads(mo.group(1)) | |
1217 | ||
1218 | ||
1219 | def write_to_version_file(filename, versions): | |
1220 | """Write the given version number to the given _version.py file.""" | |
1221 | os.unlink(filename) | |
1222 | contents = json.dumps(versions, sort_keys=True, | |
1223 | indent=1, separators=(",", ": ")) | |
1224 | with open(filename, "w") as f: | |
1225 | f.write(SHORT_VERSION_PY % contents) | |
1226 | ||
1227 | print("set %s to '%s'" % (filename, versions["version"])) | |
1228 | ||
1229 | ||
1230 | def plus_or_dot(pieces): | |
1231 | """Return a + if we don't already have one, else return a .""" | |
1232 | if "+" in pieces.get("closest-tag", ""): | |
1233 | return "." | |
1234 | return "+" | |
1235 | ||
1236 | ||
1237 | def render_pep440(pieces): | |
1238 | """Build up version string, with post-release "local version identifier". | |
1239 | ||
1240 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you | |
1241 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty | |
1242 | ||
1243 | Exceptions: | |
1244 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] | |
1245 | """ | |
1246 | if pieces["closest-tag"]: | |
1247 | rendered = pieces["closest-tag"] | |
1248 | if pieces["distance"] or pieces["dirty"]: | |
1249 | rendered += plus_or_dot(pieces) | |
1250 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) | |
1251 | if pieces["dirty"]: | |
1252 | rendered += ".dirty" | |
1253 | else: | |
1254 | # exception #1 | |
1255 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], | |
1256 | pieces["short"]) | |
1257 | if pieces["dirty"]: | |
1258 | rendered += ".dirty" | |
1259 | return rendered | |
1260 | ||
1261 | ||
1262 | def render_pep440_pre(pieces): | |
1263 | """TAG[.post.devDISTANCE] -- No -dirty. | |
1264 | ||
1265 | Exceptions: | |
1266 | 1: no tags. 0.post.devDISTANCE | |
1267 | """ | |
1268 | if pieces["closest-tag"]: | |
1269 | rendered = pieces["closest-tag"] | |
1270 | if pieces["distance"]: | |
1271 | rendered += ".post.dev%d" % pieces["distance"] | |
1272 | else: | |
1273 | # exception #1 | |
1274 | rendered = "0.post.dev%d" % pieces["distance"] | |
1275 | return rendered | |
1276 | ||
1277 | ||
1278 | def render_pep440_post(pieces): | |
1279 | """TAG[.postDISTANCE[.dev0]+gHEX] . | |
1280 | ||
1281 | The ".dev0" means dirty. Note that .dev0 sorts backwards | |
1282 | (a dirty tree will appear "older" than the corresponding clean one), | |
1283 | but you shouldn't be releasing software with -dirty anyways. | |
1284 | ||
1285 | Exceptions: | |
1286 | 1: no tags. 0.postDISTANCE[.dev0] | |
1287 | """ | |
1288 | if pieces["closest-tag"]: | |
1289 | rendered = pieces["closest-tag"] | |
1290 | if pieces["distance"] or pieces["dirty"]: | |
1291 | rendered += ".post%d" % pieces["distance"] | |
1292 | if pieces["dirty"]: | |
1293 | rendered += ".dev0" | |
1294 | rendered += plus_or_dot(pieces) | |
1295 | rendered += "g%s" % pieces["short"] | |
1296 | else: | |
1297 | # exception #1 | |
1298 | rendered = "0.post%d" % pieces["distance"] | |
1299 | if pieces["dirty"]: | |
1300 | rendered += ".dev0" | |
1301 | rendered += "+g%s" % pieces["short"] | |
1302 | return rendered | |
1303 | ||
1304 | ||
1305 | def render_pep440_old(pieces): | |
1306 | """TAG[.postDISTANCE[.dev0]] . | |
1307 | ||
1308 | The ".dev0" means dirty. | |
1309 | ||
1310 | Eexceptions: | |
1311 | 1: no tags. 0.postDISTANCE[.dev0] | |
1312 | """ | |
1313 | if pieces["closest-tag"]: | |
1314 | rendered = pieces["closest-tag"] | |
1315 | if pieces["distance"] or pieces["dirty"]: | |
1316 | rendered += ".post%d" % pieces["distance"] | |
1317 | if pieces["dirty"]: | |
1318 | rendered += ".dev0" | |
1319 | else: | |
1320 | # exception #1 | |
1321 | rendered = "0.post%d" % pieces["distance"] | |
1322 | if pieces["dirty"]: | |
1323 | rendered += ".dev0" | |
1324 | return rendered | |
1325 | ||
1326 | ||
1327 | def render_git_describe(pieces): | |
1328 | """TAG[-DISTANCE-gHEX][-dirty]. | |
1329 | ||
1330 | Like 'git describe --tags --dirty --always'. | |
1331 | ||
1332 | Exceptions: | |
1333 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) | |
1334 | """ | |
1335 | if pieces["closest-tag"]: | |
1336 | rendered = pieces["closest-tag"] | |
1337 | if pieces["distance"]: | |
1338 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) | |
1339 | else: | |
1340 | # exception #1 | |
1341 | rendered = pieces["short"] | |
1342 | if pieces["dirty"]: | |
1343 | rendered += "-dirty" | |
1344 | return rendered | |
1345 | ||
1346 | ||
1347 | def render_git_describe_long(pieces): | |
1348 | """TAG-DISTANCE-gHEX[-dirty]. | |
1349 | ||
1350 | Like 'git describe --tags --dirty --always -long'. | |
1351 | The distance/hash is unconditional. | |
1352 | ||
1353 | Exceptions: | |
1354 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) | |
1355 | """ | |
1356 | if pieces["closest-tag"]: | |
1357 | rendered = pieces["closest-tag"] | |
1358 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) | |
1359 | else: | |
1360 | # exception #1 | |
1361 | rendered = pieces["short"] | |
1362 | if pieces["dirty"]: | |
1363 | rendered += "-dirty" | |
1364 | return rendered | |
1365 | ||
1366 | ||
1367 | def render(pieces, style): | |
1368 | """Render the given version pieces into the requested style.""" | |
1369 | if pieces["error"]: | |
1370 | return {"version": "unknown", | |
1371 | "full-revisionid": pieces.get("long"), | |
1372 | "dirty": None, | |
1373 | "error": pieces["error"], | |
1374 | "date": None} | |
1375 | ||
1376 | if not style or style == "default": | |
1377 | style = "pep440" # the default | |
1378 | ||
1379 | if style == "pep440": | |
1380 | rendered = render_pep440(pieces) | |
1381 | elif style == "pep440-pre": | |
1382 | rendered = render_pep440_pre(pieces) | |
1383 | elif style == "pep440-post": | |
1384 | rendered = render_pep440_post(pieces) | |
1385 | elif style == "pep440-old": | |
1386 | rendered = render_pep440_old(pieces) | |
1387 | elif style == "git-describe": | |
1388 | rendered = render_git_describe(pieces) | |
1389 | elif style == "git-describe-long": | |
1390 | rendered = render_git_describe_long(pieces) | |
1391 | else: | |
1392 | raise ValueError("unknown style '%s'" % style) | |
1393 | ||
1394 | return {"version": rendered, "full-revisionid": pieces["long"], | |
1395 | "dirty": pieces["dirty"], "error": None, | |
1396 | "date": pieces.get("date")} | |
1397 | ||
1398 | ||
1399 | class VersioneerBadRootError(Exception): | |
1400 | """The project root directory is unknown or missing key files.""" | |
1401 | ||
1402 | ||
1403 | def get_versions(verbose=False): | |
1404 | """Get the project version from whatever source is available. | |
1405 | ||
1406 | Returns dict with two keys: 'version' and 'full'. | |
1407 | """ | |
1408 | if "versioneer" in sys.modules: | |
1409 | # see the discussion in cmdclass.py:get_cmdclass() | |
1410 | del sys.modules["versioneer"] | |
1411 | ||
1412 | root = get_root() | |
1413 | cfg = get_config_from_root(root) | |
1414 | ||
1415 | assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" | |
1416 | handlers = HANDLERS.get(cfg.VCS) | |
1417 | assert handlers, "unrecognized VCS '%s'" % cfg.VCS | |
1418 | verbose = verbose or cfg.verbose | |
1419 | assert cfg.versionfile_source is not None, \ | |
1420 | "please set versioneer.versionfile_source" | |
1421 | assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" | |
1422 | ||
1423 | versionfile_abs = os.path.join(root, cfg.versionfile_source) | |
1424 | ||
1425 | # extract version from first of: _version.py, VCS command (e.g. 'git | |
1426 | # describe'), parentdir. This is meant to work for developers using a | |
1427 | # source checkout, for users of a tarball created by 'setup.py sdist', | |
1428 | # and for users of a tarball/zipball created by 'git archive' or github's | |
1429 | # download-from-tag feature or the equivalent in other VCSes. | |
1430 | ||
1431 | get_keywords_f = handlers.get("get_keywords") | |
1432 | from_keywords_f = handlers.get("keywords") | |
1433 | if get_keywords_f and from_keywords_f: | |
1434 | try: | |
1435 | keywords = get_keywords_f(versionfile_abs) | |
1436 | ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) | |
1437 | if verbose: | |
1438 | print("got version from expanded keyword %s" % ver) | |
1439 | return ver | |
1440 | except NotThisMethod: | |
1441 | pass | |
1442 | ||
1443 | try: | |
1444 | ver = versions_from_file(versionfile_abs) | |
1445 | if verbose: | |
1446 | print("got version from file %s %s" % (versionfile_abs, ver)) | |
1447 | return ver | |
1448 | except NotThisMethod: | |
1449 | pass | |
1450 | ||
1451 | from_vcs_f = handlers.get("pieces_from_vcs") | |
1452 | if from_vcs_f: | |
1453 | try: | |
1454 | pieces = from_vcs_f(cfg.tag_prefix, root, verbose) | |
1455 | ver = render(pieces, cfg.style) | |
1456 | if verbose: | |
1457 | print("got version from VCS %s" % ver) | |
1458 | return ver | |
1459 | except NotThisMethod: | |
1460 | pass | |
1461 | ||
1462 | try: | |
1463 | if cfg.parentdir_prefix: | |
1464 | ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) | |
1465 | if verbose: | |
1466 | print("got version from parentdir %s" % ver) | |
1467 | return ver | |
1468 | except NotThisMethod: | |
1469 | pass | |
1470 | ||
1471 | if verbose: | |
1472 | print("unable to compute version") | |
1473 | ||
1474 | return {"version": "0+unknown", "full-revisionid": None, | |
1475 | "dirty": None, "error": "unable to compute version", | |
1476 | "date": None} | |
1477 | ||
1478 | ||
1479 | def get_version(): | |
1480 | """Get the short version string for this project.""" | |
1481 | return get_versions()["version"] | |
1482 | ||
1483 | ||
1484 | def get_cmdclass(): | |
1485 | """Get the custom setuptools/distutils subclasses used by Versioneer.""" | |
1486 | if "versioneer" in sys.modules: | |
1487 | del sys.modules["versioneer"] | |
1488 | # this fixes the "python setup.py develop" case (also 'install' and | |
1489 | # 'easy_install .'), in which subdependencies of the main project are | |
1490 | # built (using setup.py bdist_egg) in the same python process. Assume | |
1491 | # a main project A and a dependency B, which use different versions | |
1492 | # of Versioneer. A's setup.py imports A's Versioneer, leaving it in | |
1493 | # sys.modules by the time B's setup.py is executed, causing B to run | |
1494 | # with the wrong versioneer. Setuptools wraps the sub-dep builds in a | |
1495 | # sandbox that restores sys.modules to it's pre-build state, so the | |
1496 | # parent is protected against the child's "import versioneer". By | |
1497 | # removing ourselves from sys.modules here, before the child build | |
1498 | # happens, we protect the child from the parent's versioneer too. | |
1499 | # Also see https://github.com/warner/python-versioneer/issues/52 | |
1500 | ||
1501 | cmds = {} | |
1502 | ||
1503 | # we add "version" to both distutils and setuptools | |
1504 | from distutils.core import Command | |
1505 | ||
1506 | class cmd_version(Command): | |
1507 | description = "report generated version string" | |
1508 | user_options = [] | |
1509 | boolean_options = [] | |
1510 | ||
1511 | def initialize_options(self): | |
1512 | pass | |
1513 | ||
1514 | def finalize_options(self): | |
1515 | pass | |
1516 | ||
1517 | def run(self): | |
1518 | vers = get_versions(verbose=True) | |
1519 | print("Version: %s" % vers["version"]) | |
1520 | print(" full-revisionid: %s" % vers.get("full-revisionid")) | |
1521 | print(" dirty: %s" % vers.get("dirty")) | |
1522 | print(" date: %s" % vers.get("date")) | |
1523 | if vers["error"]: | |
1524 | print(" error: %s" % vers["error"]) | |
1525 | cmds["version"] = cmd_version | |
1526 | ||
1527 | # we override "build_py" in both distutils and setuptools | |
1528 | # | |
1529 | # most invocation pathways end up running build_py: | |
1530 | # distutils/build -> build_py | |
1531 | # distutils/install -> distutils/build ->.. | |
1532 | # setuptools/bdist_wheel -> distutils/install ->.. | |
1533 | # setuptools/bdist_egg -> distutils/install_lib -> build_py | |
1534 | # setuptools/install -> bdist_egg ->.. | |
1535 | # setuptools/develop -> ? | |
1536 | # pip install: | |
1537 | # copies source tree to a tempdir before running egg_info/etc | |
1538 | # if .git isn't copied too, 'git describe' will fail | |
1539 | # then does setup.py bdist_wheel, or sometimes setup.py install | |
1540 | # setup.py egg_info -> ? | |
1541 | ||
1542 | # we override different "build_py" commands for both environments | |
1543 | if "setuptools" in sys.modules: | |
1544 | from setuptools.command.build_py import build_py as _build_py | |
1545 | else: | |
1546 | from distutils.command.build_py import build_py as _build_py | |
1547 | ||
1548 | class cmd_build_py(_build_py): | |
1549 | def run(self): | |
1550 | root = get_root() | |
1551 | cfg = get_config_from_root(root) | |
1552 | versions = get_versions() | |
1553 | _build_py.run(self) | |
1554 | # now locate _version.py in the new build/ directory and replace | |
1555 | # it with an updated value | |
1556 | if cfg.versionfile_build: | |
1557 | target_versionfile = os.path.join(self.build_lib, | |
1558 | cfg.versionfile_build) | |
1559 | print("UPDATING %s" % target_versionfile) | |
1560 | write_to_version_file(target_versionfile, versions) | |
1561 | cmds["build_py"] = cmd_build_py | |
1562 | ||
1563 | if "cx_Freeze" in sys.modules: # cx_freeze enabled? | |
1564 | from cx_Freeze.dist import build_exe as _build_exe | |
1565 | # nczeczulin reports that py2exe won't like the pep440-style string | |
1566 | # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. | |
1567 | # setup(console=[{ | |
1568 | # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION | |
1569 | # "product_version": versioneer.get_version(), | |
1570 | # ... | |
1571 | ||
1572 | class cmd_build_exe(_build_exe): | |
1573 | def run(self): | |
1574 | root = get_root() | |
1575 | cfg = get_config_from_root(root) | |
1576 | versions = get_versions() | |
1577 | target_versionfile = cfg.versionfile_source | |
1578 | print("UPDATING %s" % target_versionfile) | |
1579 | write_to_version_file(target_versionfile, versions) | |
1580 | ||
1581 | _build_exe.run(self) | |
1582 | os.unlink(target_versionfile) | |
1583 | with open(cfg.versionfile_source, "w") as f: | |
1584 | LONG = LONG_VERSION_PY[cfg.VCS] | |
1585 | f.write(LONG % | |
1586 | {"DOLLAR": "$", | |
1587 | "STYLE": cfg.style, | |
1588 | "TAG_PREFIX": cfg.tag_prefix, | |
1589 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, | |
1590 | "VERSIONFILE_SOURCE": cfg.versionfile_source, | |
1591 | }) | |
1592 | cmds["build_exe"] = cmd_build_exe | |
1593 | del cmds["build_py"] | |
1594 | ||
1595 | if 'py2exe' in sys.modules: # py2exe enabled? | |
1596 | try: | |
1597 | from py2exe.distutils_buildexe import py2exe as _py2exe # py3 | |
1598 | except ImportError: | |
1599 | from py2exe.build_exe import py2exe as _py2exe # py2 | |
1600 | ||
1601 | class cmd_py2exe(_py2exe): | |
1602 | def run(self): | |
1603 | root = get_root() | |
1604 | cfg = get_config_from_root(root) | |
1605 | versions = get_versions() | |
1606 | target_versionfile = cfg.versionfile_source | |
1607 | print("UPDATING %s" % target_versionfile) | |
1608 | write_to_version_file(target_versionfile, versions) | |
1609 | ||
1610 | _py2exe.run(self) | |
1611 | os.unlink(target_versionfile) | |
1612 | with open(cfg.versionfile_source, "w") as f: | |
1613 | LONG = LONG_VERSION_PY[cfg.VCS] | |
1614 | f.write(LONG % | |
1615 | {"DOLLAR": "$", | |
1616 | "STYLE": cfg.style, | |
1617 | "TAG_PREFIX": cfg.tag_prefix, | |
1618 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, | |
1619 | "VERSIONFILE_SOURCE": cfg.versionfile_source, | |
1620 | }) | |
1621 | cmds["py2exe"] = cmd_py2exe | |
1622 | ||
1623 | # we override different "sdist" commands for both environments | |
1624 | if "setuptools" in sys.modules: | |
1625 | from setuptools.command.sdist import sdist as _sdist | |
1626 | else: | |
1627 | from distutils.command.sdist import sdist as _sdist | |
1628 | ||
1629 | class cmd_sdist(_sdist): | |
1630 | def run(self): | |
1631 | versions = get_versions() | |
1632 | self._versioneer_generated_versions = versions | |
1633 | # unless we update this, the command will keep using the old | |
1634 | # version | |
1635 | self.distribution.metadata.version = versions["version"] | |
1636 | return _sdist.run(self) | |
1637 | ||
1638 | def make_release_tree(self, base_dir, files): | |
1639 | root = get_root() | |
1640 | cfg = get_config_from_root(root) | |
1641 | _sdist.make_release_tree(self, base_dir, files) | |
1642 | # now locate _version.py in the new base_dir directory | |
1643 | # (remembering that it may be a hardlink) and replace it with an | |
1644 | # updated value | |
1645 | target_versionfile = os.path.join(base_dir, cfg.versionfile_source) | |
1646 | print("UPDATING %s" % target_versionfile) | |
1647 | write_to_version_file(target_versionfile, | |
1648 | self._versioneer_generated_versions) | |
1649 | cmds["sdist"] = cmd_sdist | |
1650 | ||
1651 | return cmds | |
1652 | ||
1653 | ||
1654 | CONFIG_ERROR = """ | |
1655 | setup.cfg is missing the necessary Versioneer configuration. You need | |
1656 | a section like: | |
1657 | ||
1658 | [versioneer] | |
1659 | VCS = git | |
1660 | style = pep440 | |
1661 | versionfile_source = src/myproject/_version.py | |
1662 | versionfile_build = myproject/_version.py | |
1663 | tag_prefix = | |
1664 | parentdir_prefix = myproject- | |
1665 | ||
1666 | You will also need to edit your setup.py to use the results: | |
1667 | ||
1668 | import versioneer | |
1669 | setup(version=versioneer.get_version(), | |
1670 | cmdclass=versioneer.get_cmdclass(), ...) | |
1671 | ||
1672 | Please read the docstring in ./versioneer.py for configuration instructions, | |
1673 | edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. | |
1674 | """ | |
1675 | ||
1676 | SAMPLE_CONFIG = """ | |
1677 | # See the docstring in versioneer.py for instructions. Note that you must | |
1678 | # re-run 'versioneer.py setup' after changing this section, and commit the | |
1679 | # resulting files. | |
1680 | ||
1681 | [versioneer] | |
1682 | #VCS = git | |
1683 | #style = pep440 | |
1684 | #versionfile_source = | |
1685 | #versionfile_build = | |
1686 | #tag_prefix = | |
1687 | #parentdir_prefix = | |
1688 | ||
1689 | """ | |
1690 | ||
1691 | INIT_PY_SNIPPET = """ | |
1692 | from ._version import get_versions | |
1693 | __version__ = get_versions()['version'] | |
1694 | del get_versions | |
1695 | """ | |
1696 | ||
1697 | ||
1698 | def do_setup(): | |
1699 | """Main VCS-independent setup function for installing Versioneer.""" | |
1700 | root = get_root() | |
1701 | try: | |
1702 | cfg = get_config_from_root(root) | |
1703 | except (EnvironmentError, configparser.NoSectionError, | |
1704 | configparser.NoOptionError) as e: | |
1705 | if isinstance(e, (EnvironmentError, configparser.NoSectionError)): | |
1706 | print("Adding sample versioneer config to setup.cfg", | |
1707 | file=sys.stderr) | |
1708 | with open(os.path.join(root, "setup.cfg"), "a") as f: | |
1709 | f.write(SAMPLE_CONFIG) | |
1710 | print(CONFIG_ERROR, file=sys.stderr) | |
1711 | return 1 | |
1712 | ||
1713 | print(" creating %s" % cfg.versionfile_source) | |
1714 | with open(cfg.versionfile_source, "w") as f: | |
1715 | LONG = LONG_VERSION_PY[cfg.VCS] | |
1716 | f.write(LONG % {"DOLLAR": "$", | |
1717 | "STYLE": cfg.style, | |
1718 | "TAG_PREFIX": cfg.tag_prefix, | |
1719 | "PARENTDIR_PREFIX": cfg.parentdir_prefix, | |
1720 | "VERSIONFILE_SOURCE": cfg.versionfile_source, | |
1721 | }) | |
1722 | ||
1723 | ipy = os.path.join(os.path.dirname(cfg.versionfile_source), | |
1724 | "__init__.py") | |
1725 | if os.path.exists(ipy): | |
1726 | try: | |
1727 | with open(ipy, "r") as f: | |
1728 | old = f.read() | |
1729 | except EnvironmentError: | |
1730 | old = "" | |
1731 | if INIT_PY_SNIPPET not in old: | |
1732 | print(" appending to %s" % ipy) | |
1733 | with open(ipy, "a") as f: | |
1734 | f.write(INIT_PY_SNIPPET) | |
1735 | else: | |
1736 | print(" %s unmodified" % ipy) | |
1737 | else: | |
1738 | print(" %s doesn't exist, ok" % ipy) | |
1739 | ipy = None | |
1740 | ||
1741 | # Make sure both the top-level "versioneer.py" and versionfile_source | |
1742 | # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so | |
1743 | # they'll be copied into source distributions. Pip won't be able to | |
1744 | # install the package without this. | |
1745 | manifest_in = os.path.join(root, "MANIFEST.in") | |
1746 | simple_includes = set() | |
1747 | try: | |
1748 | with open(manifest_in, "r") as f: | |
1749 | for line in f: | |
1750 | if line.startswith("include "): | |
1751 | for include in line.split()[1:]: | |
1752 | simple_includes.add(include) | |
1753 | except EnvironmentError: | |
1754 | pass | |
1755 | # That doesn't cover everything MANIFEST.in can do | |
1756 | # (http://docs.python.org/2/distutils/sourcedist.html#commands), so | |
1757 | # it might give some false negatives. Appending redundant 'include' | |
1758 | # lines is safe, though. | |
1759 | if "versioneer.py" not in simple_includes: | |
1760 | print(" appending 'versioneer.py' to MANIFEST.in") | |
1761 | with open(manifest_in, "a") as f: | |
1762 | f.write("include versioneer.py\n") | |
1763 | else: | |
1764 | print(" 'versioneer.py' already in MANIFEST.in") | |
1765 | if cfg.versionfile_source not in simple_includes: | |
1766 | print(" appending versionfile_source ('%s') to MANIFEST.in" % | |
1767 | cfg.versionfile_source) | |
1768 | with open(manifest_in, "a") as f: | |
1769 | f.write("include %s\n" % cfg.versionfile_source) | |
1770 | else: | |
1771 | print(" versionfile_source already in MANIFEST.in") | |
1772 | ||
1773 | # Make VCS-specific changes. For git, this means creating/changing | |
1774 | # .gitattributes to mark _version.py for export-subst keyword | |
1775 | # substitution. | |
1776 | do_vcs_install(manifest_in, cfg.versionfile_source, ipy) | |
1777 | return 0 | |
1778 | ||
1779 | ||
1780 | def scan_setup_py(): | |
1781 | """Validate the contents of setup.py against Versioneer's expectations.""" | |
1782 | found = set() | |
1783 | setters = False | |
1784 | errors = 0 | |
1785 | with open("setup.py", "r") as f: | |
1786 | for line in f.readlines(): | |
1787 | if "import versioneer" in line: | |
1788 | found.add("import") | |
1789 | if "versioneer.get_cmdclass()" in line: | |
1790 | found.add("cmdclass") | |
1791 | if "versioneer.get_version()" in line: | |
1792 | found.add("get_version") | |
1793 | if "versioneer.VCS" in line: | |
1794 | setters = True | |
1795 | if "versioneer.versionfile_source" in line: | |
1796 | setters = True | |
1797 | if len(found) != 3: | |
1798 | print("") | |
1799 | print("Your setup.py appears to be missing some important items") | |
1800 | print("(but I might be wrong). Please make sure it has something") | |
1801 | print("roughly like the following:") | |
1802 | print("") | |
1803 | print(" import versioneer") | |
1804 | print(" setup( version=versioneer.get_version(),") | |
1805 | print(" cmdclass=versioneer.get_cmdclass(), ...)") | |
1806 | print("") | |
1807 | errors += 1 | |
1808 | if setters: | |
1809 | print("You should remove lines like 'versioneer.VCS = ' and") | |
1810 | print("'versioneer.versionfile_source = ' . This configuration") | |
1811 | print("now lives in setup.cfg, and should be removed from setup.py") | |
1812 | print("") | |
1813 | errors += 1 | |
1814 | return errors | |
1815 | ||
1816 | ||
1817 | if __name__ == "__main__": | |
1818 | cmd = sys.argv[1] | |
1819 | if cmd == "setup": | |
1820 | errors = do_setup() | |
1821 | errors += scan_setup_py() | |
1822 | if errors: | |
1823 | sys.exit(1) |