Update upstream source from tag 'upstream/0.6.1'
Update to upstream version '0.6.1'
with Debian dir 6faa2c5578c3d9044029a4ed724c2444e7370de3
Sandro Tosi
1 year, 10 months ago
0 | [run] | |
1 | branch = True | |
2 | source = nb2plots | |
3 | include = */nb2plots/* | |
4 | omit = | |
5 | nb2plots/_version.py |
0 | nb2plots/_version.py export-subst |
0 | on: | |
1 | push: | |
2 | branches: | |
3 | - master | |
4 | pull_request: | |
5 | branches: | |
6 | - master | |
7 | ||
8 | name: Run tox tests | |
9 | ||
10 | jobs: | |
11 | tox_test: | |
12 | name: Run tox tests | |
13 | steps: | |
14 | - name: Checkout source code | |
15 | uses: actions/checkout@v2 | |
16 | - name: Run tox tests | |
17 | id: test | |
18 | uses: fedora-python/tox-github-action@master | |
19 | with: | |
20 | tox_env: ${{ matrix.tox_env }} | |
21 | dnf_install: /usr/bin/pandoc | |
22 | - name: Upload HTML coverage report | |
23 | uses: actions/upload-artifact@v2 | |
24 | with: | |
25 | name: html-report | |
26 | path: htmlcov | |
27 | if-no-files-found: ignore | |
28 | strategy: | |
29 | matrix: | |
30 | tox_env: [ | |
31 | # This list has to be maintained manually :( | |
32 | # You can get it from `tox -l | sed "s/$/,/"` | |
33 | py37, | |
34 | py38, | |
35 | py39, | |
36 | py310, | |
37 | ] | |
38 | ||
39 | # Use GitHub's Linux Docker host | |
40 | runs-on: ubuntu-latest |
0 | *.pyc | |
1 | *~ | |
2 | __pycache__/ | |
3 | build/ | |
4 | .coverage | |
5 | dist/ | |
6 | *.egg-info/ | |
7 | MANIFEST | |
8 | .ipynb_checkpoints/ | |
9 | htmlcov/ | |
10 | .cache/ | |
11 | .pytest_cache/ | |
12 | .tox/ |
0 | ################## | |
1 | nb2plots Changelog | |
2 | ################## | |
3 | ||
4 | MB below is short for Matthew Brett. | |
5 | ||
6 | ***** | |
7 | 0.6.1 | |
8 | ***** | |
9 | ||
10 | Bugfix release. | |
11 | ||
12 | * Fix for newer Docutils change in node attributes (MB). | |
13 | * Test fixes for newer Sphinx, Notebook, nbconvert (Lumir Balhar, MB). | |
14 | * Fix escapes in not-raw strings (Jerry James). | |
15 | * Fix import of Sequence for newer Python (Jerry James). | |
16 | * Update versioneer (MB). | |
17 | * Tox / Github actions for testing (Lumir Balhar) | |
18 | ||
19 | ***** | |
20 | 0.6.0 | |
21 | ***** | |
22 | ||
23 | New feature release. | |
24 | ||
25 | * Add hide from / show to options to nbplot directive. This gives a more | |
26 | flexible way of concealing individual plots from specific builders, such as | |
27 | the HTML or doctest builders. See the documentation of nbplot for more | |
28 | detail. | |
29 | * Fix bug writing Notebooks, Python files for ReST sources not at root of | |
30 | project tree. | |
31 | * Add options to set Jupyter kernel timeout when building full notebooks | |
32 | (thanks to Juan Nunez-Iglesias for the suggestion); | |
33 | * Make compatible with newest Numpy, nbconvert. | |
34 | * Refactor tests to use Pytest instead of Nose. | |
35 | * Refactor Sphinx testers and script running testers in own packages. | |
36 | ||
37 | ***** | |
38 | 0.5.2 | |
39 | ***** | |
40 | ||
41 | Bugfix release. | |
42 | ||
43 | * Fix compatibility with Sphinx 1.6 series; | |
44 | * Fix bug causing crash in LaTeX builder with code-links directives in | |
45 | subdirectories of main project (thanks to Jarrod Millman for the report). | |
46 | ||
47 | ***** | |
48 | 0.5.1 | |
49 | ***** | |
50 | ||
51 | New feature and bugfix release. Minor API breakage. | |
52 | ||
53 | * add explicit Markdown, Python, Notebook builders. You can build the whole | |
54 | site to Markdown with commands like ``sphinx-build -b markdown -d dt_dir | |
55 | src_dir md_dir``. | |
56 | * add ability of Markdown etc builders to resolve internal references relative | |
57 | to an HTTP build, using new ``markdown_http_base`` configuration option; | |
58 | * API break - change of template specification for epilogue to ``nbplot`` | |
59 | output; no longer include the source code in the template, but evaluate | |
60 | source code as ReST separately. This will cause anyone using custom | |
61 | templates pain, as their code will crash. To fix, simply remove the | |
62 | reference to source code in the template; | |
63 | * slightly extend range of doctree nodes that Markdown converter can deal | |
64 | with. | |
65 | ||
66 | *** | |
67 | 0.5 | |
68 | *** | |
69 | ||
70 | New feature release. | |
71 | ||
72 | * add ability to convert ReST to Markdown, Python .py files, clear and full | |
73 | Jupyter notebooks; | |
74 | * add ``mpl-interactive`` directive to signal presence of ``%matplotlib`` | |
75 | commands in notebook and ReST; | |
76 | * add ``code-links`` directive to add links to versions of ReST file as | |
77 | Python ``.py`` file, clear and full notebooks; | |
78 | * add ability to configure alternative versions of nbplot directives according | |
79 | to build-time configuration; | |
80 | * add directive to allow page-specific configuration for the directive | |
81 | alternative machinery; | |
82 | * add more general Sphinx testing machinery; | |
83 | * commit to setuptools; | |
84 | * improve conversion of matplotlib objects from output cells to doctest output | |
85 | in ReST page; | |
86 | * start basic documentation; | |
87 | * minor API breakage - do not split nbplot code blocks at ``plt.show()``. I | |
88 | discovered this horrible hack left over from the matplotlib plot directive | |
89 | when refactoring, and removed it to make refactoring easier; | |
90 | * minor API breakage - do not write individual nbplot directives as ``.py`` | |
91 | files; they don't make much sense for nbplot use, and the code was not | |
92 | linking to these files in any case; | |
93 | ||
94 | *** | |
95 | 0.4 | |
96 | *** | |
97 | ||
98 | New feature release. | |
99 | ||
100 | * ``nb2plots`` script now always outputs converted notebooks as UTF-8, rather | |
101 | than doing a ``print()`` to stdout; | |
102 | * add ``raises`` option to ``nbplot`` directive, to test that nbplot blocks to | |
103 | raise errors while building the HTML; | |
104 | * fix CI testing to actually run on Python 3.3-3.5. | |
105 | ||
106 | ||
107 | .. vim: ft=rst |
0 | Metadata-Version: 1.1 | |
1 | Name: nb2plots | |
2 | Version: 0.6 | |
3 | Summary: Converting between ipython notebooks and sphinx docs | |
4 | Home-page: http://github.com/matthew-brett/nb2plots | |
5 | Author: Matthew Brett | |
6 | Author-email: matthew.brett@gmail.com | |
7 | License: BSD license | |
8 | Description-Content-Type: UNKNOWN | |
9 | Description: ################################################## | |
10 | nb2plots - converting between notebooks and sphinx | |
11 | ################################################## | |
12 | ||
13 | See the nb2plots documentation_ for more information. | |
14 | ||
15 | .. shared-text-body | |
16 | ||
17 | ************ | |
18 | What it does | |
19 | ************ | |
20 | ||
21 | ``nb2plots`` converts Jupyter_ notebooks to ReST_ files for Sphinx_, and back | |
22 | again. | |
23 | ||
24 | Nb2plots assumes that the ReST document will become the source for your Sphinx | |
25 | web pages, but also for future versions of the notebook. The notebook may | |
26 | serve as a draft for the polished ReST page, and an output format from the | |
27 | Sphinx build. Why? Read on. | |
28 | ||
29 | **************************************** | |
30 | Why convert Jupyter notebooks to Sphinx? | |
31 | **************************************** | |
32 | ||
33 | Jupyter notebooks are just what the doctor ordered when hacking up a quick | |
34 | tutorial, or preparing a software demo. The problems start when you want to | |
35 | do not-trivial edits to the notebooks, or you need features that notebooks | |
36 | don't have, such as flexible cross-referencing, extensible markup, and so on. | |
37 | Notebooks are also painful to use with version control. These times make you | |
38 | wish your notebook was in a standard extensible text format, such as ReST_. | |
39 | ||
40 | You could convert your notebook to ReST using the standard `nbconvert`_ | |
41 | command, but this gives rather ugly ReST, and you lose all the nice code | |
42 | execution and figure generation that the notebook is good at. | |
43 | ||
44 | Enter Nb2plots. The ``nb2plots`` command converts notebooks to specially | |
45 | formatted ReST pages. Use with:: | |
46 | ||
47 | nb2plots notebook.ipynb > with_plots.rst | |
48 | ||
49 | Nb2plots converts your notebook to not-very-ugly ReST, where the code cells | |
50 | become ``nbplot`` directives in your ReST file. | |
51 | ||
52 | Specifically, a notebook code cell like this:: | |
53 | ||
54 | a = 1 | |
55 | ||
56 | becomes (in the ReST document):: | |
57 | ||
58 | .. nbplot:: | |
59 | ||
60 | >>> a = 1 | |
61 | ||
62 | The ``nbplot`` directives run the contained code when Sphinx builds your ReST | |
63 | files, and embed the results of any plots that your code makes. Actually, | |
64 | ``nbplot`` is an extended and edited version of the `matplotlib plot | |
65 | directive`_. Building your pages runs all the code and regenerates the | |
66 | figures, and you get much of the reproducible goodness of the notebook | |
67 | experience. | |
68 | ||
69 | You can also run the standard Sphinx ``doctest`` extension over your pages to | |
70 | check the doctest output of the code cells. | |
71 | ||
72 | The ReST version of your notebook has many advantages - it is easier to edit | |
73 | in your favorite text editor, and you can extend and configure the execution | |
74 | and display of the code in several different ways. For example, you can hide | |
75 | some code cells (Nbplot directives) if the code is not interesting to your | |
76 | point, but you still want the generated figure. You can configure your Nbplot | |
77 | directives to run different code for different configurations. For these | |
78 | options, see |nbplot-documentation|. But - what do you lose, when going from | |
79 | a notebook to a Nb2plots ReST document? | |
80 | ||
81 | ********************************** | |
82 | I want notebooks and .py files too | |
83 | ********************************** | |
84 | ||
85 | You may also want a version of your document that your users can execute. | |
86 | Perhaps the page build is generating some tricky errors or warnings, and you | |
87 | want to experiment with the code in the page interactively. Perhaps your | |
88 | users are used to notebooks, and prefer the code in that format. | |
89 | ||
90 | Nb2plots also contains Sphinx extensions that cause the Sphinx build to | |
91 | generate Python code files and Jupyter notebooks from the ReST source. When | |
92 | you add the Nb2plots ReST directive ``code-links`` to your ReST page, it will | |
93 | cause the Sphinx build to create a Python code file and notebook versions of | |
94 | your page, and adds download links to these versions:: | |
95 | ||
96 | .. code-links:: | |
97 | ||
98 | See |code-links-documentation| for details. | |
99 | ||
100 | ************************** | |
101 | Show me what it looks like | |
102 | ************************** | |
103 | ||
104 | For a very simple example, see |worked-example|. | |
105 | ||
106 | For a moderate-sized teaching site that makes extensive use of Nb2plots, see | |
107 | https://matthew-brett.github.com/teaching. | |
108 | ||
109 | ************ | |
110 | Installation | |
111 | ************ | |
112 | ||
113 | :: | |
114 | ||
115 | pip install nb2plots | |
116 | ||
117 | You will need Pandoc_ installed and available as the ``pandoc`` command. | |
118 | ||
119 | To install Pandoc on OSX, we recommend homebrew_:: | |
120 | ||
121 | brew install pandoc | |
122 | ||
123 | ************* | |
124 | Configuration | |
125 | ************* | |
126 | ||
127 | Add the following to your Sphinx ``conf.py`` file:: | |
128 | ||
129 | extensions = ["nb2plots"] | |
130 | ||
131 | See |nbplot-documentation| for the various ``conf.py`` configuration settings. | |
132 | ||
133 | **** | |
134 | Code | |
135 | **** | |
136 | ||
137 | See https://github.com/matthew-brett/nb2plots | |
138 | ||
139 | Released under the BSD two-clause license - see the file ``LICENSE`` in the | |
140 | source distribution. | |
141 | ||
142 | `travis-ci <https://travis-ci.org/matthew-brett/nb2plots>`_ kindly tests the | |
143 | code automatically under Python versions 2.7, and 3.3 through 3.5. | |
144 | ||
145 | The latest released version is at https://pypi.python.org/pypi/nb2plots | |
146 | ||
147 | ***** | |
148 | Tests | |
149 | ***** | |
150 | ||
151 | * Install ``nb2plots`` | |
152 | * Install the pytest_ testing framework, the ``mock`` package, and the | |
153 | ``scripttester`` package. | |
154 | ||
155 | pip install pytest mock scripttester | |
156 | ||
157 | * Run the tests with:: | |
158 | ||
159 | py.test --pyargs nb2plots | |
160 | ||
161 | ******* | |
162 | Support | |
163 | ******* | |
164 | ||
165 | Please put up issues on the `nb2plots issue tracker`_. | |
166 | ||
167 | .. standalone-references | |
168 | ||
169 | .. |nbplot-documentation| replace:: `nbplots documentation`_ | |
170 | .. |worked-example| replace:: `worked example`_ | |
171 | .. |code-links-documentation| replace:: `code-links documentation`_ | |
172 | .. _nbplots documentation: | |
173 | https://matthew-brett.github.com/nb2plots/nbplots.html | |
174 | .. _code-links documentation: | |
175 | https://matthew-brett.github.com/nb2plots/code_links.html | |
176 | .. _worked example: | |
177 | https://matthew-brett.github.com/nb2plots/worked_example.html | |
178 | .. _documentation: https://matthew-brett.github.com/nb2plots | |
179 | .. _pandoc: http://pandoc.org | |
180 | .. _jupyter: jupyter.org | |
181 | .. _homebrew: brew.sh | |
182 | .. _sphinx: http://sphinx-doc.org | |
183 | .. _rest: http://docutils.sourceforge.net/rst.html | |
184 | .. _nb2plots issue tracker: https://github.com/matthew-brett/nb2plots/issues | |
185 | .. _matplotlib plot directive: http://matplotlib.org/sampledoc/extensions.html | |
186 | .. _nbconvert: http://nbconvert.readthedocs.org/en/latest/ | |
187 | .. _pytest: https://pytest.readthedocs.io | |
188 | .. _mock: https://github.com/testing-cabal/mock | |
189 | ||
190 | Platform: UNKNOWN | |
191 | Classifier: Development Status :: 2 - Pre-Alpha | |
192 | Classifier: Environment :: Console | |
193 | Classifier: Intended Audience :: Developers | |
194 | Classifier: Intended Audience :: Science/Research | |
195 | Classifier: License :: OSI Approved :: BSD License | |
196 | Classifier: Programming Language :: Python | |
197 | Classifier: Programming Language :: Python :: 3 | |
198 | Classifier: Topic :: Scientific/Engineering | |
199 | Classifier: Operating System :: Microsoft :: Windows | |
200 | Classifier: Operating System :: POSIX | |
201 | Classifier: Operating System :: Unix | |
202 | Classifier: Operating System :: MacOS |
95 | 95 | For a very simple example, see |worked-example|. |
96 | 96 | |
97 | 97 | For a moderate-sized teaching site that makes extensive use of Nb2plots, see |
98 | https://matthew-brett.github.com/teaching. | |
98 | https://matthew-brett.github.io/teaching. | |
99 | 99 | |
100 | 100 | ************ |
101 | 101 | Installation |
131 | 131 | source distribution. |
132 | 132 | |
133 | 133 | `travis-ci <https://travis-ci.org/matthew-brett/nb2plots>`_ kindly tests the |
134 | code automatically under Python versions 2.7, and 3.3 through 3.5. | |
134 | code automatically under Python versions 2.7, and 3.5 through 3.8. | |
135 | 135 | |
136 | 136 | The latest released version is at https://pypi.python.org/pypi/nb2plots |
137 | 137 | |
161 | 161 | .. |worked-example| replace:: `worked example`_ |
162 | 162 | .. |code-links-documentation| replace:: `code-links documentation`_ |
163 | 163 | .. _nbplots documentation: |
164 | https://matthew-brett.github.com/nb2plots/nbplots.html | |
164 | https://matthew-brett.github.io/nb2plots/nbplots.html | |
165 | 165 | .. _code-links documentation: |
166 | https://matthew-brett.github.com/nb2plots/code_links.html | |
166 | https://matthew-brett.github.io/nb2plots/code_links.html | |
167 | 167 | .. _worked example: |
168 | https://matthew-brett.github.com/nb2plots/worked_example.html | |
169 | .. _documentation: https://matthew-brett.github.com/nb2plots | |
168 | https://matthew-brett.github.io/nb2plots/worked_example.html | |
169 | .. _documentation: https://matthew-brett.github.io/nb2plots | |
170 | 170 | .. _pandoc: http://pandoc.org |
171 | 171 | .. _jupyter: jupyter.org |
172 | 172 | .. _homebrew: brew.sh |
0 | # Makefile for Sphinx documentation | |
1 | # | |
2 | ||
3 | # You can set these variables from the command line. | |
4 | SPHINXOPTS ?= | |
5 | SPHINXBUILD = sphinx-build | |
6 | PAPER = | |
7 | BUILDDIR = _build | |
8 | ||
9 | # Internal variables. | |
10 | PAPEROPT_a4 = -D latex_paper_size=a4 | |
11 | PAPEROPT_letter = -D latex_paper_size=letter | |
12 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . | |
13 | # the i18n builder cannot share the environment and doctrees with the others | |
14 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . | |
15 | ||
16 | .PHONY: help | |
17 | help: | |
18 | @echo "Please use \`make <target>' where <target> is one of" | |
19 | @echo " html to make standalone HTML files" | |
20 | @echo " dirhtml to make HTML files named index.html in directories" | |
21 | @echo " singlehtml to make a single large HTML file" | |
22 | @echo " pickle to make pickle files" | |
23 | @echo " json to make JSON files" | |
24 | @echo " htmlhelp to make HTML files and a HTML help project" | |
25 | @echo " qthelp to make HTML files and a qthelp project" | |
26 | @echo " applehelp to make an Apple Help Book" | |
27 | @echo " devhelp to make HTML files and a Devhelp project" | |
28 | @echo " epub to make an epub" | |
29 | @echo " epub3 to make an epub3" | |
30 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" | |
31 | @echo " latexpdf to make LaTeX files and run them through pdflatex" | |
32 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" | |
33 | @echo " text to make text files" | |
34 | @echo " man to make manual pages" | |
35 | @echo " texinfo to make Texinfo files" | |
36 | @echo " info to make Texinfo files and run them through makeinfo" | |
37 | @echo " gettext to make PO message catalogs" | |
38 | @echo " changes to make an overview of all changed/added/deprecated items" | |
39 | @echo " xml to make Docutils-native XML files" | |
40 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" | |
41 | @echo " linkcheck to check all external links for integrity" | |
42 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" | |
43 | @echo " coverage to run coverage check of the documentation (if enabled)" | |
44 | @echo " dummy to check syntax errors of document sources" | |
45 | ||
46 | .PHONY: clean | |
47 | clean: | |
48 | rm -rf $(BUILDDIR)/* | |
49 | ||
50 | .PHONY: html | |
51 | html: example html-only | |
52 | ||
53 | .PHONY: html-only | |
54 | html-only: | |
55 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html | |
56 | @echo | |
57 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." | |
58 | ||
59 | .PHONY: dirhtml | |
60 | dirhtml: | |
61 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml | |
62 | @echo | |
63 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." | |
64 | ||
65 | .PHONY: singlehtml | |
66 | singlehtml: | |
67 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml | |
68 | @echo | |
69 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." | |
70 | ||
71 | .PHONY: pickle | |
72 | pickle: | |
73 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle | |
74 | @echo | |
75 | @echo "Build finished; now you can process the pickle files." | |
76 | ||
77 | .PHONY: json | |
78 | json: | |
79 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json | |
80 | @echo | |
81 | @echo "Build finished; now you can process the JSON files." | |
82 | ||
83 | .PHONY: htmlhelp | |
84 | htmlhelp: | |
85 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp | |
86 | @echo | |
87 | @echo "Build finished; now you can run HTML Help Workshop with the" \ | |
88 | ".hhp project file in $(BUILDDIR)/htmlhelp." | |
89 | ||
90 | .PHONY: qthelp | |
91 | qthelp: | |
92 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp | |
93 | @echo | |
94 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ | |
95 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" | |
96 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/nb2plots.qhcp" | |
97 | @echo "To view the help file:" | |
98 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/nb2plots.qhc" | |
99 | ||
100 | .PHONY: applehelp | |
101 | applehelp: | |
102 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp | |
103 | @echo | |
104 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." | |
105 | @echo "N.B. You won't be able to view it unless you put it in" \ | |
106 | "~/Library/Documentation/Help or install it in your application" \ | |
107 | "bundle." | |
108 | ||
109 | .PHONY: devhelp | |
110 | devhelp: | |
111 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp | |
112 | @echo | |
113 | @echo "Build finished." | |
114 | @echo "To view the help file:" | |
115 | @echo "# mkdir -p $$HOME/.local/share/devhelp/nb2plots" | |
116 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/nb2plots" | |
117 | @echo "# devhelp" | |
118 | ||
119 | .PHONY: epub | |
120 | epub: | |
121 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub | |
122 | @echo | |
123 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." | |
124 | ||
125 | .PHONY: epub3 | |
126 | epub3: | |
127 | $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3 | |
128 | @echo | |
129 | @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3." | |
130 | ||
131 | .PHONY: latex | |
132 | latex: | |
133 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex | |
134 | @echo | |
135 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." | |
136 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ | |
137 | "(use \`make latexpdf' here to do that automatically)." | |
138 | ||
139 | .PHONY: latexpdf | |
140 | latexpdf: | |
141 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex | |
142 | @echo "Running LaTeX files through pdflatex..." | |
143 | $(MAKE) -C $(BUILDDIR)/latex all-pdf | |
144 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." | |
145 | ||
146 | .PHONY: latexpdfja | |
147 | latexpdfja: | |
148 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex | |
149 | @echo "Running LaTeX files through platex and dvipdfmx..." | |
150 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja | |
151 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." | |
152 | ||
153 | .PHONY: text | |
154 | text: | |
155 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text | |
156 | @echo | |
157 | @echo "Build finished. The text files are in $(BUILDDIR)/text." | |
158 | ||
159 | .PHONY: man | |
160 | man: | |
161 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man | |
162 | @echo | |
163 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." | |
164 | ||
165 | .PHONY: texinfo | |
166 | texinfo: | |
167 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo | |
168 | @echo | |
169 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." | |
170 | @echo "Run \`make' in that directory to run these through makeinfo" \ | |
171 | "(use \`make info' here to do that automatically)." | |
172 | ||
173 | .PHONY: info | |
174 | info: | |
175 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo | |
176 | @echo "Running Texinfo files through makeinfo..." | |
177 | make -C $(BUILDDIR)/texinfo info | |
178 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." | |
179 | ||
180 | .PHONY: gettext | |
181 | gettext: | |
182 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale | |
183 | @echo | |
184 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." | |
185 | ||
186 | .PHONY: changes | |
187 | changes: | |
188 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes | |
189 | @echo | |
190 | @echo "The overview file is in $(BUILDDIR)/changes." | |
191 | ||
192 | .PHONY: linkcheck | |
193 | linkcheck: | |
194 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck | |
195 | @echo | |
196 | @echo "Link check complete; look for any errors in the above output " \ | |
197 | "or in $(BUILDDIR)/linkcheck/output.txt." | |
198 | ||
199 | .PHONY: doctest | |
200 | doctest: | |
201 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest | |
202 | @echo "Testing of doctests in the sources finished, look at the " \ | |
203 | "results in $(BUILDDIR)/doctest/output.txt." | |
204 | ||
205 | .PHONY: coverage | |
206 | coverage: | |
207 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage | |
208 | @echo "Testing of coverage in the sources finished, look at the " \ | |
209 | "results in $(BUILDDIR)/coverage/python.txt." | |
210 | ||
211 | .PHONY: xml | |
212 | xml: | |
213 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml | |
214 | @echo | |
215 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." | |
216 | ||
217 | .PHONY: pseudoxml | |
218 | pseudoxml: | |
219 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml | |
220 | @echo | |
221 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." | |
222 | ||
223 | .PHONY: dummy | |
224 | dummy: | |
225 | $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy | |
226 | @echo | |
227 | @echo "Build finished. Dummy builder generates no files." | |
228 | ||
229 | example: example_notebook.ipynb | |
230 | mkdir -p $(BUILDDIR)/html | |
231 | jupyter nbconvert --to html example_notebook.ipynb --stdout > \ | |
232 | $(BUILDDIR)/html/example_notebook.html | |
233 | nb2plots example_notebook.ipynb > converted_example.rst | |
234 | cp converted_example.rst converted_plus_notebooks.rst | |
235 | echo '.. code-links::' >> converted_plus_notebooks.rst | |
236 | ||
237 | github: html | |
238 | ghp-import -n $(BUILDDIR)/html/ | |
239 | git push origin gh-pages:gh-pages --force | |
240 | @echo | |
241 | @echo "Published to Github" |
0 | ###################### | |
1 | nb2plots documentation | |
2 | ###################### | |
3 | ||
4 | To build the documentation, change to the ``doc`` directory (containing | |
5 | this file) and run:: | |
6 | ||
7 | pip install -r ../doc-requirements.txt | |
8 | make html | |
9 | ||
10 | To upload to github pages:: | |
11 | ||
12 | make github | |
13 | ||
14 | You will need repository write permissions to upload pages to the main github | |
15 | repository. |
0 | .. _code-links-documentation: | |
1 | ||
2 | ############################## | |
3 | Using the code-links directive | |
4 | ############################## | |
5 | ||
6 | The ``code-links`` directive is a short cut for adding links to your ReST page | |
7 | pointing to Python code files and Jupyter notebooks. When Sphinx sees this | |
8 | directive, it converts the ReST page to a Python code file and notebooks, and | |
9 | add links to these files in the built HTML. Use like this: | |
10 | ||
11 | .. code-block:: rest | |
12 | ||
13 | .. code-links:: | |
14 | ||
15 | Here is an example, generating very boring code and notebooks: | |
16 | ||
17 | .. code-links:: | |
18 | ||
19 | .. _code-links-directive: | |
20 | ||
21 | ******************** | |
22 | code-links directive | |
23 | ******************** | |
24 | ||
25 | .. automodule:: nb2plots.codelinks | |
26 | ||
27 | .. include:: links_names.inc |
0 | #!/usr/bin/env python3 | |
1 | # -*- coding: utf-8 -*- | |
2 | # | |
3 | # nb2plots documentation build configuration file, created by | |
4 | # sphinx-quickstart on Fri Dec 16 08:41:58 2016. | |
5 | # | |
6 | # This file is execfile()d with the current directory set to its | |
7 | # containing dir. | |
8 | # | |
9 | # Note that not all possible configuration values are present in this | |
10 | # autogenerated file. | |
11 | # | |
12 | # All configuration values have a default; values that are commented out | |
13 | # serve to show the default. | |
14 | ||
15 | # If extensions (or modules to document with autodoc) are in another directory, | |
16 | # add these directories to sys.path here. If the directory is relative to the | |
17 | # documentation root, use os.path.abspath to make it absolute, like shown here. | |
18 | # | |
19 | # import os | |
20 | # import sys | |
21 | # sys.path.insert(0, os.path.abspath('.')) | |
22 | ||
23 | # -- General configuration ------------------------------------------------ | |
24 | ||
25 | # If your documentation needs a minimal Sphinx version, state it here. | |
26 | # | |
27 | # needs_sphinx = '1.0' | |
28 | ||
29 | # Add any Sphinx extension module names here, as strings. They can be | |
30 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom | |
31 | # ones. | |
32 | extensions = [ | |
33 | 'sphinx.ext.autodoc', | |
34 | 'sphinx.ext.doctest', | |
35 | 'sphinx.ext.mathjax', | |
36 | 'sphinx.ext.githubpages', | |
37 | 'texext.math_dollar', | |
38 | 'numpydoc', | |
39 | 'nb2plots', | |
40 | ] | |
41 | ||
42 | # Add any paths that contain templates here, relative to this directory. | |
43 | templates_path = ['_templates'] | |
44 | ||
45 | # The suffix(es) of source filenames. | |
46 | # You can specify multiple suffix as a list of string: | |
47 | # | |
48 | # source_suffix = ['.rst', '.md'] | |
49 | source_suffix = '.rst' | |
50 | ||
51 | # The encoding of source files. | |
52 | # | |
53 | # source_encoding = 'utf-8-sig' | |
54 | ||
55 | # The master toctree document. | |
56 | master_doc = 'index' | |
57 | ||
58 | # General information about the project. | |
59 | project = 'nb2plots' | |
60 | copyright = '2016-2018, Matthew Brett' | |
61 | author = 'Matthew Brett' | |
62 | ||
63 | # The version info for the project you're documenting, acts as replacement for | |
64 | # |version| and |release|, also used in various other places throughout the | |
65 | # built documents. | |
66 | # | |
67 | # The full version, including alpha/beta/rc tags. | |
68 | import nb2plots | |
69 | release = nb2plots.__version__ | |
70 | # The short X.Y version. | |
71 | version = '.'.join(release.split('.')[:2]) | |
72 | ||
73 | # The language for content autogenerated by Sphinx. Refer to documentation | |
74 | # for a list of supported languages. | |
75 | # | |
76 | # This is also used if you do content translation via gettext catalogs. | |
77 | # Usually you set "language" from the command line for these cases. | |
78 | language = None | |
79 | ||
80 | # There are two options for replacing |today|: either, you set today to some | |
81 | # non-false value, then it is used: | |
82 | # | |
83 | # today = '' | |
84 | # | |
85 | # Else, today_fmt is used as the format for a strftime call. | |
86 | # | |
87 | # today_fmt = '%B %d, %Y' | |
88 | ||
89 | # List of patterns, relative to source directory, that match files and | |
90 | # directories to ignore when looking for source files. | |
91 | # This patterns also effect to html_static_path and html_extra_path | |
92 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', 'README.rst'] | |
93 | ||
94 | # The reST default role (used for this markup: `text`) to use for all | |
95 | # documents. | |
96 | # | |
97 | # default_role = None | |
98 | ||
99 | # If true, '()' will be appended to :func: etc. cross-reference text. | |
100 | # | |
101 | # add_function_parentheses = True | |
102 | ||
103 | # If true, the current module name will be prepended to all description | |
104 | # unit titles (such as .. function::). | |
105 | # | |
106 | # add_module_names = True | |
107 | ||
108 | # If true, sectionauthor and moduleauthor directives will be shown in the | |
109 | # output. They are ignored by default. | |
110 | # | |
111 | # show_authors = False | |
112 | ||
113 | # The name of the Pygments (syntax highlighting) style to use. | |
114 | pygments_style = 'sphinx' | |
115 | ||
116 | # A list of ignored prefixes for module index sorting. | |
117 | # modindex_common_prefix = [] | |
118 | ||
119 | # If true, keep warnings as "system message" paragraphs in the built documents. | |
120 | # keep_warnings = False | |
121 | ||
122 | # If true, `todo` and `todoList` produce output, else they produce nothing. | |
123 | todo_include_todos = False | |
124 | ||
125 | ||
126 | # -- Options for HTML output ---------------------------------------------- | |
127 | ||
128 | # The theme to use for HTML and HTML Help pages. See the documentation for | |
129 | # a list of builtin themes. | |
130 | # | |
131 | html_theme = 'alabaster' | |
132 | ||
133 | # Theme options are theme-specific and customize the look and feel of a theme | |
134 | # further. For a list of options available for each theme, see the | |
135 | # documentation. | |
136 | # | |
137 | # html_theme_options = {} | |
138 | ||
139 | # Add any paths that contain custom themes here, relative to this directory. | |
140 | # html_theme_path = [] | |
141 | ||
142 | # The name for this set of Sphinx documents. | |
143 | # "<project> v<release> documentation" by default. | |
144 | # | |
145 | # html_title = 'nb2plots v0.4' | |
146 | ||
147 | # A shorter title for the navigation bar. Default is the same as html_title. | |
148 | # | |
149 | # html_short_title = None | |
150 | ||
151 | # The name of an image file (relative to this directory) to place at the top | |
152 | # of the sidebar. | |
153 | # | |
154 | # html_logo = None | |
155 | ||
156 | # The name of an image file (relative to this directory) to use as a favicon of | |
157 | # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 | |
158 | # pixels large. | |
159 | # | |
160 | # html_favicon = None | |
161 | ||
162 | # Add any paths that contain custom static files (such as style sheets) here, | |
163 | # relative to this directory. They are copied after the builtin static files, | |
164 | # so a file named "default.css" will overwrite the builtin "default.css". | |
165 | html_static_path = ['_static'] | |
166 | ||
167 | # Add any extra paths that contain custom files (such as robots.txt or | |
168 | # .htaccess) here, relative to this directory. These files are copied | |
169 | # directly to the root of the documentation. | |
170 | # | |
171 | # html_extra_path = [] | |
172 | ||
173 | # If not None, a 'Last updated on:' timestamp is inserted at every page | |
174 | # bottom, using the given strftime format. | |
175 | # The empty string is equivalent to '%b %d, %Y'. | |
176 | # | |
177 | # html_last_updated_fmt = None | |
178 | ||
179 | # If true, SmartyPants will be used to convert quotes and dashes to | |
180 | # typographically correct entities. | |
181 | # | |
182 | # html_use_smartypants = True | |
183 | ||
184 | # Custom sidebar templates, maps document names to template names. | |
185 | # | |
186 | # html_sidebars = {} | |
187 | ||
188 | # Additional templates that should be rendered to pages, maps page names to | |
189 | # template names. | |
190 | # | |
191 | # html_additional_pages = {} | |
192 | ||
193 | # If false, no module index is generated. | |
194 | # | |
195 | # html_domain_indices = True | |
196 | ||
197 | # If false, no index is generated. | |
198 | # | |
199 | # html_use_index = True | |
200 | ||
201 | # If true, the index is split into individual pages for each letter. | |
202 | # | |
203 | # html_split_index = False | |
204 | ||
205 | # If true, links to the reST sources are added to the pages. | |
206 | # | |
207 | # html_show_sourcelink = True | |
208 | ||
209 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. | |
210 | # | |
211 | # html_show_sphinx = True | |
212 | ||
213 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. | |
214 | # | |
215 | # html_show_copyright = True | |
216 | ||
217 | # If true, an OpenSearch description file will be output, and all pages will | |
218 | # contain a <link> tag referring to it. The value of this option must be the | |
219 | # base URL from which the finished HTML is served. | |
220 | # | |
221 | # html_use_opensearch = '' | |
222 | ||
223 | # This is the file name suffix for HTML files (e.g. ".xhtml"). | |
224 | # html_file_suffix = None | |
225 | ||
226 | # Language to be used for generating the HTML full-text search index. | |
227 | # Sphinx supports the following languages: | |
228 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' | |
229 | # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh' | |
230 | # | |
231 | # html_search_language = 'en' | |
232 | ||
233 | # A dictionary with options for the search language support, empty by default. | |
234 | # 'ja' uses this config value. | |
235 | # 'zh' user can custom change `jieba` dictionary path. | |
236 | # | |
237 | # html_search_options = {'type': 'default'} | |
238 | ||
239 | # The name of a javascript file (relative to the configuration directory) that | |
240 | # implements a search results scorer. If empty, the default will be used. | |
241 | # | |
242 | # html_search_scorer = 'scorer.js' | |
243 | ||
244 | # Output file base name for HTML help builder. | |
245 | htmlhelp_basename = 'nb2plotsdoc' | |
246 | ||
247 | # -- Options for LaTeX output --------------------------------------------- | |
248 | ||
249 | latex_elements = { | |
250 | # The paper size ('letterpaper' or 'a4paper'). | |
251 | # | |
252 | # 'papersize': 'letterpaper', | |
253 | ||
254 | # The font size ('10pt', '11pt' or '12pt'). | |
255 | # | |
256 | # 'pointsize': '10pt', | |
257 | ||
258 | # Additional stuff for the LaTeX preamble. | |
259 | # | |
260 | # 'preamble': '', | |
261 | ||
262 | # Latex figure (float) alignment | |
263 | # | |
264 | # 'figure_align': 'htbp', | |
265 | } | |
266 | ||
267 | # Grouping the document tree into LaTeX files. List of tuples | |
268 | # (source start file, target name, title, | |
269 | # author, documentclass [howto, manual, or own class]). | |
270 | latex_documents = [ | |
271 | (master_doc, 'nb2plots.tex', 'nb2plots Documentation', | |
272 | 'Matthew Brett', 'manual'), | |
273 | ] | |
274 | ||
275 | # The name of an image file (relative to this directory) to place at the top of | |
276 | # the title page. | |
277 | # | |
278 | # latex_logo = None | |
279 | ||
280 | # For "manual" documents, if this is true, then toplevel headings are parts, | |
281 | # not chapters. | |
282 | # | |
283 | # latex_use_parts = False | |
284 | ||
285 | # If true, show page references after internal links. | |
286 | # | |
287 | # latex_show_pagerefs = False | |
288 | ||
289 | # If true, show URL addresses after external links. | |
290 | # | |
291 | # latex_show_urls = False | |
292 | ||
293 | # Documents to append as an appendix to all manuals. | |
294 | # | |
295 | # latex_appendices = [] | |
296 | ||
297 | # It false, will not define \strong, \code, itleref, \crossref ... but only | |
298 | # \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added | |
299 | # packages. | |
300 | # | |
301 | # latex_keep_old_macro_names = True | |
302 | ||
303 | # If false, no module index is generated. | |
304 | # | |
305 | # latex_domain_indices = True | |
306 | ||
307 | ||
308 | # -- Options for manual page output --------------------------------------- | |
309 | ||
310 | # One entry per manual page. List of tuples | |
311 | # (source start file, name, description, authors, manual section). | |
312 | man_pages = [ | |
313 | (master_doc, 'nb2plots', 'nb2plots Documentation', | |
314 | [author], 1) | |
315 | ] | |
316 | ||
317 | # If true, show URL addresses after external links. | |
318 | # | |
319 | # man_show_urls = False | |
320 | ||
321 | ||
322 | # -- Options for Texinfo output ------------------------------------------- | |
323 | ||
324 | # Grouping the document tree into Texinfo files. List of tuples | |
325 | # (source start file, target name, title, author, | |
326 | # dir menu entry, description, category) | |
327 | texinfo_documents = [ | |
328 | (master_doc, 'nb2plots', 'nb2plots Documentation', | |
329 | author, 'nb2plots', 'One line description of project.', | |
330 | 'Miscellaneous'), | |
331 | ] | |
332 | ||
333 | # Documents to append as an appendix to all manuals. | |
334 | # | |
335 | # texinfo_appendices = [] | |
336 | ||
337 | # If false, no module index is generated. | |
338 | # | |
339 | # texinfo_domain_indices = True | |
340 | ||
341 | # How to display URL addresses: 'footnote', 'no', or 'inline'. | |
342 | # | |
343 | # texinfo_show_urls = 'footnote' | |
344 | ||
345 | # If true, do not generate a @detailmenu in the "Top" node's menu. | |
346 | # | |
347 | # texinfo_no_detailmenu = False | |
348 | ||
349 | markdown_http_base = 'https://matthew-brett.github.io/nb2plots' | |
350 | ||
351 | # Doctest configuration. Use older numpy array representation. | |
352 | doctest_global_setup = 'from nb2plots.testing import setup_test; setup_test()' |
0 | ########################### | |
1 | Design of a notebook writer | |
2 | ########################### | |
3 | ||
4 | Pages that will be converted to notebooks have, somewhere in them, one of two | |
5 | intepreted text roles, like this:: | |
6 | ||
7 | :clearnotebook:`Download as notebook without outputs` | |
8 | :fullnotebook:`Download as notebook with outputs` | |
9 | ||
10 | These generate page content equivalent to ``:download:`Download as notebook | |
11 | <pagename>.ipynb```, where ``<pagename>.rst`` is the name of the ReST page | |
12 | containing the notebook role. ``:clearnotebook:`` creates a notebook without | |
13 | outputs, and ``:fullnotebook:`` creates the notebook and executes it, writing | |
14 | the notebook with generated outputs. | |
15 | ||
16 | The text within the backticks is the link text that will appear in the build | |
17 | html, unless the text is ``.`` (a period). This special case causes the link | |
18 | text to be ``Download as IPython notebook``. | |
19 | ||
20 | It appears that docutils insists that there must be some text between the | |
21 | backticks. | |
22 | ||
23 | For both directives, you can specify the written filename of the notebook with | |
24 | the ``text <target>`` form of the role:: | |
25 | ||
26 | :clearnotebook:`to get this page as an ipynb file <my_nb.ipynb>` | |
27 | ||
28 | If you want to specify the filename, you must specify link text as well. For | |
29 | example ``:clearnotebook:`<my_nb.ipynb>``` will result in the default | |
30 | filename, and a link text of ``<my_nb.ipynb>``. | |
31 | ||
32 | You can have multiple notebook roles pointing to the same file (default or | |
33 | otherwise), as long as they are all of the same type (clear or full). For | |
34 | example, you can have multiple ``:clearnotebook:`.``` roles in one ReST page, | |
35 | (all pointing to ``<pagename>.ipynb``, but you cannot have a | |
36 | ``:fullnotebook:`.``` role in that page, because that would mean that the | |
37 | clear and full notebook roles were trying to point to the same file. | |
38 | ||
39 | ********* | |
40 | Mechanics | |
41 | ********* | |
42 | ||
43 | On page parse (``doctree-resolved`` event), register that the page containing | |
44 | the notebook role should generate notebook, and whether this should be "full" | |
45 | or "clear". Store in ``app.env``, with link to | |
46 | ||
47 | Either collect the doctrees in the ``doctree-resolved`` event, or collect the | |
48 | docnames there, and then generate the notebooks in ``html-collect-pages`` | |
49 | event. | |
50 | ||
51 | Add notebook vistor functions via ``app.add_node(clearnotebook, | |
52 | html=(visit_notebook_node, depart_notebook_node)`` in ``setup`` function. | |
53 | ||
54 | Maybe make a new ``Writer`` class to write the notebook from the doctree. |
0 | ######################################### | |
1 | Improving the ReST to Markdown conversion | |
2 | ######################################### | |
3 | ||
4 | Nb2plots includes a converter from ReST to Markdown. As you will see in the | |
5 | ``LICENSE`` file, the basis for this converter comes from the `rst2md | |
6 | project`_ by Chris Wrench, with thanks. At the time of writing (December | |
7 | 2016) the Nb2plots converter could deal with a considerably larger range of | |
8 | Markdown than the original rst2md project. However, the converter still does | |
9 | not deal specifically with a fairly large number of Sphinx / ReST constructs, | |
10 | so we would be very grateful for your help in improving the converter. These | |
11 | are some hints how to go about that. | |
12 | ||
13 | ******** | |
14 | Workflow | |
15 | ******** | |
16 | ||
17 | * Set yourself up with a git fork and clone of the `nb2plots code`_. Install | |
18 | the development code with something like: | |
19 | ||
20 | .. code-block:: bash | |
21 | ||
22 | pip install -e . | |
23 | ||
24 | from the root ``nb2plots`` directory (the directory containing the | |
25 | ``setup.py`` file. | |
26 | ||
27 | * Make sure the current tests pass with: | |
28 | ||
29 | .. code-block:: bash | |
30 | ||
31 | py.test nb2plots | |
32 | ||
33 | * Identify a ReST construct you would like to handle, or handle better. Let's | |
34 | say you you've decided to make the converter do something sensible with the | |
35 | ``:PEP:`` text role in ReST; | |
36 | ||
37 | * Make a new ReST file that uses the construct, and put it in the | |
38 | ``nb2plots/tests/rst_md_files`` directory. For example your file could be | |
39 | ``nb2plots/tests/rst_md_files/pep.rst`` with contents: | |
40 | ||
41 | .. code-block:: rest | |
42 | ||
43 | Test converting the :pep:`8` text role to Markdown | |
44 | ||
45 | The file have any name, as long as it has a ``.rst`` extension. | |
46 | ||
47 | * You might want to check what doctree the ReST file generates. The doctree | |
48 | is the form that the Markdown converter will use. See the pseudo XML | |
49 | version of the doctree with: | |
50 | ||
51 | .. code-block:: bash | |
52 | ||
53 | sphinx2pxml nb2plots/tests/rst_md_files/pep.rst | |
54 | ||
55 | This will show you the doctree node types and their attributes. | |
56 | ||
57 | * If your construct is valid in vanilla `docutils`_ ReST, then run the | |
58 | ``rst2md`` converter over the file and see what it looks like. In the case | |
59 | of the ``pep`` text role, you might see a message that the ``pep`` role is | |
60 | not yet supported, and the output will omit the ``pep`` role contents: | |
61 | ||
62 | .. code-block:: bash | |
63 | ||
64 | rst2md nb2plots/tests/rst_md_files/pep.rst | |
65 | ||
66 | * If your construct is only valid for Sphinx_ ReST, then run the ``sphinx2md`` | |
67 | converter on it, and see what happens: | |
68 | ||
69 | .. code-block:: bash | |
70 | ||
71 | sphinx2md nb2plots/tests/rst_md_files/pep.rst | |
72 | ||
73 | * Now have a look at the code in the ``nb2plots.doctree2md`` module. Add new | |
74 | ``visit_`` / ``depart_`` methods or modify the existing methods to handle | |
75 | your construct better. Try the ``rst2md`` and ``sphinx2md`` commands on the | |
76 | ReST file to see what the output looks like; | |
77 | ||
78 | * If your Markdown is valid docutils ReST, write the current output of | |
79 | ``rst2md`` to the ``rst_md_files`` directory with a ``.md`` extension: | |
80 | ||
81 | .. code-block:: bash | |
82 | ||
83 | rst2md nb2plots/tests/rst_md_files/pep.rst > nb2plots/tests/rst_md_files/pep.md | |
84 | ||
85 | If your Markdown is not valid docutils ReST, you can skip the ``rst2md`` | |
86 | test by writing an output file containing the work "skip": | |
87 | ||
88 | .. code-block:: bash | |
89 | ||
90 | echo "skip" > nb2plots/tests/rst_md_files/pep.md | |
91 | ||
92 | If the ``sphinx2md`` script should give a different output from the | |
93 | ``rst2md`` docutils converter, write that output with a ``.smd`` extension: | |
94 | ||
95 | .. code-block:: bash | |
96 | ||
97 | sphinx2md nb2plots/tests/rst_md_files/pep.rst > nb2plots/tests/rst_md_files/pep.smd | |
98 | ||
99 | These tests also test the output of the original ReST page (here ``pep.rst`` | |
100 | to Jupyter notebooks, and Python ``.py`` files. Check these conversions | |
101 | with the matching scripts: | |
102 | ||
103 | .. code-block:: bash | |
104 | ||
105 | sphinx2py nb2plots/tests/rst_md_files/pep.rst | |
106 | ||
107 | .. code-block:: bash | |
108 | ||
109 | sphinx2nb nb2plots/tests/rst_md_files/pep.rst | |
110 | ||
111 | When you are satisfied, build the test files to check against: | |
112 | ||
113 | .. code-block:: bash | |
114 | ||
115 | sphinx2py nb2plots/tests/rst_md_files/pep.rst > nb2plots/tests/rst_md_files/pep.py | |
116 | ||
117 | .. code-block:: bash | |
118 | ||
119 | sphinx2nb nb2plots/tests/rst_md_files/pep.rst > nb2plots/tests/rst_md_files/pep.ipynb | |
120 | ||
121 | * Run the relevant tests: | |
122 | ||
123 | .. code-block:: bash | |
124 | ||
125 | py.test nb2plots/tests/test_doctree2md.py | |
126 | py.test nb2plots/tests/test_sphinx2md.py | |
127 | py.test nb2plots/tests/test_doctree2py.py | |
128 | py.test nb2plots/tests/test_doctree2nb.py | |
129 | ||
130 | These will test your new ReST file, and the various other example ReST | |
131 | files, against their expected Markdown, code and notebook outputs. | |
132 | ||
133 | * Once the relevant tests are fixed, run all the tests to check that the rest | |
134 | of the code (such as notebook conversion) is still working as expected. If | |
135 | the tests fail, but you think your output is better than the previous output | |
136 | that the tests are using, feel free to edit. | |
137 | ||
138 | .. code-block:: bash | |
139 | ||
140 | py.test nb2plots | |
141 | ||
142 | * Make a pull request to `nb2plots github`_. | |
143 | ||
144 | .. include:: ../links_names.inc |
0 | ################## | |
1 | Releasing nb2plots | |
2 | ################## | |
3 | ||
4 | * Review the open list of `nb2plots issues`_. Check whether there are | |
5 | outstanding issues that can be closed, and whether there are any issues that | |
6 | should delay the release. Label them. | |
7 | ||
8 | * Review and update the release notes. Review and update the :file:`Changelog` | |
9 | file. Get a partial list of contributors with something like:: | |
10 | ||
11 | git log 0.2.0.. | grep '^Author' | cut -d' ' -f 2- | sort | uniq | |
12 | ||
13 | where ``0.2.0`` was the last release tag name. | |
14 | ||
15 | Then manually go over ``git shortlog 0.2.0..`` to make sure the release notes | |
16 | are as complete as possible and that every contributor was recognized. | |
17 | ||
18 | * Use the opportunity to update the ``.mailmap`` file if there are any | |
19 | duplicate authors listed from ``git shortlog -ns``. | |
20 | ||
21 | * Check the copyright years in ``doc/conf.py`` and ``LICENSE``; | |
22 | ||
23 | * Check the output of:: | |
24 | ||
25 | rst2html.py README.rst > ~/tmp/readme.html | |
26 | ||
27 | because this will be the output used by PyPi_ | |
28 | ||
29 | * Check `nb2plots travis-ci`_. | |
30 | ||
31 | * Once everything looks good, you are ready to upload the source release to | |
32 | PyPi. See `setuptools intro`_. Make sure you have a file | |
33 | ``\$HOME/.pypirc``, of form:: | |
34 | ||
35 | [distutils] | |
36 | index-servers = | |
37 | pypi | |
38 | ||
39 | [pypi] | |
40 | repository: https://upload.pypi.io/legacy/ | |
41 | username:your.pypi.username | |
42 | password:your-password | |
43 | ||
44 | * Tag the release. This will also set the version (we are using versioneer_ | |
45 | to manage versions via git tags):: | |
46 | ||
47 | git tag -s 0.3 | |
48 | ||
49 | * Clean:: | |
50 | ||
51 | make distclean | |
52 | # Check no files outside version control that you want to keep | |
53 | git status | |
54 | # Nuke | |
55 | git clean -fxd | |
56 | ||
57 | * When ready:: | |
58 | ||
59 | python setup.py sdist --formats=zip | |
60 | # -s flag to sign the release | |
61 | twine upload -r warehouse -s dist/nb2plots*zip | |
62 | ||
63 | * Upload the release commit and tag to github:: | |
64 | ||
65 | git push | |
66 | git push --tags | |
67 | ||
68 | * Push the docs to github pages with:: | |
69 | ||
70 | cd doc | |
71 | make github | |
72 | ||
73 | .. include:: ../links_names.inc |
0 | ######################################## | |
1 | Test Sphinx builds on an example project | |
2 | ######################################## | |
3 | ||
4 | If you've developed Sphinx extensions before, you'll know that they can be | |
5 | hard to test. | |
6 | ||
7 | Have a look at the machinery in `sphinxtesters | |
8 | <https://github.com/matthew-brett/sphinxtesters>`_ for a somewhat general way | |
9 | of writing tests for Sphinx builds, and ``nb2plots/tests/test_nbplots.py`` for | |
10 | many examples using that machinery. | |
11 | ||
12 | Sometimes, what you really want to do, is try an actual Sphinx build from the | |
13 | command line. | |
14 | ||
15 | At least, that is what I often want to do, so I made an example Sphinx project | |
16 | to play with at ``nb2plots/tests/futz``. To get started: | |
17 | ||
18 | .. code-block:: bash | |
19 | ||
20 | cd nb2plots/tests/futz | |
21 | make init | |
22 | ||
23 | Now you can edit the files in the example Sphinx project in the ``proj1`` | |
24 | directory. For example, you might want to try out some ReST by editing the | |
25 | example page ``proj1/a_page.rst``. Try the HTML build with: | |
26 | ||
27 | .. code-block:: bash | |
28 | ||
29 | make html | |
30 | ||
31 | Have a look at the simple ``Makefile`` for some other ``make`` targets. |
0 | { | |
1 | "cells": [ | |
2 | { | |
3 | "cell_type": "markdown", | |
4 | "metadata": {}, | |
5 | "source": [ | |
6 | "## An interesting example\n", | |
7 | "\n", | |
8 | "This is an interesting example." | |
9 | ] | |
10 | }, | |
11 | { | |
12 | "cell_type": "code", | |
13 | "execution_count": 1, | |
14 | "metadata": { | |
15 | "collapsed": false | |
16 | }, | |
17 | "outputs": [ | |
18 | { | |
19 | "data": { | |
20 | "text/plain": [ | |
21 | "array([ 0. , 0.00628947, 0.01257895, 0.01886842, 0.0251579 ,\n", | |
22 | " 0.03144737, 0.03773685, 0.04402632, 0.0503158 , 0.05660527])" | |
23 | ] | |
24 | }, | |
25 | "execution_count": 1, | |
26 | "metadata": {}, | |
27 | "output_type": "execute_result" | |
28 | } | |
29 | ], | |
30 | "source": [ | |
31 | "import numpy as np\n", | |
32 | "x = np.linspace(0, 2 * np.pi, 1000)\n", | |
33 | "x[:10]" | |
34 | ] | |
35 | }, | |
36 | { | |
37 | "cell_type": "markdown", | |
38 | "metadata": {}, | |
39 | "source": [ | |
40 | "Even more interesting than that:" | |
41 | ] | |
42 | }, | |
43 | { | |
44 | "cell_type": "code", | |
45 | "execution_count": 2, | |
46 | "metadata": { | |
47 | "collapsed": true | |
48 | }, | |
49 | "outputs": [], | |
50 | "source": [ | |
51 | "%matplotlib inline" | |
52 | ] | |
53 | }, | |
54 | { | |
55 | "cell_type": "code", | |
56 | "execution_count": 3, | |
57 | "metadata": { | |
58 | "collapsed": false | |
59 | }, | |
60 | "outputs": [ | |
61 | { | |
62 | "data": { | |
63 | "text/plain": [ | |
64 | "[<matplotlib.lines.Line2D at 0x107e9f8d0>]" | |
65 | ] | |
66 | }, | |
67 | "execution_count": 3, | |
68 | "metadata": {}, | |
69 | "output_type": "execute_result" | |
70 | }, | |
71 | { | |
72 | "data": { | |
73 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXwAAAEACAYAAACwB81wAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xm81nPex/HXJylMJi1UU6ZVyDIoo0IdssQYNbaIsYxB\nYlqM7N1yMzcxabclZ9TdaFEpiYROstxpUzhF0oqWMYnK0vK9//ieZs7k1DnXuZbv73dd7+fj0eMs\n/VzXW50+1+f6/r6LOecQEZHsVyF0ABERyQwVfBGRHKGCLyKSI1TwRURyhAq+iEiOUMEXEckRKSn4\nZjbczNaZ2aK9XDPIzJaa2ftmdlwqnldERMouVR1+PnD2nn7TzM4BGjvnDgNuAJ5I0fOKiEgZpaTg\nO+feAjbu5ZIOwIiia2cDVc2sViqeW0REyiZTY/h1gdXFvv686HsiIpIhmSr4VsL3tKeDiEgGVczQ\n86wBDi32dT3gi90vMjO9CIiIlINzrqTG+j+kssM3Su7kASYDVwKYWUvga+fcupIudM7F9te9996b\n1H+/aZPjoYccdes6TjvNkZ/v2LgxscfYutUxcaKjY0dHtWqO225zrFuXmfyhfym/8udidufK3ien\nalrm34F3gKZmtsrMrjGzG8zs+qIiPhVYbmafAk8CXVPxvNli+3Z44glo2hQWLYIpU+CNN+Dqq+Gg\ngxJ7rP33h44dYeJEWLAANm+GI46A//ov2Lo1LfFFJCZSMqTjnOtchmtuTsVzZZv5831hr1kTXn4Z\njj8+dY9dvz4MHQp33AG33gpHHQWDB8N556XuOUQkPrTSNoXy8vLKfO327dCnD7RvD7fdBq+/ntpi\nX9yhh8KYMfD009CtG3TpAlu2/PS6RPJHkfKHFef8cc6eCEtk/CfdzMxFKU+6rFsHnTpBpUqQnw91\nMzhBddMm+NOf4L33YMIEaNYsc88tIulhZrgM37SVMnjvPWjRAk491Q/hZLLYA1StCiNG+GGetm39\n/QIRyQ3q8DPopZf8eP2wYf7GamjvvgsXXeTH93v2DJ1GRMqrrB2+Cn6G5OfDnXfCCy9Ay5ah0/zb\nqlVw5pl+iOm++8BK/ZERkahRwY+QwYOhXz945RU/RTJq1q+Hs8+GNm2gf3+ooIE+kVjRGH5EPP64\nL/YFBdEs9gCHHAIzZsCcOdCjB2Tha66IoIKfVsOHw4MP+imXDRqETrN3Bx3kbyK//Tb07h06jYik\nQ6b20sk5Eyf61a0FBdC4ceg0ZVO1qh92atsWDjwQbr89dCIRSSUV/DSYPRuuv94Xz8MOC50mMQcf\nDK+9Bq1bQ716cPnloROJSKqo4KfYsmV+ymV+PjRvHjpN+fziF35+/umn++0ZTjkldCIRSQWN4afQ\n11/Duef6oZy471dz9NEwcqSfp79sWeg0IpIKmpaZIjt3QocO0LAhDBoUOk3qPP44DBnih6mqVAmd\nRkRKonn4GXb//TBtmt/WuFKl0GlSxzn4wx/ghx9g1CgtzBKJIs3Dz6CXX/b72Y8bl13FHnyBHzoU\nCgvhscdCpxGRZKjDT9KqVXDiiTB+fHbf3Pz0Uz9zZ/LkaG0NISLq8DNixw644gq/8Vg2F3uAJk38\npm+XXea3WBaR+FGHn4S//MXPWX/tNdhnn9BpMqNrV/jmG/jf/w2dRER20U3bNJs9G84/H+bN8wuU\ncsXWrX59Qe/e0LnUgy1FJBNU8NPo22/9cYR9+8KFF4ZOk3nz5/ujGefM8QuzRCQsFfw0uvFGP03x\nmWdCJwnn4Yf97KTXX9d2yiKh6aZtmsyY4bcd6N8/dJKw/vxn+O47eOqp0ElEpKzU4SdgyxY49lgY\nODD+WyekwkcfQV6eH+I59NDQaURyl4Z00qBnT9iwQTNUinvgAXjnHX9er1bhioShgp9i77zjb9B+\n+CHUqBE6TXRs2+YXnt1yC1x5Zeg0IrlJBT+Ftm2D446De++FSy4JnSZ6ds3a+egjv5++iGSWbtqm\n0IABfoz64otDJ4mmE07wB6XccUfoJCKyN+rwS7Fmje/u3303fqdXZdKmTdCsmd9ArnXr0GlEcos6\n/BTp2RNuuknFvjRVq8Ijj/itF7ZvD51GREqigr8X06b58WkNVZTNZZdB9er+0BQRiR4N6ezB99/D\nMcf4Offnnhs6TXwUFkLbtvDBB1C7dug0IrlBQzpJGjjQj0mr2CemWTN/QpbeFYlEjzr8EqxbB0cd\npRu15fXNN3D44X4LiubNQ6cRyX6ah5+ELl3ggAPg0UdDJ4mvYcP8iuSCAq3AFUk3Ffxy+uADaNcO\nPv4YqlULGiXWduzwW0j36QMXXBA6jUh2U8EvB+fg7LPht7+FP/0pWIysMX2630r6o4+gcuXQaUSy\nl27alsMrr8DKlX5IR5J35plwxBEwdGjoJCIC6vD/Zft2v/Vx376+w5fUWLwY2rTxH2vWDJ1GJDup\nw09Qfr6fN6597lPryCP9HkQPPRQ6iYiow8ef3NS0KTz/PJx0UsafPut9+aWf5rpwoQ5KEUkHdfgJ\nGDoUWrRQsU+XOnXghhvgvvtCJxHJbTnf4W/a5BdXzZjhu1BJj40b/buoWbP8jVwRSR11+GXUrx+c\nc46KfbpVqwa33gq9e4dOIpK7crrDX7/e31ScNw8aNMjY0+asrVv9u6kXXvDHIopIamjhVRn06AE7\nd8KgQRl7ypz3xBMwfrxflCUiqaGCX4qVK/3RfIWFUKtWRp5S8OcDH3EEDB8OeXmh04hkB43hl+Iv\nf/EralXsM2vfff04vmbsiGReSgq+mbU3syVm9omZ3V7C719lZuvNbH7Rrz+k4nnLa+VKP6xwyy0h\nU+SuK66A1av9TpoikjlJD+mYWQXgE6Ad8AUwB7jUObek2DVXAc2dc91KeayMDOnccAPUqAH/8z9p\nfyrZg2ef9aubVfRFkpfJIZ1fA0udcyudc9uA0UCHkjKl4LmStmqVX1Gr7j6syy+HNWtU8EUyKRUF\nvy6wutjXa4q+t7sLzOx9MxtrZvVS8Lzl8uCDcP312sgrtIoV/Vh+nz6hk4jkjoopeIySOvfdx2Um\nA393zm0zsxuAZ/FDQD/Rp1gFyMvLIy+FUzlWr4axY/3hJhLe5ZfDAw/4Ll8zdkTKrqCggIJyvD1O\nxRh+S6CPc6590dd3AM4513cP11cA/umcO6iE30vrGH7XrnDggX4LZImGESPgmWc0tCOSjEyO4c8B\nmphZfTOrBFyK7+iLh6ld7MsOQGEKnjcha9bAmDF+eb9ER+fO8MUXKvgimZB0wXfO7QBuBl4FPgJG\nO+cWm9l9ZrZrd/luZvahmS0ouvbqZJ83UQ89BNdeCwcfnOlnlr2pWBHuvFMzpkQyISdW2u7aj33J\nEjjkkJQ/vCTpxx+hSRO/NkJ77IgkTittixkwwC/2UbGPpkqVoFcvP4NKRNIn6zv8r7+Gxo1h/nyo\nXz+lDy0ptHUrNGzozyVo1ix0GpF4UYdfZOhQf06tin20HXAAdO+us29F0imrO3x1jfGy693Y3Ln+\n701EykYdPn4L3tatVezj4qCD/D5HjzwSOolIdsraDn/bNj/zY+xYHU4eJ+vX+/3yCwuhdu3SrxcR\ndfg895wfHlCxj5dDDvEzqvr3D51EJPtkZYe/cyccfTQMHAhnnpmCYJJRq1bB8cfDp5/6w89FZO9y\nusOfPNnP+jjjjNBJpDx++Us/s+rJJ0MnEckuWdfhOwetWvmFPBdemKJgknGLFkH79rB8OVSuHDqN\nSLTlbIc/axb885/QsWPoJJKMY4/1v0aNCp1EJHtkXcHv18+fZrXPPqGTSLJuvRX++ld/T0ZEkpdV\nBf/jj+Hdd+HKK0MnkVRo184P57z8cugkItkhqwp+//7QpYu/YSvxZ/bvLl9Ekpc1N203bICmTf0W\nyLVqpTiYBLNtm19PMWECtGgROo1INOXcTdvHHoOLLlKxzzb77gs9eqjLF0mFrOjwv/sOGjTwx+Qd\neWTKY0lg337r/361qZpIyXKqwx850p+UpGKfnQ48EP74R223IJKs2Hf4O3f63TAffxxOOy1NwSS4\nzz/322UsWwbVq4dOIxItOdPhT53qZ+Xk5YVOIulUty506OBf2EWkfGLf4eflwfXXQ+fO6ckk0bFw\nIZx7rt9uoVKl0GlEoiMnOvx58+Czz+Dii0MnkUz41a/8XvnjxoVOIhJPsS74/fpBt25+6p7khh49\n/M3bCL0xFYmN2A7prFoFxx3n395XrZrmYBIZO3fC4YdDfj6cckroNCLRkPVDOoMHw9VXq9jnmgoV\noHt3TdEUKY9YdvhbtkD9+vDee9CoUQaCSaRs3uz//rUQS8TL6g5/5Ej/dl7FPjdVqQJ/+AMMGRI6\niUi8xK7Ddw6OOgqGDtVCq1y2ciWccAKsWOFX4orksqzt8KdP94ebaKFVbqtf3++Xn58fOolIfMSu\n4A8c6G/aWamvZZLtevTwPw87doROIhIPsSr4S5fCnDlw+eWhk0gUtGoFNWrAlCmhk4jEQ6wK/uDB\nftfE/fcPnUSiwAx69oQBA0InEYmH2Ny03bTJT8FbtAjq1ctwMImsbdv8z8WUKX4hnkguyrqbtvn5\ncNZZKvbyn/bdF26+WV2+SFnEosPfscOfVztyJLRuHSCYRNpXX0GTJrB4MdSuHTqNSOZlVYc/dao/\n9KJVq9BJJIpq1IBOnbRXvkhpYtHhn3GG3zfniisyn0niobAQTj/dL8iqXDl0GpHMypoO/8MP/T/m\nSy4JnUSirFkzv1/+6NGhk4hEV+QL/qBB0KWLTjiS0nXv7m/eRuhNq0ikRHpIZ9fNuCVLoFatgMEk\nFnbuhCOPhGHDoE2b0GlEMicrhnSGDYPzz1exl7KpUMGfgDZwYOgkItEU2Q5/+3a/oGbSJL8rokhZ\n7Norf948aNAgdBqRzIh9hz9xov8Hq2IviahSxc/o0l75Ij8V2Q7/lFP8bogXXRQ4lMTO8uXQooWf\nolmlSug0IukX6w5/3jx/SHnHjqGTSBw1bAht28KIEaGTiERLJAv+wIFw001QsWLoJBJX3bv7n6Od\nO0MnEYmOyBX8tWvhxRfhuutCJ5E4a9PGb6M9bVroJCLRkZKCb2btzWyJmX1iZreX8PuVzGy0mS01\ns3fN7Jd7eqwnn/SraqtXT0UyyVVm/z4RS0S8pG/amlkF4BOgHfAFMAe41Dm3pNg1NwLHOOe6mlkn\n4HfOuUtLeCxXu7bjtdf8QeUiyfj+ez9Fs6DAL8gSyVaZvGn7a2Cpc26lc24bMBrosNs1HYBniz5/\nHv/iUKKjj1axl9TYbz+44Qa/PYeIpKbg1wVWF/t6TdH3SrzGObcD+NrMShy06dYtBYlEitx4o99Q\nbePG0Ekk2xQWwooVoVMkJhXzYEp6G7H7ONHu11gJ1wAwd24f5s3zn+fl5ZGXl5dkPMllderAeefB\n009Dr16h00g2ufVWuPhiuOaazD93QUEBBQUFCf93qRjDbwn0cc61L/r6DsA55/oWu+blomtmm9k+\nwJfOuUNKeKw9nmkrUl5z58KFF8KyZZrqK6mxdCmcfLJf3Lf//qHTZHYMfw7QxMzqm1kl4FJg8m7X\nvAhcVfT5xcAbKXhekTJp0cKfhfzCC6GTSLYYMgSuvTYaxT4RKdlawczaAwPxLyDDnXMPmdl9wBzn\n3BQzqwyMBI4HvsLP4llRwuOow5e0GDfO37ydNSt0Eom7b77x+3wtXAiHHho6jVfWDj+ye+mIpNL2\n7dCokd+Ur3nz0GkkzgYNgrffhjFjQif5t1jvpSOSahUr+u06tBBLkrFzJwwe7LfuiCPdwpKccd11\n0Lix376jdu3QaSSOXn4ZqlaFVq1CJykfdfiSM6pXh06d4IknQieRuBo40Hf3VurgSTRpDF9ySmEh\nnH66n05XuXLoNBInhYXQrp1fbBW1nx2N4YuUoFkzOPZYv/pWJBGDB/utOqJW7BOhDl9yztSpcM89\n/qCduL41l8zauNHP8lq8OJr3f9Thi+xB+/awZQu89VboJBIXw4f7LTqiWOwToQ5fctKQITBjBowf\nHzqJRN327dCkiV+8d+KJodOUTB2+yF5cfbXfJz9uux1K5k2eDL/4RXSLfSJU8CUnVanii/7QoaGT\nSNQNGhTfhVa705CO5Kzly/3GaitX+hcAkd29/74fu1++HPbdN3SaPdOQjkgpGjaEtm1hxIjQSSSq\nBg+Grl2jXewToQ5fctrMmX5udWEhVFD7I8Vs2ABNm/q972vWDJ1m79Thi5RBmzb+7NtXXw2dRKLm\nqafggguiX+wToQ5fcl5+vt/q9pVXQieRqNi2zQ/5TZ3qV2ZHnTp8kTK67DJYsACWLAmdRKJi/Hg4\n7LB4FPtEqOBLzttvPz+OP2hQ6CQSFYMGQbduoVOknoZ0RIAvv/Qbq332GVSrFjqNhDRnDlx8sT/0\nfp99QqcpGw3piCSgTh0/3/rpp0MnkdAGDICbb45PsU+EOnyRInPnwoUX+s6uos6Cy0mrV8OvfuUX\nWlWtGjpN2anDF0lQixZQrx5MmhQ6iYQyZAhcdVW8in0i1OGLFDN2rF9dOWtW6CSSaZs3Q4MGfgy/\nYcPQaRKjDl+kHC64wO+tM39+6CSSafn5cNpp8Sv2iVDBFymmYkW46SZ/WLXkjh07/M3aW24JnSS9\ndGtKZDfXXQeNG8PatfE/4UjKZvJkOOQQaNUqdJL0Uocvspvq1aFTJ3jiidBJJFMefTT7u3vQTVuR\nEhUWwumn+/H8ypVDp5F0eu89uOQS+PTT+E7H1U1bkSQ0a+b3URkzJnQSSbdHH/UnWsW12CdCHb7I\nHrz0EvTuDfPmgZXaO0kcrVwJJ5zgF1r9/Oeh05SfOnyRJJ1zjp+b/dZboZNIugweDNdcE+9inwh1\n+CJ7MWQIFBTA88+HTiKp9s03fs79/PlQv37oNMlRhy+SAlddBTNmwIoVoZNIqj3zDJx5ZvyLfSLU\n4YuUolcvfwLSgAGhk0iqbNvmDzgZMwZOOil0muSVtcNXwRcpxeefwzHH+MOsa9QInUZSYdQoGDbM\nD9dlAw3piKRI3brQsSMMHRo6iaSCc/Dww3D77aGTZJ4KvkgZ9Orlb+Bu3Ro6iSRr2jRf9Nu3D50k\n81TwRcrgyCOhdWu/o6LEW9++cNttubm2QmP4ImX07rvQubMfy8+FVZnZaNc2CkuXwr77hk6TOhrD\nF0mxVq3g0ENh3LjQSaS8+vb1m6RlU7FPhDp8kQS89BLcfTcsWJCbQwJx9vHHcOqpfhuFn/0sdJrU\nUocvkgbnnusPy3j11dBJJFH9+kHXrtlX7BOhDl8kQSNH+pu3b7wROomU1ZdfwlFHwSefQM2aodOk\nnjp8kTS59FK/d/qcOaGTSFkNHAiXX56dxT4R6vBFymHAAJg1C8aPD51ESvP119CkCcydCw0ahE6T\nHtpaQSSNtmyBRo38sM5RR4VOI3tz//2wbBn87W+hk6SPCr5Imj30ECxaBH//e+gksifffutfmN9+\nG5o2DZ0mfVTwRdIsV4pJnPXtC++/D889FzpJemWk4JtZNWAMUB9YAVzinNtUwnU7gIWAASudcx33\n8Hgq+BIr//3f8Nln2T1cEFdbt/oX5Ndeg6OPDp0mvTJV8PsCXznnHjaz24Fqzrk7SrjuG+dcqYeI\nqeBL3Hz9NTRu7GfsNGoUOo0Ul0s31jNV8JcAbZ1z68ysNlDgnDuihOu+dc4dWIbHU8GX2LnnHli/\nHp56KnQS2eX77/0L8Ysv+kPKs12m5uEf4pxbB+CcWwscvIfrKpvZe2b2jpl1SPI5RSKlRw9/5u2q\nVaGTyC75+XD88blR7BNR6p5/ZjYdqFX8W4AD7kngeX7pnFtrZg2BN8xskXNueUkX9unT51+f5+Xl\nkZeXl8DTiGRezZrwxz/6QzWGDAmdRn780c+gGjMmdJL0KSgooKAcx3UlO6SzGMgrNqQzwzl3ZCn/\nTT7wonNuQgm/pyEdiaV16/ye+R99BHXqhE6T255+2hf76dNDJ8mcTA3pTAauLvr8KmBSCUEOMrNK\nRZ/XBFoDhUk+r0ik1KoFV17ppwFKOD/84BdaFRsokGKS7fCrA2OBQ4FVwMXOua/NrDlwg3PuejNr\nBTwJ7MC/wPR3zv1tD4+nDl9ia+1aaNbML8aqVy90mtz02GMwZQpMnRo6SWZp4ZVIALffDps2wRNP\nhE6Se777zu+ZM2kStGgROk1mqeCLBPDVV3D44f4oPc3Lz6z+/eHNN2HixNBJMk8FXySQPn38qUrP\nPhs6Se7YvNl399OnwzHHhE6TeSr4IoFs2gSHHQYzZ/qZO5J+Dz4ICxfC6NGhk4Shgi8SUN++MG8e\njB0bOkn227TJd/ezZsERP1nnnxtU8EUC2rLFF6GpU/2KT0mfe++FlStzewM7FXyRwAYNgmnT4KWX\nQifJXmvX+gNo5s6Fhg1DpwlHBV8ksB9+8GP4w4fDaaeFTpOdbrwRDjgA+vULnSQsFXyRCBg9Gh55\nxG+fXCHZde3yHz7+GE45xX+sXj10mrAytbWCiOxFp05QsWL2n7gUwp13wm23qdgnQh2+SJrNmgW/\n/z0sWQL77Rc6TXZ4+23o3Nl39/ozVYcvEhmnngrHHQeDB4dOkh2cg1694IEHVOwTpQ5fJAN2jTcv\nWQI1aoROE2/jx/sdMefP132RXXTTViRiunaFSpX8WatSPt9956dhPvUUnHFG6DTRoYIvEjEbNvhi\nVVDgt1GWxD3wACxYkBsHkydCBV8kggYP9tv3Tp8OVuo/Tylu9Wp/LyTXF1mVRDdtRSLoxhth/XqY\n8JMDPqU0t90GN92kYp8MdfgiGTZzJlx1FRQW+lWiUro334QrrvA3vfVn9lPq8EUiqm1baNlS59+W\n1Y4d0K0b/PWvKvbJUocvEsCaNX48WidjlW7gQHjhBXjjDd332BPdtBWJuIcfhtdfh1deUSHbk9Wr\n/fbSb7/tj46UkmlIRyTievaEdetg1KjQSaLJObj5Zj+co2KfGurwRQKaOxfOOw8+/BBq1gydJlom\nTIC774b334fKlUOniTYN6YjExC23wD/+ASNGhE4SHZs2+UVqzz3n9yKSvVPBF4mJzZvh6KP9dgFn\nnRU6TTR06eJn5wwbFjpJPJS14FfMRBgR2bMqVeDJJ+G662DRIqhaNXSisKZN8zeyFy0KnST7qMMX\niYgbb/Sbg+XyYdwbN8Kxx8Kzz8Lpp4dOEx8a0hGJmc2b/dz8Rx6B3/0udJowfv97qFbNHwAvZach\nHZGYqVLF37i94AJo3Rpq1QqdKLMmTIDZs/2sHEkPdfgiEXPXXX6a5qRJubMga80aaNECJk6EVq1C\np4kfLbwSiak+feDLL/2WArlg+3a47DLo3l3FPt3U4YtE0PLlcNJJ8OKL/mM2u+cev6fQK6/oyMLy\nUocvEmMNG/qpmp06wT//GTpN+kyfDvn5MHKkin0mqMMXibCePeHTT/14frYVxBUr/BDOqFGagpks\ndfgiWaBvX7/NQO/eoZOk1ubN0KGDP8VKxT5z1OGLRNyGDX4c/4EHoHPn0GmSt3MnXHIJHHggPPNM\n7sxESifNwxfJEgcf7Id02rWDxo3jfxP3/vvh88+hoEDFPtM0pCMSA8ccA8OH+0VZy5aFTlN+w4f7\nrSMmTNCWxyGowxeJid/+1nfGZ50Fb70FdeqETpSYyZP9FMyZM+OXPVuo4IvESJcu8NVXcPbZvnBW\nqxY6Udm8+SZcey1MnQpNm4ZOk7s0pCMSM3fdBWec4Yv+xo2h05Ru5ky48EJ/mMmJJ4ZOk9tU8EVi\nxgz69YNTTvFTGjdsCJ1oz2bMgIsugtGj/YuUhKWCLxJDu4r+b34DeXnwxRehE/3UlCl++uW4cX6G\nkYSngi8SU2Z+bv6VV0LLlrBgQehE//bYY/4ErylT/AuSRIMWXolkgeef9ydmDRsGHTuGy7FtG/Tq\n5TdCmzoVGjUKlyWXaOGVSA656CJo0MCflPXOO77zr1QpsxlWr/abvdWo4TNUr57Z55fSaUhHJEu0\naOGHdZYsgZNP9h8zwTm/AdqJJ/p3F5MmqdhHVVIF38wuMrMPzWyHmZ2wl+vam9kSM/vEzG5P5jlF\nZM9q1vQF9+qr/Sye3r39wejpsmIFnHee3+TtxRf9ZmjZtqtnNkn2r+YD4HfAzD1dYGYVgCHA2cBR\nwGVmdkSSzxtJBQUFoSMkRfnDSlV+M7jpJli4ED7+GI48Ep5+Gn78MSUPD/ipoN27Q/Pm/vzduXNh\ny5aC1D1BhsX9Z6eskir4zrmPnXNLgb3dLPg1sNQ5t9I5tw0YDXRI5nmjKu4/NMofVqrz160LY8f6\n4ZaxY/0K10cegfXry/+YixbB9df7x9q5EwoL4e67/f2COP/5xzl7IjJx07YusLrY12vwLwIikgEn\nnwyvvgr/93/+FK2mTf2Om+eeC23aQLNme97IbNMmmD3bb40wYQJ8+62fbrl4MdSundn/D0leqQXf\nzKYDtYp/C3DA3c65F8vwHCV1/5p7KZJhLVv6X4MG+aMFp0710ziXLfPvBmrVgipV/KHi334Lq1b5\njy1a+BeNZ56BX/9aY/RxlpJ5+GY2A/izc25+Cb/XEujjnGtf9PUdgHPO9S3hWr0QiIiUQ6bn4e/p\nyeYATcysPvAlcClwWUkXliWwiIiUT7LTMjua2WqgJTDFzF4u+n4dM5sC4JzbAdwMvAp8BIx2zi1O\nLraIiCQqUlsriIhI+kTm9kucF2eZ2XAzW2dmi0JnKQ8zq2dmb5hZoZl9YGbdQmdKhJlVNrPZZrag\nKP+9oTMlyswqmNl8M5scOkuizGyFmS0s+vN/L3SeRJlZVTMbZ2aLzewjM4vNqcFm1rToz31+0cdN\ne/v3G4kOv2hx1idAO+AL/Lj/pc65DC0OT46ZnQJsBkY4544NnSdRZlYbqO2ce9/MqgDzgA5x+fMH\nMLMDnHNbzWwf4G2gm3MuNsXHzHoCzYGfO+fOD50nEWb2GdDcOReD41h+ysz+Bsx0zuWbWUXgAOfc\nN4FjJayojq4BTnLOrS7pmqh0+LFenOWcewuI5Q87gHNurXPu/aLPNwOL8esnYsM5t7Xo08r4yQjh\nO5kyMrNxid2tAAACKklEQVR6wLnA06GzlJMRnVqSEDM7EDjVOZcP4JzbHsdiX+QMYNmeij1E5y+p\npMVZsSo42cLMGgDHAbPDJklM0ZDIAmAtMN05Nyd0pgT0B3oRoxep3ThgmpnNMbPrQodJUCPgH2aW\nXzQs8pSZ7R86VDl1Ap7b2wVRKfhanBUBRcM5zwPdizr92HDO7XTOHQ/UA04ys2ahM5WFmf0GWFf0\nDsvY+zYlUdXaOdcC/y7lpqIhzrioCJwADHXOnQBsBe4IGylxZrYvcD4wbm/XRaXgrwF+Wezrevix\nfMmQorHL54GRzrlJofOUV9Hb8QKgfeAoZXUycH7ROPhzwGlmNiJwpoQ459YWfdwATCReW6esAVY7\n5+YWff08/gUgbs4B5hX9HexRVAr+vxZnmVkl/OKsuM1WiGt3tsszQKFzbmDoIIkys5pmVrXo8/3x\nY5mxuOHsnLvLOfdL51wj/M/9G865K0PnKiszO6DonSFm9jPgLODDsKnKzjm3DlhtZk2LvtUOKAwY\nqbwuo5ThHIjIiVfOuR1mtmtxVgVgeJwWZ5nZ34E8oIaZrQLu3XUTKA7M7GTgcuCDonFwB9zlnHsl\nbLIyqwM8WzRLoQIwxjk3NXCmXFELmFi0LUpFYJRz7tXAmRLVDRhVNCzyGXBN4DwJKdbkXF/qtVGY\nlikiIukXlSEdERFJMxV8EZEcoYIvIpIjVPBFRHKECr6ISI5QwRcRyREq+CIiOUIFX0QkR/w/qrXc\nuLlO7OYAAAAASUVORK5CYII=\n", | |
74 | "text/plain": [ | |
75 | "<matplotlib.figure.Figure at 0x106cd1dd8>" | |
76 | ] | |
77 | }, | |
78 | "metadata": {}, | |
79 | "output_type": "display_data" | |
80 | } | |
81 | ], | |
82 | "source": [ | |
83 | "import matplotlib.pyplot as plt\n", | |
84 | "\n", | |
85 | "plt.plot(x, np.sin(x))" | |
86 | ] | |
87 | } | |
88 | ], | |
89 | "metadata": { | |
90 | "kernelspec": { | |
91 | "display_name": "Python 3", | |
92 | "language": "python", | |
93 | "name": "python3" | |
94 | }, | |
95 | "language_info": { | |
96 | "codemirror_mode": { | |
97 | "name": "ipython", | |
98 | "version": 3 | |
99 | }, | |
100 | "file_extension": ".py", | |
101 | "mimetype": "text/x-python", | |
102 | "name": "python", | |
103 | "nbconvert_exporter": "python", | |
104 | "pygments_lexer": "ipython3", | |
105 | "version": "3.5.2" | |
106 | } | |
107 | }, | |
108 | "nbformat": 4, | |
109 | "nbformat_minor": 2 | |
110 | } |
0 | .. nb2plots documentation master file, created by | |
1 | sphinx-quickstart on Fri Dec 16 08:41:58 2016. | |
2 | You can adapt this file completely to your liking, but it should at least | |
3 | contain the root `toctree` directive. | |
4 | ||
5 | nb2plots - the documentation that is not missing | |
6 | ================================================ | |
7 | ||
8 | Introduction: | |
9 | ||
10 | .. toctree:: | |
11 | :maxdepth: 2 | |
12 | ||
13 | introduction | |
14 | worked_example | |
15 | scripts | |
16 | ||
17 | In more depth: | |
18 | ||
19 | .. toctree:: | |
20 | :maxdepth: 2 | |
21 | ||
22 | nbplots | |
23 | mpl_interactive | |
24 | code_links | |
25 | markdown_base | |
26 | ||
27 | Developer docs: | |
28 | ||
29 | .. toctree:: | |
30 | :maxdepth: 2 | |
31 | ||
32 | devel/improving_markdown | |
33 | devel/sphinx_example_project | |
34 | devel/make_release | |
35 | ||
36 | .. toctree:: | |
37 | :hidden: | |
38 | ||
39 | converted_example | |
40 | converted_plus_notebooks | |
41 | devel/design | |
42 | ||
43 | ||
44 | Indices and tables | |
45 | ================== | |
46 | ||
47 | * :ref:`genindex` | |
48 | * :ref:`modindex` | |
49 | * :ref:`search` | |
50 |
0 | ###### | |
1 | Basics | |
2 | ###### | |
3 | ||
4 | .. include:: ../README.rst | |
5 | :start-after: shared-text-body | |
6 | :end-before: standalone-references | |
7 | ||
8 | .. include:: links_names.inc |
0 | .. links, substitutions to include across pages. | |
1 | .. vim: ft=rst | |
2 | ||
3 | .. This project | |
4 | .. _nb2plots code: | |
5 | .. _nb2plots github: http://github.com/matthew-brett/nb2plots | |
6 | .. _nb2plots pypi: http://pypi.python.org/pypi/nb2plots | |
7 | .. _nb2plots issue tracker: | |
8 | .. _nb2plots issues: http://github.com/matthew-brett/nb2plots/issues | |
9 | .. _nb2plots travis-ci: https://travis-ci.org/matthew-brett/nb2plots | |
10 | .. _documentation: https://matthew-brett.github.io/nb2plots | |
11 | ||
12 | .. Code support | |
13 | .. _versioneer: https://github.com/warner/python-versioneer | |
14 | .. _setuptools intro: | |
15 | http://packages.python.org/an_example_pypi_project/setuptools.html | |
16 | ||
17 | .. Relevant projects | |
18 | .. _pandoc: http://pandoc.org | |
19 | .. _jupyter: jupyter.org | |
20 | .. _homebrew: brew.sh | |
21 | .. _sphinx: http://sphinx-doc.org | |
22 | .. _rest: http://docutils.sourceforge.net/rst.html | |
23 | .. _`Sphinx reST`: http://sphinx.pocoo.org/rest.html | |
24 | .. _reST: http://docutils.sourceforge.net/rst.html | |
25 | .. _matplotlib plot directive: http://matplotlib.org/sampledoc/extensions.html | |
26 | .. _nbconvert: http://nbconvert.readthedocs.org/en/latest/ | |
27 | .. _ipython: http://ipython.org/ | |
28 | .. _docutils: http://docutils.sourceforge.net | |
29 | .. _rst2md project: https://github.com/cgwrench/rst2md | |
30 | ||
31 | .. Python and common libraries | |
32 | .. _python: https://www.python.org | |
33 | .. _python standard library: http://docs.python.org/3/library | |
34 | .. _python.org releases: http://www.python.org/download/releases | |
35 | .. _setuptools: http://pypi.python.org/pypi/setuptools | |
36 | .. _matplotlib: http://matplotlib.org/ | |
37 | .. _pytest: https://pytest.readthedocs.io | |
38 | .. _nose: http://readthedocs.org/docs/nose/en/latest | |
39 | .. _mock: https://github.com/testing-cabal/mock | |
40 | .. _pip: http://pypi.python.org/pypi/pip | |
41 | .. _PEP8: | |
42 | https://www.python.org/dev/peps/pep-0008/ | |
43 | .. _Python glossary: | |
44 | https://docs.python.org/3/glossary.html | |
45 | .. _pypi: http://pypi.python.org/pypi | |
46 | ||
47 | .. Licenses | |
48 | .. _GPL: http://www.gnu.org/licenses/gpl.html | |
49 | .. _BSD: http://www.opensource.org/licenses/bsd-license.php | |
50 | ||
51 | .. Substitutions | |
52 | .. |emdash| unicode:: U+02014 | |
53 | .. |--| unicode:: U+2013 .. en dash | |
54 | .. |---| unicode:: U+2014 .. em dash, trimming surrounding whitespace | |
55 | :trim: | |
56 | .. |nbplot-documentation| replace:: :ref:`nbplots-documentation` | |
57 | .. |code-links-documentation| replace:: :ref:`code-links-documentation` | |
58 | .. |worked-example| replace:: :ref:`worked-example` |
0 | @ECHO OFF | |
1 | ||
2 | REM Command file for Sphinx documentation | |
3 | ||
4 | if "%SPHINXBUILD%" == "" ( | |
5 | set SPHINXBUILD=sphinx-build | |
6 | ) | |
7 | set BUILDDIR=_build | |
8 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . | |
9 | set I18NSPHINXOPTS=%SPHINXOPTS% . | |
10 | if NOT "%PAPER%" == "" ( | |
11 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% | |
12 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% | |
13 | ) | |
14 | ||
15 | if "%1" == "" goto help | |
16 | ||
17 | if "%1" == "help" ( | |
18 | :help | |
19 | echo.Please use `make ^<target^>` where ^<target^> is one of | |
20 | echo. html to make standalone HTML files | |
21 | echo. dirhtml to make HTML files named index.html in directories | |
22 | echo. singlehtml to make a single large HTML file | |
23 | echo. pickle to make pickle files | |
24 | echo. json to make JSON files | |
25 | echo. htmlhelp to make HTML files and a HTML help project | |
26 | echo. qthelp to make HTML files and a qthelp project | |
27 | echo. devhelp to make HTML files and a Devhelp project | |
28 | echo. epub to make an epub | |
29 | echo. epub3 to make an epub3 | |
30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter | |
31 | echo. text to make text files | |
32 | echo. man to make manual pages | |
33 | echo. texinfo to make Texinfo files | |
34 | echo. gettext to make PO message catalogs | |
35 | echo. changes to make an overview over all changed/added/deprecated items | |
36 | echo. xml to make Docutils-native XML files | |
37 | echo. pseudoxml to make pseudoxml-XML files for display purposes | |
38 | echo. linkcheck to check all external links for integrity | |
39 | echo. doctest to run all doctests embedded in the documentation if enabled | |
40 | echo. coverage to run coverage check of the documentation if enabled | |
41 | echo. dummy to check syntax errors of document sources | |
42 | goto end | |
43 | ) | |
44 | ||
45 | if "%1" == "clean" ( | |
46 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i | |
47 | del /q /s %BUILDDIR%\* | |
48 | goto end | |
49 | ) | |
50 | ||
51 | ||
52 | REM Check if sphinx-build is available and fallback to Python version if any | |
53 | %SPHINXBUILD% 1>NUL 2>NUL | |
54 | if errorlevel 9009 goto sphinx_python | |
55 | goto sphinx_ok | |
56 | ||
57 | :sphinx_python | |
58 | ||
59 | set SPHINXBUILD=python -m sphinx.__init__ | |
60 | %SPHINXBUILD% 2> nul | |
61 | if errorlevel 9009 ( | |
62 | echo. | |
63 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx | |
64 | echo.installed, then set the SPHINXBUILD environment variable to point | |
65 | echo.to the full path of the 'sphinx-build' executable. Alternatively you | |
66 | echo.may add the Sphinx directory to PATH. | |
67 | echo. | |
68 | echo.If you don't have Sphinx installed, grab it from | |
69 | echo.http://sphinx-doc.org/ | |
70 | exit /b 1 | |
71 | ) | |
72 | ||
73 | :sphinx_ok | |
74 | ||
75 | ||
76 | if "%1" == "html" ( | |
77 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html | |
78 | if errorlevel 1 exit /b 1 | |
79 | echo. | |
80 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. | |
81 | goto end | |
82 | ) | |
83 | ||
84 | if "%1" == "dirhtml" ( | |
85 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml | |
86 | if errorlevel 1 exit /b 1 | |
87 | echo. | |
88 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. | |
89 | goto end | |
90 | ) | |
91 | ||
92 | if "%1" == "singlehtml" ( | |
93 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml | |
94 | if errorlevel 1 exit /b 1 | |
95 | echo. | |
96 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. | |
97 | goto end | |
98 | ) | |
99 | ||
100 | if "%1" == "pickle" ( | |
101 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle | |
102 | if errorlevel 1 exit /b 1 | |
103 | echo. | |
104 | echo.Build finished; now you can process the pickle files. | |
105 | goto end | |
106 | ) | |
107 | ||
108 | if "%1" == "json" ( | |
109 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json | |
110 | if errorlevel 1 exit /b 1 | |
111 | echo. | |
112 | echo.Build finished; now you can process the JSON files. | |
113 | goto end | |
114 | ) | |
115 | ||
116 | if "%1" == "htmlhelp" ( | |
117 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp | |
118 | if errorlevel 1 exit /b 1 | |
119 | echo. | |
120 | echo.Build finished; now you can run HTML Help Workshop with the ^ | |
121 | .hhp project file in %BUILDDIR%/htmlhelp. | |
122 | goto end | |
123 | ) | |
124 | ||
125 | if "%1" == "qthelp" ( | |
126 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp | |
127 | if errorlevel 1 exit /b 1 | |
128 | echo. | |
129 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ | |
130 | .qhcp project file in %BUILDDIR%/qthelp, like this: | |
131 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\nb2plots.qhcp | |
132 | echo.To view the help file: | |
133 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\nb2plots.ghc | |
134 | goto end | |
135 | ) | |
136 | ||
137 | if "%1" == "devhelp" ( | |
138 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp | |
139 | if errorlevel 1 exit /b 1 | |
140 | echo. | |
141 | echo.Build finished. | |
142 | goto end | |
143 | ) | |
144 | ||
145 | if "%1" == "epub" ( | |
146 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub | |
147 | if errorlevel 1 exit /b 1 | |
148 | echo. | |
149 | echo.Build finished. The epub file is in %BUILDDIR%/epub. | |
150 | goto end | |
151 | ) | |
152 | ||
153 | if "%1" == "epub3" ( | |
154 | %SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3 | |
155 | if errorlevel 1 exit /b 1 | |
156 | echo. | |
157 | echo.Build finished. The epub3 file is in %BUILDDIR%/epub3. | |
158 | goto end | |
159 | ) | |
160 | ||
161 | if "%1" == "latex" ( | |
162 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex | |
163 | if errorlevel 1 exit /b 1 | |
164 | echo. | |
165 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. | |
166 | goto end | |
167 | ) | |
168 | ||
169 | if "%1" == "latexpdf" ( | |
170 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex | |
171 | cd %BUILDDIR%/latex | |
172 | make all-pdf | |
173 | cd %~dp0 | |
174 | echo. | |
175 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. | |
176 | goto end | |
177 | ) | |
178 | ||
179 | if "%1" == "latexpdfja" ( | |
180 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex | |
181 | cd %BUILDDIR%/latex | |
182 | make all-pdf-ja | |
183 | cd %~dp0 | |
184 | echo. | |
185 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. | |
186 | goto end | |
187 | ) | |
188 | ||
189 | if "%1" == "text" ( | |
190 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text | |
191 | if errorlevel 1 exit /b 1 | |
192 | echo. | |
193 | echo.Build finished. The text files are in %BUILDDIR%/text. | |
194 | goto end | |
195 | ) | |
196 | ||
197 | if "%1" == "man" ( | |
198 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man | |
199 | if errorlevel 1 exit /b 1 | |
200 | echo. | |
201 | echo.Build finished. The manual pages are in %BUILDDIR%/man. | |
202 | goto end | |
203 | ) | |
204 | ||
205 | if "%1" == "texinfo" ( | |
206 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo | |
207 | if errorlevel 1 exit /b 1 | |
208 | echo. | |
209 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. | |
210 | goto end | |
211 | ) | |
212 | ||
213 | if "%1" == "gettext" ( | |
214 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale | |
215 | if errorlevel 1 exit /b 1 | |
216 | echo. | |
217 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. | |
218 | goto end | |
219 | ) | |
220 | ||
221 | if "%1" == "changes" ( | |
222 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes | |
223 | if errorlevel 1 exit /b 1 | |
224 | echo. | |
225 | echo.The overview file is in %BUILDDIR%/changes. | |
226 | goto end | |
227 | ) | |
228 | ||
229 | if "%1" == "linkcheck" ( | |
230 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck | |
231 | if errorlevel 1 exit /b 1 | |
232 | echo. | |
233 | echo.Link check complete; look for any errors in the above output ^ | |
234 | or in %BUILDDIR%/linkcheck/output.txt. | |
235 | goto end | |
236 | ) | |
237 | ||
238 | if "%1" == "doctest" ( | |
239 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest | |
240 | if errorlevel 1 exit /b 1 | |
241 | echo. | |
242 | echo.Testing of doctests in the sources finished, look at the ^ | |
243 | results in %BUILDDIR%/doctest/output.txt. | |
244 | goto end | |
245 | ) | |
246 | ||
247 | if "%1" == "coverage" ( | |
248 | %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage | |
249 | if errorlevel 1 exit /b 1 | |
250 | echo. | |
251 | echo.Testing of coverage in the sources finished, look at the ^ | |
252 | results in %BUILDDIR%/coverage/python.txt. | |
253 | goto end | |
254 | ) | |
255 | ||
256 | if "%1" == "xml" ( | |
257 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml | |
258 | if errorlevel 1 exit /b 1 | |
259 | echo. | |
260 | echo.Build finished. The XML files are in %BUILDDIR%/xml. | |
261 | goto end | |
262 | ) | |
263 | ||
264 | if "%1" == "pseudoxml" ( | |
265 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml | |
266 | if errorlevel 1 exit /b 1 | |
267 | echo. | |
268 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. | |
269 | goto end | |
270 | ) | |
271 | ||
272 | if "%1" == "dummy" ( | |
273 | %SPHINXBUILD% -b dummy %ALLSPHINXOPTS% %BUILDDIR%/dummy | |
274 | if errorlevel 1 exit /b 1 | |
275 | echo. | |
276 | echo.Build finished. Dummy builder generates no files. | |
277 | goto end | |
278 | ) | |
279 | ||
280 | if "%1" == "github" ( | |
281 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html | |
282 | ghp-import -n -p -b gh-pages -f %BUILDDIR%/html/ | |
283 | if errorlevel 1 exit /b 1 | |
284 | echo. | |
285 | echo Published to Github | |
286 | goto end | |
287 | ) | |
288 | ||
289 | :end |
0 | .. _links-in-markdown: | |
1 | ||
2 | ################# | |
3 | Links in Markdown | |
4 | ################# | |
5 | ||
6 | By default, if your ReST documents has internal links, then generated Markdown | |
7 | output like the Notebook files or the Python code files drop these links, | |
8 | replacing them with some suitable text. | |
9 | ||
10 | Internal links can be :ref:`internal to the document <links-in-markdown>`, via | |
11 | the ``:ref:`` role, or to :doc:`other ReST pages <scripts>`, using the | |
12 | ``:doc:`` role. Links can also be to files to download, such as | |
13 | :download:`conf.py`. | |
14 | ||
15 | It's often the case that you have built HTTP pages on the web somewhere, so it | |
16 | would make sense for the built Markdown to point to the HTTP versions of these | |
17 | links, rather than dropping them. | |
18 | ||
19 | You can tell ``nb2plots`` where your built HTTP docs are, by setting the | |
20 | ``markdown_http_base`` value in your ``conf.py`` file, like this:: | |
21 | ||
22 | markdown_http_base = 'https://example.com' | |
23 | ||
24 | If you set this value to any string other than the empty string, the Markdown | |
25 | builders will use this base URL to resolve internal links. | |
26 | ||
27 | In fact, I've set that option in the ``conf.py`` for this project, like this:: | |
28 | ||
29 | markdown_http_base = 'https://matthew-brett.github.io/nb2plots' | |
30 | ||
31 | Have a look at the generated Python code and Notebook code for this page, via | |
32 | the links below. You should see that the Markdown links resolve to the HTTP | |
33 | pages at https://matthew-brett.github.io/nb2plots. | |
34 | ||
35 | .. code-links:: |
0 | ################################### | |
1 | Using the mpl-interactive directive | |
2 | ################################### | |
3 | ||
4 | Many notebooks have code cells generating matplotlib_ plots. Nearly all of | |
5 | these have a code cell near the top with the IPython magic command | |
6 | ``%matplotlib inline`` or ``%matplotlib nbagg``. This commands tell the | |
7 | notebook to embed the plots inside the notebook rather than generate them in | |
8 | separate windows. | |
9 | ||
10 | When ``nb2plots`` converts a notebook to ReST, it detects these commands, and | |
11 | adds a new directive ``.. mpl-interactive::`` at the matching place in the | |
12 | ReST page. When Sphinx builds the ReST page to HTML, the directive put | |
13 | boilerplate text into the page reminding the user that they may want to use | |
14 | the ``%matplotlib`` magic command in the IPython_ console, or ``%matplotlib | |
15 | inline`` when they are running the commands in the Jupyter notebook. The | |
16 | ``mpl-interactive`` directive also serves as marker when converting the ReST | |
17 | page to a notebook again; the directive generates a notebook code cell with | |
18 | the ``%matplotlib inline`` magic at the matching location, like this: | |
19 | ||
20 | .. mpl-interactive:: | |
21 | ||
22 | .. _mpl-interactive-directive: | |
23 | ||
24 | ************************* | |
25 | mpl-interactive directive | |
26 | ************************* | |
27 | ||
28 | .. automodule:: nb2plots.mpl_interactive | |
29 | ||
30 | .. include:: links_names.inc |
0 | .. _nbplots-documentation: | |
1 | ||
2 | ########################## | |
3 | Using the nbplot directive | |
4 | ########################## | |
5 | ||
6 | The ``nbplot`` directive is very similar to the `matplotlib plot directive`_, | |
7 | and started life as a fork of that code. It differs mainly in that its | |
8 | default is to keep the name-space from one ``nbplot`` directive to the next in | |
9 | a given page. It also has output defaults adapted to directive contents with | |
10 | source code rather than pointing to a standalone script. | |
11 | ||
12 | For example, here is the source for an Nbplot directive: | |
13 | ||
14 | .. code-block:: rest | |
15 | ||
16 | .. nbplot:: | |
17 | ||
18 | >>> a = 1 | |
19 | >>> a | |
20 | 1 | |
21 | ||
22 | This renders as: | |
23 | ||
24 | .. nbplot:: | |
25 | ||
26 | >>> a = 1 | |
27 | >>> a | |
28 | 1 | |
29 | ||
30 | All Nbplot directives in a page share a namespace with the first (by default). | |
31 | Here is another Nbplot directive: | |
32 | ||
33 | .. nbplot:: | |
34 | ||
35 | >>> b = a | |
36 | >>> b | |
37 | 1 | |
38 | ||
39 | Nbplot directives can also make |--| plots: | |
40 | ||
41 | .. nbplot:: | |
42 | ||
43 | >>> import matplotlib.pyplot as plt | |
44 | >>> plt.plot(range(10)) | |
45 | [...] | |
46 | ||
47 | Notice that the HTML version of the page contains links to high and low | |
48 | resolution PNG versions of the plot, and a PDF version. | |
49 | ||
50 | The code in Nbplot directives gets executed during the page build, so your | |
51 | build will detect any errors. With doctest code blocks, like the above, you | |
52 | can also test the doctest output, using the Sphinx ``doctest`` builder, which | |
53 | you might be able to run with: | |
54 | ||
55 | .. code-block:: bash | |
56 | ||
57 | make doctest | |
58 | ||
59 | See the ``run-parts`` and ``render-parts`` options to run and render different | |
60 | code according to your local configuration. | |
61 | ||
62 | .. _nbplot-directive: | |
63 | ||
64 | **************** | |
65 | nbplot directive | |
66 | **************** | |
67 | ||
68 | .. automodule:: nb2plots.nbplots | |
69 | ||
70 | .. include:: links_names.inc |
0 | ################### | |
1 | Scripts in nb2plots | |
2 | ################### | |
3 | ||
4 | Nbplots installs the following command-line scripts: | |
5 | ||
6 | * ``nb2plots`` |--| converts Jupyter notebooks to a ReST page with | |
7 | :doc:`nbplot directives <nbplots>` for the code cells; | |
8 | * ``sphinx2py`` |--| converts a ReST page that may have nbplot directives or | |
9 | doctest blocks into a Python ``.py`` code file, where everything other than | |
10 | the nbplot directives and doctest blocks become comments in Markdown text | |
11 | format; | |
12 | * ``sphinx2nb`` |--| converts a ReST page that may have nbplot directives or | |
13 | doctest blocks into a Jupyter notebook, where the nbplot directives and | |
14 | doctest blocks become code cells; | |
15 | * ``sphinx2md`` |--| converts a ReST page into a Markdown page, where the | |
16 | conversion assumes the Sphinx versions of directives and roles; | |
17 | * ``sphinx2pxml`` |--| converts a ReST page into a Sphinx pseudo XML page, | |
18 | where the conversion assumes the Sphinx versions of directives and roles; | |
19 | * ``rst2md`` |--| converts a ReST page into a Markdown page, where the | |
20 | conversion assumes the `docutils`_ versions of directives and roles; | |
21 | ||
22 | All these scripts write their output to standard output (stdout). | |
23 | ||
24 | .. include:: links_names.inc |
0 | .. _worked-example: | |
1 | ||
2 | ############## | |
3 | Worked example | |
4 | ############## | |
5 | ||
6 | Let's say I have a notebook `example notebook <example_notebook.html>`_. You | |
7 | can download the notebook from :download:`example_notebook.ipynb`. | |
8 | ||
9 | I want to make this notebook into a ReST page to include in my Sphinx | |
10 | project. First I convert the notebook with the ``nb2plots`` script:: | |
11 | ||
12 | nb2plots example_notebook.ipynb > converted_notebook.rst | |
13 | ||
14 | This results in the ReST page `converted_example.rst | |
15 | <_sources/converted_example.rst.txt>`_, which builds as HTML to | |
16 | :doc:`converted_example`. | |
17 | ||
18 | If you look at the `source of the converted notebook | |
19 | <_sources/converted_example.rst.txt>`_ you will see two custom Nb2plots | |
20 | directives: | |
21 | ||
22 | * :ref:`nbplot <nbplot-directive>` |--| housing the content from the code | |
23 | cells in the original notebook; | |
24 | * :ref:`mpl-interactive <mpl-interactive-directive>` |--| noting that the user | |
25 | may want to use the ``%matplotlib`` magic for interactive plots. | |
26 | ||
27 | Notice that each Nbplot directive on a single page uses the same namespace, by | |
28 | default, so the Nbplot directives on your page can build up variables in the | |
29 | same way that a notebook does. | |
30 | ||
31 | The notebook code cells convert to doctest blocks, so I can check the | |
32 | correctness of the code on my page with the Sphinx doctest extension: | |
33 | ||
34 | .. code-block:: bash | |
35 | ||
36 | make doctest | |
37 | ||
38 | This example page converts well, but, in practice, you may well want to edit | |
39 | the ReST document to clean up some differences between the notebook code cells | |
40 | and doctest blocks in Nbplot directives. | |
41 | ||
42 | Now let's say that I would like to make this page available to my users as | |
43 | Jupyter notebooks and / or a Python code file. To do this, I add the | |
44 | following directive to the end of my page (or wherever I like): | |
45 | ||
46 | .. code-block:: rest | |
47 | ||
48 | .. code-links:: | |
49 | ||
50 | See the :ref:`code-links-directive` for more detail. | |
51 | ||
52 | When I do this, I get a built HTML page :doc:`like this | |
53 | <converted_plus_notebooks>`. Note the three links to the Python code file and | |
54 | notebooks at the end of the page. The first link is to the Python code file. | |
55 | The second is to a notebook that has not been executed, and has no outputs. | |
56 | The third is to a version of the same notebook that has been executed, and has | |
57 | the code output cells. See the :ref:`code-links-directive` documentation for | |
58 | options to select which of these links to add. | |
59 | ||
60 | .. include:: links_names.inc |
0 | 0 | # nb2plots package |
1 | 1 | |
2 | from ._version import get_versions | |
3 | __version__ = get_versions()['version'] | |
4 | del get_versions | |
2 | from . import _version | |
3 | __version__ = _version.get_versions()['version'] | |
5 | 4 | |
6 | 5 | from . import nbplots |
7 | 6 | from . import runroles |
0 | 0 | |
1 | # This file was generated by 'versioneer.py' (0.17) from | |
2 | # revision-control system data, or from the parent directory name of an | |
3 | # unpacked source archive. Distribution tarballs contain a pre-generated copy | |
4 | # of this file. | |
5 | ||
6 | import json | |
7 | ||
8 | version_json = ''' | |
9 | { | |
10 | "date": "2018-02-03T21:09:07+0000", | |
11 | "dirty": false, | |
12 | "error": null, | |
13 | "full-revisionid": "8873eeca7d7ad617dcac9aa317ee0a7079e17c79", | |
14 | "version": "0.6" | |
15 | } | |
16 | ''' # END VERSION_JSON | |
1 | # This file helps to compute a version number in source trees obtained from | |
2 | # git-archive tarball (such as those provided by githubs download-from-tag | |
3 | # feature). Distribution tarballs (built by setup.py sdist) and build | |
4 | # directories (produced by setup.py build) will contain a much shorter file | |
5 | # that just contains the computed version number. | |
6 | ||
7 | # This file is released into the public domain. Generated by | |
8 | # versioneer-0.22 (https://github.com/python-versioneer/python-versioneer) | |
9 | ||
10 | """Git implementation of _version.py.""" | |
11 | ||
12 | import errno | |
13 | import os | |
14 | import re | |
15 | import subprocess | |
16 | import sys | |
17 | from typing import Callable, Dict | |
18 | import functools | |
19 | ||
20 | ||
21 | def get_keywords(): | |
22 | """Get the keywords needed to look up the version information.""" | |
23 | # these strings will be replaced by git during git-archive. | |
24 | # setup.py/versioneer.py will grep for the variable names, so they must | |
25 | # each be defined on a line of their own. _version.py will just call | |
26 | # get_keywords(). | |
27 | git_refnames = " (HEAD -> main, tag: 0.6.1)" | |
28 | git_full = "369f12c15eb151de929a81658fabdea0d95cf6ad" | |
29 | git_date = "2022-06-02 16:13:30 +0100" | |
30 | keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} | |
31 | return keywords | |
32 | ||
33 | ||
34 | class VersioneerConfig: | |
35 | """Container for Versioneer configuration parameters.""" | |
36 | ||
37 | ||
38 | def get_config(): | |
39 | """Create, populate and return the VersioneerConfig() object.""" | |
40 | # these strings are filled in when 'setup.py versioneer' creates | |
41 | # _version.py | |
42 | cfg = VersioneerConfig() | |
43 | cfg.VCS = "git" | |
44 | cfg.style = "pep440" | |
45 | cfg.tag_prefix = "" | |
46 | cfg.parentdir_prefix = "nb2plots-" | |
47 | cfg.versionfile_source = "nb2plots/_version.py" | |
48 | cfg.verbose = False | |
49 | return cfg | |
50 | ||
51 | ||
52 | class NotThisMethod(Exception): | |
53 | """Exception raised if a method is not valid for the current scenario.""" | |
54 | ||
55 | ||
56 | LONG_VERSION_PY: Dict[str, str] = {} | |
57 | HANDLERS: Dict[str, Dict[str, Callable]] = {} | |
58 | ||
59 | ||
60 | def register_vcs_handler(vcs, method): # decorator | |
61 | """Create decorator to mark a method as the handler of a VCS.""" | |
62 | def decorate(f): | |
63 | """Store f in HANDLERS[vcs][method].""" | |
64 | if vcs not in HANDLERS: | |
65 | HANDLERS[vcs] = {} | |
66 | HANDLERS[vcs][method] = f | |
67 | return f | |
68 | return decorate | |
69 | ||
70 | ||
71 | def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, | |
72 | env=None): | |
73 | """Call the given command(s).""" | |
74 | assert isinstance(commands, list) | |
75 | process = None | |
76 | ||
77 | popen_kwargs = {} | |
78 | if sys.platform == "win32": | |
79 | # This hides the console window if pythonw.exe is used | |
80 | startupinfo = subprocess.STARTUPINFO() | |
81 | startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW | |
82 | popen_kwargs["startupinfo"] = startupinfo | |
83 | ||
84 | for command in commands: | |
85 | try: | |
86 | dispcmd = str([command] + args) | |
87 | # remember shell=False, so use git.cmd on windows, not just git | |
88 | process = subprocess.Popen([command] + args, cwd=cwd, env=env, | |
89 | stdout=subprocess.PIPE, | |
90 | stderr=(subprocess.PIPE if hide_stderr | |
91 | else None), **popen_kwargs) | |
92 | break | |
93 | except OSError: | |
94 | e = sys.exc_info()[1] | |
95 | if e.errno == errno.ENOENT: | |
96 | continue | |
97 | if verbose: | |
98 | print("unable to run %s" % dispcmd) | |
99 | print(e) | |
100 | return None, None | |
101 | else: | |
102 | if verbose: | |
103 | print("unable to find command, tried %s" % (commands,)) | |
104 | return None, None | |
105 | stdout = process.communicate()[0].strip().decode() | |
106 | if process.returncode != 0: | |
107 | if verbose: | |
108 | print("unable to run %s (error)" % dispcmd) | |
109 | print("stdout was %s" % stdout) | |
110 | return None, process.returncode | |
111 | return stdout, process.returncode | |
112 | ||
113 | ||
114 | def versions_from_parentdir(parentdir_prefix, root, verbose): | |
115 | """Try to determine the version from the parent directory name. | |
116 | ||
117 | Source tarballs conventionally unpack into a directory that includes both | |
118 | the project name and a version string. We will also support searching up | |
119 | two directory levels for an appropriately named parent directory | |
120 | """ | |
121 | rootdirs = [] | |
122 | ||
123 | for _ in range(3): | |
124 | dirname = os.path.basename(root) | |
125 | if dirname.startswith(parentdir_prefix): | |
126 | return {"version": dirname[len(parentdir_prefix):], | |
127 | "full-revisionid": None, | |
128 | "dirty": False, "error": None, "date": None} | |
129 | rootdirs.append(root) | |
130 | root = os.path.dirname(root) # up a level | |
131 | ||
132 | if verbose: | |
133 | print("Tried directories %s but none started with prefix %s" % | |
134 | (str(rootdirs), parentdir_prefix)) | |
135 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") | |
136 | ||
137 | ||
138 | @register_vcs_handler("git", "get_keywords") | |
139 | def git_get_keywords(versionfile_abs): | |
140 | """Extract version information from the given file.""" | |
141 | # the code embedded in _version.py can just fetch the value of these | |
142 | # keywords. When used from setup.py, we don't want to import _version.py, | |
143 | # so we do it with a regexp instead. This function is not used from | |
144 | # _version.py. | |
145 | keywords = {} | |
146 | try: | |
147 | with open(versionfile_abs, "r") as fobj: | |
148 | for line in fobj: | |
149 | if line.strip().startswith("git_refnames ="): | |
150 | mo = re.search(r'=\s*"(.*)"', line) | |
151 | if mo: | |
152 | keywords["refnames"] = mo.group(1) | |
153 | if line.strip().startswith("git_full ="): | |
154 | mo = re.search(r'=\s*"(.*)"', line) | |
155 | if mo: | |
156 | keywords["full"] = mo.group(1) | |
157 | if line.strip().startswith("git_date ="): | |
158 | mo = re.search(r'=\s*"(.*)"', line) | |
159 | if mo: | |
160 | keywords["date"] = mo.group(1) | |
161 | except OSError: | |
162 | pass | |
163 | return keywords | |
164 | ||
165 | ||
166 | @register_vcs_handler("git", "keywords") | |
167 | def git_versions_from_keywords(keywords, tag_prefix, verbose): | |
168 | """Get version information from git keywords.""" | |
169 | if "refnames" not in keywords: | |
170 | raise NotThisMethod("Short version file found") | |
171 | date = keywords.get("date") | |
172 | if date is not None: | |
173 | # Use only the last line. Previous lines may contain GPG signature | |
174 | # information. | |
175 | date = date.splitlines()[-1] | |
176 | ||
177 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant | |
178 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 | |
179 | # -like" string, which we must then edit to make compliant), because | |
180 | # it's been around since git-1.5.3, and it's too difficult to | |
181 | # discover which version we're using, or to work around using an | |
182 | # older one. | |
183 | date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) | |
184 | refnames = keywords["refnames"].strip() | |
185 | if refnames.startswith("$Format"): | |
186 | if verbose: | |
187 | print("keywords are unexpanded, not using") | |
188 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") | |
189 | refs = {r.strip() for r in refnames.strip("()").split(",")} | |
190 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of | |
191 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. | |
192 | TAG = "tag: " | |
193 | tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} | |
194 | if not tags: | |
195 | # Either we're using git < 1.8.3, or there really are no tags. We use | |
196 | # a heuristic: assume all version tags have a digit. The old git %d | |
197 | # expansion behaves like git log --decorate=short and strips out the | |
198 | # refs/heads/ and refs/tags/ prefixes that would let us distinguish | |
199 | # between branches and tags. By ignoring refnames without digits, we | |
200 | # filter out many common branch names like "release" and | |
201 | # "stabilization", as well as "HEAD" and "master". | |
202 | tags = {r for r in refs if re.search(r'\d', r)} | |
203 | if verbose: | |
204 | print("discarding '%s', no digits" % ",".join(refs - tags)) | |
205 | if verbose: | |
206 | print("likely tags: %s" % ",".join(sorted(tags))) | |
207 | for ref in sorted(tags): | |
208 | # sorting will prefer e.g. "2.0" over "2.0rc1" | |
209 | if ref.startswith(tag_prefix): | |
210 | r = ref[len(tag_prefix):] | |
211 | # Filter out refs that exactly match prefix or that don't start | |
212 | # with a number once the prefix is stripped (mostly a concern | |
213 | # when prefix is '') | |
214 | if not re.match(r'\d', r): | |
215 | continue | |
216 | if verbose: | |
217 | print("picking %s" % r) | |
218 | return {"version": r, | |
219 | "full-revisionid": keywords["full"].strip(), | |
220 | "dirty": False, "error": None, | |
221 | "date": date} | |
222 | # no suitable tags, so version is "0+unknown", but full hex is still there | |
223 | if verbose: | |
224 | print("no suitable tags, using unknown + full revision id") | |
225 | return {"version": "0+unknown", | |
226 | "full-revisionid": keywords["full"].strip(), | |
227 | "dirty": False, "error": "no suitable tags", "date": None} | |
228 | ||
229 | ||
230 | @register_vcs_handler("git", "pieces_from_vcs") | |
231 | def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): | |
232 | """Get version from 'git describe' in the root of the source tree. | |
233 | ||
234 | This only gets called if the git-archive 'subst' keywords were *not* | |
235 | expanded, and _version.py hasn't already been rewritten with a short | |
236 | version string, meaning we're inside a checked out source tree. | |
237 | """ | |
238 | GITS = ["git"] | |
239 | if sys.platform == "win32": | |
240 | GITS = ["git.cmd", "git.exe"] | |
241 | ||
242 | # GIT_DIR can interfere with correct operation of Versioneer. | |
243 | # It may be intended to be passed to the Versioneer-versioned project, | |
244 | # but that should not change where we get our version from. | |
245 | env = os.environ.copy() | |
246 | env.pop("GIT_DIR", None) | |
247 | runner = functools.partial(runner, env=env) | |
248 | ||
249 | _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, | |
250 | hide_stderr=True) | |
251 | if rc != 0: | |
252 | if verbose: | |
253 | print("Directory %s not under git control" % root) | |
254 | raise NotThisMethod("'git rev-parse --git-dir' returned error") | |
255 | ||
256 | MATCH_ARGS = ["--match", "%s*" % tag_prefix] if tag_prefix else [] | |
257 | ||
258 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] | |
259 | # if there isn't one, this yields HEX[-dirty] (no NUM) | |
260 | describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty", | |
261 | "--always", "--long", *MATCH_ARGS], | |
262 | cwd=root) | |
263 | # --long was added in git-1.5.5 | |
264 | if describe_out is None: | |
265 | raise NotThisMethod("'git describe' failed") | |
266 | describe_out = describe_out.strip() | |
267 | full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) | |
268 | if full_out is None: | |
269 | raise NotThisMethod("'git rev-parse' failed") | |
270 | full_out = full_out.strip() | |
271 | ||
272 | pieces = {} | |
273 | pieces["long"] = full_out | |
274 | pieces["short"] = full_out[:7] # maybe improved later | |
275 | pieces["error"] = None | |
276 | ||
277 | branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], | |
278 | cwd=root) | |
279 | # --abbrev-ref was added in git-1.6.3 | |
280 | if rc != 0 or branch_name is None: | |
281 | raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") | |
282 | branch_name = branch_name.strip() | |
283 | ||
284 | if branch_name == "HEAD": | |
285 | # If we aren't exactly on a branch, pick a branch which represents | |
286 | # the current commit. If all else fails, we are on a branchless | |
287 | # commit. | |
288 | branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) | |
289 | # --contains was added in git-1.5.4 | |
290 | if rc != 0 or branches is None: | |
291 | raise NotThisMethod("'git branch --contains' returned error") | |
292 | branches = branches.split("\n") | |
293 | ||
294 | # Remove the first line if we're running detached | |
295 | if "(" in branches[0]: | |
296 | branches.pop(0) | |
297 | ||
298 | # Strip off the leading "* " from the list of branches. | |
299 | branches = [branch[2:] for branch in branches] | |
300 | if "master" in branches: | |
301 | branch_name = "master" | |
302 | elif not branches: | |
303 | branch_name = None | |
304 | else: | |
305 | # Pick the first branch that is returned. Good or bad. | |
306 | branch_name = branches[0] | |
307 | ||
308 | pieces["branch"] = branch_name | |
309 | ||
310 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] | |
311 | # TAG might have hyphens. | |
312 | git_describe = describe_out | |
313 | ||
314 | # look for -dirty suffix | |
315 | dirty = git_describe.endswith("-dirty") | |
316 | pieces["dirty"] = dirty | |
317 | if dirty: | |
318 | git_describe = git_describe[:git_describe.rindex("-dirty")] | |
319 | ||
320 | # now we have TAG-NUM-gHEX or HEX | |
321 | ||
322 | if "-" in git_describe: | |
323 | # TAG-NUM-gHEX | |
324 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) | |
325 | if not mo: | |
326 | # unparsable. Maybe git-describe is misbehaving? | |
327 | pieces["error"] = ("unable to parse git-describe output: '%s'" | |
328 | % describe_out) | |
329 | return pieces | |
330 | ||
331 | # tag | |
332 | full_tag = mo.group(1) | |
333 | if not full_tag.startswith(tag_prefix): | |
334 | if verbose: | |
335 | fmt = "tag '%s' doesn't start with prefix '%s'" | |
336 | print(fmt % (full_tag, tag_prefix)) | |
337 | pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" | |
338 | % (full_tag, tag_prefix)) | |
339 | return pieces | |
340 | pieces["closest-tag"] = full_tag[len(tag_prefix):] | |
341 | ||
342 | # distance: number of commits since tag | |
343 | pieces["distance"] = int(mo.group(2)) | |
344 | ||
345 | # commit: short hex revision ID | |
346 | pieces["short"] = mo.group(3) | |
347 | ||
348 | else: | |
349 | # HEX: no tags | |
350 | pieces["closest-tag"] = None | |
351 | count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) | |
352 | pieces["distance"] = int(count_out) # total number of commits | |
353 | ||
354 | # commit date: see ISO-8601 comment in git_versions_from_keywords() | |
355 | date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() | |
356 | # Use only the last line. Previous lines may contain GPG signature | |
357 | # information. | |
358 | date = date.splitlines()[-1] | |
359 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) | |
360 | ||
361 | return pieces | |
362 | ||
363 | ||
364 | def plus_or_dot(pieces): | |
365 | """Return a + if we don't already have one, else return a .""" | |
366 | if "+" in pieces.get("closest-tag", ""): | |
367 | return "." | |
368 | return "+" | |
369 | ||
370 | ||
371 | def render_pep440(pieces): | |
372 | """Build up version string, with post-release "local version identifier". | |
373 | ||
374 | Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you | |
375 | get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty | |
376 | ||
377 | Exceptions: | |
378 | 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] | |
379 | """ | |
380 | if pieces["closest-tag"]: | |
381 | rendered = pieces["closest-tag"] | |
382 | if pieces["distance"] or pieces["dirty"]: | |
383 | rendered += plus_or_dot(pieces) | |
384 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) | |
385 | if pieces["dirty"]: | |
386 | rendered += ".dirty" | |
387 | else: | |
388 | # exception #1 | |
389 | rendered = "0+untagged.%d.g%s" % (pieces["distance"], | |
390 | pieces["short"]) | |
391 | if pieces["dirty"]: | |
392 | rendered += ".dirty" | |
393 | return rendered | |
394 | ||
395 | ||
396 | def render_pep440_branch(pieces): | |
397 | """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . | |
398 | ||
399 | The ".dev0" means not master branch. Note that .dev0 sorts backwards | |
400 | (a feature branch will appear "older" than the master branch). | |
401 | ||
402 | Exceptions: | |
403 | 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] | |
404 | """ | |
405 | if pieces["closest-tag"]: | |
406 | rendered = pieces["closest-tag"] | |
407 | if pieces["distance"] or pieces["dirty"]: | |
408 | if pieces["branch"] != "master": | |
409 | rendered += ".dev0" | |
410 | rendered += plus_or_dot(pieces) | |
411 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) | |
412 | if pieces["dirty"]: | |
413 | rendered += ".dirty" | |
414 | else: | |
415 | # exception #1 | |
416 | rendered = "0" | |
417 | if pieces["branch"] != "master": | |
418 | rendered += ".dev0" | |
419 | rendered += "+untagged.%d.g%s" % (pieces["distance"], | |
420 | pieces["short"]) | |
421 | if pieces["dirty"]: | |
422 | rendered += ".dirty" | |
423 | return rendered | |
424 | ||
425 | ||
426 | def pep440_split_post(ver): | |
427 | """Split pep440 version string at the post-release segment. | |
428 | ||
429 | Returns the release segments before the post-release and the | |
430 | post-release version number (or -1 if no post-release segment is present). | |
431 | """ | |
432 | vc = str.split(ver, ".post") | |
433 | return vc[0], int(vc[1] or 0) if len(vc) == 2 else None | |
434 | ||
435 | ||
436 | def render_pep440_pre(pieces): | |
437 | """TAG[.postN.devDISTANCE] -- No -dirty. | |
438 | ||
439 | Exceptions: | |
440 | 1: no tags. 0.post0.devDISTANCE | |
441 | """ | |
442 | if pieces["closest-tag"]: | |
443 | if pieces["distance"]: | |
444 | # update the post release segment | |
445 | tag_version, post_version = pep440_split_post(pieces["closest-tag"]) | |
446 | rendered = tag_version | |
447 | if post_version is not None: | |
448 | rendered += ".post%d.dev%d" % (post_version+1, pieces["distance"]) | |
449 | else: | |
450 | rendered += ".post0.dev%d" % (pieces["distance"]) | |
451 | else: | |
452 | # no commits, use the tag as the version | |
453 | rendered = pieces["closest-tag"] | |
454 | else: | |
455 | # exception #1 | |
456 | rendered = "0.post0.dev%d" % pieces["distance"] | |
457 | return rendered | |
458 | ||
459 | ||
460 | def render_pep440_post(pieces): | |
461 | """TAG[.postDISTANCE[.dev0]+gHEX] . | |
462 | ||
463 | The ".dev0" means dirty. Note that .dev0 sorts backwards | |
464 | (a dirty tree will appear "older" than the corresponding clean one), | |
465 | but you shouldn't be releasing software with -dirty anyways. | |
466 | ||
467 | Exceptions: | |
468 | 1: no tags. 0.postDISTANCE[.dev0] | |
469 | """ | |
470 | if pieces["closest-tag"]: | |
471 | rendered = pieces["closest-tag"] | |
472 | if pieces["distance"] or pieces["dirty"]: | |
473 | rendered += ".post%d" % pieces["distance"] | |
474 | if pieces["dirty"]: | |
475 | rendered += ".dev0" | |
476 | rendered += plus_or_dot(pieces) | |
477 | rendered += "g%s" % pieces["short"] | |
478 | else: | |
479 | # exception #1 | |
480 | rendered = "0.post%d" % pieces["distance"] | |
481 | if pieces["dirty"]: | |
482 | rendered += ".dev0" | |
483 | rendered += "+g%s" % pieces["short"] | |
484 | return rendered | |
485 | ||
486 | ||
487 | def render_pep440_post_branch(pieces): | |
488 | """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . | |
489 | ||
490 | The ".dev0" means not master branch. | |
491 | ||
492 | Exceptions: | |
493 | 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] | |
494 | """ | |
495 | if pieces["closest-tag"]: | |
496 | rendered = pieces["closest-tag"] | |
497 | if pieces["distance"] or pieces["dirty"]: | |
498 | rendered += ".post%d" % pieces["distance"] | |
499 | if pieces["branch"] != "master": | |
500 | rendered += ".dev0" | |
501 | rendered += plus_or_dot(pieces) | |
502 | rendered += "g%s" % pieces["short"] | |
503 | if pieces["dirty"]: | |
504 | rendered += ".dirty" | |
505 | else: | |
506 | # exception #1 | |
507 | rendered = "0.post%d" % pieces["distance"] | |
508 | if pieces["branch"] != "master": | |
509 | rendered += ".dev0" | |
510 | rendered += "+g%s" % pieces["short"] | |
511 | if pieces["dirty"]: | |
512 | rendered += ".dirty" | |
513 | return rendered | |
514 | ||
515 | ||
516 | def render_pep440_old(pieces): | |
517 | """TAG[.postDISTANCE[.dev0]] . | |
518 | ||
519 | The ".dev0" means dirty. | |
520 | ||
521 | Exceptions: | |
522 | 1: no tags. 0.postDISTANCE[.dev0] | |
523 | """ | |
524 | if pieces["closest-tag"]: | |
525 | rendered = pieces["closest-tag"] | |
526 | if pieces["distance"] or pieces["dirty"]: | |
527 | rendered += ".post%d" % pieces["distance"] | |
528 | if pieces["dirty"]: | |
529 | rendered += ".dev0" | |
530 | else: | |
531 | # exception #1 | |
532 | rendered = "0.post%d" % pieces["distance"] | |
533 | if pieces["dirty"]: | |
534 | rendered += ".dev0" | |
535 | return rendered | |
536 | ||
537 | ||
538 | def render_git_describe(pieces): | |
539 | """TAG[-DISTANCE-gHEX][-dirty]. | |
540 | ||
541 | Like 'git describe --tags --dirty --always'. | |
542 | ||
543 | Exceptions: | |
544 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) | |
545 | """ | |
546 | if pieces["closest-tag"]: | |
547 | rendered = pieces["closest-tag"] | |
548 | if pieces["distance"]: | |
549 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) | |
550 | else: | |
551 | # exception #1 | |
552 | rendered = pieces["short"] | |
553 | if pieces["dirty"]: | |
554 | rendered += "-dirty" | |
555 | return rendered | |
556 | ||
557 | ||
558 | def render_git_describe_long(pieces): | |
559 | """TAG-DISTANCE-gHEX[-dirty]. | |
560 | ||
561 | Like 'git describe --tags --dirty --always -long'. | |
562 | The distance/hash is unconditional. | |
563 | ||
564 | Exceptions: | |
565 | 1: no tags. HEX[-dirty] (note: no 'g' prefix) | |
566 | """ | |
567 | if pieces["closest-tag"]: | |
568 | rendered = pieces["closest-tag"] | |
569 | rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) | |
570 | else: | |
571 | # exception #1 | |
572 | rendered = pieces["short"] | |
573 | if pieces["dirty"]: | |
574 | rendered += "-dirty" | |
575 | return rendered | |
576 | ||
577 | ||
578 | def render(pieces, style): | |
579 | """Render the given version pieces into the requested style.""" | |
580 | if pieces["error"]: | |
581 | return {"version": "unknown", | |
582 | "full-revisionid": pieces.get("long"), | |
583 | "dirty": None, | |
584 | "error": pieces["error"], | |
585 | "date": None} | |
586 | ||
587 | if not style or style == "default": | |
588 | style = "pep440" # the default | |
589 | ||
590 | if style == "pep440": | |
591 | rendered = render_pep440(pieces) | |
592 | elif style == "pep440-branch": | |
593 | rendered = render_pep440_branch(pieces) | |
594 | elif style == "pep440-pre": | |
595 | rendered = render_pep440_pre(pieces) | |
596 | elif style == "pep440-post": | |
597 | rendered = render_pep440_post(pieces) | |
598 | elif style == "pep440-post-branch": | |
599 | rendered = render_pep440_post_branch(pieces) | |
600 | elif style == "pep440-old": | |
601 | rendered = render_pep440_old(pieces) | |
602 | elif style == "git-describe": | |
603 | rendered = render_git_describe(pieces) | |
604 | elif style == "git-describe-long": | |
605 | rendered = render_git_describe_long(pieces) | |
606 | else: | |
607 | raise ValueError("unknown style '%s'" % style) | |
608 | ||
609 | return {"version": rendered, "full-revisionid": pieces["long"], | |
610 | "dirty": pieces["dirty"], "error": None, | |
611 | "date": pieces.get("date")} | |
17 | 612 | |
18 | 613 | |
19 | 614 | def get_versions(): |
20 | return json.loads(version_json) | |
615 | """Get version information or return default if unable to do so.""" | |
616 | # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have | |
617 | # __file__, we can work backwards from there to the root. Some | |
618 | # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which | |
619 | # case we can only use expanded keywords. | |
620 | ||
621 | cfg = get_config() | |
622 | verbose = cfg.verbose | |
623 | ||
624 | try: | |
625 | return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, | |
626 | verbose) | |
627 | except NotThisMethod: | |
628 | pass | |
629 | ||
630 | try: | |
631 | root = os.path.realpath(__file__) | |
632 | # versionfile_source is the relative path from the top of the source | |
633 | # tree (where the .git directory might live) to this file. Invert | |
634 | # this to find the root from __file__. | |
635 | for _ in cfg.versionfile_source.split('/'): | |
636 | root = os.path.dirname(root) | |
637 | except NameError: | |
638 | return {"version": "0+unknown", "full-revisionid": None, | |
639 | "dirty": None, | |
640 | "error": "unable to find root of source tree", | |
641 | "date": None} | |
642 | ||
643 | try: | |
644 | pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) | |
645 | return render(pieces, cfg.style) | |
646 | except NotThisMethod: | |
647 | pass | |
648 | ||
649 | try: | |
650 | if cfg.parentdir_prefix: | |
651 | return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) | |
652 | except NotThisMethod: | |
653 | pass | |
654 | ||
655 | return {"version": "0+unknown", "full-revisionid": None, | |
656 | "dirty": None, | |
657 | "error": "unable to compute version", "date": None} |
21 | 21 | -------------- |
22 | 22 | |
23 | 23 | When you build the full notebook, Jupyter will execute the code in each cell. |
24 | By default, Jupyter will time out of any cell takes longer than 30 seconds to | |
25 | execute. You can change this default for the whole project with the | |
24 | By default, Jupyter will time out for any cell that takes longer than 30 | |
25 | seconds to execute. You can change this default for the whole project with the | |
26 | 26 | ``fill_notebook_timeout`` setting in the ``conf.py`` file (see below). If you |
27 | 27 | just want to change the setting for a single page, you can add the ``timeout`` |
28 | 28 | option to the ``code-links`` directive. For example:: |
30 | 30 | .. code-links: |
31 | 31 | :timeout: 120 |
32 | 32 | |
33 | Set the timeout value to -1 or ``none`` to disable timeout entirely for this | |
34 | directive / page. | |
33 | Set the ``timeout`` value to -1 or ``none`` to disable timeout entirely for | |
34 | this directive / page. | |
35 | 35 | |
36 | 36 | Configuration options |
37 | 37 | --------------------- |
75 | 75 | (``app.cleanup()``) after use. |
76 | 76 | """ |
77 | 77 | app = self._make_app(rst_text) |
78 | out_fname = pjoin(app.tmp_dir, 'contents.rst') | |
78 | master_doc = app.config.master_doc | |
79 | out_fname = pjoin(app.tmp_dir, master_doc + '.rst') | |
79 | 80 | with open(out_fname, 'wt') as fobj: |
80 | 81 | fobj.write(rst_text) |
81 | 82 | # Force build of everything |
82 | 83 | app.build(True, []) |
83 | 84 | if resolve: |
84 | dt = app.env.get_and_resolve_doctree('contents', app.builder) | |
85 | dt = app.env.get_and_resolve_doctree(master_doc, app.builder) | |
85 | 86 | else: |
86 | dt = app.env.get_doctree('contents') | |
87 | dt = app.env.get_doctree(master_doc) | |
87 | 88 | return dt, app |
88 | 89 | |
89 | 90 | def from_doctree(self, doctree, builder): |
101 | 102 | output : str |
102 | 103 | Representation in output format |
103 | 104 | """ |
104 | builder.prepare_writing(['contents']) | |
105 | builder.prepare_writing([builder.config.master_doc]) | |
105 | 106 | return builder.writer.write(doctree, UnicodeOutput()) |
106 | 107 | |
107 | 108 | def from_rst(self, rst_text, resolve=True): |
142 | 143 | |
143 | 144 | |
144 | 145 | DEFAULT_CONF = """\ |
146 | master_doc = 'contents' # For compatibility with Sphinx 2 | |
145 | 147 | extensions = [{}] |
146 | 148 | """.format(',\n'.join('"{}"'.format(ext_name) |
147 | 149 | for ext_name in DEFAULT_EXTENSIONS)) |
343 | 343 | |
344 | 344 | def visit_literal_block(self, node): |
345 | 345 | self._escape_text = False |
346 | code_type = node['classes'][1] if 'code' in node['classes'] else '' | |
346 | if 'code' in node['classes']: # Sphinx < 2 | |
347 | code_type = node['classes'][1] | |
348 | else: # Sphinx >= 2 | |
349 | language = node.get('language', '') | |
350 | code_type = '' if language == 'default' else language | |
347 | 351 | self.add('```' + code_type + '\n') |
348 | 352 | |
349 | 353 | def depart_literal_block(self, node): |
13 | 13 | |
14 | 14 | # Template to label code and output and plot blocks |
15 | 15 | dl = DictLoader({'rst_plots.tpl': """\ |
16 | {%- extends 'rst.tpl' -%} | |
16 | {%- extends '__RST_DEFAULT_TEMPLATE__' -%} | |
17 | 17 | |
18 | 18 | {% block input %} |
19 | 19 | {%- if cell.source.strip() | has_mpl_inline -%} |
49 | 49 | {{ output.data['text/plain'] | ellipse_mpl | indent }} |
50 | 50 | ##END_OUT_END## |
51 | 51 | {%- endblock data_text -%} |
52 | """}) | |
52 | """.replace('__RST_DEFAULT_TEMPLATE__', | |
53 | nbconvert.RSTExporter().template_file)}) | |
53 | 54 | |
54 | 55 | |
55 | 56 | def has_mpl_inline(code): |
87 | 88 | return '\n'.join(new_code) |
88 | 89 | |
89 | 90 | |
90 | MPL_LIST_OUT = re.compile('\[<matplotlib\..*?>\]') | |
91 | MPL_OBJ_OUT = re.compile('<matplotlib\..*?>') | |
91 | MPL_LIST_OUT = re.compile(r'\[<matplotlib\..*?>\]') | |
92 | MPL_OBJ_OUT = re.compile(r'<matplotlib\..*?>') | |
92 | 93 | |
93 | 94 | def ellipse_mpl(text): |
94 | 95 | """ Replace outputs of matplotlib objects with ellipses |
112 | 113 | '^##CODE_START##\n' |
113 | 114 | '(?P<code>.*?)' |
114 | 115 | '^##CODE_END##(\n|$)' |
115 | '([\s\\n]*?' | |
116 | '([\\s\\n]*?' | |
116 | 117 | '^##STDOUT_START##\n' |
117 | 118 | '(?P<stdout>.*?)' |
118 | 119 | '^##STDOUT_END##(\n|$))?' |
119 | '([\s\\n]*?' | |
120 | '([\\s\\n]*?' | |
120 | 121 | '^##END_OUT_START##\n' |
121 | 122 | '(?P<end_out>.*?)' |
122 | 123 | '^##END_OUT_END##(\n|$))?', re.S | re.M) |
176 | 176 | |
177 | 177 | import six |
178 | 178 | |
179 | from collections import defaultdict, Sequence | |
179 | try: | |
180 | from collections.abc import Sequence | |
181 | except ImportError: | |
182 | from collections import Sequence | |
183 | from collections import defaultdict | |
180 | 184 | import sys, os, shutil, io, re, textwrap |
181 | 185 | from os.path import (relpath, abspath, join as pjoin, dirname, exists, |
182 | 186 | basename, splitext, isdir) |
201 | 205 | from matplotlib._pylab_helpers import Gcf |
202 | 206 | |
203 | 207 | __version__ = 2 |
208 | ||
209 | ||
210 | def _get_rawsource(node): | |
211 | # Docutils < 0.18 has rawsource attribute, otherwise, build it. | |
212 | if hasattr(node, 'rawsource'): | |
213 | return node.rawsource | |
214 | return nodes.unescape(node, restore_backslashes=True) | |
204 | 215 | |
205 | 216 | |
206 | 217 | class NBPlotFlags(Directive): |
340 | 351 | """ Replace ``doctest_block`` nodes with ``dont_doctest_block`` nodes |
341 | 352 | """ |
342 | 353 | for node in tree.traverse(doctest_filter): |
343 | new_node = self.dont_doctest_block(node.rawsource, node.rawsource) | |
354 | raw_source = _get_rawsource(node) | |
355 | new_node = self.dont_doctest_block(raw_source, raw_source) | |
344 | 356 | node.replace_self(new_node) |
345 | 357 | |
346 | 358 | def rst2nodes(self, lines, node_class, node_attrs=None): |
672 | 684 | """ |
673 | 685 | Remove the coding comment, which six.exec_ doesn't like. |
674 | 686 | """ |
675 | sub_re = re.compile("^#\s*-\*-\s*coding:\s*.*-\*-$", flags=re.MULTILINE) | |
687 | sub_re = re.compile(r"^#\s*-\*-\s*coding:\s*.*-\*-$", flags=re.MULTILINE) | |
676 | 688 | return sub_re.sub("", text) |
677 | 689 | |
678 | 690 | #------------------------------------------------------------------------------ |
355 | 355 | # code visit, depart methods with app.add_node as we have just done for the |
356 | 356 | # html translator in the lines above. See: |
357 | 357 | # http://www.sphinx-doc.org/en/1.4.8/extdev/tutorial.html#the-setup-function |
358 | app.set_translator('markdown', doctree2py.Translator) | |
358 | 359 | app.set_translator('pyfile', doctree2py.Translator) |
359 | 360 | app.set_translator('ipynb', doctree2nb.Translator) |
5 | 5 | import numpy as np |
6 | 6 | |
7 | 7 | from sphinxtesters import SourcesBuilder |
8 | ||
9 | 8 | |
10 | 9 | DATA_PATH = abspath(pjoin( |
11 | 10 | dirname(__file__), |
29 | 28 | Used by several test functions. |
30 | 29 | """ |
31 | 30 | |
32 | conf_source = 'extensions = ["nb2plots", "sphinx.ext.doctest"]' | |
31 | conf_source = """\ | |
32 | master_doc = "contents" # Compatibility with Sphinx 2 | |
33 | extensions = ["nb2plots", "sphinx.ext.doctest"] | |
34 | """ | |
35 | ||
36 | def stripeq(actual, expected): | |
37 | """ True if LR stripped `actual` equal to LR stripped `expected` | |
38 | """ | |
39 | return actual.strip() == expected.strip() |
0 | 0 | """ Utils for testing notebooks |
1 | 1 | """ |
2 | 2 | |
3 | from copy import deepcopy | |
4 | ||
3 | 5 | from nb2plots.ipython_shim import nbf |
6 | ||
7 | ||
8 | def rm_ids(nb): | |
9 | nb2 = deepcopy(nb) | |
10 | for cell in nb2['cells']: | |
11 | if 'id' in cell: | |
12 | del cell['id'] | |
13 | return nb2 | |
4 | 14 | |
5 | 15 | |
6 | 16 | def assert_nb_equiv(ipynb, expected): |
10 | 20 | # It does not appear to be possible to request specific minor versions of |
11 | 21 | # the Notebook format. |
12 | 22 | expected_nb['nbformat_minor'] = actual_nb['nbformat_minor'] |
13 | assert actual_nb == expected_nb | |
23 | # 'language_info' key seems to have arrived in metadata as a result of | |
24 | # nbconvert 5.3.1 -> 5.4.0 (5.4.0 released September 7 2018). Previously | |
25 | # it was empty. | |
26 | actual_nb['metadata'].pop('language_info', None) | |
27 | # 'execution' in cell metadata from nbconvert 6.0 | |
28 | for cell in actual_nb['cells']: | |
29 | if 'execution' in cell['metadata']: | |
30 | cell['metadata'].pop('execution') | |
31 | assert rm_ids(actual_nb) == rm_ids(expected_nb) |
0 | """ Configuration for py.test test run | |
0 | """ Skip the origin Gohlke transforms for doctests. | |
1 | ||
2 | That file needs some specific doctest setup. | |
1 | 3 | """ |
2 | 4 | |
3 | def pytest_ignore_collect(path, config): | |
4 | """ Skip the origin Gohlke transforms for doctests. | |
5 | from os.path import join as pjoin | |
5 | 6 | |
6 | That file needs some specific doctest setup. | |
7 | """ | |
8 | return path.basename in ('conf.py', 'rst_md_files') | |
7 | collect_ignore = [pjoin('proj1', "conf.py"), 'rst_md_files'] |
0 | BUILDDIR = build | |
1 | SRCDIR = proj1 | |
2 | ||
3 | all: build-html | |
4 | ||
5 | clean: | |
6 | rm -rf $(BUILDDIR) | |
7 | ||
8 | init: clean | |
9 | rm -rf $(SRCDIR) | |
10 | cp -r ../$(SRCDIR) . | |
11 | ||
12 | build-% : | |
13 | sphinx-build -W -b $* -d $(BUILDDIR) $(SRCDIR) $(BUILDDIR) | |
14 | ||
15 | debug-% : | |
16 | sphinx-build -P -W -b $* -d $(BUILDDIR) $(SRCDIR) $(BUILDDIR) | |
17 | ||
18 | permissive-% : | |
19 | sphinx-build -b $* -d $(BUILDDIR) $(SRCDIR) $(BUILDDIR) | |
20 | ||
21 | html: build-html | |
22 | ||
23 | doctest: build-doctest | |
24 | ||
25 | text: build-text |
0 | ################# | |
1 | Futzing directory | |
2 | ################# | |
3 | ||
4 | The tests are all well and good in their temporary directory, but sometimes I | |
5 | need to do an actual sphinx build and check what the output is, for debugging | |
6 | and so on. | |
7 | ||
8 | I do that in this directory. Something like:: | |
9 | ||
10 | make init | |
11 | make html | |
12 | ||
13 | Then I might copy some stuff into the ``proj1`` subdirectory that ``make | |
14 | init`` made for me, before retrying the build. |
2 | 2 | from __future__ import unicode_literals |
3 | 3 | |
4 | 4 | from os.path import join as pjoin, isfile |
5 | import re | |
5 | 6 | |
6 | 7 | from nb2plots.testing import PlotsBuilder |
7 | 8 | |
112 | 113 | """ Markdown builder with specified base URL |
113 | 114 | """ |
114 | 115 | |
115 | conf_source = ('extensions = ["nb2plots"]\n' | |
116 | conf_source = ('master_doc = "contents"\n' | |
117 | 'extensions = ["nb2plots"]\n' | |
116 | 118 | 'markdown_http_base = "https://dynevor.org"') |
117 | 119 | |
118 | 120 | def test_output(self): |
119 | 121 | assert self.get_built_file('contents.md').strip() == '' |
120 | assert self.get_built_file('a_page.md') == """\ | |
121 | ## Refereed section | |
122 | ||
123 | This section refers to [itself](https://dynevor.org/a_page.html#a-ref). | |
124 | ||
125 | It also refers forward to the [next section](https://dynevor.org/a_page.html#b-ref). | |
126 | ||
127 | Then, and finally, it refers to itself with its own name: [Refereed section](https://dynevor.org/a_page.html#a-ref). | |
122 | expected_re = r"""## Refereed section | |
123 | ||
124 | This section refers to \[itself\]\(https://dynevor.org/a_page.html#a-ref\)\. | |
125 | ||
126 | It also refers forward to the \[next section\]\(https://dynevor.org/a_page.html#b-ref\)\. | |
127 | ||
128 | Then, and finally, it refers to itself with its own name: \[Refereed section\]\(https://dynevor.org/a_page\.html#a-ref\)\. | |
128 | 129 | |
129 | 130 | ## Rerefereed |
130 | 131 | |
131 | This section refers to this document at [Refereed section](https://dynevor.org/a_page.html), and with an | |
132 | explicit title, to [this document](https://dynevor.org/a_page.html). | |
133 | ||
134 | Then to [Refereed section](https://dynevor.org/a_page.html). Again to [another doc](https://dynevor.org/a_page.html). | |
135 | ||
136 | Now [a_page.rst](https://dynevor.org/_downloads/a_page.rst). | |
137 | ||
138 | Then [another page](https://dynevor.org/_downloads/a_page.rst). | |
139 | ||
140 | Then [a link](https://another-place.com/page.html). | |
141 | ||
142 | Again, we [link to another doc](https://dynevor.org/subdir1/b_page.html). | |
143 | """ | |
132 | This section refers to this document at \[Refereed section\]\(https://dynevor\.org/a_page\.html\), and with an | |
133 | explicit title, to \[this document\]\(https://dynevor\.org/a_page.html\)\. | |
134 | ||
135 | Then to \[Refereed section\]\(https://dynevor\.org/a_page.html\)\. Again to \[another doc\]\(https://dynevor\.org/a_page.html\)\. | |
136 | ||
137 | Now \[a_page\.rst\]\(https://dynevor.org/_downloads/([a-f0-9]+/)?a_page.rst\)\. | |
138 | ||
139 | Then \[another page\]\(https://dynevor\.org/_downloads/([a-f0-9]+/)?a_page.rst\)\. | |
140 | ||
141 | Then \[a link\]\(https://another-place.com/page.html\)\. | |
142 | ||
143 | Again, we \[link to another doc\]\(https://dynevor.org/subdir1/b_page\.html\)\. | |
144 | """ | |
145 | actual = self.get_built_file('a_page.md') | |
146 | assert re.match(expected_re, actual) | |
144 | 147 | assert self.get_built_file(pjoin('subdir1', 'b_page.md')) == """\ |
145 | 148 | ## Another page |
146 | 149 | |
197 | 200 | """ Python builder with specified base URL |
198 | 201 | """ |
199 | 202 | |
200 | conf_source = ('extensions = ["nb2plots"]\n' | |
203 | conf_source = ('master_doc = "contents"\n' | |
204 | 'extensions = ["nb2plots"]\n' | |
201 | 205 | 'markdown_http_base = "https://dynevor.org"') |
202 | 206 | |
203 | 207 | def test_output(self): |
229 | 233 | .. code-links:: |
230 | 234 | """} |
231 | 235 | |
236 | toctree_pages = list(rst_sources) | |
237 | ||
232 | 238 | def test_output(self): |
233 | 239 | for suffix in ('.py', '.ipynb', '_full.ipynb'): |
234 | 240 | assert isfile(pjoin(self.out_dir, 'foo', 'a_page' + suffix)) |
19 | 19 | .. code-links:: |
20 | 20 | |
21 | 21 | More text here.""" |
22 | both_re = re.compile("""\ | |
23 | <document source=".*?"> | |
22 | both_re = re.compile(r"""<document source=".*?"> | |
24 | 23 | <paragraph> |
25 | 24 | Text here |
26 | 25 | <code_links> |
60 | 59 | |
61 | 60 | More text here.""" |
62 | 61 | pxml = as_pxml(page) |
63 | assert re.match("""\ | |
64 | <document source=".*?"> | |
62 | assert re.match(r"""<document source=".*?"> | |
65 | 63 | <paragraph> |
66 | 64 | Text here |
67 | 65 | <code_links> |
80 | 78 | |
81 | 79 | More text here.""" |
82 | 80 | pxml = as_pxml(page) |
83 | assert re.match("""\ | |
84 | <document source=".*?"> | |
81 | assert re.match(r"""<document source=".*?"> | |
85 | 82 | <paragraph> |
86 | 83 | Text here |
87 | 84 | <code_links> |
100 | 97 | |
101 | 98 | More text here.""" |
102 | 99 | pxml = as_pxml(page) |
103 | assert re.match("""\ | |
104 | <document source=".*?"> | |
100 | assert re.match(r"""<document source=".*?"> | |
105 | 101 | <paragraph> |
106 | 102 | Text here |
107 | 103 | <code_links> |
24 | 24 | # pseudoxml converter |
25 | 25 | conv = Converter('pseudoxml') |
26 | 26 | pxml = conv.from_rst(NEW_PAGE) |
27 | assert re.search(r"""<document source=".*/contents.rst"> | |
27 | assert re.search(r"""<document source=".*/(contents|index)\.rst"> | |
28 | 28 | <section ids="more-fancy-title" names="more\\ fancy\\ title"> |
29 | 29 | <title> |
30 | 30 | More fancy title |
1 | 1 | """ |
2 | 2 | from os.path import join as pjoin |
3 | 3 | from glob import glob |
4 | import re | |
4 | 5 | |
5 | 6 | from nb2plots.converters import to_notebook |
6 | 7 | from nb2plots.ipython_shim import nbf |
15 | 16 | from nb2plots.testing.nbtesters import assert_nb_equiv |
16 | 17 | |
17 | 18 | |
19 | ID_RE = re.compile(r'"id":\s+".*?",\s*\n?') | |
20 | ||
21 | ||
18 | 22 | def to_nb_safe(rst_str): |
19 | 23 | out = to_notebook.from_rst(rst_str) |
20 | 24 | return unsmart_nb(out) |
26 | 30 | return nbf.writes(nb) |
27 | 31 | |
28 | 32 | |
33 | def rm_json_id(s): | |
34 | return ID_RE.sub(s, '') | |
35 | ||
36 | ||
29 | 37 | def assert_rst_cells_equal(rst_text, cells): |
30 | 38 | actual = to_notebook.from_rst(rst_text) |
31 | 39 | expected = cells2json(cells) |
32 | assert actual == expected | |
40 | assert rm_json_id(actual) == rm_json_id(expected) | |
33 | 41 | |
34 | 42 | |
35 | 43 | def test_basic(): |
5 | 5 | from ..ipython_shim import nbformat |
6 | 6 | from ..from_notebook import (convert_nb, convert_nb_fname, to_doctests, |
7 | 7 | has_mpl_inline, CODE_WITH_OUTPUT) |
8 | from ..testing import stripeq | |
8 | 9 | |
9 | 10 | |
10 | 11 | DATA_PATH = pjoin(dirname(__file__), 'data') |
19 | 20 | md_cell = v4.new_markdown_cell('# Some text') |
20 | 21 | nb['cells'] = [md_cell] |
21 | 22 | exp_text = "\nSome text\n=========\n" |
22 | assert convert_nb(nb) == exp_text | |
23 | assert stripeq(convert_nb(nb), exp_text) | |
23 | 24 | # Code -> replaced with plot directive / doctest markers |
24 | 25 | code_cell = v4.new_code_cell('a = 10') |
25 | 26 | nb['cells'] = [code_cell] |
26 | 27 | exp_code = PLT_HDR + " >>> a = 10\n" |
27 | assert convert_nb(nb) == exp_code | |
28 | assert stripeq(convert_nb(nb), exp_code) | |
28 | 29 | # Empty code -> no output |
29 | 30 | empty_code_cell = v4.new_code_cell('') |
30 | 31 | nb['cells'] = [empty_code_cell] |
34 | 35 | # magic lines get stripped |
35 | 36 | magic_code_cell = v4.new_code_cell('%timeit a = 1') |
36 | 37 | nb['cells'] = [magic_code_cell] |
37 | assert convert_nb(nb) == exp_empty_code | |
38 | assert stripeq(convert_nb(nb), exp_empty_code) | |
38 | 39 | # Magic lines stripped from within other code lines |
39 | 40 | mixed_magic_code_cell = v4.new_code_cell('%timeit a = 1\nb = 2') |
40 | 41 | exp_mixed_magic = PLT_HDR + " >>> b = 2\n" |
41 | 42 | nb['cells'] = [mixed_magic_code_cell] |
42 | assert convert_nb(nb) == exp_mixed_magic | |
43 | assert stripeq(convert_nb(nb), exp_mixed_magic) | |
43 | 44 | |
44 | 45 | |
45 | 46 | def test_mpl_inline_works(): |
49 | 50 | code_cell = v4.new_code_cell('%matplotlib inline\na = 10') |
50 | 51 | nb['cells'] = [code_cell] |
51 | 52 | exp_code = "\n.. mpl-interactive::\n{} >>> a = 10\n".format(PLT_HDR) |
52 | assert convert_nb(nb) == exp_code | |
53 | assert stripeq(convert_nb(nb), exp_code) | |
53 | 54 | |
54 | 55 | |
55 | 56 | def test_mpl_inline(): |
125 | 126 | rst_fname = pjoin(DATA_PATH, 'small.rst') |
126 | 127 | out = convert_nb_fname(nb_fname) |
127 | 128 | with open(rst_fname, 'rt') as fobj: |
128 | assert out + '\n' == fobj.read() | |
129 | assert stripeq(out, fobj.read()) | |
129 | 130 | |
130 | 131 | |
131 | 132 | code = \ |
6 | 6 | |
7 | 7 | from docutils.nodes import paragraph, title |
8 | 8 | |
9 | import sphinx | |
10 | ||
11 | SPHINX_ge_1p8 = sphinx.version_info[:2] >= (1, 8) | |
12 | ||
9 | 13 | from nb2plots.nbplots import (run_code, parse_parts, nbplot_container, |
10 | 14 | nbplot_epilogue) |
11 | 15 | from sphinxtesters import SourcesBuilder |
18 | 22 | |
19 | 23 | |
20 | 24 | HERE = dirname(__file__) |
25 | ||
26 | # Variation in doctest block. | |
27 | DOCTEST_BLOCK_RE = r'<doctest_block (classes="doctest" )?xml:space="preserve">' | |
21 | 28 | |
22 | 29 | |
23 | 30 | def get_otherpage(fname): |
444 | 451 | # Check that reference correctly included |
445 | 452 | built = self.get_built_file('a_page.pseudoxml') |
446 | 453 | expected_regexp = re.compile( |
447 | r"""<document _plot_counter="1" source=".*?a_page.rst"> | |
454 | r"""<document _plot_counter="1" source=".*?a_page.rst"( xmlns.*)?> | |
448 | 455 | <section ids="a-title" names="a\\ title"> |
449 | 456 | <title> |
450 | 457 | A title |
469 | 476 | |
470 | 477 | builder = 'pseudoxml' |
471 | 478 | |
479 | literal_header = ( | |
480 | r'<literal_block ' + | |
481 | (r'force(_highlighting)?="False" language="default" linenos="False" ' | |
482 | if SPHINX_ge_1p8 else '') + | |
483 | 'xml:space="preserve">') | |
484 | ||
472 | 485 | rst_sources=dict(a_page="""\ |
473 | 486 | A title |
474 | 487 | ------- |
492 | 505 | def test_flags(self): |
493 | 506 | # Check that flags correctly set from flag directives |
494 | 507 | built = self.get_built_file('a_page.pseudoxml') |
495 | assert """ | |
508 | expected = r""" | |
496 | 509 | <title> |
497 | 510 | A title |
498 | <literal_block xml:space="preserve"> | |
499 | {'a': 1, 'b': 2} | |
511 | {literal_header} | |
512 | {{'a': 1, 'b': 2}} | |
500 | 513 | <paragraph> |
501 | 514 | Some text |
502 | <literal_block xml:space="preserve"> | |
503 | {'a': 1, 'b': 2, 'c': 3}""" in built | |
515 | {literal_header} | |
516 | {{'a': 1, 'b': 2, 'c': 3}}""".format( | |
517 | literal_header=self.literal_header) | |
518 | assert re.search(expected, built) | |
504 | 519 | |
505 | 520 | |
506 | 521 | class TestFlagsConfig(TestFlags): |
514 | 529 | def test_flags(self): |
515 | 530 | # Check that global flags merged with local |
516 | 531 | built = self.get_built_file('a_page.pseudoxml') |
517 | assert (""" | |
532 | expected = r""" | |
518 | 533 | <title> |
519 | 534 | A title |
520 | <literal_block xml:space="preserve"> | |
521 | {'a': 1, 'b': 2, 'flag1': 5, 'flag2': 6} | |
535 | {literal_header} | |
536 | {{'a': 1, 'b': 2, 'flag1': 5, 'flag2': 6}} | |
522 | 537 | <paragraph> |
523 | 538 | Some text |
524 | <literal_block xml:space="preserve"> | |
525 | {'a': 1, 'b': 2, 'c': 3, 'flag1': 5, 'flag2': 6}""" | |
526 | in built) | |
539 | {literal_header} | |
540 | {{'a': 1, 'b': 2, 'c': 3, 'flag1': 5, 'flag2': 6}}""".format( | |
541 | literal_header=self.literal_header) | |
542 | assert re.search(expected, built) | |
527 | 543 | |
528 | 544 | |
529 | 545 | class TestWithoutSkip(PlotsBuilder): |
625 | 641 | <title> |
626 | 642 | A title |
627 | 643 | <nbplot_container> |
628 | <doctest_block xml:space="preserve"> | |
644 | {DOCTEST_BLOCK_RE} | |
629 | 645 | >>> # always |
630 | 646 | >>> a = 'default' |
631 | 647 | <nbplot_epilogue> |
635 | 651 | <paragraph> |
636 | 652 | Some text |
637 | 653 | <nbplot_container> |
638 | <doctest_block xml:space="preserve"> | |
654 | {DOCTEST_BLOCK_RE} | |
639 | 655 | >>> a = 'skip is False' |
640 | 656 | <nbplot_epilogue> |
641 | 657 | <comment xml:space="preserve"> |
644 | 660 | <paragraph> |
645 | 661 | Keep text coming |
646 | 662 | <nbplot_container> |
647 | <doctest_block xml:space="preserve"> | |
663 | {DOCTEST_BLOCK_RE} | |
648 | 664 | >>> b = 'skip appears to be False' |
649 | 665 | >>> a == 'skip is False' |
650 | 666 | True |
655 | 671 | <paragraph> |
656 | 672 | Text continues |
657 | 673 | <nbplot_container> |
658 | <doctest_block xml:space="preserve"> | |
674 | {DOCTEST_BLOCK_RE} | |
659 | 675 | >>> # doctest only run when skip flag False, always rendered |
660 | 676 | >>> b == 'skip appears to be False' |
661 | True""") | |
677 | True""".format(**globals())) | |
662 | 678 | assert(regex.match(p_xml) is not None) |
663 | 679 | |
664 | 680 | |
694 | 710 | <title> |
695 | 711 | A title |
696 | 712 | <nbplot_container> |
697 | <doctest_block xml:space="preserve"> | |
713 | {DOCTEST_BLOCK_RE} | |
698 | 714 | >>> # always |
699 | 715 | >>> a = 'default' |
700 | 716 | <nbplot_epilogue> |
704 | 720 | <paragraph> |
705 | 721 | Some text |
706 | 722 | <nbplot_container> |
707 | <doctest_block xml:space="preserve"> | |
723 | {DOCTEST_BLOCK_RE} | |
708 | 724 | >>> a = 'skip is True' |
709 | 725 | <nbplot_epilogue> |
710 | 726 | <comment xml:space="preserve"> |
713 | 729 | <paragraph> |
714 | 730 | Keep text coming |
715 | 731 | <nbplot_container> |
716 | <doctest_block xml:space="preserve"> | |
732 | {DOCTEST_BLOCK_RE} | |
717 | 733 | >>> b = 'skip appears to be True' |
718 | 734 | >>> a == 'skip is True' |
719 | 735 | True |
733 | 749 | <comment xml:space="preserve"> |
734 | 750 | <comment xml:space="preserve"> |
735 | 751 | <nbplot_container hide-from="all" show-to="doctest"> |
736 | <doctest_block xml:space="preserve"> | |
752 | {DOCTEST_BLOCK_RE} | |
737 | 753 | >>> # only when skip flag True |
738 | 754 | >>> b == 'skip appears to be True' |
739 | True""") | |
755 | True""".format(**globals())) | |
740 | 756 | assert(regex.match(p_xml) is not None) |
741 | 757 | |
742 | 758 |
1 | 1 | |
2 | 2 | from os.path import (join as pjoin, dirname, isdir, exists) |
3 | 3 | |
4 | from sphinxtesters import ModifiedPageBuilder | |
4 | from sphinxtesters import PageBuilder | |
5 | 5 | |
6 | 6 | HERE = dirname(__file__) |
7 | 7 | |
11 | 11 | |
12 | 12 | """ |
13 | 13 | |
14 | class Proj1Builder(ModifiedPageBuilder): | |
14 | class Proj1Builder(PageBuilder): | |
15 | 15 | """ Build using 'proj1' directory as template to modify |
16 | 16 | """ |
17 | 17 | |
18 | 18 | page_source_template = pjoin(HERE, 'proj1') |
19 | ||
20 | # default_page used in 'replace_page' class method | |
21 | default_page = 'a_page.rst' | |
22 | 19 | |
23 | 20 | |
24 | 21 | class TestProj1(Proj1Builder): |
5 | 5 | from nb2plots.from_notebook import convert_nb_fname |
6 | 6 | from nb2plots.converters import to_py, to_notebook |
7 | 7 | |
8 | from nb2plots.testing import stripeq | |
8 | 9 | from nb2plots.testing.convutils import fcontents |
9 | 10 | from nb2plots.testing.nbtesters import assert_nb_equiv |
10 | 11 | |
18 | 19 | output_rst_fname = pjoin(DATA, 'converted_example.rst') |
19 | 20 | # Convert to ReST, add trailing CR from output script |
20 | 21 | rst = convert_nb_fname(input_nb_fname) + '\n' |
21 | assert rst.encode('utf8') == fcontents(output_rst_fname) | |
22 | assert stripeq(rst.encode('utf8'), fcontents(output_rst_fname)) | |
22 | 23 | # Convert ReST to output formats |
23 | 24 | py_file = to_py.from_rst(rst) |
24 | 25 | assert (py_file.encode('utf8') == |
71 | 71 | dict(code_type='clearnotebook', |
72 | 72 | filebase='contents', |
73 | 73 | base='/contents', |
74 | descr='Download this page as a Jupyter notebook \(no outputs\)'), | |
74 | descr=r'Download this page as a Jupyter notebook \(no outputs\)'), | |
75 | 75 | "Text then :clearnotebook:`.` then text.") |
76 | 76 | assert_rst_pxml( |
77 | 77 | dict(code_type='fullnotebook', |
78 | 78 | filebase='contents', |
79 | 79 | base='/contents', |
80 | 80 | descr=('Download this page as a Jupyter notebook ' |
81 | '\(with outputs\)')), | |
81 | r'\(with outputs\)')), | |
82 | 82 | "Text then :fullnotebook:`.` then text.") |
83 | 83 | assert_rst_pxml( |
84 | 84 | dict(code_type='pyfile', |
18 | 18 | .. nbplot:: |
19 | 19 | |
20 | 20 | >>> from time import sleep |
21 | >>> sleep(2) | |
21 | >>> sleep(5) | |
22 | 22 | """} |
23 | 23 | |
24 | 24 | |
47 | 47 | .. nbplot:: |
48 | 48 | |
49 | 49 | >>> from time import sleep |
50 | >>> sleep(2) | |
50 | >>> sleep(5) | |
51 | 51 | """} |
52 | 52 | |
53 | 53 | should_error = True |
69 | 69 | .. nbplot:: |
70 | 70 | |
71 | 71 | >>> from time import sleep |
72 | >>> sleep(2) | |
72 | >>> sleep(5) | |
73 | 73 | """} |
74 | 74 | |
75 | 75 | should_error = True |
102 | 102 | .. nbplot:: |
103 | 103 | |
104 | 104 | >>> from time import sleep |
105 | >>> sleep(2) | |
105 | >>> sleep(5) | |
106 | 106 | """} |
107 | 107 | |
108 | 108 | |
130 | 130 | .. nbplot:: |
131 | 131 | |
132 | 132 | >>> from time import sleep |
133 | >>> sleep(3) | |
133 | >>> sleep(5) | |
134 | 134 | """} |
0 | Metadata-Version: 1.1 | |
1 | Name: nb2plots | |
2 | Version: 0.6 | |
3 | Summary: Converting between ipython notebooks and sphinx docs | |
4 | Home-page: http://github.com/matthew-brett/nb2plots | |
5 | Author: Matthew Brett | |
6 | Author-email: matthew.brett@gmail.com | |
7 | License: BSD license | |
8 | Description-Content-Type: UNKNOWN | |
9 | Description: ################################################## | |
10 | nb2plots - converting between notebooks and sphinx | |
11 | ################################################## | |
12 | ||
13 | See the nb2plots documentation_ for more information. | |
14 | ||
15 | .. shared-text-body | |
16 | ||
17 | ************ | |
18 | What it does | |
19 | ************ | |
20 | ||
21 | ``nb2plots`` converts Jupyter_ notebooks to ReST_ files for Sphinx_, and back | |
22 | again. | |
23 | ||
24 | Nb2plots assumes that the ReST document will become the source for your Sphinx | |
25 | web pages, but also for future versions of the notebook. The notebook may | |
26 | serve as a draft for the polished ReST page, and an output format from the | |
27 | Sphinx build. Why? Read on. | |
28 | ||
29 | **************************************** | |
30 | Why convert Jupyter notebooks to Sphinx? | |
31 | **************************************** | |
32 | ||
33 | Jupyter notebooks are just what the doctor ordered when hacking up a quick | |
34 | tutorial, or preparing a software demo. The problems start when you want to | |
35 | do not-trivial edits to the notebooks, or you need features that notebooks | |
36 | don't have, such as flexible cross-referencing, extensible markup, and so on. | |
37 | Notebooks are also painful to use with version control. These times make you | |
38 | wish your notebook was in a standard extensible text format, such as ReST_. | |
39 | ||
40 | You could convert your notebook to ReST using the standard `nbconvert`_ | |
41 | command, but this gives rather ugly ReST, and you lose all the nice code | |
42 | execution and figure generation that the notebook is good at. | |
43 | ||
44 | Enter Nb2plots. The ``nb2plots`` command converts notebooks to specially | |
45 | formatted ReST pages. Use with:: | |
46 | ||
47 | nb2plots notebook.ipynb > with_plots.rst | |
48 | ||
49 | Nb2plots converts your notebook to not-very-ugly ReST, where the code cells | |
50 | become ``nbplot`` directives in your ReST file. | |
51 | ||
52 | Specifically, a notebook code cell like this:: | |
53 | ||
54 | a = 1 | |
55 | ||
56 | becomes (in the ReST document):: | |
57 | ||
58 | .. nbplot:: | |
59 | ||
60 | >>> a = 1 | |
61 | ||
62 | The ``nbplot`` directives run the contained code when Sphinx builds your ReST | |
63 | files, and embed the results of any plots that your code makes. Actually, | |
64 | ``nbplot`` is an extended and edited version of the `matplotlib plot | |
65 | directive`_. Building your pages runs all the code and regenerates the | |
66 | figures, and you get much of the reproducible goodness of the notebook | |
67 | experience. | |
68 | ||
69 | You can also run the standard Sphinx ``doctest`` extension over your pages to | |
70 | check the doctest output of the code cells. | |
71 | ||
72 | The ReST version of your notebook has many advantages - it is easier to edit | |
73 | in your favorite text editor, and you can extend and configure the execution | |
74 | and display of the code in several different ways. For example, you can hide | |
75 | some code cells (Nbplot directives) if the code is not interesting to your | |
76 | point, but you still want the generated figure. You can configure your Nbplot | |
77 | directives to run different code for different configurations. For these | |
78 | options, see |nbplot-documentation|. But - what do you lose, when going from | |
79 | a notebook to a Nb2plots ReST document? | |
80 | ||
81 | ********************************** | |
82 | I want notebooks and .py files too | |
83 | ********************************** | |
84 | ||
85 | You may also want a version of your document that your users can execute. | |
86 | Perhaps the page build is generating some tricky errors or warnings, and you | |
87 | want to experiment with the code in the page interactively. Perhaps your | |
88 | users are used to notebooks, and prefer the code in that format. | |
89 | ||
90 | Nb2plots also contains Sphinx extensions that cause the Sphinx build to | |
91 | generate Python code files and Jupyter notebooks from the ReST source. When | |
92 | you add the Nb2plots ReST directive ``code-links`` to your ReST page, it will | |
93 | cause the Sphinx build to create a Python code file and notebook versions of | |
94 | your page, and adds download links to these versions:: | |
95 | ||
96 | .. code-links:: | |
97 | ||
98 | See |code-links-documentation| for details. | |
99 | ||
100 | ************************** | |
101 | Show me what it looks like | |
102 | ************************** | |
103 | ||
104 | For a very simple example, see |worked-example|. | |
105 | ||
106 | For a moderate-sized teaching site that makes extensive use of Nb2plots, see | |
107 | https://matthew-brett.github.com/teaching. | |
108 | ||
109 | ************ | |
110 | Installation | |
111 | ************ | |
112 | ||
113 | :: | |
114 | ||
115 | pip install nb2plots | |
116 | ||
117 | You will need Pandoc_ installed and available as the ``pandoc`` command. | |
118 | ||
119 | To install Pandoc on OSX, we recommend homebrew_:: | |
120 | ||
121 | brew install pandoc | |
122 | ||
123 | ************* | |
124 | Configuration | |
125 | ************* | |
126 | ||
127 | Add the following to your Sphinx ``conf.py`` file:: | |
128 | ||
129 | extensions = ["nb2plots"] | |
130 | ||
131 | See |nbplot-documentation| for the various ``conf.py`` configuration settings. | |
132 | ||
133 | **** | |
134 | Code | |
135 | **** | |
136 | ||
137 | See https://github.com/matthew-brett/nb2plots | |
138 | ||
139 | Released under the BSD two-clause license - see the file ``LICENSE`` in the | |
140 | source distribution. | |
141 | ||
142 | `travis-ci <https://travis-ci.org/matthew-brett/nb2plots>`_ kindly tests the | |
143 | code automatically under Python versions 2.7, and 3.3 through 3.5. | |
144 | ||
145 | The latest released version is at https://pypi.python.org/pypi/nb2plots | |
146 | ||
147 | ***** | |
148 | Tests | |
149 | ***** | |
150 | ||
151 | * Install ``nb2plots`` | |
152 | * Install the pytest_ testing framework, the ``mock`` package, and the | |
153 | ``scripttester`` package. | |
154 | ||
155 | pip install pytest mock scripttester | |
156 | ||
157 | * Run the tests with:: | |
158 | ||
159 | py.test --pyargs nb2plots | |
160 | ||
161 | ******* | |
162 | Support | |
163 | ******* | |
164 | ||
165 | Please put up issues on the `nb2plots issue tracker`_. | |
166 | ||
167 | .. standalone-references | |
168 | ||
169 | .. |nbplot-documentation| replace:: `nbplots documentation`_ | |
170 | .. |worked-example| replace:: `worked example`_ | |
171 | .. |code-links-documentation| replace:: `code-links documentation`_ | |
172 | .. _nbplots documentation: | |
173 | https://matthew-brett.github.com/nb2plots/nbplots.html | |
174 | .. _code-links documentation: | |
175 | https://matthew-brett.github.com/nb2plots/code_links.html | |
176 | .. _worked example: | |
177 | https://matthew-brett.github.com/nb2plots/worked_example.html | |
178 | .. _documentation: https://matthew-brett.github.com/nb2plots | |
179 | .. _pandoc: http://pandoc.org | |
180 | .. _jupyter: jupyter.org | |
181 | .. _homebrew: brew.sh | |
182 | .. _sphinx: http://sphinx-doc.org | |
183 | .. _rest: http://docutils.sourceforge.net/rst.html | |
184 | .. _nb2plots issue tracker: https://github.com/matthew-brett/nb2plots/issues | |
185 | .. _matplotlib plot directive: http://matplotlib.org/sampledoc/extensions.html | |
186 | .. _nbconvert: http://nbconvert.readthedocs.org/en/latest/ | |
187 | .. _pytest: https://pytest.readthedocs.io | |
188 | .. _mock: https://github.com/testing-cabal/mock | |
189 | ||
190 | Platform: UNKNOWN | |
191 | Classifier: Development Status :: 2 - Pre-Alpha | |
192 | Classifier: Environment :: Console | |
193 | Classifier: Intended Audience :: Developers | |
194 | Classifier: Intended Audience :: Science/Research | |
195 | Classifier: License :: OSI Approved :: BSD License | |
196 | Classifier: Programming Language :: Python | |
197 | Classifier: Programming Language :: Python :: 3 | |
198 | Classifier: Topic :: Scientific/Engineering | |
199 | Classifier: Operating System :: Microsoft :: Windows | |
200 | Classifier: Operating System :: POSIX | |
201 | Classifier: Operating System :: Unix | |
202 | Classifier: Operating System :: MacOS |
0 | LICENSE | |
1 | MANIFEST.in | |
2 | README.rst | |
3 | doc-requirements.txt | |
4 | requirements.txt | |
5 | setup.cfg | |
6 | setup.py | |
7 | test-requirements.txt | |
8 | versioneer.py | |
9 | nb2plots/__init__.py | |
10 | nb2plots/_version.py | |
11 | nb2plots/codelinks.py | |
12 | nb2plots/commands.py | |
13 | nb2plots/converters.py | |
14 | nb2plots/doctree2md.py | |
15 | nb2plots/doctree2nb.py | |
16 | nb2plots/doctree2py.py | |
17 | nb2plots/from_notebook.py | |
18 | nb2plots/ipython_shim.py | |
19 | nb2plots/mpl_interactive.py | |
20 | nb2plots/nbplots.py | |
21 | nb2plots/runroles.py | |
22 | nb2plots/sphinx2foos.py | |
23 | nb2plots/strdiff.py | |
24 | nb2plots.egg-info/PKG-INFO | |
25 | nb2plots.egg-info/SOURCES.txt | |
26 | nb2plots.egg-info/dependency_links.txt | |
27 | nb2plots.egg-info/requires.txt | |
28 | nb2plots.egg-info/top_level.txt | |
29 | nb2plots/testing/__init__.py | |
30 | nb2plots/testing/convutils.py | |
31 | nb2plots/testing/mockapp.py | |
32 | nb2plots/testing/nbtesters.py | |
33 | nb2plots/tests/__init__.py | |
34 | nb2plots/tests/conftest.py | |
35 | nb2plots/tests/test_builders.py | |
36 | nb2plots/tests/test_codelinks.py | |
37 | nb2plots/tests/test_config.py | |
38 | nb2plots/tests/test_converters.py | |
39 | nb2plots/tests/test_doctree2md.py | |
40 | nb2plots/tests/test_doctree2nb.py | |
41 | nb2plots/tests/test_doctree2py.py | |
42 | nb2plots/tests/test_from_notebook.py | |
43 | nb2plots/tests/test_mpl_interactive.py | |
44 | nb2plots/tests/test_nbplots.py | |
45 | nb2plots/tests/test_proj1.py | |
46 | nb2plots/tests/test_regression.py | |
47 | nb2plots/tests/test_runroles.py | |
48 | nb2plots/tests/test_scripts.py | |
49 | nb2plots/tests/test_sphinx2md.py | |
50 | nb2plots/tests/test_strdiff.py | |
51 | nb2plots/tests/test_timeout.py | |
52 | nb2plots/tests/data/converted_example.rst | |
53 | nb2plots/tests/data/converted_plus_notebooks.ipynb | |
54 | nb2plots/tests/data/converted_plus_notebooks.py | |
55 | nb2plots/tests/data/example_notebook.ipynb | |
56 | nb2plots/tests/data/small.ipynb | |
57 | nb2plots/tests/data/small.rst | |
58 | nb2plots/tests/otherpages/some_plots.rst | |
59 | nb2plots/tests/proj1/.gitignore | |
60 | nb2plots/tests/proj1/README.md | |
61 | nb2plots/tests/proj1/a_page.rst | |
62 | nb2plots/tests/proj1/conf.py | |
63 | nb2plots/tests/proj1/index.rst | |
64 | nb2plots/tests/proj1/_static/.gitignore | |
65 | nb2plots/tests/proj1/_static/README.txt | |
66 | nb2plots/tests/rst_md_files/blockquotes.ipynb | |
67 | nb2plots/tests/rst_md_files/blockquotes.md | |
68 | nb2plots/tests/rst_md_files/blockquotes.py | |
69 | nb2plots/tests/rst_md_files/blockquotes.rst | |
70 | nb2plots/tests/rst_md_files/code.ipynb | |
71 | nb2plots/tests/rst_md_files/code.md | |
72 | nb2plots/tests/rst_md_files/code.py | |
73 | nb2plots/tests/rst_md_files/code.rst | |
74 | nb2plots/tests/rst_md_files/comment.ipynb | |
75 | nb2plots/tests/rst_md_files/comment.md | |
76 | nb2plots/tests/rst_md_files/comment.py | |
77 | nb2plots/tests/rst_md_files/comment.rst | |
78 | nb2plots/tests/rst_md_files/cross_references.ipynb | |
79 | nb2plots/tests/rst_md_files/cross_references.md | |
80 | nb2plots/tests/rst_md_files/cross_references.py | |
81 | nb2plots/tests/rst_md_files/cross_references.rst | |
82 | nb2plots/tests/rst_md_files/cross_references.smd | |
83 | nb2plots/tests/rst_md_files/definitions.ipynb | |
84 | nb2plots/tests/rst_md_files/definitions.md | |
85 | nb2plots/tests/rst_md_files/definitions.py | |
86 | nb2plots/tests/rst_md_files/definitions.rst | |
87 | nb2plots/tests/rst_md_files/docinfo.ipynb | |
88 | nb2plots/tests/rst_md_files/docinfo.md | |
89 | nb2plots/tests/rst_md_files/docinfo.py | |
90 | nb2plots/tests/rst_md_files/docinfo.rst | |
91 | nb2plots/tests/rst_md_files/docinfo.smd | |
92 | nb2plots/tests/rst_md_files/doctests.ipynb | |
93 | nb2plots/tests/rst_md_files/doctests.md | |
94 | nb2plots/tests/rst_md_files/doctests.py | |
95 | nb2plots/tests/rst_md_files/doctests.rst | |
96 | nb2plots/tests/rst_md_files/escaping.ipynb | |
97 | nb2plots/tests/rst_md_files/escaping.md | |
98 | nb2plots/tests/rst_md_files/escaping.py | |
99 | nb2plots/tests/rst_md_files/escaping.rst | |
100 | nb2plots/tests/rst_md_files/index.ipynb | |
101 | nb2plots/tests/rst_md_files/index.md | |
102 | nb2plots/tests/rst_md_files/index.py | |
103 | nb2plots/tests/rst_md_files/index.rst | |
104 | nb2plots/tests/rst_md_files/index.smd | |
105 | nb2plots/tests/rst_md_files/links.ipynb | |
106 | nb2plots/tests/rst_md_files/links.md | |
107 | nb2plots/tests/rst_md_files/links.py | |
108 | nb2plots/tests/rst_md_files/links.rst | |
109 | nb2plots/tests/rst_md_files/links.smd | |
110 | nb2plots/tests/rst_md_files/lists.ipynb | |
111 | nb2plots/tests/rst_md_files/lists.md | |
112 | nb2plots/tests/rst_md_files/lists.py | |
113 | nb2plots/tests/rst_md_files/lists.rst | |
114 | nb2plots/tests/rst_md_files/literals.ipynb | |
115 | nb2plots/tests/rst_md_files/literals.md | |
116 | nb2plots/tests/rst_md_files/literals.py | |
117 | nb2plots/tests/rst_md_files/literals.rst | |
118 | nb2plots/tests/rst_md_files/literals.smd | |
119 | nb2plots/tests/rst_md_files/math_markup.ipynb | |
120 | nb2plots/tests/rst_md_files/math_markup.md | |
121 | nb2plots/tests/rst_md_files/math_markup.py | |
122 | nb2plots/tests/rst_md_files/math_markup.rst | |
123 | nb2plots/tests/rst_md_files/math_markup.smd | |
124 | nb2plots/tests/rst_md_files/nbplot.ipynb | |
125 | nb2plots/tests/rst_md_files/nbplot.md | |
126 | nb2plots/tests/rst_md_files/nbplot.py | |
127 | nb2plots/tests/rst_md_files/nbplot.rst | |
128 | nb2plots/tests/rst_md_files/nbplot.smd | |
129 | nb2plots/tests/rst_md_files/only.ipynb | |
130 | nb2plots/tests/rst_md_files/only.md | |
131 | nb2plots/tests/rst_md_files/only.py | |
132 | nb2plots/tests/rst_md_files/only.rst | |
133 | nb2plots/tests/rst_md_files/only.smd | |
134 | nb2plots/tests/rst_md_files/sect_text.ipynb | |
135 | nb2plots/tests/rst_md_files/sect_text.md | |
136 | nb2plots/tests/rst_md_files/sect_text.py | |
137 | nb2plots/tests/rst_md_files/sect_text.rst | |
138 | nb2plots/tests/rst_md_files/sect_text.smd | |
139 | nb2plots/tests/rst_md_files/sections.ipynb | |
140 | nb2plots/tests/rst_md_files/sections.md | |
141 | nb2plots/tests/rst_md_files/sections.py | |
142 | nb2plots/tests/rst_md_files/sections.rst | |
143 | nb2plots/tests/rst_md_files/sections.smd | |
144 | nb2plots/tests/rst_md_files/substitution.ipynb | |
145 | nb2plots/tests/rst_md_files/substitution.md | |
146 | nb2plots/tests/rst_md_files/substitution.py | |
147 | nb2plots/tests/rst_md_files/substitution.rst | |
148 | nb2plots/tests/rst_md_files/subtitle.ipynb | |
149 | nb2plots/tests/rst_md_files/subtitle.md | |
150 | nb2plots/tests/rst_md_files/subtitle.py | |
151 | nb2plots/tests/rst_md_files/subtitle.rst | |
152 | nb2plots/tests/rst_md_files/subtitle.smd | |
153 | scripts/nb2plots | |
154 | scripts/rst2md | |
155 | scripts/sphinx2md | |
156 | scripts/sphinx2nb | |
157 | scripts/sphinx2pxml | |
158 | scripts/sphinx2py⏎ |
0 | ipython[notebook]>=3.0 | |
1 | sphinx>=1.4 | |
2 | numpy>=1.6.1 | |
3 | matplotlib>=1.1.0 | |
4 | six>=1.7.0 | |
5 | sphinxtesters | |
6 | texext | |
7 | ||
8 | [test] | |
9 | pytest | |
10 | mock | |
11 | scripttester |
0 | 0 | # Minimum requirements |
1 | 1 | # |
2 | ipython[notebook]>=3.0 | |
2 | ipython[notebook]>=4.0 | |
3 | 3 | sphinx>=1.4 |
4 | numpy>=1.6.1 | |
5 | matplotlib>=1.1.0 | |
6 | six>=1.7.0 | |
7 | sphinxtesters | |
4 | numpy>=1.7.1 | |
5 | matplotlib>=2.0 | |
6 | six>=1.10 | |
7 | sphinxtesters>=0.2 | |
8 | 8 | texext |
0 | ||
1 | # See the docstring in versioneer.py for instructions. Note that you must | |
2 | # re-run 'versioneer.py setup' after changing this section, and commit the | |
3 | # resulting files. | |
4 | ||
0 | 5 | [versioneer] |
1 | vcs = git | |
6 | VCS = git | |
2 | 7 | style = pep440 |
3 | 8 | versionfile_source = nb2plots/_version.py |
4 | 9 | versionfile_build = nb2plots/_version.py |
5 | 10 | tag_prefix = "" |
6 | 11 | parentdir_prefix = nb2plots- |
7 | ||
8 | [egg_info] | |
9 | tag_build = | |
10 | tag_date = 0 | |
11 |
0 | # Move to futzing directory, get started | |
1 | # Source this, otherwise it won't work | |
2 | FUTZ_DIR="$(dirname "${BASH_SOURCE[0]}")/../nb2plots/tests/futz" | |
3 | cd $FUTZ_DIR && make init | |
4 | echo "Now futz with files in proj1 directory and 'make'" |
0 | [tox] | |
1 | # Don't forget to adjust CI when changing this | |
2 | envlist = py{37,38,39,310} | |
3 | ||
4 | [testenv] | |
5 | commands = pytest {posargs} nb2plots/tests | |
6 | extras = test | |
7 | ||
8 | [testenv:py310] | |
9 | commands = | |
10 | coverage run -m pytest {posargs} nb2plots/tests | |
11 | coverage report | |
12 | coverage html | |
13 | deps = coverage |
0 | 0 | |
1 | # Version: 0.17 | |
1 | # Version: 0.22 | |
2 | 2 | |
3 | 3 | """The Versioneer - like a rocketeer, but for versions. |
4 | 4 | |
6 | 6 | ============== |
7 | 7 | |
8 | 8 | * like a rocketeer, but for versions! |
9 | * https://github.com/warner/python-versioneer | |
9 | * https://github.com/python-versioneer/python-versioneer | |
10 | 10 | * Brian Warner |
11 | 11 | * License: Public Domain |
12 | * Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, and pypy | |
13 | * [![Latest Version] | |
14 | (https://pypip.in/version/versioneer/badge.svg?style=flat) | |
15 | ](https://pypi.python.org/pypi/versioneer/) | |
16 | * [![Build Status] | |
17 | (https://travis-ci.org/warner/python-versioneer.png?branch=master) | |
18 | ](https://travis-ci.org/warner/python-versioneer) | |
19 | ||
20 | This is a tool for managing a recorded version number in distutils-based | |
12 | * Compatible with: Python 3.6, 3.7, 3.8, 3.9, 3.10 and pypy3 | |
13 | * [![Latest Version][pypi-image]][pypi-url] | |
14 | * [![Build Status][travis-image]][travis-url] | |
15 | ||
16 | This is a tool for managing a recorded version number in distutils/setuptools-based | |
21 | 17 | python projects. The goal is to remove the tedious and error-prone "update |
22 | 18 | the embedded version string" step from your release process. Making a new |
23 | 19 | release should be as easy as recording a new tag in your version-control |
26 | 22 | |
27 | 23 | ## Quick Install |
28 | 24 | |
29 | * `pip install versioneer` to somewhere to your $PATH | |
30 | * add a `[versioneer]` section to your setup.cfg (see below) | |
25 | * `pip install versioneer` to somewhere in your $PATH | |
26 | * add a `[versioneer]` section to your setup.cfg (see [Install](INSTALL.md)) | |
31 | 27 | * run `versioneer install` in your source tree, commit the results |
28 | * Verify version information with `python setup.py version` | |
32 | 29 | |
33 | 30 | ## Version Identifiers |
34 | 31 | |
60 | 57 | for example `git describe --tags --dirty --always` reports things like |
61 | 58 | "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the |
62 | 59 | 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has |
63 | uncommitted changes. | |
60 | uncommitted changes). | |
64 | 61 | |
65 | 62 | The version identifier is used for multiple purposes: |
66 | 63 | |
150 | 147 | software (exactly equal to a known tag), the identifier will only contain the |
151 | 148 | stripped tag, e.g. "0.11". |
152 | 149 | |
153 | Other styles are available. See details.md in the Versioneer source tree for | |
154 | descriptions. | |
150 | Other styles are available. See [details.md](details.md) in the Versioneer | |
151 | source tree for descriptions. | |
155 | 152 | |
156 | 153 | ## Debugging |
157 | 154 | |
165 | 162 | |
166 | 163 | Some situations are known to cause problems for Versioneer. This details the |
167 | 164 | most significant ones. More can be found on Github |
168 | [issues page](https://github.com/warner/python-versioneer/issues). | |
165 | [issues page](https://github.com/python-versioneer/python-versioneer/issues). | |
169 | 166 | |
170 | 167 | ### Subprojects |
171 | 168 | |
179 | 176 | `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI |
180 | 177 | distributions (and upload multiple independently-installable tarballs). |
181 | 178 | * Source trees whose main purpose is to contain a C library, but which also |
182 | provide bindings to Python (and perhaps other langauges) in subdirectories. | |
179 | provide bindings to Python (and perhaps other languages) in subdirectories. | |
183 | 180 | |
184 | 181 | Versioneer will look for `.git` in parent directories, and most operations |
185 | 182 | should get the right version string. However `pip` and `setuptools` have bugs |
193 | 190 | Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in |
194 | 191 | some later version. |
195 | 192 | |
196 | [Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking | |
193 | [Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking | |
197 | 194 | this issue. The discussion in |
198 | [PR #61](https://github.com/warner/python-versioneer/pull/61) describes the | |
195 | [PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the | |
199 | 196 | issue from the Versioneer side in more detail. |
200 | 197 | [pip PR#3176](https://github.com/pypa/pip/pull/3176) and |
201 | 198 | [pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve |
223 | 220 | cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into |
224 | 221 | a different virtualenv), so this can be surprising. |
225 | 222 | |
226 | [Bug #83](https://github.com/warner/python-versioneer/issues/83) describes | |
223 | [Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes | |
227 | 224 | this one, but upgrading to a newer version of setuptools should probably |
228 | 225 | resolve it. |
229 | ||
230 | ### Unicode version strings | |
231 | ||
232 | While Versioneer works (and is continually tested) with both Python 2 and | |
233 | Python 3, it is not entirely consistent with bytes-vs-unicode distinctions. | |
234 | Newer releases probably generate unicode version strings on py2. It's not | |
235 | clear that this is wrong, but it may be surprising for applications when then | |
236 | write these strings to a network connection or include them in bytes-oriented | |
237 | APIs like cryptographic checksums. | |
238 | ||
239 | [Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates | |
240 | this question. | |
241 | 226 | |
242 | 227 | |
243 | 228 | ## Updating Versioneer |
264 | 249 | direction and include code from all supported VCS systems, reducing the |
265 | 250 | number of intermediate scripts. |
266 | 251 | |
252 | ## Similar projects | |
253 | ||
254 | * [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time | |
255 | dependency | |
256 | * [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of | |
257 | versioneer | |
258 | * [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools | |
259 | plugin | |
267 | 260 | |
268 | 261 | ## License |
269 | 262 | |
273 | 266 | Dedication" license (CC0-1.0), as described in |
274 | 267 | https://creativecommons.org/publicdomain/zero/1.0/ . |
275 | 268 | |
269 | [pypi-image]: https://img.shields.io/pypi/v/versioneer.svg | |
270 | [pypi-url]: https://pypi.python.org/pypi/versioneer/ | |
271 | [travis-image]: | |
272 | https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg | |
273 | [travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer | |
274 | ||
276 | 275 | """ |
277 | ||
278 | from __future__ import print_function | |
279 | try: | |
280 | import configparser | |
281 | except ImportError: | |
282 | import ConfigParser as configparser | |
276 | # pylint:disable=invalid-name,import-outside-toplevel,missing-function-docstring | |
277 | # pylint:disable=missing-class-docstring,too-many-branches,too-many-statements | |
278 | # pylint:disable=raise-missing-from,too-many-lines,too-many-locals,import-error | |
279 | # pylint:disable=too-few-public-methods,redefined-outer-name,consider-using-with | |
280 | # pylint:disable=attribute-defined-outside-init,too-many-arguments | |
281 | ||
282 | import configparser | |
283 | 283 | import errno |
284 | 284 | import json |
285 | 285 | import os |
286 | 286 | import re |
287 | 287 | import subprocess |
288 | 288 | import sys |
289 | from typing import Callable, Dict | |
290 | import functools | |
289 | 291 | |
290 | 292 | |
291 | 293 | class VersioneerConfig: |
320 | 322 | # module-import table will cache the first one. So we can't use |
321 | 323 | # os.path.dirname(__file__), as that will find whichever |
322 | 324 | # versioneer.py was first imported, even in later projects. |
323 | me = os.path.realpath(os.path.abspath(__file__)) | |
324 | me_dir = os.path.normcase(os.path.splitext(me)[0]) | |
325 | my_path = os.path.realpath(os.path.abspath(__file__)) | |
326 | me_dir = os.path.normcase(os.path.splitext(my_path)[0]) | |
325 | 327 | vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) |
326 | 328 | if me_dir != vsr_dir: |
327 | 329 | print("Warning: build in %s is using versioneer.py from %s" |
328 | % (os.path.dirname(me), versioneer_py)) | |
330 | % (os.path.dirname(my_path), versioneer_py)) | |
329 | 331 | except NameError: |
330 | 332 | pass |
331 | 333 | return root |
333 | 335 | |
334 | 336 | def get_config_from_root(root): |
335 | 337 | """Read the project setup.cfg file to determine Versioneer config.""" |
336 | # This might raise EnvironmentError (if setup.cfg is missing), or | |
338 | # This might raise OSError (if setup.cfg is missing), or | |
337 | 339 | # configparser.NoSectionError (if it lacks a [versioneer] section), or |
338 | 340 | # configparser.NoOptionError (if it lacks "VCS="). See the docstring at |
339 | 341 | # the top of versioneer.py for instructions on writing your setup.cfg . |
340 | 342 | setup_cfg = os.path.join(root, "setup.cfg") |
341 | parser = configparser.SafeConfigParser() | |
342 | with open(setup_cfg, "r") as f: | |
343 | parser.readfp(f) | |
343 | parser = configparser.ConfigParser() | |
344 | with open(setup_cfg, "r") as cfg_file: | |
345 | parser.read_file(cfg_file) | |
344 | 346 | VCS = parser.get("versioneer", "VCS") # mandatory |
345 | 347 | |
346 | def get(parser, name): | |
347 | if parser.has_option("versioneer", name): | |
348 | return parser.get("versioneer", name) | |
349 | return None | |
348 | # Dict-like interface for non-mandatory entries | |
349 | section = parser["versioneer"] | |
350 | ||
350 | 351 | cfg = VersioneerConfig() |
351 | 352 | cfg.VCS = VCS |
352 | cfg.style = get(parser, "style") or "" | |
353 | cfg.versionfile_source = get(parser, "versionfile_source") | |
354 | cfg.versionfile_build = get(parser, "versionfile_build") | |
355 | cfg.tag_prefix = get(parser, "tag_prefix") | |
353 | cfg.style = section.get("style", "") | |
354 | cfg.versionfile_source = section.get("versionfile_source") | |
355 | cfg.versionfile_build = section.get("versionfile_build") | |
356 | cfg.tag_prefix = section.get("tag_prefix") | |
356 | 357 | if cfg.tag_prefix in ("''", '""'): |
357 | 358 | cfg.tag_prefix = "" |
358 | cfg.parentdir_prefix = get(parser, "parentdir_prefix") | |
359 | cfg.verbose = get(parser, "verbose") | |
359 | cfg.parentdir_prefix = section.get("parentdir_prefix") | |
360 | cfg.verbose = section.get("verbose") | |
360 | 361 | return cfg |
361 | 362 | |
362 | 363 | |
363 | 364 | class NotThisMethod(Exception): |
364 | 365 | """Exception raised if a method is not valid for the current scenario.""" |
365 | 366 | |
367 | ||
366 | 368 | # these dictionaries contain VCS-specific tools |
367 | LONG_VERSION_PY = {} | |
368 | HANDLERS = {} | |
369 | LONG_VERSION_PY: Dict[str, str] = {} | |
370 | HANDLERS: Dict[str, Dict[str, Callable]] = {} | |
369 | 371 | |
370 | 372 | |
371 | 373 | def register_vcs_handler(vcs, method): # decorator |
372 | """Decorator to mark a method as the handler for a particular VCS.""" | |
374 | """Create decorator to mark a method as the handler of a VCS.""" | |
373 | 375 | def decorate(f): |
374 | 376 | """Store f in HANDLERS[vcs][method].""" |
375 | if vcs not in HANDLERS: | |
376 | HANDLERS[vcs] = {} | |
377 | HANDLERS[vcs][method] = f | |
377 | HANDLERS.setdefault(vcs, {})[method] = f | |
378 | 378 | return f |
379 | 379 | return decorate |
380 | 380 | |
383 | 383 | env=None): |
384 | 384 | """Call the given command(s).""" |
385 | 385 | assert isinstance(commands, list) |
386 | p = None | |
387 | for c in commands: | |
386 | process = None | |
387 | ||
388 | popen_kwargs = {} | |
389 | if sys.platform == "win32": | |
390 | # This hides the console window if pythonw.exe is used | |
391 | startupinfo = subprocess.STARTUPINFO() | |
392 | startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW | |
393 | popen_kwargs["startupinfo"] = startupinfo | |
394 | ||
395 | for command in commands: | |
388 | 396 | try: |
389 | dispcmd = str([c] + args) | |
397 | dispcmd = str([command] + args) | |
390 | 398 | # remember shell=False, so use git.cmd on windows, not just git |
391 | p = subprocess.Popen([c] + args, cwd=cwd, env=env, | |
392 | stdout=subprocess.PIPE, | |
393 | stderr=(subprocess.PIPE if hide_stderr | |
394 | else None)) | |
399 | process = subprocess.Popen([command] + args, cwd=cwd, env=env, | |
400 | stdout=subprocess.PIPE, | |
401 | stderr=(subprocess.PIPE if hide_stderr | |
402 | else None), **popen_kwargs) | |
395 | 403 | break |
396 | except EnvironmentError: | |
404 | except OSError: | |
397 | 405 | e = sys.exc_info()[1] |
398 | 406 | if e.errno == errno.ENOENT: |
399 | 407 | continue |
405 | 413 | if verbose: |
406 | 414 | print("unable to find command, tried %s" % (commands,)) |
407 | 415 | return None, None |
408 | stdout = p.communicate()[0].strip() | |
409 | if sys.version_info[0] >= 3: | |
410 | stdout = stdout.decode() | |
411 | if p.returncode != 0: | |
416 | stdout = process.communicate()[0].strip().decode() | |
417 | if process.returncode != 0: | |
412 | 418 | if verbose: |
413 | 419 | print("unable to run %s (error)" % dispcmd) |
414 | 420 | print("stdout was %s" % stdout) |
415 | return None, p.returncode | |
416 | return stdout, p.returncode | |
417 | LONG_VERSION_PY['git'] = ''' | |
421 | return None, process.returncode | |
422 | return stdout, process.returncode | |
423 | ||
424 | ||
425 | LONG_VERSION_PY['git'] = r''' | |
418 | 426 | # This file helps to compute a version number in source trees obtained from |
419 | 427 | # git-archive tarball (such as those provided by githubs download-from-tag |
420 | 428 | # feature). Distribution tarballs (built by setup.py sdist) and build |
422 | 430 | # that just contains the computed version number. |
423 | 431 | |
424 | 432 | # This file is released into the public domain. Generated by |
425 | # versioneer-0.17 (https://github.com/warner/python-versioneer) | |
433 | # versioneer-0.22 (https://github.com/python-versioneer/python-versioneer) | |
426 | 434 | |
427 | 435 | """Git implementation of _version.py.""" |
428 | 436 | |
431 | 439 | import re |
432 | 440 | import subprocess |
433 | 441 | import sys |
442 | from typing import Callable, Dict | |
443 | import functools | |
434 | 444 | |
435 | 445 | |
436 | 446 | def get_keywords(): |
468 | 478 | """Exception raised if a method is not valid for the current scenario.""" |
469 | 479 | |
470 | 480 | |
471 | LONG_VERSION_PY = {} | |
472 | HANDLERS = {} | |
481 | LONG_VERSION_PY: Dict[str, str] = {} | |
482 | HANDLERS: Dict[str, Dict[str, Callable]] = {} | |
473 | 483 | |
474 | 484 | |
475 | 485 | def register_vcs_handler(vcs, method): # decorator |
476 | """Decorator to mark a method as the handler for a particular VCS.""" | |
486 | """Create decorator to mark a method as the handler of a VCS.""" | |
477 | 487 | def decorate(f): |
478 | 488 | """Store f in HANDLERS[vcs][method].""" |
479 | 489 | if vcs not in HANDLERS: |
487 | 497 | env=None): |
488 | 498 | """Call the given command(s).""" |
489 | 499 | assert isinstance(commands, list) |
490 | p = None | |
491 | for c in commands: | |
500 | process = None | |
501 | ||
502 | popen_kwargs = {} | |
503 | if sys.platform == "win32": | |
504 | # This hides the console window if pythonw.exe is used | |
505 | startupinfo = subprocess.STARTUPINFO() | |
506 | startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW | |
507 | popen_kwargs["startupinfo"] = startupinfo | |
508 | ||
509 | for command in commands: | |
492 | 510 | try: |
493 | dispcmd = str([c] + args) | |
511 | dispcmd = str([command] + args) | |
494 | 512 | # remember shell=False, so use git.cmd on windows, not just git |
495 | p = subprocess.Popen([c] + args, cwd=cwd, env=env, | |
496 | stdout=subprocess.PIPE, | |
497 | stderr=(subprocess.PIPE if hide_stderr | |
498 | else None)) | |
513 | process = subprocess.Popen([command] + args, cwd=cwd, env=env, | |
514 | stdout=subprocess.PIPE, | |
515 | stderr=(subprocess.PIPE if hide_stderr | |
516 | else None), **popen_kwargs) | |
499 | 517 | break |
500 | except EnvironmentError: | |
518 | except OSError: | |
501 | 519 | e = sys.exc_info()[1] |
502 | 520 | if e.errno == errno.ENOENT: |
503 | 521 | continue |
509 | 527 | if verbose: |
510 | 528 | print("unable to find command, tried %%s" %% (commands,)) |
511 | 529 | return None, None |
512 | stdout = p.communicate()[0].strip() | |
513 | if sys.version_info[0] >= 3: | |
514 | stdout = stdout.decode() | |
515 | if p.returncode != 0: | |
530 | stdout = process.communicate()[0].strip().decode() | |
531 | if process.returncode != 0: | |
516 | 532 | if verbose: |
517 | 533 | print("unable to run %%s (error)" %% dispcmd) |
518 | 534 | print("stdout was %%s" %% stdout) |
519 | return None, p.returncode | |
520 | return stdout, p.returncode | |
535 | return None, process.returncode | |
536 | return stdout, process.returncode | |
521 | 537 | |
522 | 538 | |
523 | 539 | def versions_from_parentdir(parentdir_prefix, root, verbose): |
529 | 545 | """ |
530 | 546 | rootdirs = [] |
531 | 547 | |
532 | for i in range(3): | |
548 | for _ in range(3): | |
533 | 549 | dirname = os.path.basename(root) |
534 | 550 | if dirname.startswith(parentdir_prefix): |
535 | 551 | return {"version": dirname[len(parentdir_prefix):], |
536 | 552 | "full-revisionid": None, |
537 | 553 | "dirty": False, "error": None, "date": None} |
538 | else: | |
539 | rootdirs.append(root) | |
540 | root = os.path.dirname(root) # up a level | |
554 | rootdirs.append(root) | |
555 | root = os.path.dirname(root) # up a level | |
541 | 556 | |
542 | 557 | if verbose: |
543 | 558 | print("Tried directories %%s but none started with prefix %%s" %% |
554 | 569 | # _version.py. |
555 | 570 | keywords = {} |
556 | 571 | try: |
557 | f = open(versionfile_abs, "r") | |
558 | for line in f.readlines(): | |
559 | if line.strip().startswith("git_refnames ="): | |
560 | mo = re.search(r'=\s*"(.*)"', line) | |
561 | if mo: | |
562 | keywords["refnames"] = mo.group(1) | |
563 | if line.strip().startswith("git_full ="): | |
564 | mo = re.search(r'=\s*"(.*)"', line) | |
565 | if mo: | |
566 | keywords["full"] = mo.group(1) | |
567 | if line.strip().startswith("git_date ="): | |
568 | mo = re.search(r'=\s*"(.*)"', line) | |
569 | if mo: | |
570 | keywords["date"] = mo.group(1) | |
571 | f.close() | |
572 | except EnvironmentError: | |
572 | with open(versionfile_abs, "r") as fobj: | |
573 | for line in fobj: | |
574 | if line.strip().startswith("git_refnames ="): | |
575 | mo = re.search(r'=\s*"(.*)"', line) | |
576 | if mo: | |
577 | keywords["refnames"] = mo.group(1) | |
578 | if line.strip().startswith("git_full ="): | |
579 | mo = re.search(r'=\s*"(.*)"', line) | |
580 | if mo: | |
581 | keywords["full"] = mo.group(1) | |
582 | if line.strip().startswith("git_date ="): | |
583 | mo = re.search(r'=\s*"(.*)"', line) | |
584 | if mo: | |
585 | keywords["date"] = mo.group(1) | |
586 | except OSError: | |
573 | 587 | pass |
574 | 588 | return keywords |
575 | 589 | |
577 | 591 | @register_vcs_handler("git", "keywords") |
578 | 592 | def git_versions_from_keywords(keywords, tag_prefix, verbose): |
579 | 593 | """Get version information from git keywords.""" |
580 | if not keywords: | |
581 | raise NotThisMethod("no keywords at all, weird") | |
594 | if "refnames" not in keywords: | |
595 | raise NotThisMethod("Short version file found") | |
582 | 596 | date = keywords.get("date") |
583 | 597 | if date is not None: |
598 | # Use only the last line. Previous lines may contain GPG signature | |
599 | # information. | |
600 | date = date.splitlines()[-1] | |
601 | ||
584 | 602 | # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant |
585 | 603 | # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 |
586 | 604 | # -like" string, which we must then edit to make compliant), because |
593 | 611 | if verbose: |
594 | 612 | print("keywords are unexpanded, not using") |
595 | 613 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") |
596 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) | |
614 | refs = {r.strip() for r in refnames.strip("()").split(",")} | |
597 | 615 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of |
598 | 616 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. |
599 | 617 | TAG = "tag: " |
600 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) | |
618 | tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} | |
601 | 619 | if not tags: |
602 | 620 | # Either we're using git < 1.8.3, or there really are no tags. We use |
603 | 621 | # a heuristic: assume all version tags have a digit. The old git %%d |
606 | 624 | # between branches and tags. By ignoring refnames without digits, we |
607 | 625 | # filter out many common branch names like "release" and |
608 | 626 | # "stabilization", as well as "HEAD" and "master". |
609 | tags = set([r for r in refs if re.search(r'\d', r)]) | |
627 | tags = {r for r in refs if re.search(r'\d', r)} | |
610 | 628 | if verbose: |
611 | 629 | print("discarding '%%s', no digits" %% ",".join(refs - tags)) |
612 | 630 | if verbose: |
615 | 633 | # sorting will prefer e.g. "2.0" over "2.0rc1" |
616 | 634 | if ref.startswith(tag_prefix): |
617 | 635 | r = ref[len(tag_prefix):] |
636 | # Filter out refs that exactly match prefix or that don't start | |
637 | # with a number once the prefix is stripped (mostly a concern | |
638 | # when prefix is '') | |
639 | if not re.match(r'\d', r): | |
640 | continue | |
618 | 641 | if verbose: |
619 | 642 | print("picking %%s" %% r) |
620 | 643 | return {"version": r, |
630 | 653 | |
631 | 654 | |
632 | 655 | @register_vcs_handler("git", "pieces_from_vcs") |
633 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): | |
656 | def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): | |
634 | 657 | """Get version from 'git describe' in the root of the source tree. |
635 | 658 | |
636 | 659 | This only gets called if the git-archive 'subst' keywords were *not* |
641 | 664 | if sys.platform == "win32": |
642 | 665 | GITS = ["git.cmd", "git.exe"] |
643 | 666 | |
644 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, | |
645 | hide_stderr=True) | |
667 | # GIT_DIR can interfere with correct operation of Versioneer. | |
668 | # It may be intended to be passed to the Versioneer-versioned project, | |
669 | # but that should not change where we get our version from. | |
670 | env = os.environ.copy() | |
671 | env.pop("GIT_DIR", None) | |
672 | runner = functools.partial(runner, env=env) | |
673 | ||
674 | _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, | |
675 | hide_stderr=True) | |
646 | 676 | if rc != 0: |
647 | 677 | if verbose: |
648 | 678 | print("Directory %%s not under git control" %% root) |
649 | 679 | raise NotThisMethod("'git rev-parse --git-dir' returned error") |
650 | 680 | |
681 | MATCH_ARGS = ["--match", "%%s*" %% tag_prefix] if tag_prefix else [] | |
682 | ||
651 | 683 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] |
652 | 684 | # if there isn't one, this yields HEX[-dirty] (no NUM) |
653 | describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", | |
654 | "--always", "--long", | |
655 | "--match", "%%s*" %% tag_prefix], | |
656 | cwd=root) | |
685 | describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty", | |
686 | "--always", "--long", *MATCH_ARGS], | |
687 | cwd=root) | |
657 | 688 | # --long was added in git-1.5.5 |
658 | 689 | if describe_out is None: |
659 | 690 | raise NotThisMethod("'git describe' failed") |
660 | 691 | describe_out = describe_out.strip() |
661 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) | |
692 | full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) | |
662 | 693 | if full_out is None: |
663 | 694 | raise NotThisMethod("'git rev-parse' failed") |
664 | 695 | full_out = full_out.strip() |
667 | 698 | pieces["long"] = full_out |
668 | 699 | pieces["short"] = full_out[:7] # maybe improved later |
669 | 700 | pieces["error"] = None |
701 | ||
702 | branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], | |
703 | cwd=root) | |
704 | # --abbrev-ref was added in git-1.6.3 | |
705 | if rc != 0 or branch_name is None: | |
706 | raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") | |
707 | branch_name = branch_name.strip() | |
708 | ||
709 | if branch_name == "HEAD": | |
710 | # If we aren't exactly on a branch, pick a branch which represents | |
711 | # the current commit. If all else fails, we are on a branchless | |
712 | # commit. | |
713 | branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) | |
714 | # --contains was added in git-1.5.4 | |
715 | if rc != 0 or branches is None: | |
716 | raise NotThisMethod("'git branch --contains' returned error") | |
717 | branches = branches.split("\n") | |
718 | ||
719 | # Remove the first line if we're running detached | |
720 | if "(" in branches[0]: | |
721 | branches.pop(0) | |
722 | ||
723 | # Strip off the leading "* " from the list of branches. | |
724 | branches = [branch[2:] for branch in branches] | |
725 | if "master" in branches: | |
726 | branch_name = "master" | |
727 | elif not branches: | |
728 | branch_name = None | |
729 | else: | |
730 | # Pick the first branch that is returned. Good or bad. | |
731 | branch_name = branches[0] | |
732 | ||
733 | pieces["branch"] = branch_name | |
670 | 734 | |
671 | 735 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] |
672 | 736 | # TAG might have hyphens. |
684 | 748 | # TAG-NUM-gHEX |
685 | 749 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) |
686 | 750 | if not mo: |
687 | # unparseable. Maybe git-describe is misbehaving? | |
751 | # unparsable. Maybe git-describe is misbehaving? | |
688 | 752 | pieces["error"] = ("unable to parse git-describe output: '%%s'" |
689 | 753 | %% describe_out) |
690 | 754 | return pieces |
709 | 773 | else: |
710 | 774 | # HEX: no tags |
711 | 775 | pieces["closest-tag"] = None |
712 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], | |
713 | cwd=root) | |
776 | count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) | |
714 | 777 | pieces["distance"] = int(count_out) # total number of commits |
715 | 778 | |
716 | 779 | # commit date: see ISO-8601 comment in git_versions_from_keywords() |
717 | date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], | |
718 | cwd=root)[0].strip() | |
780 | date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip() | |
781 | # Use only the last line. Previous lines may contain GPG signature | |
782 | # information. | |
783 | date = date.splitlines()[-1] | |
719 | 784 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) |
720 | 785 | |
721 | 786 | return pieces |
753 | 818 | return rendered |
754 | 819 | |
755 | 820 | |
756 | def render_pep440_pre(pieces): | |
757 | """TAG[.post.devDISTANCE] -- No -dirty. | |
821 | def render_pep440_branch(pieces): | |
822 | """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . | |
823 | ||
824 | The ".dev0" means not master branch. Note that .dev0 sorts backwards | |
825 | (a feature branch will appear "older" than the master branch). | |
758 | 826 | |
759 | 827 | Exceptions: |
760 | 1: no tags. 0.post.devDISTANCE | |
828 | 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] | |
761 | 829 | """ |
762 | 830 | if pieces["closest-tag"]: |
763 | 831 | rendered = pieces["closest-tag"] |
832 | if pieces["distance"] or pieces["dirty"]: | |
833 | if pieces["branch"] != "master": | |
834 | rendered += ".dev0" | |
835 | rendered += plus_or_dot(pieces) | |
836 | rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) | |
837 | if pieces["dirty"]: | |
838 | rendered += ".dirty" | |
839 | else: | |
840 | # exception #1 | |
841 | rendered = "0" | |
842 | if pieces["branch"] != "master": | |
843 | rendered += ".dev0" | |
844 | rendered += "+untagged.%%d.g%%s" %% (pieces["distance"], | |
845 | pieces["short"]) | |
846 | if pieces["dirty"]: | |
847 | rendered += ".dirty" | |
848 | return rendered | |
849 | ||
850 | ||
851 | def pep440_split_post(ver): | |
852 | """Split pep440 version string at the post-release segment. | |
853 | ||
854 | Returns the release segments before the post-release and the | |
855 | post-release version number (or -1 if no post-release segment is present). | |
856 | """ | |
857 | vc = str.split(ver, ".post") | |
858 | return vc[0], int(vc[1] or 0) if len(vc) == 2 else None | |
859 | ||
860 | ||
861 | def render_pep440_pre(pieces): | |
862 | """TAG[.postN.devDISTANCE] -- No -dirty. | |
863 | ||
864 | Exceptions: | |
865 | 1: no tags. 0.post0.devDISTANCE | |
866 | """ | |
867 | if pieces["closest-tag"]: | |
764 | 868 | if pieces["distance"]: |
765 | rendered += ".post.dev%%d" %% pieces["distance"] | |
869 | # update the post release segment | |
870 | tag_version, post_version = pep440_split_post(pieces["closest-tag"]) | |
871 | rendered = tag_version | |
872 | if post_version is not None: | |
873 | rendered += ".post%%d.dev%%d" %% (post_version+1, pieces["distance"]) | |
874 | else: | |
875 | rendered += ".post0.dev%%d" %% (pieces["distance"]) | |
876 | else: | |
877 | # no commits, use the tag as the version | |
878 | rendered = pieces["closest-tag"] | |
766 | 879 | else: |
767 | 880 | # exception #1 |
768 | rendered = "0.post.dev%%d" %% pieces["distance"] | |
881 | rendered = "0.post0.dev%%d" %% pieces["distance"] | |
769 | 882 | return rendered |
770 | 883 | |
771 | 884 | |
796 | 909 | return rendered |
797 | 910 | |
798 | 911 | |
912 | def render_pep440_post_branch(pieces): | |
913 | """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . | |
914 | ||
915 | The ".dev0" means not master branch. | |
916 | ||
917 | Exceptions: | |
918 | 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] | |
919 | """ | |
920 | if pieces["closest-tag"]: | |
921 | rendered = pieces["closest-tag"] | |
922 | if pieces["distance"] or pieces["dirty"]: | |
923 | rendered += ".post%%d" %% pieces["distance"] | |
924 | if pieces["branch"] != "master": | |
925 | rendered += ".dev0" | |
926 | rendered += plus_or_dot(pieces) | |
927 | rendered += "g%%s" %% pieces["short"] | |
928 | if pieces["dirty"]: | |
929 | rendered += ".dirty" | |
930 | else: | |
931 | # exception #1 | |
932 | rendered = "0.post%%d" %% pieces["distance"] | |
933 | if pieces["branch"] != "master": | |
934 | rendered += ".dev0" | |
935 | rendered += "+g%%s" %% pieces["short"] | |
936 | if pieces["dirty"]: | |
937 | rendered += ".dirty" | |
938 | return rendered | |
939 | ||
940 | ||
799 | 941 | def render_pep440_old(pieces): |
800 | 942 | """TAG[.postDISTANCE[.dev0]] . |
801 | 943 | |
802 | 944 | The ".dev0" means dirty. |
803 | 945 | |
804 | Eexceptions: | |
946 | Exceptions: | |
805 | 947 | 1: no tags. 0.postDISTANCE[.dev0] |
806 | 948 | """ |
807 | 949 | if pieces["closest-tag"]: |
872 | 1014 | |
873 | 1015 | if style == "pep440": |
874 | 1016 | rendered = render_pep440(pieces) |
1017 | elif style == "pep440-branch": | |
1018 | rendered = render_pep440_branch(pieces) | |
875 | 1019 | elif style == "pep440-pre": |
876 | 1020 | rendered = render_pep440_pre(pieces) |
877 | 1021 | elif style == "pep440-post": |
878 | 1022 | rendered = render_pep440_post(pieces) |
1023 | elif style == "pep440-post-branch": | |
1024 | rendered = render_pep440_post_branch(pieces) | |
879 | 1025 | elif style == "pep440-old": |
880 | 1026 | rendered = render_pep440_old(pieces) |
881 | 1027 | elif style == "git-describe": |
911 | 1057 | # versionfile_source is the relative path from the top of the source |
912 | 1058 | # tree (where the .git directory might live) to this file. Invert |
913 | 1059 | # this to find the root from __file__. |
914 | for i in cfg.versionfile_source.split('/'): | |
1060 | for _ in cfg.versionfile_source.split('/'): | |
915 | 1061 | root = os.path.dirname(root) |
916 | 1062 | except NameError: |
917 | 1063 | return {"version": "0+unknown", "full-revisionid": None, |
946 | 1092 | # _version.py. |
947 | 1093 | keywords = {} |
948 | 1094 | try: |
949 | f = open(versionfile_abs, "r") | |
950 | for line in f.readlines(): | |
951 | if line.strip().startswith("git_refnames ="): | |
952 | mo = re.search(r'=\s*"(.*)"', line) | |
953 | if mo: | |
954 | keywords["refnames"] = mo.group(1) | |
955 | if line.strip().startswith("git_full ="): | |
956 | mo = re.search(r'=\s*"(.*)"', line) | |
957 | if mo: | |
958 | keywords["full"] = mo.group(1) | |
959 | if line.strip().startswith("git_date ="): | |
960 | mo = re.search(r'=\s*"(.*)"', line) | |
961 | if mo: | |
962 | keywords["date"] = mo.group(1) | |
963 | f.close() | |
964 | except EnvironmentError: | |
1095 | with open(versionfile_abs, "r") as fobj: | |
1096 | for line in fobj: | |
1097 | if line.strip().startswith("git_refnames ="): | |
1098 | mo = re.search(r'=\s*"(.*)"', line) | |
1099 | if mo: | |
1100 | keywords["refnames"] = mo.group(1) | |
1101 | if line.strip().startswith("git_full ="): | |
1102 | mo = re.search(r'=\s*"(.*)"', line) | |
1103 | if mo: | |
1104 | keywords["full"] = mo.group(1) | |
1105 | if line.strip().startswith("git_date ="): | |
1106 | mo = re.search(r'=\s*"(.*)"', line) | |
1107 | if mo: | |
1108 | keywords["date"] = mo.group(1) | |
1109 | except OSError: | |
965 | 1110 | pass |
966 | 1111 | return keywords |
967 | 1112 | |
969 | 1114 | @register_vcs_handler("git", "keywords") |
970 | 1115 | def git_versions_from_keywords(keywords, tag_prefix, verbose): |
971 | 1116 | """Get version information from git keywords.""" |
972 | if not keywords: | |
973 | raise NotThisMethod("no keywords at all, weird") | |
1117 | if "refnames" not in keywords: | |
1118 | raise NotThisMethod("Short version file found") | |
974 | 1119 | date = keywords.get("date") |
975 | 1120 | if date is not None: |
1121 | # Use only the last line. Previous lines may contain GPG signature | |
1122 | # information. | |
1123 | date = date.splitlines()[-1] | |
1124 | ||
976 | 1125 | # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant |
977 | 1126 | # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 |
978 | 1127 | # -like" string, which we must then edit to make compliant), because |
985 | 1134 | if verbose: |
986 | 1135 | print("keywords are unexpanded, not using") |
987 | 1136 | raise NotThisMethod("unexpanded keywords, not a git-archive tarball") |
988 | refs = set([r.strip() for r in refnames.strip("()").split(",")]) | |
1137 | refs = {r.strip() for r in refnames.strip("()").split(",")} | |
989 | 1138 | # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of |
990 | 1139 | # just "foo-1.0". If we see a "tag: " prefix, prefer those. |
991 | 1140 | TAG = "tag: " |
992 | tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) | |
1141 | tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} | |
993 | 1142 | if not tags: |
994 | 1143 | # Either we're using git < 1.8.3, or there really are no tags. We use |
995 | 1144 | # a heuristic: assume all version tags have a digit. The old git %d |
998 | 1147 | # between branches and tags. By ignoring refnames without digits, we |
999 | 1148 | # filter out many common branch names like "release" and |
1000 | 1149 | # "stabilization", as well as "HEAD" and "master". |
1001 | tags = set([r for r in refs if re.search(r'\d', r)]) | |
1150 | tags = {r for r in refs if re.search(r'\d', r)} | |
1002 | 1151 | if verbose: |
1003 | 1152 | print("discarding '%s', no digits" % ",".join(refs - tags)) |
1004 | 1153 | if verbose: |
1007 | 1156 | # sorting will prefer e.g. "2.0" over "2.0rc1" |
1008 | 1157 | if ref.startswith(tag_prefix): |
1009 | 1158 | r = ref[len(tag_prefix):] |
1159 | # Filter out refs that exactly match prefix or that don't start | |
1160 | # with a number once the prefix is stripped (mostly a concern | |
1161 | # when prefix is '') | |
1162 | if not re.match(r'\d', r): | |
1163 | continue | |
1010 | 1164 | if verbose: |
1011 | 1165 | print("picking %s" % r) |
1012 | 1166 | return {"version": r, |
1022 | 1176 | |
1023 | 1177 | |
1024 | 1178 | @register_vcs_handler("git", "pieces_from_vcs") |
1025 | def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): | |
1179 | def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): | |
1026 | 1180 | """Get version from 'git describe' in the root of the source tree. |
1027 | 1181 | |
1028 | 1182 | This only gets called if the git-archive 'subst' keywords were *not* |
1033 | 1187 | if sys.platform == "win32": |
1034 | 1188 | GITS = ["git.cmd", "git.exe"] |
1035 | 1189 | |
1036 | out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, | |
1037 | hide_stderr=True) | |
1190 | # GIT_DIR can interfere with correct operation of Versioneer. | |
1191 | # It may be intended to be passed to the Versioneer-versioned project, | |
1192 | # but that should not change where we get our version from. | |
1193 | env = os.environ.copy() | |
1194 | env.pop("GIT_DIR", None) | |
1195 | runner = functools.partial(runner, env=env) | |
1196 | ||
1197 | _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, | |
1198 | hide_stderr=True) | |
1038 | 1199 | if rc != 0: |
1039 | 1200 | if verbose: |
1040 | 1201 | print("Directory %s not under git control" % root) |
1041 | 1202 | raise NotThisMethod("'git rev-parse --git-dir' returned error") |
1042 | 1203 | |
1204 | MATCH_ARGS = ["--match", "%s*" % tag_prefix] if tag_prefix else [] | |
1205 | ||
1043 | 1206 | # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] |
1044 | 1207 | # if there isn't one, this yields HEX[-dirty] (no NUM) |
1045 | describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", | |
1046 | "--always", "--long", | |
1047 | "--match", "%s*" % tag_prefix], | |
1048 | cwd=root) | |
1208 | describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty", | |
1209 | "--always", "--long", *MATCH_ARGS], | |
1210 | cwd=root) | |
1049 | 1211 | # --long was added in git-1.5.5 |
1050 | 1212 | if describe_out is None: |
1051 | 1213 | raise NotThisMethod("'git describe' failed") |
1052 | 1214 | describe_out = describe_out.strip() |
1053 | full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) | |
1215 | full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) | |
1054 | 1216 | if full_out is None: |
1055 | 1217 | raise NotThisMethod("'git rev-parse' failed") |
1056 | 1218 | full_out = full_out.strip() |
1059 | 1221 | pieces["long"] = full_out |
1060 | 1222 | pieces["short"] = full_out[:7] # maybe improved later |
1061 | 1223 | pieces["error"] = None |
1224 | ||
1225 | branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], | |
1226 | cwd=root) | |
1227 | # --abbrev-ref was added in git-1.6.3 | |
1228 | if rc != 0 or branch_name is None: | |
1229 | raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") | |
1230 | branch_name = branch_name.strip() | |
1231 | ||
1232 | if branch_name == "HEAD": | |
1233 | # If we aren't exactly on a branch, pick a branch which represents | |
1234 | # the current commit. If all else fails, we are on a branchless | |
1235 | # commit. | |
1236 | branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) | |
1237 | # --contains was added in git-1.5.4 | |
1238 | if rc != 0 or branches is None: | |
1239 | raise NotThisMethod("'git branch --contains' returned error") | |
1240 | branches = branches.split("\n") | |
1241 | ||
1242 | # Remove the first line if we're running detached | |
1243 | if "(" in branches[0]: | |
1244 | branches.pop(0) | |
1245 | ||
1246 | # Strip off the leading "* " from the list of branches. | |
1247 | branches = [branch[2:] for branch in branches] | |
1248 | if "master" in branches: | |
1249 | branch_name = "master" | |
1250 | elif not branches: | |
1251 | branch_name = None | |
1252 | else: | |
1253 | # Pick the first branch that is returned. Good or bad. | |
1254 | branch_name = branches[0] | |
1255 | ||
1256 | pieces["branch"] = branch_name | |
1062 | 1257 | |
1063 | 1258 | # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] |
1064 | 1259 | # TAG might have hyphens. |
1076 | 1271 | # TAG-NUM-gHEX |
1077 | 1272 | mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) |
1078 | 1273 | if not mo: |
1079 | # unparseable. Maybe git-describe is misbehaving? | |
1274 | # unparsable. Maybe git-describe is misbehaving? | |
1080 | 1275 | pieces["error"] = ("unable to parse git-describe output: '%s'" |
1081 | 1276 | % describe_out) |
1082 | 1277 | return pieces |
1101 | 1296 | else: |
1102 | 1297 | # HEX: no tags |
1103 | 1298 | pieces["closest-tag"] = None |
1104 | count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], | |
1105 | cwd=root) | |
1299 | count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) | |
1106 | 1300 | pieces["distance"] = int(count_out) # total number of commits |
1107 | 1301 | |
1108 | 1302 | # commit date: see ISO-8601 comment in git_versions_from_keywords() |
1109 | date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], | |
1110 | cwd=root)[0].strip() | |
1303 | date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() | |
1304 | # Use only the last line. Previous lines may contain GPG signature | |
1305 | # information. | |
1306 | date = date.splitlines()[-1] | |
1111 | 1307 | pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) |
1112 | 1308 | |
1113 | 1309 | return pieces |
1126 | 1322 | if ipy: |
1127 | 1323 | files.append(ipy) |
1128 | 1324 | try: |
1129 | me = __file__ | |
1130 | if me.endswith(".pyc") or me.endswith(".pyo"): | |
1131 | me = os.path.splitext(me)[0] + ".py" | |
1132 | versioneer_file = os.path.relpath(me) | |
1325 | my_path = __file__ | |
1326 | if my_path.endswith(".pyc") or my_path.endswith(".pyo"): | |
1327 | my_path = os.path.splitext(my_path)[0] + ".py" | |
1328 | versioneer_file = os.path.relpath(my_path) | |
1133 | 1329 | except NameError: |
1134 | 1330 | versioneer_file = "versioneer.py" |
1135 | 1331 | files.append(versioneer_file) |
1136 | 1332 | present = False |
1137 | 1333 | try: |
1138 | f = open(".gitattributes", "r") | |
1139 | for line in f.readlines(): | |
1140 | if line.strip().startswith(versionfile_source): | |
1141 | if "export-subst" in line.strip().split()[1:]: | |
1142 | present = True | |
1143 | f.close() | |
1144 | except EnvironmentError: | |
1334 | with open(".gitattributes", "r") as fobj: | |
1335 | for line in fobj: | |
1336 | if line.strip().startswith(versionfile_source): | |
1337 | if "export-subst" in line.strip().split()[1:]: | |
1338 | present = True | |
1339 | break | |
1340 | except OSError: | |
1145 | 1341 | pass |
1146 | 1342 | if not present: |
1147 | f = open(".gitattributes", "a+") | |
1148 | f.write("%s export-subst\n" % versionfile_source) | |
1149 | f.close() | |
1343 | with open(".gitattributes", "a+") as fobj: | |
1344 | fobj.write(f"{versionfile_source} export-subst\n") | |
1150 | 1345 | files.append(".gitattributes") |
1151 | 1346 | run_command(GITS, ["add", "--"] + files) |
1152 | 1347 | |
1160 | 1355 | """ |
1161 | 1356 | rootdirs = [] |
1162 | 1357 | |
1163 | for i in range(3): | |
1358 | for _ in range(3): | |
1164 | 1359 | dirname = os.path.basename(root) |
1165 | 1360 | if dirname.startswith(parentdir_prefix): |
1166 | 1361 | return {"version": dirname[len(parentdir_prefix):], |
1167 | 1362 | "full-revisionid": None, |
1168 | 1363 | "dirty": False, "error": None, "date": None} |
1169 | else: | |
1170 | rootdirs.append(root) | |
1171 | root = os.path.dirname(root) # up a level | |
1364 | rootdirs.append(root) | |
1365 | root = os.path.dirname(root) # up a level | |
1172 | 1366 | |
1173 | 1367 | if verbose: |
1174 | 1368 | print("Tried directories %s but none started with prefix %s" % |
1175 | 1369 | (str(rootdirs), parentdir_prefix)) |
1176 | 1370 | raise NotThisMethod("rootdir doesn't start with parentdir_prefix") |
1177 | 1371 | |
1372 | ||
1178 | 1373 | SHORT_VERSION_PY = """ |
1179 | # This file was generated by 'versioneer.py' (0.17) from | |
1374 | # This file was generated by 'versioneer.py' (0.22) from | |
1180 | 1375 | # revision-control system data, or from the parent directory name of an |
1181 | 1376 | # unpacked source archive. Distribution tarballs contain a pre-generated copy |
1182 | 1377 | # of this file. |
1198 | 1393 | try: |
1199 | 1394 | with open(filename) as f: |
1200 | 1395 | contents = f.read() |
1201 | except EnvironmentError: | |
1396 | except OSError: | |
1202 | 1397 | raise NotThisMethod("unable to read _version.py") |
1203 | 1398 | mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", |
1204 | 1399 | contents, re.M | re.S) |
1253 | 1448 | return rendered |
1254 | 1449 | |
1255 | 1450 | |
1256 | def render_pep440_pre(pieces): | |
1257 | """TAG[.post.devDISTANCE] -- No -dirty. | |
1451 | def render_pep440_branch(pieces): | |
1452 | """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . | |
1453 | ||
1454 | The ".dev0" means not master branch. Note that .dev0 sorts backwards | |
1455 | (a feature branch will appear "older" than the master branch). | |
1258 | 1456 | |
1259 | 1457 | Exceptions: |
1260 | 1: no tags. 0.post.devDISTANCE | |
1458 | 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] | |
1261 | 1459 | """ |
1262 | 1460 | if pieces["closest-tag"]: |
1263 | 1461 | rendered = pieces["closest-tag"] |
1462 | if pieces["distance"] or pieces["dirty"]: | |
1463 | if pieces["branch"] != "master": | |
1464 | rendered += ".dev0" | |
1465 | rendered += plus_or_dot(pieces) | |
1466 | rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) | |
1467 | if pieces["dirty"]: | |
1468 | rendered += ".dirty" | |
1469 | else: | |
1470 | # exception #1 | |
1471 | rendered = "0" | |
1472 | if pieces["branch"] != "master": | |
1473 | rendered += ".dev0" | |
1474 | rendered += "+untagged.%d.g%s" % (pieces["distance"], | |
1475 | pieces["short"]) | |
1476 | if pieces["dirty"]: | |
1477 | rendered += ".dirty" | |
1478 | return rendered | |
1479 | ||
1480 | ||
1481 | def pep440_split_post(ver): | |
1482 | """Split pep440 version string at the post-release segment. | |
1483 | ||
1484 | Returns the release segments before the post-release and the | |
1485 | post-release version number (or -1 if no post-release segment is present). | |
1486 | """ | |
1487 | vc = str.split(ver, ".post") | |
1488 | return vc[0], int(vc[1] or 0) if len(vc) == 2 else None | |
1489 | ||
1490 | ||
1491 | def render_pep440_pre(pieces): | |
1492 | """TAG[.postN.devDISTANCE] -- No -dirty. | |
1493 | ||
1494 | Exceptions: | |
1495 | 1: no tags. 0.post0.devDISTANCE | |
1496 | """ | |
1497 | if pieces["closest-tag"]: | |
1264 | 1498 | if pieces["distance"]: |
1265 | rendered += ".post.dev%d" % pieces["distance"] | |
1499 | # update the post release segment | |
1500 | tag_version, post_version = pep440_split_post(pieces["closest-tag"]) | |
1501 | rendered = tag_version | |
1502 | if post_version is not None: | |
1503 | rendered += ".post%d.dev%d" % (post_version+1, pieces["distance"]) | |
1504 | else: | |
1505 | rendered += ".post0.dev%d" % (pieces["distance"]) | |
1506 | else: | |
1507 | # no commits, use the tag as the version | |
1508 | rendered = pieces["closest-tag"] | |
1266 | 1509 | else: |
1267 | 1510 | # exception #1 |
1268 | rendered = "0.post.dev%d" % pieces["distance"] | |
1511 | rendered = "0.post0.dev%d" % pieces["distance"] | |
1269 | 1512 | return rendered |
1270 | 1513 | |
1271 | 1514 | |
1296 | 1539 | return rendered |
1297 | 1540 | |
1298 | 1541 | |
1542 | def render_pep440_post_branch(pieces): | |
1543 | """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . | |
1544 | ||
1545 | The ".dev0" means not master branch. | |
1546 | ||
1547 | Exceptions: | |
1548 | 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] | |
1549 | """ | |
1550 | if pieces["closest-tag"]: | |
1551 | rendered = pieces["closest-tag"] | |
1552 | if pieces["distance"] or pieces["dirty"]: | |
1553 | rendered += ".post%d" % pieces["distance"] | |
1554 | if pieces["branch"] != "master": | |
1555 | rendered += ".dev0" | |
1556 | rendered += plus_or_dot(pieces) | |
1557 | rendered += "g%s" % pieces["short"] | |
1558 | if pieces["dirty"]: | |
1559 | rendered += ".dirty" | |
1560 | else: | |
1561 | # exception #1 | |
1562 | rendered = "0.post%d" % pieces["distance"] | |
1563 | if pieces["branch"] != "master": | |
1564 | rendered += ".dev0" | |
1565 | rendered += "+g%s" % pieces["short"] | |
1566 | if pieces["dirty"]: | |
1567 | rendered += ".dirty" | |
1568 | return rendered | |
1569 | ||
1570 | ||
1299 | 1571 | def render_pep440_old(pieces): |
1300 | 1572 | """TAG[.postDISTANCE[.dev0]] . |
1301 | 1573 | |
1302 | 1574 | The ".dev0" means dirty. |
1303 | 1575 | |
1304 | Eexceptions: | |
1576 | Exceptions: | |
1305 | 1577 | 1: no tags. 0.postDISTANCE[.dev0] |
1306 | 1578 | """ |
1307 | 1579 | if pieces["closest-tag"]: |
1372 | 1644 | |
1373 | 1645 | if style == "pep440": |
1374 | 1646 | rendered = render_pep440(pieces) |
1647 | elif style == "pep440-branch": | |
1648 | rendered = render_pep440_branch(pieces) | |
1375 | 1649 | elif style == "pep440-pre": |
1376 | 1650 | rendered = render_pep440_pre(pieces) |
1377 | 1651 | elif style == "pep440-post": |
1378 | 1652 | rendered = render_pep440_post(pieces) |
1653 | elif style == "pep440-post-branch": | |
1654 | rendered = render_pep440_post_branch(pieces) | |
1379 | 1655 | elif style == "pep440-old": |
1380 | 1656 | rendered = render_pep440_old(pieces) |
1381 | 1657 | elif style == "git-describe": |
1475 | 1751 | return get_versions()["version"] |
1476 | 1752 | |
1477 | 1753 | |
1478 | def get_cmdclass(): | |
1479 | """Get the custom setuptools/distutils subclasses used by Versioneer.""" | |
1754 | def get_cmdclass(cmdclass=None): | |
1755 | """Get the custom setuptools/distutils subclasses used by Versioneer. | |
1756 | ||
1757 | If the package uses a different cmdclass (e.g. one from numpy), it | |
1758 | should be provide as an argument. | |
1759 | """ | |
1480 | 1760 | if "versioneer" in sys.modules: |
1481 | 1761 | del sys.modules["versioneer"] |
1482 | 1762 | # this fixes the "python setup.py develop" case (also 'install' and |
1490 | 1770 | # parent is protected against the child's "import versioneer". By |
1491 | 1771 | # removing ourselves from sys.modules here, before the child build |
1492 | 1772 | # happens, we protect the child from the parent's versioneer too. |
1493 | # Also see https://github.com/warner/python-versioneer/issues/52 | |
1494 | ||
1495 | cmds = {} | |
1773 | # Also see https://github.com/python-versioneer/python-versioneer/issues/52 | |
1774 | ||
1775 | cmds = {} if cmdclass is None else cmdclass.copy() | |
1496 | 1776 | |
1497 | 1777 | # we add "version" to both distutils and setuptools |
1498 | from distutils.core import Command | |
1778 | try: | |
1779 | from setuptools import Command | |
1780 | except ImportError: | |
1781 | from distutils.core import Command | |
1499 | 1782 | |
1500 | 1783 | class cmd_version(Command): |
1501 | 1784 | description = "report generated version string" |
1534 | 1817 | # setup.py egg_info -> ? |
1535 | 1818 | |
1536 | 1819 | # we override different "build_py" commands for both environments |
1537 | if "setuptools" in sys.modules: | |
1820 | if 'build_py' in cmds: | |
1821 | _build_py = cmds['build_py'] | |
1822 | elif "setuptools" in sys.modules: | |
1538 | 1823 | from setuptools.command.build_py import build_py as _build_py |
1539 | 1824 | else: |
1540 | 1825 | from distutils.command.build_py import build_py as _build_py |
1554 | 1839 | write_to_version_file(target_versionfile, versions) |
1555 | 1840 | cmds["build_py"] = cmd_build_py |
1556 | 1841 | |
1842 | if 'build_ext' in cmds: | |
1843 | _build_ext = cmds['build_ext'] | |
1844 | elif "setuptools" in sys.modules: | |
1845 | from setuptools.command.build_ext import build_ext as _build_ext | |
1846 | else: | |
1847 | from distutils.command.build_ext import build_ext as _build_ext | |
1848 | ||
1849 | class cmd_build_ext(_build_ext): | |
1850 | def run(self): | |
1851 | root = get_root() | |
1852 | cfg = get_config_from_root(root) | |
1853 | versions = get_versions() | |
1854 | _build_ext.run(self) | |
1855 | if self.inplace: | |
1856 | # build_ext --inplace will only build extensions in | |
1857 | # build/lib<..> dir with no _version.py to write to. | |
1858 | # As in place builds will already have a _version.py | |
1859 | # in the module dir, we do not need to write one. | |
1860 | return | |
1861 | # now locate _version.py in the new build/ directory and replace | |
1862 | # it with an updated value | |
1863 | target_versionfile = os.path.join(self.build_lib, | |
1864 | cfg.versionfile_build) | |
1865 | print("UPDATING %s" % target_versionfile) | |
1866 | write_to_version_file(target_versionfile, versions) | |
1867 | cmds["build_ext"] = cmd_build_ext | |
1868 | ||
1557 | 1869 | if "cx_Freeze" in sys.modules: # cx_freeze enabled? |
1558 | 1870 | from cx_Freeze.dist import build_exe as _build_exe |
1559 | 1871 | # nczeczulin reports that py2exe won't like the pep440-style string |
1587 | 1899 | del cmds["build_py"] |
1588 | 1900 | |
1589 | 1901 | if 'py2exe' in sys.modules: # py2exe enabled? |
1590 | try: | |
1591 | from py2exe.distutils_buildexe import py2exe as _py2exe # py3 | |
1592 | except ImportError: | |
1593 | from py2exe.build_exe import py2exe as _py2exe # py2 | |
1902 | from py2exe.distutils_buildexe import py2exe as _py2exe | |
1594 | 1903 | |
1595 | 1904 | class cmd_py2exe(_py2exe): |
1596 | 1905 | def run(self): |
1615 | 1924 | cmds["py2exe"] = cmd_py2exe |
1616 | 1925 | |
1617 | 1926 | # we override different "sdist" commands for both environments |
1618 | if "setuptools" in sys.modules: | |
1927 | if 'sdist' in cmds: | |
1928 | _sdist = cmds['sdist'] | |
1929 | elif "setuptools" in sys.modules: | |
1619 | 1930 | from setuptools.command.sdist import sdist as _sdist |
1620 | 1931 | else: |
1621 | 1932 | from distutils.command.sdist import sdist as _sdist |
1682 | 1993 | |
1683 | 1994 | """ |
1684 | 1995 | |
1685 | INIT_PY_SNIPPET = """ | |
1996 | OLD_SNIPPET = """ | |
1686 | 1997 | from ._version import get_versions |
1687 | 1998 | __version__ = get_versions()['version'] |
1688 | 1999 | del get_versions |
1689 | 2000 | """ |
1690 | 2001 | |
2002 | INIT_PY_SNIPPET = """ | |
2003 | from . import {0} | |
2004 | __version__ = {0}.get_versions()['version'] | |
2005 | """ | |
2006 | ||
1691 | 2007 | |
1692 | 2008 | def do_setup(): |
1693 | """Main VCS-independent setup function for installing Versioneer.""" | |
2009 | """Do main VCS-independent setup function for installing Versioneer.""" | |
1694 | 2010 | root = get_root() |
1695 | 2011 | try: |
1696 | 2012 | cfg = get_config_from_root(root) |
1697 | except (EnvironmentError, configparser.NoSectionError, | |
2013 | except (OSError, configparser.NoSectionError, | |
1698 | 2014 | configparser.NoOptionError) as e: |
1699 | if isinstance(e, (EnvironmentError, configparser.NoSectionError)): | |
2015 | if isinstance(e, (OSError, configparser.NoSectionError)): | |
1700 | 2016 | print("Adding sample versioneer config to setup.cfg", |
1701 | 2017 | file=sys.stderr) |
1702 | 2018 | with open(os.path.join(root, "setup.cfg"), "a") as f: |
1720 | 2036 | try: |
1721 | 2037 | with open(ipy, "r") as f: |
1722 | 2038 | old = f.read() |
1723 | except EnvironmentError: | |
2039 | except OSError: | |
1724 | 2040 | old = "" |
1725 | if INIT_PY_SNIPPET not in old: | |
2041 | module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0] | |
2042 | snippet = INIT_PY_SNIPPET.format(module) | |
2043 | if OLD_SNIPPET in old: | |
2044 | print(" replacing boilerplate in %s" % ipy) | |
2045 | with open(ipy, "w") as f: | |
2046 | f.write(old.replace(OLD_SNIPPET, snippet)) | |
2047 | elif snippet not in old: | |
1726 | 2048 | print(" appending to %s" % ipy) |
1727 | 2049 | with open(ipy, "a") as f: |
1728 | f.write(INIT_PY_SNIPPET) | |
2050 | f.write(snippet) | |
1729 | 2051 | else: |
1730 | 2052 | print(" %s unmodified" % ipy) |
1731 | 2053 | else: |
1744 | 2066 | if line.startswith("include "): |
1745 | 2067 | for include in line.split()[1:]: |
1746 | 2068 | simple_includes.add(include) |
1747 | except EnvironmentError: | |
2069 | except OSError: | |
1748 | 2070 | pass |
1749 | 2071 | # That doesn't cover everything MANIFEST.in can do |
1750 | 2072 | # (http://docs.python.org/2/distutils/sourcedist.html#commands), so |
1807 | 2129 | errors += 1 |
1808 | 2130 | return errors |
1809 | 2131 | |
2132 | ||
1810 | 2133 | if __name__ == "__main__": |
1811 | 2134 | cmd = sys.argv[1] |
1812 | 2135 | if cmd == "setup": |