New upstream version 2.4.16
Yao Wei (魏銘廷)
1 year, 10 months ago
0 | 0 | # These are supported funding model platforms |
1 | 1 | |
2 | custom: willmcgugan | |
2 | github: willmcgugan | |
3 | ko_fi: willmcgugan | |
4 | tidelift: "pypi/rich" |
0 | name: Package | |
1 | ||
2 | on: | |
3 | push: | |
4 | tags: | |
5 | - 'v2.*' | |
6 | ||
7 | jobs: | |
8 | ||
9 | build-wheel: | |
10 | runs-on: ubuntu-latest | |
11 | name: Build wheel distribution | |
12 | steps: | |
13 | - name: Checkout code | |
14 | uses: actions/checkout@v2 | |
15 | with: | |
16 | submodules: true | |
17 | - name: Setup Python ${{ matrix.python-version }} | |
18 | uses: actions/setup-python@v2 | |
19 | with: | |
20 | python-version: ${{ matrix.python-version }} | |
21 | - name: Update build dependencies | |
22 | run: python -m pip install -U pip wheel setuptools | |
23 | - name: Build wheel distribution | |
24 | run: python setup.py bdist_wheel | |
25 | - name: Store built wheel | |
26 | uses: actions/upload-artifact@v2 | |
27 | with: | |
28 | name: dist | |
29 | path: dist/* | |
30 | ||
31 | build-sdist: | |
32 | runs-on: ubuntu-latest | |
33 | name: Build source distribution | |
34 | steps: | |
35 | - name: Checkout code | |
36 | uses: actions/checkout@v2 | |
37 | with: | |
38 | submodules: true | |
39 | - name: Set up Python 3.9 | |
40 | uses: actions/setup-python@v2 | |
41 | with: | |
42 | python-version: 3.9 | |
43 | - name: Update build dependencies | |
44 | run: python -m pip install -U pip wheel setuptools | |
45 | - name: Build source distribution | |
46 | run: python setup.py sdist | |
47 | - name: Store source distribution | |
48 | uses: actions/upload-artifact@v2 | |
49 | with: | |
50 | name: dist | |
51 | path: dist/* | |
52 | ||
53 | test-sdist: | |
54 | runs-on: ubuntu-latest | |
55 | name: Test source distribution | |
56 | needs: | |
57 | - build-sdist | |
58 | steps: | |
59 | - name: Checkout code | |
60 | uses: actions/checkout@v2 | |
61 | with: | |
62 | submodules: true | |
63 | - name: Setup Python 3.9 | |
64 | uses: actions/setup-python@v2 | |
65 | with: | |
66 | python-version: 3.9 | |
67 | - name: Download source distribution | |
68 | uses: actions/download-artifact@v2 | |
69 | with: | |
70 | name: dist | |
71 | path: dist | |
72 | - name: Install source distribution | |
73 | run: python -m pip install dist/fs-*.tar.gz | |
74 | - name: Remove source code | |
75 | run: rm -rvd fs | |
76 | - name: Install test requirements | |
77 | run: python -m pip install -r tests/requirements.txt | |
78 | - name: Test installed package | |
79 | run: python -m unittest discover -vv | |
80 | ||
81 | test-wheel: | |
82 | runs-on: ubuntu-latest | |
83 | name: Test wheel distribution | |
84 | needs: | |
85 | - build-wheel | |
86 | steps: | |
87 | - name: Checkout code | |
88 | uses: actions/checkout@v2 | |
89 | with: | |
90 | submodules: true | |
91 | - name: Setup Python 3.9 | |
92 | uses: actions/setup-python@v2 | |
93 | with: | |
94 | python-version: 3.9 | |
95 | - name: Download wheel distribution | |
96 | uses: actions/download-artifact@v2 | |
97 | with: | |
98 | name: dist | |
99 | path: dist | |
100 | - name: Install wheel distribution | |
101 | run: python -m pip install dist/fs-*.whl | |
102 | - name: Remove source code | |
103 | run: rm -rvd fs | |
104 | - name: Install test requirements | |
105 | run: python -m pip install -r tests/requirements.txt | |
106 | - name: Test installed package | |
107 | run: python -m unittest discover -vv | |
108 | ||
109 | upload: | |
110 | environment: PyPI | |
111 | runs-on: ubuntu-latest | |
112 | name: Upload | |
113 | needs: | |
114 | - build-sdist | |
115 | - build-wheel | |
116 | - test-sdist | |
117 | - test-wheel | |
118 | steps: | |
119 | - name: Download built distributions | |
120 | uses: actions/download-artifact@v2 | |
121 | with: | |
122 | name: dist | |
123 | path: dist | |
124 | - name: Publish distributions to PyPI | |
125 | if: startsWith(github.ref, 'refs/tags/v') | |
126 | uses: pypa/gh-action-pypi-publish@master | |
127 | with: | |
128 | user: __token__ | |
129 | password: ${{ secrets.PYPI_API_TOKEN }} | |
130 | skip_existing: false | |
131 | ||
132 | release: | |
133 | environment: GitHub Releases | |
134 | runs-on: ubuntu-latest | |
135 | if: "startsWith(github.ref, 'refs/tags/v')" | |
136 | name: Release | |
137 | needs: upload | |
138 | steps: | |
139 | - name: Checkout code | |
140 | uses: actions/checkout@v1 | |
141 | - name: Release a Changelog | |
142 | uses: rasmus-saks/release-a-changelog-action@v1.0.1 | |
143 | with: | |
144 | github-token: '${{ secrets.GITHUB_TOKEN }}' |
0 | name: Test | |
1 | ||
2 | on: | |
3 | - push | |
4 | - pull_request | |
5 | ||
6 | jobs: | |
7 | test: | |
8 | runs-on: ubuntu-latest | |
9 | strategy: | |
10 | fail-fast: false | |
11 | matrix: | |
12 | python-version: | |
13 | - 2.7 | |
14 | - 3.5 | |
15 | - 3.6 | |
16 | - 3.7 | |
17 | - 3.8 | |
18 | - 3.9 | |
19 | - '3.10' | |
20 | - pypy-2.7 | |
21 | - pypy-3.6 | |
22 | - pypy-3.7 | |
23 | steps: | |
24 | - name: Checkout code | |
25 | uses: actions/checkout@v1 | |
26 | - name: Setup Python ${{ matrix.python-version }} | |
27 | uses: actions/setup-python@v2 | |
28 | with: | |
29 | python-version: ${{ matrix.python-version }} | |
30 | - name: Update pip | |
31 | run: python -m pip install -U pip wheel setuptools | |
32 | - name: Install tox | |
33 | run: python -m pip install tox tox-gh-actions | |
34 | - name: Test with tox | |
35 | run: python -m tox | |
36 | - name: Store partial coverage reports | |
37 | uses: actions/upload-artifact@v2 | |
38 | with: | |
39 | name: coverage | |
40 | path: .coverage.* | |
41 | ||
42 | coveralls: | |
43 | needs: test | |
44 | runs-on: ubuntu-latest | |
45 | steps: | |
46 | - name: Checkout code | |
47 | uses: actions/checkout@v1 | |
48 | - name: Setup Python 3.10 | |
49 | uses: actions/setup-python@v2 | |
50 | with: | |
51 | python-version: '3.10' | |
52 | - name: Install coverage package | |
53 | run: python -m pip install -U coverage | |
54 | - name: Download partial coverage reports | |
55 | uses: actions/download-artifact@v2 | |
56 | with: | |
57 | name: coverage | |
58 | - name: Combine coverage | |
59 | run: python -m coverage combine | |
60 | - name: Report coverage | |
61 | run: python -m coverage report | |
62 | - name: Export coverage to XML | |
63 | run: python -m coverage xml | |
64 | - name: Upload coverage statistics to Coveralls | |
65 | uses: AndreMiras/coveralls-python-action@develop | |
66 | ||
67 | lint: | |
68 | runs-on: ubuntu-latest | |
69 | strategy: | |
70 | fail-fast: false | |
71 | matrix: | |
72 | linter: | |
73 | - typecheck | |
74 | - codestyle | |
75 | - docstyle | |
76 | - codeformat | |
77 | steps: | |
78 | - name: Checkout code | |
79 | uses: actions/checkout@v1 | |
80 | - name: Setup Python '3.10' | |
81 | uses: actions/setup-python@v2 | |
82 | with: | |
83 | python-version: '3.10' | |
84 | - name: Update pip | |
85 | run: python -m pip install -U pip wheel setuptools | |
86 | - name: Install tox | |
87 | run: python -m pip install tox tox-gh-actions | |
88 | - name: Run ${{ matrix.linter }} linter | |
89 | run: python -m tox -e ${{ matrix.linter }} |
0 | dist: xenial | |
1 | sudo: false | |
2 | language: python | |
3 | ||
4 | python: | |
5 | - "2.7" | |
6 | - "3.4" | |
7 | - "3.5" | |
8 | - "3.6" | |
9 | - "3.7" | |
10 | - "3.8" | |
11 | - "3.9" | |
12 | - "pypy" | |
13 | - "pypy3.5-7.0" # Need 7.0+ due to a bug in earlier versions that broke our tests. | |
14 | ||
15 | matrix: | |
16 | include: | |
17 | - name: "Type checking" | |
18 | python: "3.7" | |
19 | env: TOXENV=typecheck | |
20 | - name: "Lint" | |
21 | python: "3.7" | |
22 | env: TOXENV=lint | |
23 | ||
24 | # Temporary bandaid for https://github.com/PyFilesystem/pyfilesystem2/issues/342 | |
25 | allow_failures: | |
26 | - python: pypy | |
27 | - python: pypy3.5-7.0 | |
28 | ||
29 | before_install: | |
30 | - pip install -U tox tox-travis | |
31 | - pip --version | |
32 | - pip install -r testrequirements.txt | |
33 | - pip freeze | |
34 | ||
35 | install: | |
36 | - pip install -e . | |
37 | ||
38 | # command to run tests | |
39 | script: tox | |
40 | ||
41 | after_success: | |
42 | - coveralls | |
43 | ||
44 | before_deploy: | |
45 | - pip install -U twine wheel | |
46 | - python setup.py sdist bdist_wheel | |
47 | ||
48 | deploy: | |
49 | provider: script | |
50 | script: twine upload dist/* | |
51 | skip_cleanup: true | |
52 | on: | |
53 | python: 3.9 | |
54 | tags: true | |
55 | repo: PyFilesystem/pyfilesystem2 | |
56 |
3 | 3 | |
4 | 4 | The format is based on [Keep a Changelog](http://keepachangelog.com/) |
5 | 5 | and this project adheres to [Semantic Versioning](http://semver.org/). |
6 | ||
7 | ||
8 | ## Unreleased | |
9 | ||
10 | ||
11 | ## [2.4.16] - 2022-05-02 | |
12 | ||
13 | ### Changed | |
14 | ||
15 | - Make `fs.zipfs._ZipExtFile` use the seeking mechanism implemented | |
16 | in the Python standard library in Python version 3.7 and later | |
17 | ([#527](https://github.com/PyFilesystem/pyfilesystem2/pull/527)). | |
18 | - Mark `fs.zipfs.ReadZipFS` as a case-sensitive filesystem | |
19 | ([#527](https://github.com/PyFilesystem/pyfilesystem2/pull/527)). | |
20 | - Optimized moving files between filesystems with syspaths. | |
21 | ([#523](https://github.com/PyFilesystem/pyfilesystem2/pull/523)). | |
22 | - Fixed `fs.move.move_file` to clean up the copy on the destination in case of errors. | |
23 | - `fs.opener.manage_fs` with `writeable=True` will now raise a `ResourceReadOnly` | |
24 | exception if the managed filesystem is not writeable. | |
25 | - Marked filesystems wrapped with `fs.wrap.WrapReadOnly` as read-only. | |
26 | ||
27 | ||
28 | ## [2.4.15] - 2022-02-07 | |
29 | ||
30 | ### Changed | |
31 | ||
32 | - Support more lenient usernames and group names in FTP servers | |
33 | ([#507](https://github.com/PyFilesystem/pyfilesystem2/pull/507)). | |
34 | Closes [#506](https://github.com/PyFilesystem/pyfilesystem2/issues/506). | |
35 | - Removed dependency on pytz ([#518](https://github.com/PyFilesystem/pyfilesystem2/pull/518)). | |
36 | Closes [#516](https://github.com/PyFilesystem/pyfilesystem2/issues/518). | |
37 | ||
38 | ### Fixed | |
39 | ||
40 | - Fixed `MemoryFS.move` and `MemoryFS.movedir` not updating the name of moved | |
41 | resources, causing `MemoryFS.scandir` to use the old name. | |
42 | ([#510](https://github.com/PyFilesystem/pyfilesystem2/pull/510)). | |
43 | Closes [#509](https://github.com/PyFilesystem/pyfilesystem2/issues/509). | |
44 | - Make `WrapFS.move` and `WrapFS.movedir` use the delegate FS methods instead | |
45 | of `fs.move` functions, which was causing optimized implementation of | |
46 | `movedir` to be always skipped. | |
47 | ([#511](https://github.com/PyFilesystem/pyfilesystem2/pull/511)). | |
48 | ||
49 | ||
50 | ## [2.4.14] - 2021-11-16 | |
51 | ||
52 | ### Added | |
53 | ||
54 | - Added `fs.copy.copy_file_if`, `fs.copy.copy_dir_if`, and `fs.copy.copy_fs_if`. | |
55 | Closes [#458](https://github.com/PyFilesystem/pyfilesystem2/issues/458). | |
56 | - Added `fs.base.FS.getmodified`. | |
57 | ||
58 | ### Changed | |
59 | ||
60 | - FTP servers that do not support the MLST command now try to use the MDTM command to | |
61 | retrieve the last modification timestamp of a resource. | |
62 | Closes [#456](https://github.com/PyFilesystem/pyfilesystem2/pull/456). | |
63 | ||
64 | ### Fixed | |
65 | ||
66 | - Fixed performance bugs in `fs.copy.copy_dir_if_newer`. Test cases were adapted to catch those bugs in the future. | |
67 | - Fixed precision bug for timestamps in `fs.OSFS.setinfo`. | |
68 | ||
69 | ||
70 | ## [2.4.13] - 2021-03-27 | |
71 | ||
72 | ### Added | |
73 | ||
74 | - Added FTP over TLS (FTPS) support to FTPFS. | |
75 | Closes [#437](https://github.com/PyFilesystem/pyfilesystem2/issues/437), | |
76 | [#449](https://github.com/PyFilesystem/pyfilesystem2/pull/449). | |
77 | - `PathError` now supports wrapping an exception using the `exc` argument. | |
78 | Closes [#453](https://github.com/PyFilesystem/pyfilesystem2/issues/453). | |
79 | - Better documentation of the `writable` parameter of `fs.open_fs`, and | |
80 | hint about using `fs.wrap.read_only` when a read-only filesystem is | |
81 | required. Closes [#441](https://github.com/PyFilesystem/pyfilesystem2/issues/441). | |
82 | - Copy and move operations now provide a parameter `preserve_time` that, when | |
83 | passed as `True`, makes sure the "mtime" of the destination file will be | |
84 | the same as that of the source file. | |
85 | ||
86 | ### Changed | |
87 | ||
88 | - Make `FS.upload` explicit about the expected error when the parent directory of the destination does not exist. | |
89 | Closes [#445](https://github.com/PyFilesystem/pyfilesystem2/pull/445). | |
90 | - Migrate continuous integration from Travis-CI to GitHub Actions and introduce several linters | |
91 | again in the build steps ([#448](https://github.com/PyFilesystem/pyfilesystem2/pull/448)). | |
92 | Closes [#446](https://github.com/PyFilesystem/pyfilesystem2/issues/446). | |
93 | - Stop requiring `pytest` to run tests, allowing any test runner supporting `unittest`-style | |
94 | test suites. | |
95 | - `FSTestCases` now builds the large data required for `upload` and `download` tests only | |
96 | once in order to reduce the total testing time. | |
97 | - `MemoryFS.move` and `MemoryFS.movedir` will now avoid copying data. | |
98 | Closes [#452](https://github.com/PyFilesystem/pyfilesystem2/issues/452). | |
99 | - `FS.removetree("/")` behaviour has been standardized in all filesystems, and | |
100 | is expected to clear the contents of the root folder without deleting it. | |
101 | Closes [#471](https://github.com/PyFilesystem/pyfilesystem2/issues/471). | |
102 | - `FS.getbasic` is now deprecated, as it is redundant with `FS.getinfo`, | |
103 | and `FS.getinfo` is now explicitly expected to return the *basic* info | |
104 | namespace unconditionally. Closes [#469](https://github.com/PyFilesystem/pyfilesystem2/issues/469). | |
105 | ||
106 | ### Fixed | |
107 | ||
108 | - Make `FTPFile`, `MemoryFile` and `RawWrapper` accept [`array.array`](https://docs.python.org/3/library/array.html) | |
109 | arguments for the `write` and `writelines` methods, as expected by their base class [`io.RawIOBase`](https://docs.python.org/3/library/io.html#io.RawIOBase). | |
110 | - Various documentation issues, including `MemoryFS` docstring not rendering properly. | |
111 | - Avoid creating a new connection on every call of `FTPFS.upload`. Closes [#455](https://github.com/PyFilesystem/pyfilesystem2/issues/455). | |
112 | - `WrapReadOnly.removetree` not raising a `ResourceReadOnly` when called. Closes [#468](https://github.com/PyFilesystem/pyfilesystem2/issues/468). | |
113 | - `WrapCachedDir.isdir` and `WrapCachedDir.isfile` raising a `ResourceNotFound` error on non-existing path ([#470](https://github.com/PyFilesystem/pyfilesystem2/pull/470)). | |
114 | - `FTPFS` not listing certain entries with sticky/SUID/SGID permissions set by Linux server ([#473](https://github.com/PyFilesystem/pyfilesystem2/pull/473)). | |
115 | Closes [#451](https://github.com/PyFilesystem/pyfilesystem2/issues/451). | |
116 | - `scandir` iterator not being closed explicitly in `OSFS.scandir`, occasionally causing a `ResourceWarning` | |
117 | to be thrown. Closes [#311](https://github.com/PyFilesystem/pyfilesystem2/issues/311). | |
118 | - Incomplete type annotations for the `temp_fs` parameter of `WriteTarFS` and `WriteZipFS`. | |
119 | Closes [#410](https://github.com/PyFilesystem/pyfilesystem2/issues/410). | |
120 | ||
6 | 121 | |
7 | 122 | ## [2.4.12] - 2021-01-14 |
8 | 123 | |
13 | 128 | [#380](https://github.com/PyFilesystem/pyfilesystem2/issues/380). |
14 | 129 | - Added compatibility if a Windows FTP server returns file information to the |
15 | 130 | `LIST` command with 24-hour times. Closes [#438](https://github.com/PyFilesystem/pyfilesystem2/issues/438). |
131 | - Added Python 3.9 support. Closes [#443](https://github.com/PyFilesystem/pyfilesystem2/issues/443). | |
16 | 132 | |
17 | 133 | ### Changed |
18 | 134 | |
21 | 137 | be able to see if we break something aside from known issues with FTP tests. |
22 | 138 | - Include docs in source distributions as well as the whole tests folder, |
23 | 139 | ensuring `conftest.py` is present, fixes [#364](https://github.com/PyFilesystem/pyfilesystem2/issues/364). |
24 | - Stop patching copy with Python 3.8+ because it already uses `sendfile`. | |
140 | - Stop patching copy with Python 3.8+ because it already uses `sendfile` | |
141 | ([#424](https://github.com/PyFilesystem/pyfilesystem2/pull/424)). | |
142 | Closes [#421](https://github.com/PyFilesystem/pyfilesystem2/issues/421). | |
25 | 143 | |
26 | 144 | ### Fixed |
27 | 145 | |
28 | 146 | - Fixed crash when CPython's -OO flag is used |
29 | - Fixed error when parsing timestamps from a FTP directory served from a WindowsNT FTP Server, fixes [#395](https://github.com/PyFilesystem/pyfilesystem2/issues/395). | |
147 | - Fixed error when parsing timestamps from a FTP directory served from a WindowsNT FTP Server. | |
148 | Closes [#395](https://github.com/PyFilesystem/pyfilesystem2/issues/395). | |
30 | 149 | - Fixed documentation of `Mode.to_platform_bin`. Closes [#382](https://github.com/PyFilesystem/pyfilesystem2/issues/382). |
31 | 150 | - Fixed the code example in the "Testing Filesystems" section of the |
32 | 151 | "Implementing Filesystems" guide. Closes [#407](https://github.com/PyFilesystem/pyfilesystem2/issues/407). |
33 | 152 | - Fixed `FTPFS.openbin` not implicitly opening files in binary mode like expected |
34 | 153 | from `openbin`. Closes [#406](https://github.com/PyFilesystem/pyfilesystem2/issues/406). |
35 | 154 | |
155 | ||
36 | 156 | ## [2.4.11] - 2019-09-07 |
37 | 157 | |
38 | 158 | ### Added |
39 | 159 | |
40 | 160 | - Added geturl for TarFS and ZipFS for 'fs' purpose. NoURL for 'download' purpose. |
41 | - Added helpful root path in CreateFailed exception [#340](https://github.com/PyFilesystem/pyfilesystem2/issues/340) | |
42 | - Added Python 3.8 support | |
161 | - Added helpful root path in CreateFailed exception. | |
162 | Closes [#340](https://github.com/PyFilesystem/pyfilesystem2/issues/340). | |
163 | - Added Python 3.8 support. | |
43 | 164 | |
44 | 165 | ### Fixed |
45 | 166 | |
67 | 188 | |
68 | 189 | ### Fixed |
69 | 190 | |
70 | - Fixed broken WrapFS.movedir [#322](https://github.com/PyFilesystem/pyfilesystem2/issues/322) | |
191 | - Fixed broken WrapFS.movedir [#322](https://github.com/PyFilesystem/pyfilesystem2/issues/322). | |
71 | 192 | |
72 | 193 | ## [2.4.9] - 2019-07-28 |
73 | 194 | |
449 | 570 | |
450 | 571 | ### Added |
451 | 572 | |
452 | - New `copy_if_newer' functionality in`copy` module. | |
573 | - New `copy_if_newer` functionality in `copy` module. | |
453 | 574 | |
454 | 575 | ### Fixed |
455 | 576 | |
460 | 581 | ### Changed |
461 | 582 | |
462 | 583 | - Improved FTP support for non-compliant servers |
463 | - Fix for ZipFS implied directories | |
584 | - Fix for `ZipFS` implied directories | |
464 | 585 | |
465 | 586 | ## [2.0.1] - 2017-03-11 |
466 | 587 | |
467 | 588 | ### Added |
468 | 589 | |
469 | - TarFS contributed by Martin Larralde | |
470 | ||
471 | ### Fixed | |
472 | ||
473 | - FTPFS bugs. | |
590 | - `TarFS` contributed by Martin Larralde. | |
591 | ||
592 | ### Fixed | |
593 | ||
594 | - `FTPFS` bugs. | |
474 | 595 | |
475 | 596 | ## [2.0.0] - 2016-12-07 |
476 | 597 |
1 | 1 | |
2 | 2 | Pull Requests are very welcome for this project! |
3 | 3 | |
4 | For bug fixes or new features, please file an issue before submitting a pull request. If the change isn't trivial, it may be best to wait for feedback. For a quicker response, contact [Will McGugan](mailto:willmcgugan+pyfs@gmail.com) directly. | |
4 | For bug fixes or new features, please file an issue before submitting a pull | |
5 | request. If the change isn't trivial, it may be best to wait for feedback. | |
6 | For a quicker response, contact [Will McGugan](mailto:willmcgugan+pyfs@gmail.com) | |
7 | directly. | |
8 | ||
9 | ||
10 | ## `tox` | |
11 | ||
12 | Most of the guidelines that follow can be checked with a particular | |
13 | [`tox`](https://pypi.org/project/tox/) environment. Having it installed will | |
14 | help you develop and verify your code locally without having to wait for | |
15 | our Continuous Integration pipeline to finish. | |
16 | ||
17 | ||
18 | ## Tests | |
19 | ||
20 | New code should have unit tests. We strive to have near 100% coverage. | |
21 | Get in touch, if you need assistance with the tests. You shouldn't refrain | |
22 | from opening a Pull Request even if all the tests were not added yet, or if | |
23 | not all of them are passing yet. | |
24 | ||
25 | ### Dependencies | |
26 | ||
27 | The dependency for running the tests can be found in the `tests/requirements.txt` file. | |
28 | If you're using `tox`, you won't have to install them manually. Otherwise, | |
29 | they can be installed with `pip`: | |
30 | ```console | |
31 | $ pip install -r tests/requirements.txt | |
32 | ``` | |
33 | ||
34 | ### Running (with `tox`) | |
35 | ||
36 | Simply run in the repository folder to execute the tests for all available | |
37 | environments: | |
38 | ```console | |
39 | $ tox | |
40 | ``` | |
41 | ||
42 | Since this can take some time, you can use a single environment to run | |
43 | tests only once, for instance to run tests only with Python 3.9: | |
44 | ```console | |
45 | $ tox -e py39 | |
46 | ``` | |
47 | ||
48 | ### Running (without `tox`) | |
49 | ||
50 | Tests are written using the standard [`unittest`](https://docs.python.org/3/library/unittest.html) | |
51 | framework. You should be able to run them using the standard library runner: | |
52 | ```console | |
53 | $ python -m unittest discover -vv | |
54 | ``` | |
55 | ||
5 | 56 | |
6 | 57 | ## Coding Guidelines |
7 | 58 | |
8 | This project runs on Python2.7 and Python3.X. Python2.7 will be dropped at some point, but for now, please maintain compatibility. | |
59 | This project runs on Python2.7 and Python3.X. Python2.7 will be dropped at | |
60 | some point, but for now, please maintain compatibility. PyFilesystem2 uses | |
61 | the [`six`](https://pypi.org/project/six/) library to write version-agnostic | |
62 | Python code. | |
9 | 63 | |
10 | Please format new code with [black](https://github.com/ambv/black), using the default settings. | |
64 | ### Style | |
11 | 65 | |
12 | ## Tests | |
66 | The code (including the tests) should follow PEP8. You can check for the | |
67 | code style with: | |
68 | ```console | |
69 | $ tox -e codestyle | |
70 | ``` | |
13 | 71 | |
14 | New code should have unit tests. We strive to have near 100% coverage. Get in touch, if you need assistance with the tests. | |
72 | This will invoke [`flake8`](https://pypi.org/project/flake8/) with some common | |
73 | plugins such as [`flake8-comprehensions`](https://pypi.org/project/flake8-comprehensions/). | |
74 | ||
75 | ### Format | |
76 | ||
77 | Please format new code with [black](https://github.com/ambv/black), using the | |
78 | default settings. You can check whether the code is well-formatted with: | |
79 | ```console | |
80 | $ tox -e codeformat | |
81 | ``` | |
82 | ||
83 | ### Type annotations | |
84 | ||
85 | The code is typechecked with [`mypy`](https://pypi.org/project/mypy/), and | |
86 | type annotations written as comments, to stay compatible with Python2. Run | |
87 | the typechecking with: | |
88 | ```console | |
89 | $ tox -e typecheck | |
90 | ``` | |
91 | ||
92 | ||
93 | ## Documentation | |
94 | ||
95 | ### Dependencies | |
96 | ||
97 | The documentation is built with [Sphinx](https://pypi.org/project/Sphinx/), | |
98 | using the [ReadTheDocs](https://pypi.org/project/sphinx-rtd-theme/) theme. | |
99 | The dependencies are listed in `docs/requirements.txt` and can be installed with | |
100 | `pip`: | |
101 | ```console | |
102 | $ pip install -r docs/requirements.txt | |
103 | ``` | |
104 | ||
105 | ### Building | |
106 | ||
107 | Run the following command to build the HTML documentation: | |
108 | ```console | |
109 | $ python setup.py build_sphinx | |
110 | ``` | |
111 | ||
112 | The documentation index will be written to the `build/sphinx/html/` | |
113 | directory. | |
114 | ||
115 | ### Style | |
116 | ||
117 | The API reference is written in the Python source, using docstrings in | |
118 | [Google format](https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html). | |
119 | The documentation style can be checked with: | |
120 | ```console | |
121 | $ tox -e docstyle | |
122 | ``` |
1 | 1 | |
2 | 2 | Many thanks to the following developers for contributing to this project: |
3 | 3 | |
4 | - [Adrian Garcia Badaracco](https://github.com/adriangb) | |
5 | - [Alex Povel](https://github.com/alexpovel) | |
4 | 6 | - [Andreas Tollkötter](https://github.com/atollk) |
5 | - [C. W.](https://github.com/chfw) | |
7 | - [Andrew Scheller](https://github.com/lurch) | |
8 | - [Andrey Serov](https://github.com/zmej-serow) | |
9 | - [Ben Lindsay](https://github.com/benlindsay) | |
10 | - [Bernhard M. Wiedemann](https://github.com/bmwiedemann) | |
11 | - [@chfw](https://github.com/chfw) | |
12 | - [Dafna Hirschfeld](https://github.com/kamomil) | |
6 | 13 | - [Diego Argueta](https://github.com/dargueta) |
14 | - [Eelke van den Bos](https://github.com/eelkevdbos) | |
15 | - [Egor Namakonov](https://github.com/fresheed) | |
16 | - [Felix Yan](https://github.com/felixonmars) | |
17 | - [@FooBarQuaxx](https://github.com/FooBarQuaxx) | |
7 | 18 | - [Geoff Jukes](https://github.com/geoffjukes) |
19 | - [George Macon](https://github.com/gmacon) | |
8 | 20 | - [Giampaolo Cimino](https://github.com/gpcimino) |
21 | - [@Hoboneer](https://github.com/Hoboneer) | |
22 | - [Jon Hagg](https://github.com/jon-hagg) | |
23 | - [Joseph Atkins-Turkish](https://github.com/Spacerat) | |
24 | - [Joshua Tauberer](https://github.com/JoshData) | |
9 | 25 | - [Justin Charlong](https://github.com/jcharlong) |
10 | 26 | - [Louis Sautier](https://github.com/sbraz) |
27 | - [Martin Durant](https://github.com/martindurant) | |
11 | 28 | - [Martin Larralde](https://github.com/althonos) |
29 | - [Masaya Nakamura](https://github.com/mashabow) | |
30 | - [Matthew Gamble](https://github.com/djmattyg007) | |
12 | 31 | - [Morten Engelhardt Olsen](https://github.com/xoriath) |
32 | - [@mrg0029](https://github.com/mrg0029) | |
33 | - [Nathan Goldbaum](https://github.com/ngoldbaum) | |
13 | 34 | - [Nick Henderson](https://github.com/nwh) |
35 | - [Oliver Galvin](https://github.com/odgalvin) | |
36 | - [Philipp Wiesner](https://github.com/birnbaum) | |
37 | - [Philippe Ombredanne](https://github.com/pombredanne) | |
38 | - [Rehan Khwaja](https://github.com/rkhwaja) | |
39 | - [Silvan Spross](https://github.com/sspross) | |
40 | - [@sqwishy](https://github.com/sqwishy) | |
41 | - [Sven Schliesing](https://github.com/muffl0n) | |
42 | - [Thomas Feldmann](https://github.com/tfeldmann) | |
43 | - [Tim Gates](https://github.com/timgates42/) | |
44 | - [@tkossak](https://github.com/tkossak) | |
45 | - [Todd Levi](https://github.com/televi) | |
46 | - [Vilius Grigaliūnas](https://github.com/vilius-g) | |
14 | 47 | - [Will McGugan](https://github.com/willmcgugan) |
15 | 48 | - [Zmej Serow](https://github.com/zmej-serow) |
0 | include CHANGELOG.md | |
1 | include CONTRIBUTING.md | |
2 | include CONTRIBUTORS.md | |
0 | 3 | include LICENSE |
1 | 4 | graft tests |
2 | 5 | graft docs |
1 | 1 | |
2 | 2 | Python's Filesystem abstraction layer. |
3 | 3 | |
4 | [![PyPI version](https://badge.fury.io/py/fs.svg)](https://badge.fury.io/py/fs) | |
4 | [![PyPI version](https://img.shields.io/pypi/v/fs)](https://pypi.org/project/fs/) | |
5 | 5 | [![PyPI](https://img.shields.io/pypi/pyversions/fs.svg)](https://pypi.org/project/fs/) |
6 | [![Downloads](https://pepy.tech/badge/fs/month)](https://pepy.tech/project/fs/month) | |
7 | ||
8 | ||
9 | [![Build Status](https://travis-ci.org/PyFilesystem/pyfilesystem2.svg?branch=master)](https://travis-ci.org/PyFilesystem/pyfilesystem2) | |
10 | [![Windows Build Status](https://ci.appveyor.com/api/projects/status/github/pyfilesystem/pyfilesystem2?branch=master&svg=true)](https://ci.appveyor.com/project/willmcgugan/pyfilesystem2) | |
11 | [![Coverage Status](https://coveralls.io/repos/github/PyFilesystem/pyfilesystem2/badge.svg)](https://coveralls.io/github/PyFilesystem/pyfilesystem2) | |
12 | [![Codacy Badge](https://api.codacy.com/project/badge/Grade/30ad6445427349218425d93886ade9ee)](https://www.codacy.com/app/will-mcgugan/pyfilesystem2?utm_source=github.com&utm_medium=referral&utm_content=PyFilesystem/pyfilesystem2&utm_campaign=Badge_Grade) | |
6 | [![Downloads](https://pepy.tech/badge/fs/month)](https://pepy.tech/project/fs/) | |
7 | [![Build Status](https://img.shields.io/github/workflow/status/PyFilesystem/pyfilesystem2/Test/master?logo=github&cacheSeconds=600)](https://github.com/PyFilesystem/pyfilesystem2/actions?query=branch%3Amaster) | |
8 | [![Windows Build Status](https://img.shields.io/appveyor/build/willmcgugan/pyfilesystem2/master?logo=appveyor&cacheSeconds=600)](https://ci.appveyor.com/project/willmcgugan/pyfilesystem2) | |
9 | [![Coverage Status](https://img.shields.io/coveralls/github/PyFilesystem/pyfilesystem2/master?cacheSeconds=600)](https://coveralls.io/github/PyFilesystem/pyfilesystem2) | |
10 | [![Codacy Badge](https://img.shields.io/codacy/grade/30ad6445427349218425d93886ade9ee/master?logo=codacy)](https://www.codacy.com/app/will-mcgugan/pyfilesystem2?utm_source=github.com&utm_medium=referral&utm_content=PyFilesystem/pyfilesystem2&utm_campaign=Badge_Grade) | |
11 | [![Docs](https://img.shields.io/readthedocs/pyfilesystem2?maxAge=3600)](http://pyfilesystem2.readthedocs.io/en/stable/?badge=stable) | |
13 | 12 | |
14 | 13 | ## Documentation |
15 | 14 |
19 | 19 | |
20 | 20 | install: |
21 | 21 | # We need wheel installed to build wheels |
22 | - "%PYTHON%\\python.exe -m pip install pytest pytest-randomly pytest-cov psutil pyftpdlib mock" | |
22 | - "%PYTHON%\\python.exe -m pip install -U pip wheel setuptools" | |
23 | - "%PYTHON%\\python.exe -m pip install pytest" | |
24 | - "%PYTHON%\\python.exe -m pip install -r tests/requirements.txt" | |
23 | 25 | - "%PYTHON%\\python.exe setup.py install" |
24 | 26 | |
25 | 27 | build: off |
26 | 28 | |
27 | 29 | test_script: |
28 | - "%PYTHON%\\python.exe -m pytest -v tests" | |
30 | - "%PYTHON%\\python.exe -m pytest" |
0 | # the bare requirements for building docs | |
1 | Sphinx ~=3.0 | |
2 | sphinx-rtd-theme ~=0.5.1 | |
3 | recommonmark ~=0.6 |
12 | 12 | # serve to show the default. |
13 | 13 | |
14 | 14 | import sys |
15 | ||
15 | 16 | import os |
16 | ||
17 | ||
18 | 17 | import sphinx_rtd_theme |
19 | 18 | |
20 | 19 | html_theme = "sphinx_rtd_theme" |
38 | 37 | 'sphinx.ext.autodoc', |
39 | 38 | 'sphinx.ext.viewcode', |
40 | 39 | 'sphinx.ext.napoleon', |
41 | 'sphinx.ext.intersphinx' | |
40 | 'sphinx.ext.intersphinx', | |
41 | "recommonmark", | |
42 | 42 | ] |
43 | 43 | |
44 | 44 | # Add any paths that contain templates here, relative to this directory. |
62 | 62 | |
63 | 63 | # General information about the project. |
64 | 64 | project = u'PyFilesystem' |
65 | copyright = u'2016-2017, Will McGugan' | |
65 | copyright = u'2016-2021, Will McGugan and the PyFilesystem2 contributors' | |
66 | 66 | author = u'Will McGugan' |
67 | 67 | |
68 | 68 | # The version info for the project you're documenting, acts as replacement for |
70 | 70 | # built documents. |
71 | 71 | # |
72 | 72 | from fs import __version__ |
73 | ||
73 | 74 | # The short X.Y version. |
74 | 75 | version = '.'.join(__version__.split('.')[:2]) |
75 | 76 | # The full version, including alpha/beta/rc tags. |
303 | 304 | #texinfo_no_detailmenu = False |
304 | 305 | |
305 | 306 | napoleon_include_special_with_doc = True |
307 | ||
308 | ||
309 | # -- Options for autodoc ----------------------------------------------------- | |
310 | ||
311 | # Configure autodoc so that it doesn't skip building the documentation for | |
312 | # __init__ methods, since the arguments to instantiate classes should be in | |
313 | # the __init__ docstring and not at the class-level. | |
314 | ||
315 | autodoc_default_options = { | |
316 | 'special-members': '__init__', | |
317 | } |
0 | ../../CONTRIBUTING.md⏎ |
28 | 28 | |
29 | 29 | __all__ = ['S3FSOpener'] |
30 | 30 | |
31 | from fs.opener import Opener, OpenerError | |
31 | from fs.opener import Opener | |
32 | from fs.opener.errors import OpenerError | |
32 | 33 | |
33 | 34 | from ._s3fs import S3FS |
34 | 35 |
175 | 175 | |
176 | 176 | In the case of a ``OSFS``, a standard file-like object will be returned. Other filesystems may return a different object supporting the same methods. For instance, :class:`~fs.memoryfs.MemoryFS` will return a ``io.BytesIO`` object. |
177 | 177 | |
178 | PyFilesystem also offers a number of shortcuts for common file related operations. For instance, :meth:`~fs.base.FS.readbytes` will return the file contents as a bytes, and :meth:`~fs.base.FS.readtext` will read unicode text. These methods is generally preferable to explicitly opening files, as the FS object may have an optimized implementation. | |
178 | PyFilesystem also offers a number of shortcuts for common file related operations. For instance, :meth:`~fs.base.FS.readbytes` will return the file contents as bytes, and :meth:`~fs.base.FS.readtext` will read unicode text. These methods are generally preferable to explicitly opening files, as the FS object may have an optimized implementation. | |
179 | 179 | |
180 | 180 | Other *shortcut* methods are :meth:`~fs.base.FS.download`, :meth:`~fs.base.FS.upload`, :meth:`~fs.base.FS.writebytes`, :meth:`~fs.base.FS.writetext`. |
181 | 181 |
24 | 24 | external.rst |
25 | 25 | interface.rst |
26 | 26 | reference.rst |
27 | ||
27 | contributing.md | |
28 | 28 | |
29 | 29 | |
30 | 30 | Indices and tables |
19 | 19 | * :meth:`~fs.base.FS.getdetails` Get details info namespace for a resource. |
20 | 20 | * :meth:`~fs.base.FS.getinfo` Get info regarding a file or directory. |
21 | 21 | * :meth:`~fs.base.FS.getmeta` Get meta information for a resource. |
22 | * :meth:`~fs.base.FS.getmodified` Get the last modified time of a resource. | |
22 | 23 | * :meth:`~fs.base.FS.getospath` Get path with encoding expected by the OS. |
23 | 24 | * :meth:`~fs.base.FS.getsize` Get the size of a file. |
24 | 25 | * :meth:`~fs.base.FS.getsyspath` Get the system path of a resource, if one exists. |
55 | 55 | |
56 | 56 | from fs import open_fs |
57 | 57 | projects_fs = open_fs('osfs://~/projects') |
58 | ||
59 | ||
60 | Manually registering Openers | |
61 | ---------------------------- | |
62 | ||
63 | The ``fs.opener`` registry uses an entry point to install external openers | |
64 | (see :ref:`extension`), and it does so once, when you import `fs` for the first | |
65 | time. In some rare cases where entry points are not available (for instance, | |
66 | when running an embedded interpreter) or when extensions are installed *after* | |
67 | the interpreter has started (for instance in a notebook, see | |
68 | `PyFilesystem2#485 <https://github.com/PyFilesystem/pyfilesystem2/issues/485>`_). | |
69 | ||
70 | However, a new opener can be installed manually at any time with the | |
71 | `fs.opener.registry.install` method. For instance, here's how the opener for | |
72 | the `s3fs <https://github.com/PyFilesystem/s3fs>`_ extension can be added to | |
73 | the registry:: | |
74 | ||
75 | import fs.opener | |
76 | from fs_s3fs.opener import S3FSOpener | |
77 | ||
78 | fs.opener.registry.install(S3FSOpener) | |
79 | # fs.open_fs("s3fs://...") should now work |
10 | 10 | from fs import open_fs |
11 | 11 | from fs.filesize import traditional |
12 | 12 | |
13 | ||
14 | 13 | fs_url = sys.argv[1] |
15 | 14 | count = 0 |
16 | 15 |
6 | 6 | |
7 | 7 | """ |
8 | 8 | |
9 | from collections import defaultdict | |
10 | 9 | import sys |
11 | 10 | |
11 | from collections import defaultdict | |
12 | ||
12 | 13 | from fs import open_fs |
13 | ||
14 | 14 | |
15 | 15 | hashes = defaultdict(list) |
16 | 16 | with open_fs(sys.argv[1]) as fs: |
10 | 10 | |
11 | 11 | from fs import open_fs |
12 | 12 | |
13 | ||
14 | 13 | with open_fs(sys.argv[1]) as fs: |
15 | 14 | count = fs.glob("**/*.pyc").remove() |
16 | 15 | print(f"{count} .pyc files remove") |
11 | 11 | |
12 | 12 | """ |
13 | 13 | |
14 | import sys | |
15 | ||
14 | 16 | import os |
15 | import sys | |
16 | 17 | |
17 | 18 | from fs import open_fs |
18 | 19 |
2 | 2 | |
3 | 3 | __import__("pkg_resources").declare_namespace(__name__) # type: ignore |
4 | 4 | |
5 | from . import path | |
6 | from ._fscompat import fsdecode, fsencode | |
5 | 7 | from ._version import __version__ |
6 | 8 | from .enums import ResourceType, Seek |
7 | 9 | from .opener import open_fs |
8 | from ._fscompat import fsencode, fsdecode | |
9 | from . import path | |
10 | 10 | |
11 | 11 | __all__ = ["__version__", "ResourceType", "Seek", "open_fs"] |
5 | 5 | |
6 | 6 | from __future__ import unicode_literals |
7 | 7 | |
8 | import threading | |
9 | 8 | import typing |
10 | 9 | |
10 | import threading | |
11 | 11 | from six.moves.queue import Queue |
12 | 12 | |
13 | from .copy import copy_file_internal | |
13 | from .copy import copy_file_internal, copy_modified_time | |
14 | 14 | from .errors import BulkCopyFailed |
15 | 15 | from .tools import copy_file_data |
16 | 16 | |
17 | 17 | if typing.TYPE_CHECKING: |
18 | from typing import IO, List, Optional, Text, Tuple, Type | |
19 | ||
20 | from types import TracebackType | |
21 | ||
18 | 22 | from .base import FS |
19 | from types import TracebackType | |
20 | from typing import IO, List, Optional, Text, Type | |
21 | 23 | |
22 | 24 | |
23 | 25 | class _Worker(threading.Thread): |
74 | 76 | class Copier(object): |
75 | 77 | """Copy files in worker threads.""" |
76 | 78 | |
77 | def __init__(self, num_workers=4): | |
78 | # type: (int) -> None | |
79 | def __init__(self, num_workers=4, preserve_time=False): | |
80 | # type: (int, bool) -> None | |
79 | 81 | if num_workers < 0: |
80 | 82 | raise ValueError("num_workers must be >= 0") |
81 | 83 | self.num_workers = num_workers |
84 | self.preserve_time = preserve_time | |
85 | self.all_tasks = [] # type: List[Tuple[FS, Text, FS, Text]] | |
82 | 86 | self.queue = None # type: Optional[Queue[_Task]] |
83 | 87 | self.workers = [] # type: List[_Worker] |
84 | 88 | self.errors = [] # type: List[Exception] |
96 | 100 | def stop(self): |
97 | 101 | """Stop the workers (will block until they are finished).""" |
98 | 102 | if self.running and self.num_workers: |
103 | # Notify the workers that all tasks have arrived | |
104 | # and wait for them to finish. | |
99 | 105 | for _worker in self.workers: |
100 | 106 | self.queue.put(None) |
101 | 107 | for worker in self.workers: |
102 | 108 | worker.join() |
109 | ||
110 | # If the "last modified" time is to be preserved, do it now. | |
111 | if self.preserve_time: | |
112 | for args in self.all_tasks: | |
113 | copy_modified_time(*args) | |
114 | ||
103 | 115 | # Free up references held by workers |
104 | 116 | del self.workers[:] |
105 | 117 | self.queue.join() |
123 | 135 | if traceback is None and self.errors: |
124 | 136 | raise BulkCopyFailed(self.errors) |
125 | 137 | |
126 | def copy(self, src_fs, src_path, dst_fs, dst_path): | |
127 | # type: (FS, Text, FS, Text) -> None | |
138 | def copy(self, src_fs, src_path, dst_fs, dst_path, preserve_time=False): | |
139 | # type: (FS, Text, FS, Text, bool) -> None | |
128 | 140 | """Copy a file from one fs to another.""" |
129 | 141 | if self.queue is None: |
130 | 142 | # This should be the most performant for a single-thread |
131 | copy_file_internal(src_fs, src_path, dst_fs, dst_path) | |
143 | copy_file_internal( | |
144 | src_fs, src_path, dst_fs, dst_path, preserve_time=self.preserve_time | |
145 | ) | |
132 | 146 | else: |
147 | self.all_tasks.append((src_fs, src_path, dst_fs, dst_path)) | |
133 | 148 | src_file = src_fs.openbin(src_path, "r") |
134 | 149 | try: |
135 | 150 | dst_file = dst_fs.openbin(dst_path, "w") |
0 | 0 | import six |
1 | 1 | |
2 | 2 | try: |
3 | from os import fsencode, fsdecode | |
3 | from os import fsdecode, fsencode | |
4 | 4 | except ImportError: |
5 | from backports.os import fsencode, fsdecode # type: ignore | |
5 | from backports.os import fsdecode, fsencode # type: ignore | |
6 | 6 | |
7 | 7 | try: |
8 | 8 | from os import fspath |
0 | from __future__ import absolute_import | |
1 | from __future__ import print_function | |
2 | from __future__ import unicode_literals | |
3 | ||
4 | import unicodedata | |
5 | import datetime | |
0 | from __future__ import absolute_import, print_function, unicode_literals | |
1 | ||
6 | 2 | import re |
7 | 3 | import time |
8 | ||
9 | from pytz import UTC | |
4 | import unicodedata | |
5 | from datetime import datetime | |
6 | ||
7 | try: | |
8 | from datetime import timezone | |
9 | except ImportError: | |
10 | from ._tzcompat import timezone # type: ignore | |
10 | 11 | |
11 | 12 | from .enums import ResourceType |
12 | 13 | from .permissions import Permissions |
13 | 14 | |
14 | ||
15 | EPOCH_DT = datetime.datetime.fromtimestamp(0, UTC) | |
15 | EPOCH_DT = datetime.fromtimestamp(0, timezone.utc) | |
16 | 16 | |
17 | 17 | |
18 | 18 | RE_LINUX = re.compile( |
19 | 19 | r""" |
20 | 20 | ^ |
21 | ([ldrwx-]{10}) | |
21 | ([-dlpscbD]) | |
22 | ([r-][w-][xsS-][r-][w-][xsS-][r-][w-][xtT-][\.\+]?) | |
22 | 23 | \s+? |
23 | 24 | (\d+) |
24 | 25 | \s+? |
25 | ([\w\-]+) | |
26 | \s+? | |
27 | ([\w\-]+) | |
26 | ([A-Za-z0-9][A-Za-z0-9\-\.\_\@]*\$?) | |
27 | \s+? | |
28 | ([A-Za-z0-9][A-Za-z0-9\-\.\_\@]*\$?) | |
28 | 29 | \s+? |
29 | 30 | (\d+) |
30 | 31 | \s+? |
54 | 55 | |
55 | 56 | |
56 | 57 | def get_decoders(): |
57 | """ | |
58 | Returns all available FTP LIST line decoders with their matching regexes. | |
59 | """ | |
58 | """Return all available FTP LIST line decoders with their matching regexes.""" | |
60 | 59 | decoders = [ |
61 | 60 | (RE_LINUX, decode_linux), |
62 | 61 | (RE_WINDOWSNT, decode_windowsnt), |
98 | 97 | day = _t.tm_mday |
99 | 98 | hour = _t.tm_hour |
100 | 99 | minutes = _t.tm_min |
101 | dt = datetime.datetime(year, month, day, hour, minutes, tzinfo=UTC) | |
100 | dt = datetime(year, month, day, hour, minutes, tzinfo=timezone.utc) | |
102 | 101 | |
103 | 102 | epoch_time = (dt - EPOCH_DT).total_seconds() |
104 | 103 | return epoch_time |
109 | 108 | |
110 | 109 | |
111 | 110 | def decode_linux(line, match): |
112 | perms, links, uid, gid, size, mtime, name = match.groups() | |
113 | is_link = perms.startswith("l") | |
114 | is_dir = perms.startswith("d") or is_link | |
111 | ty, perms, links, uid, gid, size, mtime, name = match.groups() | |
112 | is_link = ty == "l" | |
113 | is_dir = ty == "d" or is_link | |
115 | 114 | if is_link: |
116 | 115 | name, _, _link_name = name.partition("->") |
117 | 116 | name = name.strip() |
118 | 117 | _link_name = _link_name.strip() |
119 | permissions = Permissions.parse(perms[1:]) | |
118 | permissions = Permissions.parse(perms) | |
120 | 119 | |
121 | 120 | mtime_epoch = _decode_linux_time(mtime) |
122 | 121 | |
147 | 146 | |
148 | 147 | |
149 | 148 | def decode_windowsnt(line, match): |
150 | """ | |
151 | Decodes a Windows NT FTP LIST line like one of these: | |
152 | ||
153 | `11-02-18 02:12PM <DIR> images` | |
154 | `11-02-18 03:33PM 9276 logo.gif` | |
155 | ||
156 | Alternatively, the time (02:12PM) might also be present in 24-hour format (14:12). | |
149 | """Decode a Windows NT FTP LIST line. | |
150 | ||
151 | Examples: | |
152 | Decode a directory line:: | |
153 | ||
154 | >>> line = "11-02-18 02:12PM <DIR> images" | |
155 | >>> match = RE_WINDOWSNT.match(line) | |
156 | >>> pprint(decode_windowsnt(line, match)) | |
157 | {'basic': {'is_dir': True, 'name': 'images'}, | |
158 | 'details': {'modified': 1518358320.0, 'type': 1}, | |
159 | 'ftp': {'ls': '11-02-18 02:12PM <DIR> images'}} | |
160 | ||
161 | Decode a file line:: | |
162 | ||
163 | >>> line = "11-02-18 03:33PM 9276 logo.gif" | |
164 | >>> match = RE_WINDOWSNT.match(line) | |
165 | >>> pprint(decode_windowsnt(line, match)) | |
166 | {'basic': {'is_dir': False, 'name': 'logo.gif'}, | |
167 | 'details': {'modified': 1518363180.0, 'size': 9276, 'type': 2}, | |
168 | 'ftp': {'ls': '11-02-18 03:33PM 9276 logo.gif'}} | |
169 | ||
170 | Alternatively, the time might also be present in 24-hour format:: | |
171 | ||
172 | >>> line = "11-02-18 15:33 9276 logo.gif" | |
173 | >>> match = RE_WINDOWSNT.match(line) | |
174 | >>> decode_windowsnt(line, match)["details"]["modified"] | |
175 | 1518363180.0 | |
176 | ||
157 | 177 | """ |
158 | 178 | is_dir = match.group("size") == "<DIR>" |
159 | 179 |
0 | # mypy: ignore-errors | |
1 | try: | |
2 | from os.path import commonpath | |
3 | except ImportError: | |
4 | # Return the longest common sub-path of the sequence of paths given as input. | |
5 | # The paths are not normalized before comparing them (this is the | |
6 | # responsibility of the caller). Any trailing separator is stripped from the | |
7 | # returned path. | |
8 | ||
9 | def commonpath(paths): | |
10 | """Given a sequence of path names, returns the longest common sub-path.""" | |
11 | ||
12 | if not paths: | |
13 | raise ValueError("commonpath() arg is an empty sequence") | |
14 | ||
15 | paths = tuple(paths) | |
16 | if isinstance(paths[0], bytes): | |
17 | sep = b"/" | |
18 | curdir = b"." | |
19 | else: | |
20 | sep = "/" | |
21 | curdir = "." | |
22 | ||
23 | split_paths = [path.split(sep) for path in paths] | |
24 | ||
25 | try: | |
26 | (isabs,) = set(p[:1] == sep for p in paths) | |
27 | except ValueError: | |
28 | raise ValueError("Can't mix absolute and relative paths") | |
29 | ||
30 | split_paths = [[c for c in s if c and c != curdir] for s in split_paths] | |
31 | s1 = min(split_paths) | |
32 | s2 = max(split_paths) | |
33 | common = s1 | |
34 | for i, c in enumerate(s1): | |
35 | if c != s2[i]: | |
36 | common = s1[:i] | |
37 | break | |
38 | ||
39 | prefix = sep if isabs else sep[:0] | |
40 | return prefix + sep.join(common) |
26 | 26 | >>> MyClass('Will') |
27 | 27 | MyClass('foo', name='Will') |
28 | 28 | >>> MyClass(None) |
29 | MyClass() | |
29 | MyClass('foo') | |
30 | 30 | |
31 | 31 | """ |
32 | 32 | arguments = [repr(arg) for arg in args] |
0 | """Compatibility shim for python2's lack of datetime.timezone. | |
1 | ||
2 | This is the example code from the Python 2 documentation: | |
3 | https://docs.python.org/2.7/library/datetime.html#tzinfo-objects | |
4 | """ | |
5 | ||
6 | from datetime import timedelta, tzinfo | |
7 | ||
8 | ZERO = timedelta(0) | |
9 | ||
10 | ||
11 | class UTC(tzinfo): | |
12 | """UTC""" | |
13 | ||
14 | def utcoffset(self, dt): | |
15 | return ZERO | |
16 | ||
17 | def tzname(self, dt): | |
18 | return "UTC" | |
19 | ||
20 | def dst(self, dt): | |
21 | return ZERO | |
22 | ||
23 | ||
24 | utc = UTC() | |
25 | ||
26 | ||
27 | class timezone: | |
28 | utc = utc |
0 | import typing | |
1 | ||
2 | import platform | |
0 | 3 | import re |
1 | 4 | import six |
2 | import platform | |
3 | import typing | |
4 | 5 | |
5 | 6 | if typing.TYPE_CHECKING: |
6 | 7 | from typing import Text |
10 | 11 | |
11 | 12 | def url_quote(path_snippet): |
12 | 13 | # type: (Text) -> Text |
13 | """ | |
14 | On Windows, it will separate drive letter and quote windows | |
15 | path alone. No magic on Unix-alie path, just pythonic | |
16 | `pathname2url` | |
14 | """Quote a URL without quoting the Windows drive letter, if any. | |
15 | ||
16 | On Windows, it will separate drive letter and quote Windows | |
17 | path alone. No magic on Unix-like path, just pythonic | |
18 | `~urllib.request.pathname2url`. | |
17 | 19 | |
18 | 20 | Arguments: |
19 | path_snippet: a file path, relative or absolute. | |
21 | path_snippet (str): a file path, relative or absolute. | |
22 | ||
20 | 23 | """ |
21 | 24 | if _WINDOWS_PLATFORM and _has_drive_letter(path_snippet): |
22 | 25 | drive_letter, path = path_snippet.split(":", 1) |
33 | 36 | |
34 | 37 | def _has_drive_letter(path_snippet): |
35 | 38 | # type: (Text) -> bool |
36 | """ | |
37 | The following path will get True | |
38 | D:/Data | |
39 | C:\\My Dcouments\\ test | |
40 | ||
41 | And will get False | |
42 | ||
43 | /tmp/abc:test | |
39 | """Check whether a path contains a drive letter. | |
44 | 40 | |
45 | 41 | Arguments: |
46 | path_snippet: a file path, relative or absolute. | |
42 | path_snippet (str): a file path, relative or absolute. | |
43 | ||
44 | Example: | |
45 | >>> _has_drive_letter("D:/Data") | |
46 | True | |
47 | >>> _has_drive_letter(r"C:\\System32\\ test") | |
48 | True | |
49 | >>> _has_drive_letter("/tmp/abc:test") | |
50 | False | |
51 | ||
47 | 52 | """ |
48 | 53 | windows_drive_pattern = ".:[/\\\\].*$" |
49 | 54 | return re.match(windows_drive_pattern, path_snippet) is not None |
0 | 0 | """Version, used in module and setup.py. |
1 | 1 | """ |
2 | __version__ = "2.4.12" | |
2 | __version__ = "2.4.16" |
10 | 10 | |
11 | 11 | import typing |
12 | 12 | |
13 | import abc | |
14 | import six | |
15 | from appdirs import AppDirs | |
16 | ||
17 | from ._repr import make_repr | |
13 | 18 | from .osfs import OSFS |
14 | from ._repr import make_repr | |
15 | from appdirs import AppDirs | |
16 | 19 | |
17 | 20 | if typing.TYPE_CHECKING: |
18 | 21 | from typing import Optional, Text |
28 | 31 | ] |
29 | 32 | |
30 | 33 | |
34 | class _CopyInitMeta(abc.ABCMeta): | |
35 | """A metaclass that performs a hard copy of the `__init__`. | |
36 | ||
37 | This is a fix for Sphinx, which is a pain to configure in a way that | |
38 | it documents the ``__init__`` method of a class when it is inherited. | |
39 | Copying ``__init__`` makes it think it is not inherited, and let us | |
40 | share the documentation between all the `_AppFS` subclasses. | |
41 | ||
42 | """ | |
43 | ||
44 | def __new__(mcls, classname, bases, cls_dict): | |
45 | cls_dict.setdefault("__init__", bases[0].__init__) | |
46 | return super(abc.ABCMeta, mcls).__new__(mcls, classname, bases, cls_dict) | |
47 | ||
48 | ||
49 | @six.add_metaclass(_CopyInitMeta) | |
31 | 50 | class _AppFS(OSFS): |
32 | """Abstract base class for an app FS. | |
33 | """ | |
51 | """Abstract base class for an app FS.""" | |
34 | 52 | |
35 | 53 | # FIXME(@althonos): replace by ClassVar[Text] once |
36 | 54 | # https://github.com/python/mypy/pull/4718 is accepted |
46 | 64 | create=True, # type: bool |
47 | 65 | ): |
48 | 66 | # type: (...) -> None |
67 | """Create a new application-specific filesystem. | |
68 | ||
69 | Arguments: | |
70 | appname (str): The name of the application. | |
71 | author (str): The name of the author (used on Windows). | |
72 | version (str): Optional version string, if a unique location | |
73 | per version of the application is required. | |
74 | roaming (bool): If `True`, use a *roaming* profile on | |
75 | Windows. | |
76 | create (bool): If `True` (the default) the directory | |
77 | will be created if it does not exist. | |
78 | ||
79 | """ | |
49 | 80 | self.app_dirs = AppDirs(appname, author, version, roaming) |
50 | 81 | self._create = create |
51 | 82 | super(_AppFS, self).__init__( |
76 | 107 | May also be opened with |
77 | 108 | ``open_fs('userdata://appname:author:version')``. |
78 | 109 | |
79 | Arguments: | |
80 | appname (str): The name of the application. | |
81 | author (str): The name of the author (used on Windows). | |
82 | version (str): Optional version string, if a unique location | |
83 | per version of the application is required. | |
84 | roaming (bool): If `True`, use a *roaming* profile on | |
85 | Windows. | |
86 | create (bool): If `True` (the default) the directory | |
87 | will be created if it does not exist. | |
88 | ||
89 | 110 | """ |
90 | 111 | |
91 | 112 | app_dir = "user_data_dir" |
96 | 117 | |
97 | 118 | May also be opened with |
98 | 119 | ``open_fs('userconf://appname:author:version')``. |
99 | ||
100 | Arguments: | |
101 | appname (str): The name of the application. | |
102 | author (str): The name of the author (used on Windows). | |
103 | version (str): Optional version string, if a unique location | |
104 | per version of the application is required. | |
105 | roaming (bool): If `True`, use a *roaming* profile on | |
106 | Windows. | |
107 | create (bool): If `True` (the default) the directory | |
108 | will be created if it does not exist. | |
109 | 120 | |
110 | 121 | """ |
111 | 122 | |
118 | 129 | May also be opened with |
119 | 130 | ``open_fs('usercache://appname:author:version')``. |
120 | 131 | |
121 | Arguments: | |
122 | appname (str): The name of the application. | |
123 | author (str): The name of the author (used on Windows). | |
124 | version (str): Optional version string, if a unique location | |
125 | per version of the application is required. | |
126 | roaming (bool): If `True`, use a *roaming* profile on | |
127 | Windows. | |
128 | create (bool): If `True` (the default) the directory | |
129 | will be created if it does not exist. | |
130 | ||
131 | 132 | """ |
132 | 133 | |
133 | 134 | app_dir = "user_cache_dir" |
138 | 139 | |
139 | 140 | May also be opened with |
140 | 141 | ``open_fs('sitedata://appname:author:version')``. |
141 | ||
142 | Arguments: | |
143 | appname (str): The name of the application. | |
144 | author (str): The name of the author (used on Windows). | |
145 | version (str): Optional version string, if a unique location | |
146 | per version of the application is required. | |
147 | roaming (bool): If `True`, use a *roaming* profile on | |
148 | Windows. | |
149 | create (bool): If `True` (the default) the directory | |
150 | will be created if it does not exist. | |
151 | 142 | |
152 | 143 | """ |
153 | 144 | |
160 | 151 | May also be opened with |
161 | 152 | ``open_fs('siteconf://appname:author:version')``. |
162 | 153 | |
163 | Arguments: | |
164 | appname (str): The name of the application. | |
165 | author (str): The name of the author (used on Windows). | |
166 | version (str): Optional version string, if a unique location | |
167 | per version of the application is required. | |
168 | roaming (bool): If `True`, use a *roaming* profile on | |
169 | Windows. | |
170 | create (bool): If `True` (the default) the directory | |
171 | will be created if it does not exist. | |
172 | ||
173 | 154 | """ |
174 | 155 | |
175 | 156 | app_dir = "site_config_dir" |
181 | 162 | May also be opened with |
182 | 163 | ``open_fs('userlog://appname:author:version')``. |
183 | 164 | |
184 | Arguments: | |
185 | appname (str): The name of the application. | |
186 | author (str): The name of the author (used on Windows). | |
187 | version (str): Optional version string, if a unique location | |
188 | per version of the application is required. | |
189 | roaming (bool): If `True`, use a *roaming* profile on | |
190 | Windows. | |
191 | create (bool): If `True` (the default) the directory | |
192 | will be created if it does not exist. | |
193 | ||
194 | 165 | """ |
195 | 166 | |
196 | 167 | app_dir = "user_log_dir" |
6 | 6 | """ |
7 | 7 | |
8 | 8 | from __future__ import absolute_import, print_function, unicode_literals |
9 | ||
10 | import typing | |
9 | 11 | |
10 | 12 | import abc |
11 | 13 | import hashlib |
12 | 14 | import itertools |
13 | 15 | import os |
16 | import six | |
14 | 17 | import threading |
15 | 18 | import time |
16 | import typing | |
19 | import warnings | |
17 | 20 | from contextlib import closing |
18 | 21 | from functools import partial, wraps |
19 | import warnings | |
20 | ||
21 | import six | |
22 | ||
23 | from . import copy, errors, fsencode, iotools, move, tools, walk, wildcard | |
22 | ||
23 | from . import copy, errors, fsencode, iotools, tools, walk, wildcard | |
24 | from .copy import copy_modified_time | |
24 | 25 | from .glob import BoundGlobber |
25 | 26 | from .mode import validate_open_mode |
26 | 27 | from .path import abspath, join, normpath |
28 | 29 | from .walk import Walker |
29 | 30 | |
30 | 31 | if typing.TYPE_CHECKING: |
31 | from datetime import datetime | |
32 | from threading import RLock | |
33 | 32 | from typing import ( |
33 | IO, | |
34 | 34 | Any, |
35 | 35 | BinaryIO, |
36 | 36 | Callable, |
37 | 37 | Collection, |
38 | 38 | Dict, |
39 | IO, | |
40 | 39 | Iterable, |
41 | 40 | Iterator, |
42 | 41 | List, |
47 | 46 | Type, |
48 | 47 | Union, |
49 | 48 | ) |
49 | ||
50 | from datetime import datetime | |
51 | from threading import RLock | |
50 | 52 | from types import TracebackType |
53 | ||
51 | 54 | from .enums import ResourceType |
52 | 55 | from .info import Info, RawInfo |
56 | from .permissions import Permissions | |
53 | 57 | from .subfs import SubFS |
54 | from .permissions import Permissions | |
55 | 58 | from .walk import BoundWalker |
56 | 59 | |
57 | 60 | _F = typing.TypeVar("_F", bound="FS") |
91 | 94 | |
92 | 95 | @six.add_metaclass(abc.ABCMeta) |
93 | 96 | class FS(object): |
94 | """Base class for FS objects. | |
95 | """ | |
97 | """Base class for FS objects.""" | |
96 | 98 | |
97 | 99 | # This is the "standard" meta namespace. |
98 | 100 | _meta = {} # type: Dict[Text, Union[Text, int, bool, None]] |
105 | 107 | |
106 | 108 | def __init__(self): |
107 | 109 | # type: (...) -> None |
108 | """Create a filesystem. See help(type(self)) for accurate signature. | |
109 | """ | |
110 | """Create a filesystem. See help(type(self)) for accurate signature.""" | |
110 | 111 | self._closed = False |
111 | 112 | self._lock = threading.RLock() |
112 | 113 | super(FS, self).__init__() |
117 | 118 | |
118 | 119 | def __enter__(self): |
119 | 120 | # type: (...) -> FS |
120 | """Allow use of filesystem as a context manager. | |
121 | """ | |
121 | """Allow use of filesystem as a context manager.""" | |
122 | 122 | return self |
123 | 123 | |
124 | 124 | def __exit__( |
128 | 128 | traceback, # type: Optional[TracebackType] |
129 | 129 | ): |
130 | 130 | # type: (...) -> None |
131 | """Close filesystem on exit. | |
132 | """ | |
131 | """Close filesystem on exit.""" | |
133 | 132 | self.close() |
134 | 133 | |
135 | 134 | @property |
136 | 135 | def glob(self): |
137 | """`~fs.glob.BoundGlobber`: a globber object.. | |
138 | """ | |
136 | """`~fs.glob.BoundGlobber`: a globber object..""" | |
139 | 137 | return BoundGlobber(self) |
140 | 138 | |
141 | 139 | @property |
142 | 140 | def walk(self): |
143 | 141 | # type: (_F) -> BoundWalker[_F] |
144 | """`~fs.walk.BoundWalker`: a walker bound to this filesystem. | |
145 | """ | |
142 | """`~fs.walk.BoundWalker`: a walker bound to this filesystem.""" | |
146 | 143 | return self.walker_class.bind(self) |
147 | 144 | |
148 | 145 | # ---------------------------------------------------------------- # |
157 | 154 | |
158 | 155 | Arguments: |
159 | 156 | path (str): A path to a resource on the filesystem. |
160 | namespaces (list, optional): Info namespaces to query | |
161 | (defaults to *[basic]*). | |
157 | namespaces (list, optional): Info namespaces to query. The | |
158 | `"basic"` namespace is alway included in the returned | |
159 | info, whatever the value of `namespaces` may be. | |
162 | 160 | |
163 | 161 | Returns: |
164 | 162 | ~fs.info.Info: resource information object. |
163 | ||
164 | Raises: | |
165 | fs.errors.ResourceNotFound: If ``path`` does not exist. | |
165 | 166 | |
166 | 167 | For more information regarding resource information, see :ref:`info`. |
167 | 168 | |
240 | 241 | io.IOBase: a *file-like* object. |
241 | 242 | |
242 | 243 | Raises: |
243 | fs.errors.FileExpected: If the path is not a file. | |
244 | fs.errors.FileExists: If the file exists, and *exclusive mode* | |
245 | is specified (``x`` in the mode). | |
246 | fs.errors.ResourceNotFound: If the path does not exist. | |
244 | fs.errors.FileExpected: If ``path`` exists and is not a file. | |
245 | fs.errors.FileExists: If the ``path`` exists, and | |
246 | *exclusive mode* is specified (``x`` in the mode). | |
247 | fs.errors.ResourceNotFound: If ``path`` does not exist and | |
248 | ``mode`` does not imply creating the file, or if any | |
249 | ancestor of ``path`` does not exist. | |
247 | 250 | |
248 | 251 | """ |
249 | 252 | |
272 | 275 | Raises: |
273 | 276 | fs.errors.DirectoryNotEmpty: If the directory is not empty ( |
274 | 277 | see `~fs.base.FS.removetree` for a way to remove the |
275 | directory contents.). | |
278 | directory contents). | |
276 | 279 | fs.errors.DirectoryExpected: If the path does not refer to |
277 | 280 | a directory. |
278 | 281 | fs.errors.ResourceNotFound: If no resource exists at the |
392 | 395 | """ |
393 | 396 | self._closed = True |
394 | 397 | |
395 | def copy(self, src_path, dst_path, overwrite=False): | |
396 | # type: (Text, Text, bool) -> None | |
398 | def copy( | |
399 | self, | |
400 | src_path, # type: Text | |
401 | dst_path, # type: Text | |
402 | overwrite=False, # type: bool | |
403 | preserve_time=False, # type: bool | |
404 | ): | |
405 | # type: (...) -> None | |
397 | 406 | """Copy file contents from ``src_path`` to ``dst_path``. |
398 | 407 | |
399 | 408 | Arguments: |
401 | 410 | dst_path (str): Path to destination file. |
402 | 411 | overwrite (bool): If `True`, overwrite the destination file |
403 | 412 | if it exists (defaults to `False`). |
413 | preserve_time (bool): If `True`, try to preserve mtime of the | |
414 | resource (defaults to `False`). | |
404 | 415 | |
405 | 416 | Raises: |
406 | 417 | fs.errors.DestinationExists: If ``dst_path`` exists, |
407 | 418 | and ``overwrite`` is `False`. |
408 | 419 | fs.errors.ResourceNotFound: If a parent directory of |
409 | 420 | ``dst_path`` does not exist. |
421 | fs.errors.FileExpected: If ``src_path`` is not a file. | |
410 | 422 | |
411 | 423 | """ |
412 | 424 | with self._lock: |
415 | 427 | with closing(self.open(src_path, "rb")) as read_file: |
416 | 428 | # FIXME(@althonos): typing complains because open return IO |
417 | 429 | self.upload(dst_path, read_file) # type: ignore |
418 | ||
419 | def copydir(self, src_path, dst_path, create=False): | |
420 | # type: (Text, Text, bool) -> None | |
430 | if preserve_time: | |
431 | copy_modified_time(self, src_path, self, dst_path) | |
432 | ||
433 | def copydir( | |
434 | self, | |
435 | src_path, # type: Text | |
436 | dst_path, # type: Text | |
437 | create=False, # type: bool | |
438 | preserve_time=False, # type: bool | |
439 | ): | |
440 | # type: (...) -> None | |
421 | 441 | """Copy the contents of ``src_path`` to ``dst_path``. |
422 | 442 | |
423 | 443 | Arguments: |
425 | 445 | dst_path (str): Path to destination directory. |
426 | 446 | create (bool): If `True`, then ``dst_path`` will be created |
427 | 447 | if it doesn't exist already (defaults to `False`). |
448 | preserve_time (bool): If `True`, try to preserve mtime of the | |
449 | resource (defaults to `False`). | |
428 | 450 | |
429 | 451 | Raises: |
430 | 452 | fs.errors.ResourceNotFound: If the ``dst_path`` |
431 | 453 | does not exist, and ``create`` is not `True`. |
454 | fs.errors.DirectoryExpected: If ``src_path`` is not a | |
455 | directory. | |
432 | 456 | |
433 | 457 | """ |
434 | 458 | with self._lock: |
436 | 460 | raise errors.ResourceNotFound(dst_path) |
437 | 461 | if not self.getinfo(src_path).is_dir: |
438 | 462 | raise errors.DirectoryExpected(src_path) |
439 | copy.copy_dir(self, src_path, self, dst_path) | |
463 | copy.copy_dir(self, src_path, self, dst_path, preserve_time=preserve_time) | |
440 | 464 | |
441 | 465 | def create(self, path, wipe=False): |
442 | 466 | # type: (Text, bool) -> bool |
471 | 495 | |
472 | 496 | Returns: |
473 | 497 | str: a short description of the path. |
498 | ||
499 | Raises: | |
500 | fs.errors.ResourceNotFound: If ``path`` does not exist. | |
474 | 501 | |
475 | 502 | """ |
476 | 503 | if not self.exists(path): |
543 | 570 | |
544 | 571 | def match_dir(patterns, info): |
545 | 572 | # type: (Optional[Iterable[Text]], Info) -> bool |
546 | """Pattern match info.name. | |
547 | """ | |
573 | """Pattern match info.name.""" | |
548 | 574 | return info.is_file or self.match(patterns, info.name) |
549 | 575 | |
550 | 576 | def match_file(patterns, info): |
551 | 577 | # type: (Optional[Iterable[Text]], Info) -> bool |
552 | """Pattern match info.name. | |
553 | """ | |
578 | """Pattern match info.name.""" | |
554 | 579 | return info.is_dir or self.match(patterns, info.name) |
555 | 580 | |
556 | 581 | def exclude_dir(patterns, info): |
557 | 582 | # type: (Optional[Iterable[Text]], Info) -> bool |
558 | """Pattern match info.name. | |
559 | """ | |
583 | """Pattern match info.name.""" | |
560 | 584 | return info.is_file or not self.match(patterns, info.name) |
561 | 585 | |
562 | 586 | def exclude_file(patterns, info): |
563 | 587 | # type: (Optional[Iterable[Text]], Info) -> bool |
564 | """Pattern match info.name. | |
565 | """ | |
588 | """Pattern match info.name.""" | |
566 | 589 | return info.is_dir or not self.match(patterns, info.name) |
567 | 590 | |
568 | 591 | if files: |
596 | 619 | bytes: the file contents. |
597 | 620 | |
598 | 621 | Raises: |
622 | fs.errors.FileExpected: if ``path`` exists but is not a file. | |
599 | 623 | fs.errors.ResourceNotFound: if ``path`` does not exist. |
600 | 624 | |
601 | 625 | """ |
607 | 631 | |
608 | 632 | def download(self, path, file, chunk_size=None, **options): |
609 | 633 | # type: (Text, BinaryIO, Optional[int], **Any) -> None |
610 | """Copies a file from the filesystem to a file-like object. | |
634 | """Copy a file from the filesystem to a file-like object. | |
611 | 635 | |
612 | 636 | This may be more efficient that opening and copying files |
613 | 637 | manually if the filesystem supplies an optimized method. |
638 | ||
639 | Note that the file object ``file`` will *not* be closed by this | |
640 | method. Take care to close it after this method completes | |
641 | (ideally with a context manager). | |
614 | 642 | |
615 | 643 | Arguments: |
616 | 644 | path (str): Path to a resource. |
622 | 650 | **options: Implementation specific options required to open |
623 | 651 | the source file. |
624 | 652 | |
625 | Note that the file object ``file`` will *not* be closed by this | |
626 | method. Take care to close it after this method completes | |
627 | (ideally with a context manager). | |
628 | ||
629 | 653 | Example: |
630 | 654 | >>> with open('starwars.mov', 'wb') as write_file: |
631 | ... my_fs.download('/movies/starwars.mov', write_file) | |
655 | ... my_fs.download('/Videos/starwars.mov', write_file) | |
656 | ||
657 | Raises: | |
658 | fs.errors.ResourceNotFound: if ``path`` does not exist. | |
632 | 659 | |
633 | 660 | """ |
634 | 661 | with self._lock: |
670 | 697 | return contents |
671 | 698 | |
672 | 699 | gettext = _new_name(readtext, "gettext") |
700 | ||
701 | def getmodified(self, path): | |
702 | # type: (Text) -> Optional[datetime] | |
703 | """Get the timestamp of the last modifying access of a resource. | |
704 | ||
705 | Arguments: | |
706 | path (str): A path to a resource. | |
707 | ||
708 | Returns: | |
709 | datetime: The timestamp of the last modification. | |
710 | ||
711 | The *modified timestamp* of a file is the point in time | |
712 | that the file was last changed. Depending on the file system, | |
713 | it might only have limited accuracy. | |
714 | ||
715 | """ | |
716 | return self.getinfo(path, namespaces=["details"]).modified | |
673 | 717 | |
674 | 718 | def getmeta(self, namespace="standard"): |
675 | 719 | # type: (Text) -> Mapping[Text, object] |
735 | 779 | Returns: |
736 | 780 | int: the *size* of the resource. |
737 | 781 | |
782 | Raises: | |
783 | fs.errors.ResourceNotFound: if ``path`` does not exist. | |
784 | ||
738 | 785 | The *size* of a file is the total number of readable bytes, |
739 | 786 | which may not reflect the exact number of bytes of reserved |
740 | 787 | disk space (or other storage medium). |
750 | 797 | # type: (Text) -> Text |
751 | 798 | """Get the *system path* of a resource. |
752 | 799 | |
753 | Parameters: | |
800 | Arguments: | |
754 | 801 | path (str): A path on the filesystem. |
755 | 802 | |
756 | 803 | Returns: |
786 | 833 | |
787 | 834 | def getospath(self, path): |
788 | 835 | # type: (Text) -> bytes |
789 | """Get a *system path* to a resource, encoded in the operating | |
790 | system's prefered encoding. | |
791 | ||
792 | Parameters: | |
836 | """Get the *system path* to a resource, in the OS' prefered encoding. | |
837 | ||
838 | Arguments: | |
793 | 839 | path (str): A path on the filesystem. |
794 | 840 | |
795 | 841 | Returns: |
806 | 852 | |
807 | 853 | Note: |
808 | 854 | If you want your code to work in Python2.7 and Python3 then |
809 | use this method if you want to work will the OS filesystem | |
855 | use this method if you want to work with the OS filesystem | |
810 | 856 | outside of the OSFS interface. |
811 | 857 | |
812 | 858 | """ |
818 | 864 | # type: (Text) -> ResourceType |
819 | 865 | """Get the type of a resource. |
820 | 866 | |
821 | Parameters: | |
867 | Arguments: | |
822 | 868 | path (str): A path on the filesystem. |
823 | 869 | |
824 | 870 | Returns: |
825 | 871 | ~fs.enums.ResourceType: the type of the resource. |
872 | ||
873 | Raises: | |
874 | fs.errors.ResourceNotFound: if ``path`` does not exist. | |
826 | 875 | |
827 | 876 | A type of a resource is an integer that identifies the what |
828 | 877 | the resource references. The standard type integers may be one |
856 | 905 | # type: (Text, Text) -> Text |
857 | 906 | """Get the URL to a given resource. |
858 | 907 | |
859 | Parameters: | |
908 | Arguments: | |
860 | 909 | path (str): A path on the filesystem |
861 | 910 | purpose (str): A short string that indicates which URL |
862 | 911 | to retrieve for the given path (if there is more than |
863 | 912 | one). The default is ``'download'``, which should return |
864 | 913 | a URL that serves the file. Other filesystems may support |
865 | other values for ``purpose``. | |
914 | other values for ``purpose``: for instance, `OSFS` supports | |
915 | ``'fs'``, which returns a FS URL (see :ref:`fs-urls`). | |
866 | 916 | |
867 | 917 | Returns: |
868 | 918 | str: a URL. |
877 | 927 | # type: (Text) -> bool |
878 | 928 | """Check if a path maps to a system path. |
879 | 929 | |
880 | Parameters: | |
930 | Arguments: | |
881 | 931 | path (str): A path on the filesystem. |
882 | 932 | |
883 | 933 | Returns: |
895 | 945 | # type: (Text, Text) -> bool |
896 | 946 | """Check if a path has a corresponding URL. |
897 | 947 | |
898 | Parameters: | |
948 | Arguments: | |
899 | 949 | path (str): A path on the filesystem. |
900 | 950 | purpose (str): A purpose parameter, as given in |
901 | 951 | `~fs.base.FS.geturl`. |
913 | 963 | |
914 | 964 | def isclosed(self): |
915 | 965 | # type: () -> bool |
916 | """Check if the filesystem is closed. | |
917 | """ | |
966 | """Check if the filesystem is closed.""" | |
918 | 967 | return getattr(self, "_closed", False) |
919 | 968 | |
920 | 969 | def isdir(self, path): |
921 | 970 | # type: (Text) -> bool |
922 | 971 | """Check if a path maps to an existing directory. |
923 | 972 | |
924 | Parameters: | |
973 | Arguments: | |
925 | 974 | path (str): A path on the filesystem. |
926 | 975 | |
927 | 976 | Returns: |
940 | 989 | A directory is considered empty when it does not contain |
941 | 990 | any file or any directory. |
942 | 991 | |
943 | Parameters: | |
992 | Arguments: | |
944 | 993 | path (str): A path to a directory on the filesystem. |
945 | 994 | |
946 | 995 | Returns: |
957 | 1006 | # type: (Text) -> bool |
958 | 1007 | """Check if a path maps to an existing file. |
959 | 1008 | |
960 | Parameters: | |
1009 | Arguments: | |
961 | 1010 | path (str): A path on the filesystem. |
962 | 1011 | |
963 | 1012 | Returns: |
973 | 1022 | # type: (Text) -> bool |
974 | 1023 | """Check if a path maps to a symlink. |
975 | 1024 | |
976 | Parameters: | |
1025 | Arguments: | |
977 | 1026 | path (str): A path on the filesystem. |
978 | 1027 | |
979 | 1028 | Returns: |
997 | 1046 | Example: |
998 | 1047 | >>> with my_fs.lock(): # May block |
999 | 1048 | ... # code here has exclusive access to the filesystem |
1049 | ... pass | |
1000 | 1050 | |
1001 | 1051 | It is a good idea to put a lock around any operations that you |
1002 | 1052 | would like to be *atomic*. For instance if you are copying |
1015 | 1065 | """ |
1016 | 1066 | return self._lock |
1017 | 1067 | |
1018 | def movedir(self, src_path, dst_path, create=False): | |
1019 | # type: (Text, Text, bool) -> None | |
1068 | def movedir(self, src_path, dst_path, create=False, preserve_time=False): | |
1069 | # type: (Text, Text, bool, bool) -> None | |
1020 | 1070 | """Move directory ``src_path`` to ``dst_path``. |
1021 | 1071 | |
1022 | Parameters: | |
1072 | Arguments: | |
1023 | 1073 | src_path (str): Path of source directory on the filesystem. |
1024 | 1074 | dst_path (str): Path to destination directory. |
1025 | 1075 | create (bool): If `True`, then ``dst_path`` will be created |
1026 | 1076 | if it doesn't exist already (defaults to `False`). |
1077 | preserve_time (bool): If `True`, try to preserve mtime of the | |
1078 | resources (defaults to `False`). | |
1027 | 1079 | |
1028 | 1080 | Raises: |
1029 | 1081 | fs.errors.ResourceNotFound: if ``dst_path`` does not exist, |
1030 | 1082 | and ``create`` is `False`. |
1031 | ||
1032 | """ | |
1083 | fs.errors.DirectoryExpected: if ``src_path`` or one of its | |
1084 | ancestors is not a directory. | |
1085 | ||
1086 | """ | |
1087 | from .move import move_dir | |
1088 | ||
1033 | 1089 | with self._lock: |
1034 | 1090 | if not create and not self.exists(dst_path): |
1035 | 1091 | raise errors.ResourceNotFound(dst_path) |
1036 | move.move_dir(self, src_path, self, dst_path) | |
1092 | move_dir(self, src_path, self, dst_path, preserve_time=preserve_time) | |
1037 | 1093 | |
1038 | 1094 | def makedirs( |
1039 | 1095 | self, |
1078 | 1134 | raise |
1079 | 1135 | return self.opendir(path) |
1080 | 1136 | |
1081 | def move(self, src_path, dst_path, overwrite=False): | |
1082 | # type: (Text, Text, bool) -> None | |
1137 | def move(self, src_path, dst_path, overwrite=False, preserve_time=False): | |
1138 | # type: (Text, Text, bool, bool) -> None | |
1083 | 1139 | """Move a file from ``src_path`` to ``dst_path``. |
1084 | 1140 | |
1085 | 1141 | Arguments: |
1088 | 1144 | file will be written to. |
1089 | 1145 | overwrite (bool): If `True`, destination path will be |
1090 | 1146 | overwritten if it exists. |
1147 | preserve_time (bool): If `True`, try to preserve mtime of the | |
1148 | resources (defaults to `False`). | |
1091 | 1149 | |
1092 | 1150 | Raises: |
1093 | 1151 | fs.errors.FileExpected: If ``src_path`` maps to a |
1114 | 1172 | except OSError: |
1115 | 1173 | pass |
1116 | 1174 | else: |
1175 | if preserve_time: | |
1176 | copy_modified_time(self, src_path, self, dst_path) | |
1117 | 1177 | return |
1118 | 1178 | with self._lock: |
1119 | 1179 | with self.open(src_path, "rb") as read_file: |
1120 | 1180 | # FIXME(@althonos): typing complains because open return IO |
1121 | 1181 | self.upload(dst_path, read_file) # type: ignore |
1182 | if preserve_time: | |
1183 | copy_modified_time(self, src_path, self, dst_path) | |
1122 | 1184 | self.remove(src_path) |
1123 | 1185 | |
1124 | 1186 | def open( |
1195 | 1257 | ~fs.subfs.SubFS: A filesystem representing a sub-directory. |
1196 | 1258 | |
1197 | 1259 | Raises: |
1198 | fs.errors.DirectoryExpected: If ``dst_path`` does not | |
1199 | exist or is not a directory. | |
1260 | fs.errors.ResourceNotFound: If ``path`` does not exist. | |
1261 | fs.errors.DirectoryExpected: If ``path`` is not a directory. | |
1200 | 1262 | |
1201 | 1263 | """ |
1202 | 1264 | from .subfs import SubFS |
1203 | 1265 | |
1204 | 1266 | _factory = factory or self.subfs_class or SubFS |
1205 | 1267 | |
1206 | if not self.getbasic(path).is_dir: | |
1268 | if not self.getinfo(path).is_dir: | |
1207 | 1269 | raise errors.DirectoryExpected(path=path) |
1208 | 1270 | return _factory(self, path) |
1209 | 1271 | |
1210 | 1272 | def removetree(self, dir_path): |
1211 | 1273 | # type: (Text) -> None |
1212 | """Recursively remove the contents of a directory. | |
1213 | ||
1214 | This method is similar to `~fs.base.removedir`, but will | |
1274 | """Recursively remove a directory and all its contents. | |
1275 | ||
1276 | This method is similar to `~fs.base.FS.removedir`, but will | |
1215 | 1277 | remove the contents of the directory if it is not empty. |
1216 | 1278 | |
1217 | 1279 | Arguments: |
1218 | 1280 | dir_path (str): Path to a directory on the filesystem. |
1281 | ||
1282 | Raises: | |
1283 | fs.errors.ResourceNotFound: If ``dir_path`` does not exist. | |
1284 | fs.errors.DirectoryExpected: If ``dir_path`` is not a directory. | |
1285 | ||
1286 | Caution: | |
1287 | A filesystem should never delete its root folder, so | |
1288 | ``FS.removetree("/")`` has different semantics: the | |
1289 | contents of the root folder will be deleted, but the | |
1290 | root will be untouched:: | |
1291 | ||
1292 | >>> home_fs = fs.open_fs("~") | |
1293 | >>> home_fs.removetree("/") | |
1294 | >>> home_fs.exists("/") | |
1295 | True | |
1296 | >>> home_fs.isempty("/") | |
1297 | True | |
1298 | ||
1299 | Combined with `~fs.base.FS.opendir`, this can be used | |
1300 | to clear a directory without removing the directory | |
1301 | itself:: | |
1302 | ||
1303 | >>> home_fs = fs.open_fs("~") | |
1304 | >>> home_fs.opendir("/Videos").removetree("/") | |
1305 | >>> home_fs.exists("/Videos") | |
1306 | True | |
1307 | >>> home_fs.isempty("/Videos") | |
1308 | True | |
1219 | 1309 | |
1220 | 1310 | """ |
1221 | 1311 | _dir_path = abspath(normpath(dir_path)) |
1308 | 1398 | **options: Implementation specific options required to open |
1309 | 1399 | the source file. |
1310 | 1400 | |
1401 | Raises: | |
1402 | fs.errors.ResourceNotFound: If a parent directory of | |
1403 | ``path`` does not exist. | |
1404 | ||
1311 | 1405 | Note that the file object ``file`` will *not* be closed by this |
1312 | 1406 | method. Take care to close it after this method completes |
1313 | 1407 | (ideally with a context manager). |
1449 | 1543 | |
1450 | 1544 | def validatepath(self, path): |
1451 | 1545 | # type: (Text) -> Text |
1452 | """Check if a path is valid, returning a normalized absolute | |
1453 | path. | |
1546 | """Validate a path, returning a normalized absolute path on sucess. | |
1454 | 1547 | |
1455 | 1548 | Many filesystems have restrictions on the format of paths they |
1456 | 1549 | support. This method will check that ``path`` is valid on the |
1464 | 1557 | str: A normalized, absolute path. |
1465 | 1558 | |
1466 | 1559 | Raises: |
1560 | fs.errors.InvalidPath: If the path is invalid. | |
1561 | fs.errors.FilesystemClosed: if the filesystem is closed. | |
1467 | 1562 | fs.errors.InvalidCharsInPath: If the path contains |
1468 | 1563 | invalid characters. |
1469 | fs.errors.InvalidPath: If the path is invalid. | |
1470 | fs.errors.FilesystemClosed: if the filesystem | |
1471 | is closed. | |
1472 | 1564 | |
1473 | 1565 | """ |
1474 | 1566 | self.check() |
1520 | 1612 | Returns: |
1521 | 1613 | ~fs.info.Info: Resource information object for ``path``. |
1522 | 1614 | |
1523 | """ | |
1615 | Note: | |
1616 | .. deprecated:: 2.4.13 | |
1617 | Please use `~FS.getinfo` directly, which is | |
1618 | required to always return the *basic* namespace. | |
1619 | ||
1620 | """ | |
1621 | warnings.warn( | |
1622 | "method 'getbasic' has been deprecated, please use 'getinfo'", | |
1623 | DeprecationWarning, | |
1624 | ) | |
1524 | 1625 | return self.getinfo(path, namespaces=["basic"]) |
1525 | 1626 | |
1526 | 1627 | def getdetails(self, path): |
1554 | 1655 | def match(self, patterns, name): |
1555 | 1656 | # type: (Optional[Iterable[Text]], Text) -> bool |
1556 | 1657 | """Check if a name matches any of a list of wildcards. |
1557 | ||
1558 | Arguments: | |
1559 | patterns (list): A list of patterns, e.g. ``['*.py']`` | |
1560 | name (str): A file or directory name (not a path) | |
1561 | ||
1562 | Returns: | |
1563 | bool: `True` if ``name`` matches any of the patterns. | |
1564 | 1658 | |
1565 | 1659 | If a filesystem is case *insensitive* (such as Windows) then |
1566 | 1660 | this method will perform a case insensitive match (i.e. ``*.py`` |
1568 | 1662 | be case sensitive (``*.py`` and ``*.PY`` will match different |
1569 | 1663 | names). |
1570 | 1664 | |
1665 | Arguments: | |
1666 | patterns (list, optional): A list of patterns, e.g. | |
1667 | ``['*.py']``, or `None` to match everything. | |
1668 | name (str): A file or directory name (not a path) | |
1669 | ||
1670 | Returns: | |
1671 | bool: `True` if ``name`` matches any of the patterns. | |
1672 | ||
1673 | Raises: | |
1674 | TypeError: If ``patterns`` is a single string instead of | |
1675 | a list (or `None`). | |
1676 | ||
1571 | 1677 | Example: |
1572 | >>> home_fs.match(['*.py'], '__init__.py') | |
1678 | >>> my_fs.match(['*.py'], '__init__.py') | |
1573 | 1679 | True |
1574 | >>> home_fs.match(['*.jpg', '*.png'], 'foo.gif') | |
1680 | >>> my_fs.match(['*.jpg', '*.png'], 'foo.gif') | |
1575 | 1681 | False |
1576 | 1682 | |
1577 | 1683 | Note: |
1624 | 1730 | Arguments: |
1625 | 1731 | path(str): A path on the filesystem. |
1626 | 1732 | name(str): |
1627 | One of the algorithms supported by the hashlib module, e.g. `"md5"` | |
1733 | One of the algorithms supported by the `hashlib` module, | |
1734 | e.g. `"md5"` or `"sha256"`. | |
1628 | 1735 | |
1629 | 1736 | Returns: |
1630 | 1737 | str: The hex digest of the hash. |
1631 | 1738 | |
1632 | 1739 | Raises: |
1633 | 1740 | fs.errors.UnsupportedHash: If the requested hash is not supported. |
1741 | fs.errors.ResourceNotFound: If ``path`` does not exist. | |
1742 | fs.errors.FileExpected: If ``path`` exists but is not a file. | |
1634 | 1743 | |
1635 | 1744 | """ |
1636 | 1745 | self.validatepath(path) |
3 | 3 | `tarfile` modules from the standard library. |
4 | 4 | """ |
5 | 5 | |
6 | from __future__ import absolute_import | |
7 | from __future__ import print_function | |
8 | from __future__ import unicode_literals | |
6 | from __future__ import absolute_import, print_function, unicode_literals | |
9 | 7 | |
8 | import typing | |
9 | ||
10 | import six | |
11 | import tarfile | |
10 | 12 | import time |
11 | import tarfile | |
12 | import typing | |
13 | 13 | import zipfile |
14 | 14 | from datetime import datetime |
15 | 15 | |
16 | import six | |
17 | ||
18 | 16 | from .enums import ResourceType |
17 | from .errors import MissingInfoNamespace, NoSysPath | |
19 | 18 | from .path import relpath |
20 | 19 | from .time import datetime_to_epoch |
21 | from .errors import NoSysPath, MissingInfoNamespace | |
22 | 20 | from .walk import Walker |
23 | 21 | |
24 | 22 | if typing.TYPE_CHECKING: |
25 | 23 | from typing import BinaryIO, Optional, Text, Tuple, Union |
24 | ||
26 | 25 | from .base import FS |
27 | 26 | |
28 | 27 | ZipTime = Tuple[int, int, int, int, int, int] |
45 | 44 | compression (int): Compression to use (one of the constants |
46 | 45 | defined in the `zipfile` module in the stdlib). Defaults |
47 | 46 | to `zipfile.ZIP_DEFLATED`. |
48 | encoding (str): | |
49 | The encoding to use for filenames. The default is ``"utf-8"``, | |
50 | use ``"CP437"`` if compatibility with WinZip is desired. | |
47 | encoding (str): The encoding to use for filenames. The default | |
48 | is ``"utf-8"``, use ``"CP437"`` if compatibility with WinZip | |
49 | is desired. | |
51 | 50 | walker (~fs.walk.Walker, optional): A `Walker` instance, or `None` |
52 | 51 | to use default walker. You can use this to specify which files |
53 | 52 | you want to compress. |
115 | 114 | """Write the contents of a filesystem to a tar file. |
116 | 115 | |
117 | 116 | Arguments: |
117 | src_fs (~fs.base.FS): The source filesystem to compress. | |
118 | 118 | file (str or io.IOBase): Destination file, may be a file |
119 | 119 | name or an open file object. |
120 | 120 | compression (str, optional): Compression to use, or `None` |
2 | 2 | |
3 | 3 | import io |
4 | 4 | |
5 | ||
6 | 5 | DEFAULT_CHUNK_SIZE = io.DEFAULT_BUFFER_SIZE * 16 |
7 | 6 | """`int`: the size of a single chunk read from or written to a file. |
8 | 7 | """ |
4 | 4 | |
5 | 5 | import typing |
6 | 6 | |
7 | from .errors import FSError | |
7 | import warnings | |
8 | ||
9 | from .errors import ResourceNotFound | |
8 | 10 | from .opener import manage_fs |
9 | 11 | from .path import abspath, combine, frombase, normpath |
10 | 12 | from .tools import is_thread_safe |
12 | 14 | |
13 | 15 | if typing.TYPE_CHECKING: |
14 | 16 | from typing import Callable, Optional, Text, Union |
17 | ||
15 | 18 | from .base import FS |
16 | 19 | |
17 | 20 | _OnCopy = Callable[[FS, Text, FS, Text], object] |
23 | 26 | walker=None, # type: Optional[Walker] |
24 | 27 | on_copy=None, # type: Optional[_OnCopy] |
25 | 28 | workers=0, # type: int |
29 | preserve_time=False, # type: bool | |
26 | 30 | ): |
27 | 31 | # type: (...) -> None |
28 | 32 | """Copy the contents of one filesystem to another. |
38 | 42 | dst_path)``. |
39 | 43 | workers (int): Use `worker` threads to copy data, or ``0`` (default) for |
40 | 44 | a single-threaded copy. |
41 | ||
42 | """ | |
43 | return copy_dir( | |
44 | src_fs, "/", dst_fs, "/", walker=walker, on_copy=on_copy, workers=workers | |
45 | preserve_time (bool): If `True`, try to preserve mtime of the | |
46 | resources (defaults to `False`). | |
47 | ||
48 | """ | |
49 | return copy_fs_if( | |
50 | src_fs, dst_fs, "always", walker, on_copy, workers, preserve_time=preserve_time | |
45 | 51 | ) |
46 | 52 | |
47 | 53 | |
51 | 57 | walker=None, # type: Optional[Walker] |
52 | 58 | on_copy=None, # type: Optional[_OnCopy] |
53 | 59 | workers=0, # type: int |
60 | preserve_time=False, # type: bool | |
54 | 61 | ): |
55 | 62 | # type: (...) -> None |
56 | 63 | """Copy the contents of one filesystem to another, checking times. |
57 | 64 | |
58 | If both source and destination files exist, the copy is executed | |
59 | only if the source file is newer than the destination file. In case | |
60 | modification times of source or destination files are not available, | |
61 | copy file is always executed. | |
65 | .. deprecated:: 2.5.0 | |
66 | Use `~fs.copy.copy_fs_if` with ``condition="newer"`` instead. | |
67 | ||
68 | """ | |
69 | warnings.warn( | |
70 | "copy_fs_if_newer is deprecated. Use copy_fs_if instead.", DeprecationWarning | |
71 | ) | |
72 | return copy_fs_if( | |
73 | src_fs, dst_fs, "newer", walker, on_copy, workers, preserve_time=preserve_time | |
74 | ) | |
75 | ||
76 | ||
77 | def copy_fs_if( | |
78 | src_fs, # type: Union[FS, Text] | |
79 | dst_fs, # type: Union[FS, Text] | |
80 | condition="always", # type: Text | |
81 | walker=None, # type: Optional[Walker] | |
82 | on_copy=None, # type: Optional[_OnCopy] | |
83 | workers=0, # type: int | |
84 | preserve_time=False, # type: bool | |
85 | ): | |
86 | # type: (...) -> None | |
87 | """Copy the contents of one filesystem to another, depending on a condition. | |
62 | 88 | |
63 | 89 | Arguments: |
64 | 90 | src_fs (FS or str): Source filesystem (URL or instance). |
65 | 91 | dst_fs (FS or str): Destination filesystem (URL or instance). |
92 | condition (str): Name of the condition to check for each file. | |
66 | 93 | walker (~fs.walk.Walker, optional): A walker object that will be |
67 | 94 | used to scan for files in ``src_fs``. Set this if you only want |
68 | 95 | to consider a sub-set of the resources in ``src_fs``. |
69 | 96 | on_copy (callable):A function callback called after a single file copy |
70 | 97 | is executed. Expected signature is ``(src_fs, src_path, dst_fs, |
71 | 98 | dst_path)``. |
72 | workers (int): Use ``worker`` threads to copy data, or ``0`` (default) for | |
73 | a single-threaded copy. | |
74 | ||
75 | """ | |
76 | return copy_dir_if_newer( | |
77 | src_fs, "/", dst_fs, "/", walker=walker, on_copy=on_copy, workers=workers | |
78 | ) | |
79 | ||
80 | ||
81 | def _source_is_newer(src_fs, src_path, dst_fs, dst_path): | |
82 | # type: (FS, Text, FS, Text) -> bool | |
83 | """Determine if source file is newer than destination file. | |
84 | ||
85 | Arguments: | |
86 | src_fs (FS): Source filesystem (instance or URL). | |
87 | src_path (str): Path to a file on the source filesystem. | |
88 | dst_fs (FS): Destination filesystem (instance or URL). | |
89 | dst_path (str): Path to a file on the destination filesystem. | |
90 | ||
91 | Returns: | |
92 | bool: `True` if the source file is newer than the destination | |
93 | file or file modification time cannot be determined, `False` | |
94 | otherwise. | |
95 | ||
96 | """ | |
97 | try: | |
98 | if dst_fs.exists(dst_path): | |
99 | namespace = ("details", "modified") | |
100 | src_modified = src_fs.getinfo(src_path, namespace).modified | |
101 | if src_modified is not None: | |
102 | dst_modified = dst_fs.getinfo(dst_path, namespace).modified | |
103 | return dst_modified is None or src_modified > dst_modified | |
104 | return True | |
105 | except FSError: # pragma: no cover | |
106 | # todo: should log something here | |
107 | return True | |
99 | workers (int): Use ``worker`` threads to copy data, or ``0`` (default) | |
100 | for a single-threaded copy. | |
101 | preserve_time (bool): If `True`, try to preserve mtime of the | |
102 | resources (defaults to `False`). | |
103 | ||
104 | See Also: | |
105 | `~fs.copy.copy_file_if` for the full list of supported values for the | |
106 | ``condition`` argument. | |
107 | ||
108 | """ | |
109 | return copy_dir_if( | |
110 | src_fs, | |
111 | "/", | |
112 | dst_fs, | |
113 | "/", | |
114 | condition, | |
115 | walker=walker, | |
116 | on_copy=on_copy, | |
117 | workers=workers, | |
118 | preserve_time=preserve_time, | |
119 | ) | |
108 | 120 | |
109 | 121 | |
110 | 122 | def copy_file( |
112 | 124 | src_path, # type: Text |
113 | 125 | dst_fs, # type: Union[FS, Text] |
114 | 126 | dst_path, # type: Text |
127 | preserve_time=False, # type: bool | |
115 | 128 | ): |
116 | 129 | # type: (...) -> None |
117 | 130 | """Copy a file from one filesystem to another. |
123 | 136 | src_path (str): Path to a file on the source filesystem. |
124 | 137 | dst_fs (FS or str): Destination filesystem (instance or URL). |
125 | 138 | dst_path (str): Path to a file on the destination filesystem. |
126 | ||
127 | """ | |
128 | with manage_fs(src_fs, writeable=False) as _src_fs: | |
129 | with manage_fs(dst_fs, create=True) as _dst_fs: | |
130 | if _src_fs is _dst_fs: | |
131 | # Same filesystem, so we can do a potentially optimized | |
132 | # copy | |
133 | _src_fs.copy(src_path, dst_path, overwrite=True) | |
134 | else: | |
135 | # Standard copy | |
136 | with _src_fs.lock(), _dst_fs.lock(): | |
137 | if _dst_fs.hassyspath(dst_path): | |
138 | with _dst_fs.openbin(dst_path, "w") as write_file: | |
139 | _src_fs.download(src_path, write_file) | |
140 | else: | |
141 | with _src_fs.openbin(src_path) as read_file: | |
142 | _dst_fs.upload(dst_path, read_file) | |
143 | ||
144 | ||
145 | def copy_file_internal( | |
146 | src_fs, # type: FS | |
147 | src_path, # type: Text | |
148 | dst_fs, # type: FS | |
149 | dst_path, # type: Text | |
150 | ): | |
151 | # type: (...) -> None | |
152 | """Low level copy, that doesn't call manage_fs or lock. | |
153 | ||
154 | If the destination exists, and is a file, it will be first truncated. | |
155 | ||
156 | This method exists to optimize copying in loops. In general you | |
157 | should prefer `copy_file`. | |
158 | ||
159 | Arguments: | |
160 | src_fs (FS): Source filesystem. | |
161 | src_path (str): Path to a file on the source filesystem. | |
162 | dst_fs (FS: Destination filesystem. | |
163 | dst_path (str): Path to a file on the destination filesystem. | |
164 | ||
165 | """ | |
166 | if src_fs is dst_fs: | |
167 | # Same filesystem, so we can do a potentially optimized | |
168 | # copy | |
169 | src_fs.copy(src_path, dst_path, overwrite=True) | |
170 | elif dst_fs.hassyspath(dst_path): | |
171 | with dst_fs.openbin(dst_path, "w") as write_file: | |
172 | src_fs.download(src_path, write_file) | |
173 | else: | |
174 | with src_fs.openbin(src_path) as read_file: | |
175 | dst_fs.upload(dst_path, read_file) | |
139 | preserve_time (bool): If `True`, try to preserve mtime of the | |
140 | resource (defaults to `False`). | |
141 | ||
142 | """ | |
143 | copy_file_if( | |
144 | src_fs, src_path, dst_fs, dst_path, "always", preserve_time=preserve_time | |
145 | ) | |
176 | 146 | |
177 | 147 | |
178 | 148 | def copy_file_if_newer( |
180 | 150 | src_path, # type: Text |
181 | 151 | dst_fs, # type: Union[FS, Text] |
182 | 152 | dst_path, # type: Text |
153 | preserve_time=False, # type: bool | |
183 | 154 | ): |
184 | 155 | # type: (...) -> bool |
185 | 156 | """Copy a file from one filesystem to another, checking times. |
186 | 157 | |
187 | If the destination exists, and is a file, it will be first truncated. | |
188 | If both source and destination files exist, the copy is executed only | |
189 | if the source file is newer than the destination file. In case | |
190 | modification times of source or destination files are not available, | |
191 | copy is always executed. | |
158 | .. deprecated:: 2.5.0 | |
159 | Use `~fs.copy.copy_file_if` with ``condition="newer"`` instead. | |
160 | ||
161 | """ | |
162 | warnings.warn( | |
163 | "copy_file_if_newer is deprecated. Use copy_file_if instead.", | |
164 | DeprecationWarning, | |
165 | ) | |
166 | return copy_file_if( | |
167 | src_fs, src_path, dst_fs, dst_path, "newer", preserve_time=preserve_time | |
168 | ) | |
169 | ||
170 | ||
171 | def copy_file_if( | |
172 | src_fs, # type: Union[FS, Text] | |
173 | src_path, # type: Text | |
174 | dst_fs, # type: Union[FS, Text] | |
175 | dst_path, # type: Text | |
176 | condition, # type: Text | |
177 | preserve_time=False, # type: bool | |
178 | ): | |
179 | # type: (...) -> bool | |
180 | """Copy a file from one filesystem to another, depending on a condition. | |
181 | ||
182 | Depending on the value of ``condition``, certain requirements must | |
183 | be fulfilled for a file to be copied to ``dst_fs``. The following | |
184 | values are supported: | |
185 | ||
186 | ``"always"`` | |
187 | The source file is always copied. | |
188 | ``"newer"`` | |
189 | The last modification time of the source file must be newer than that | |
190 | of the destination file. If either file has no modification time, the | |
191 | copy is performed always. | |
192 | ``"older"`` | |
193 | The last modification time of the source file must be older than that | |
194 | of the destination file. If either file has no modification time, the | |
195 | copy is performed always. | |
196 | ``"exists"`` | |
197 | The source file is only copied if a file of the same path already | |
198 | exists in ``dst_fs``. | |
199 | ``"not_exists"`` | |
200 | The source file is only copied if no file of the same path already | |
201 | exists in ``dst_fs``. | |
192 | 202 | |
193 | 203 | Arguments: |
194 | 204 | src_fs (FS or str): Source filesystem (instance or URL). |
195 | 205 | src_path (str): Path to a file on the source filesystem. |
196 | 206 | dst_fs (FS or str): Destination filesystem (instance or URL). |
197 | 207 | dst_path (str): Path to a file on the destination filesystem. |
208 | condition (str): Name of the condition to check for each file. | |
209 | preserve_time (bool): If `True`, try to preserve mtime of the | |
210 | resource (defaults to `False`). | |
198 | 211 | |
199 | 212 | Returns: |
200 | 213 | bool: `True` if the file copy was executed, `False` otherwise. |
202 | 215 | """ |
203 | 216 | with manage_fs(src_fs, writeable=False) as _src_fs: |
204 | 217 | with manage_fs(dst_fs, create=True) as _dst_fs: |
205 | if _src_fs is _dst_fs: | |
206 | # Same filesystem, so we can do a potentially optimized | |
207 | # copy | |
208 | if _source_is_newer(_src_fs, src_path, _dst_fs, dst_path): | |
209 | _src_fs.copy(src_path, dst_path, overwrite=True) | |
210 | return True | |
211 | else: | |
212 | return False | |
213 | else: | |
214 | # Standard copy | |
215 | with _src_fs.lock(), _dst_fs.lock(): | |
216 | if _source_is_newer(_src_fs, src_path, _dst_fs, dst_path): | |
217 | copy_file_internal(_src_fs, src_path, _dst_fs, dst_path) | |
218 | return True | |
219 | else: | |
220 | return False | |
218 | do_copy = _copy_is_necessary( | |
219 | _src_fs, src_path, _dst_fs, dst_path, condition | |
220 | ) | |
221 | if do_copy: | |
222 | copy_file_internal( | |
223 | _src_fs, | |
224 | src_path, | |
225 | _dst_fs, | |
226 | dst_path, | |
227 | preserve_time=preserve_time, | |
228 | lock=True, | |
229 | ) | |
230 | return do_copy | |
231 | ||
232 | ||
233 | def copy_file_internal( | |
234 | src_fs, # type: FS | |
235 | src_path, # type: Text | |
236 | dst_fs, # type: FS | |
237 | dst_path, # type: Text | |
238 | preserve_time=False, # type: bool | |
239 | lock=False, # type: bool | |
240 | ): | |
241 | # type: (...) -> None | |
242 | """Copy a file at low level, without calling `manage_fs` or locking. | |
243 | ||
244 | If the destination exists, and is a file, it will be first truncated. | |
245 | ||
246 | This method exists to optimize copying in loops. In general you | |
247 | should prefer `copy_file`. | |
248 | ||
249 | Arguments: | |
250 | src_fs (FS): Source filesystem. | |
251 | src_path (str): Path to a file on the source filesystem. | |
252 | dst_fs (FS): Destination filesystem. | |
253 | dst_path (str): Path to a file on the destination filesystem. | |
254 | preserve_time (bool): If `True`, try to preserve mtime of the | |
255 | resource (defaults to `False`). | |
256 | lock (bool): Lock both filesystems before copying. | |
257 | ||
258 | """ | |
259 | if src_fs is dst_fs: | |
260 | # Same filesystem, so we can do a potentially optimized | |
261 | # copy | |
262 | src_fs.copy(src_path, dst_path, overwrite=True, preserve_time=preserve_time) | |
263 | return | |
264 | ||
265 | def _copy_locked(): | |
266 | if dst_fs.hassyspath(dst_path): | |
267 | with dst_fs.openbin(dst_path, "w") as write_file: | |
268 | src_fs.download(src_path, write_file) | |
269 | else: | |
270 | with src_fs.openbin(src_path) as read_file: | |
271 | dst_fs.upload(dst_path, read_file) | |
272 | ||
273 | if preserve_time: | |
274 | copy_modified_time(src_fs, src_path, dst_fs, dst_path) | |
275 | ||
276 | if lock: | |
277 | with src_fs.lock(), dst_fs.lock(): | |
278 | _copy_locked() | |
279 | else: | |
280 | _copy_locked() | |
221 | 281 | |
222 | 282 | |
223 | 283 | def copy_structure( |
224 | 284 | src_fs, # type: Union[FS, Text] |
225 | 285 | dst_fs, # type: Union[FS, Text] |
226 | 286 | walker=None, # type: Optional[Walker] |
287 | src_root="/", # type: Text | |
288 | dst_root="/", # type: Text | |
227 | 289 | ): |
228 | 290 | # type: (...) -> None |
229 | 291 | """Copy directories (but not files) from ``src_fs`` to ``dst_fs``. |
234 | 296 | walker (~fs.walk.Walker, optional): A walker object that will be |
235 | 297 | used to scan for files in ``src_fs``. Set this if you only |
236 | 298 | want to consider a sub-set of the resources in ``src_fs``. |
299 | src_root (str): Path of the base directory to consider as the root | |
300 | of the tree structure to copy. | |
301 | dst_root (str): Path to the target root of the tree structure. | |
237 | 302 | |
238 | 303 | """ |
239 | 304 | walker = walker or Walker() |
240 | 305 | with manage_fs(src_fs) as _src_fs: |
241 | 306 | with manage_fs(dst_fs, create=True) as _dst_fs: |
242 | 307 | with _src_fs.lock(), _dst_fs.lock(): |
243 | for dir_path in walker.dirs(_src_fs): | |
244 | _dst_fs.makedir(dir_path, recreate=True) | |
308 | _dst_fs.makedirs(dst_root, recreate=True) | |
309 | for dir_path in walker.dirs(_src_fs, src_root): | |
310 | _dst_fs.makedir( | |
311 | combine(dst_root, frombase(src_root, dir_path)), recreate=True | |
312 | ) | |
245 | 313 | |
246 | 314 | |
247 | 315 | def copy_dir( |
252 | 320 | walker=None, # type: Optional[Walker] |
253 | 321 | on_copy=None, # type: Optional[_OnCopy] |
254 | 322 | workers=0, # type: int |
323 | preserve_time=False, # type: bool | |
255 | 324 | ): |
256 | 325 | # type: (...) -> None |
257 | 326 | """Copy a directory from one filesystem to another. |
269 | 338 | ``(src_fs, src_path, dst_fs, dst_path)``. |
270 | 339 | workers (int): Use ``worker`` threads to copy data, or ``0`` (default) for |
271 | 340 | a single-threaded copy. |
341 | preserve_time (bool): If `True`, try to preserve mtime of the | |
342 | resources (defaults to `False`). | |
343 | ||
344 | """ | |
345 | copy_dir_if( | |
346 | src_fs, | |
347 | src_path, | |
348 | dst_fs, | |
349 | dst_path, | |
350 | "always", | |
351 | walker, | |
352 | on_copy, | |
353 | workers, | |
354 | preserve_time=preserve_time, | |
355 | ) | |
356 | ||
357 | ||
358 | def copy_dir_if_newer( | |
359 | src_fs, # type: Union[FS, Text] | |
360 | src_path, # type: Text | |
361 | dst_fs, # type: Union[FS, Text] | |
362 | dst_path, # type: Text | |
363 | walker=None, # type: Optional[Walker] | |
364 | on_copy=None, # type: Optional[_OnCopy] | |
365 | workers=0, # type: int | |
366 | preserve_time=False, # type: bool | |
367 | ): | |
368 | # type: (...) -> None | |
369 | """Copy a directory from one filesystem to another, checking times. | |
370 | ||
371 | .. deprecated:: 2.5.0 | |
372 | Use `~fs.copy.copy_dir_if` with ``condition="newer"`` instead. | |
373 | ||
374 | """ | |
375 | warnings.warn( | |
376 | "copy_dir_if_newer is deprecated. Use copy_dir_if instead.", DeprecationWarning | |
377 | ) | |
378 | copy_dir_if( | |
379 | src_fs, | |
380 | src_path, | |
381 | dst_fs, | |
382 | dst_path, | |
383 | "newer", | |
384 | walker, | |
385 | on_copy, | |
386 | workers, | |
387 | preserve_time=preserve_time, | |
388 | ) | |
389 | ||
390 | ||
391 | def copy_dir_if( | |
392 | src_fs, # type: Union[FS, Text] | |
393 | src_path, # type: Text | |
394 | dst_fs, # type: Union[FS, Text] | |
395 | dst_path, # type: Text | |
396 | condition, # type: Text | |
397 | walker=None, # type: Optional[Walker] | |
398 | on_copy=None, # type: Optional[_OnCopy] | |
399 | workers=0, # type: int | |
400 | preserve_time=False, # type: bool | |
401 | ): | |
402 | # type: (...) -> None | |
403 | """Copy a directory from one filesystem to another, depending on a condition. | |
404 | ||
405 | Arguments: | |
406 | src_fs (FS or str): Source filesystem (instance or URL). | |
407 | src_path (str): Path to a directory on the source filesystem. | |
408 | dst_fs (FS or str): Destination filesystem (instance or URL). | |
409 | dst_path (str): Path to a directory on the destination filesystem. | |
410 | condition (str): Name of the condition to check for each file. | |
411 | walker (~fs.walk.Walker, optional): A walker object that will be | |
412 | used to scan for files in ``src_fs``. Set this if you only want | |
413 | to consider a sub-set of the resources in ``src_fs``. | |
414 | on_copy (callable):A function callback called after a single file copy | |
415 | is executed. Expected signature is ``(src_fs, src_path, dst_fs, | |
416 | dst_path)``. | |
417 | workers (int): Use ``worker`` threads to copy data, or ``0`` (default) for | |
418 | a single-threaded copy. | |
419 | preserve_time (bool): If `True`, try to preserve mtime of the | |
420 | resources (defaults to `False`). | |
421 | ||
422 | See Also: | |
423 | `~fs.copy.copy_file_if` for the full list of supported values for the | |
424 | ``condition`` argument. | |
272 | 425 | |
273 | 426 | """ |
274 | 427 | on_copy = on_copy or (lambda *args: None) |
276 | 429 | _src_path = abspath(normpath(src_path)) |
277 | 430 | _dst_path = abspath(normpath(dst_path)) |
278 | 431 | |
279 | def src(): | |
280 | return manage_fs(src_fs, writeable=False) | |
281 | ||
282 | def dst(): | |
283 | return manage_fs(dst_fs, create=True) | |
284 | ||
285 | 432 | from ._bulk import Copier |
286 | 433 | |
287 | with src() as _src_fs, dst() as _dst_fs: | |
434 | copy_structure(src_fs, dst_fs, walker, src_path, dst_path) | |
435 | ||
436 | with manage_fs(src_fs, writeable=False) as _src_fs, manage_fs( | |
437 | dst_fs, create=True | |
438 | ) as _dst_fs: | |
288 | 439 | with _src_fs.lock(), _dst_fs.lock(): |
289 | 440 | _thread_safe = is_thread_safe(_src_fs, _dst_fs) |
290 | with Copier(num_workers=workers if _thread_safe else 0) as copier: | |
291 | _dst_fs.makedir(_dst_path, recreate=True) | |
292 | for dir_path, dirs, files in walker.walk(_src_fs, _src_path): | |
441 | with Copier( | |
442 | num_workers=workers if _thread_safe else 0, preserve_time=preserve_time | |
443 | ) as copier: | |
444 | for dir_path in walker.files(_src_fs, _src_path): | |
293 | 445 | copy_path = combine(_dst_path, frombase(_src_path, dir_path)) |
294 | for info in dirs: | |
295 | _dst_fs.makedir(info.make_path(copy_path), recreate=True) | |
296 | for info in files: | |
297 | src_path = info.make_path(dir_path) | |
298 | dst_path = info.make_path(copy_path) | |
299 | copier.copy(_src_fs, src_path, _dst_fs, dst_path) | |
300 | on_copy(_src_fs, src_path, _dst_fs, dst_path) | |
301 | ||
302 | ||
303 | def copy_dir_if_newer( | |
304 | src_fs, # type: Union[FS, Text] | |
305 | src_path, # type: Text | |
306 | dst_fs, # type: Union[FS, Text] | |
307 | dst_path, # type: Text | |
308 | walker=None, # type: Optional[Walker] | |
309 | on_copy=None, # type: Optional[_OnCopy] | |
310 | workers=0, # type: int | |
311 | ): | |
312 | # type: (...) -> None | |
313 | """Copy a directory from one filesystem to another, checking times. | |
314 | ||
315 | If both source and destination files exist, the copy is executed only | |
316 | if the source file is newer than the destination file. In case | |
317 | modification times of source or destination files are not available, | |
318 | copy is always executed. | |
446 | if _copy_is_necessary( | |
447 | _src_fs, dir_path, _dst_fs, copy_path, condition | |
448 | ): | |
449 | copier.copy(_src_fs, dir_path, _dst_fs, copy_path) | |
450 | on_copy(_src_fs, dir_path, _dst_fs, copy_path) | |
451 | ||
452 | ||
453 | def _copy_is_necessary( | |
454 | src_fs, # type: FS | |
455 | src_path, # type: Text | |
456 | dst_fs, # type: FS | |
457 | dst_path, # type: Text | |
458 | condition, # type: Text | |
459 | ): | |
460 | # type: (...) -> bool | |
461 | ||
462 | if condition == "always": | |
463 | return True | |
464 | ||
465 | elif condition == "newer": | |
466 | try: | |
467 | src_modified = src_fs.getmodified(src_path) | |
468 | dst_modified = dst_fs.getmodified(dst_path) | |
469 | except ResourceNotFound: | |
470 | return True | |
471 | else: | |
472 | return ( | |
473 | src_modified is None | |
474 | or dst_modified is None | |
475 | or src_modified > dst_modified | |
476 | ) | |
477 | ||
478 | elif condition == "older": | |
479 | try: | |
480 | src_modified = src_fs.getmodified(src_path) | |
481 | dst_modified = dst_fs.getmodified(dst_path) | |
482 | except ResourceNotFound: | |
483 | return True | |
484 | else: | |
485 | return ( | |
486 | src_modified is None | |
487 | or dst_modified is None | |
488 | or src_modified < dst_modified | |
489 | ) | |
490 | ||
491 | elif condition == "exists": | |
492 | return dst_fs.exists(dst_path) | |
493 | ||
494 | elif condition == "not_exists": | |
495 | return not dst_fs.exists(dst_path) | |
496 | ||
497 | else: | |
498 | raise ValueError("{} is not a valid copy condition.".format(condition)) | |
499 | ||
500 | ||
501 | def copy_modified_time( | |
502 | src_fs, # type: Union[FS, Text] | |
503 | src_path, # type: Text | |
504 | dst_fs, # type: Union[FS, Text] | |
505 | dst_path, # type: Text | |
506 | ): | |
507 | # type: (...) -> None | |
508 | """Copy modified time metadata from one file to another. | |
319 | 509 | |
320 | 510 | Arguments: |
321 | 511 | src_fs (FS or str): Source filesystem (instance or URL). |
322 | 512 | src_path (str): Path to a directory on the source filesystem. |
323 | 513 | dst_fs (FS or str): Destination filesystem (instance or URL). |
324 | 514 | dst_path (str): Path to a directory on the destination filesystem. |
325 | walker (~fs.walk.Walker, optional): A walker object that will be | |
326 | used to scan for files in ``src_fs``. Set this if you only | |
327 | want to consider a sub-set of the resources in ``src_fs``. | |
328 | on_copy (callable, optional): A function callback called after | |
329 | a single file copy is executed. Expected signature is | |
330 | ``(src_fs, src_path, dst_fs, dst_path)``. | |
331 | workers (int): Use ``worker`` threads to copy data, or ``0`` (default) for | |
332 | a single-threaded copy. | |
333 | ||
334 | """ | |
335 | on_copy = on_copy or (lambda *args: None) | |
336 | walker = walker or Walker() | |
337 | _src_path = abspath(normpath(src_path)) | |
338 | _dst_path = abspath(normpath(dst_path)) | |
339 | ||
340 | def src(): | |
341 | return manage_fs(src_fs, writeable=False) | |
342 | ||
343 | def dst(): | |
344 | return manage_fs(dst_fs, create=True) | |
345 | ||
346 | from ._bulk import Copier | |
347 | ||
348 | with src() as _src_fs, dst() as _dst_fs: | |
349 | with _src_fs.lock(), _dst_fs.lock(): | |
350 | _thread_safe = is_thread_safe(_src_fs, _dst_fs) | |
351 | with Copier(num_workers=workers if _thread_safe else 0) as copier: | |
352 | _dst_fs.makedir(_dst_path, recreate=True) | |
353 | namespace = ("details", "modified") | |
354 | dst_state = { | |
355 | path: info | |
356 | for path, info in walker.info(_dst_fs, _dst_path, namespace) | |
357 | if info.is_file | |
358 | } | |
359 | src_state = [ | |
360 | (path, info) | |
361 | for path, info in walker.info(_src_fs, _src_path, namespace) | |
362 | ] | |
363 | for dir_path, copy_info in src_state: | |
364 | copy_path = combine(_dst_path, frombase(_src_path, dir_path)) | |
365 | if copy_info.is_dir: | |
366 | _dst_fs.makedir(copy_path, recreate=True) | |
367 | elif copy_info.is_file: | |
368 | # dst file is present, try to figure out if copy | |
369 | # is necessary | |
370 | try: | |
371 | src_modified = copy_info.modified | |
372 | dst_modified = dst_state[dir_path].modified | |
373 | except KeyError: | |
374 | do_copy = True | |
375 | else: | |
376 | do_copy = ( | |
377 | src_modified is None | |
378 | or dst_modified is None | |
379 | or src_modified > dst_modified | |
380 | ) | |
381 | ||
382 | if do_copy: | |
383 | copier.copy(_src_fs, dir_path, _dst_fs, copy_path) | |
384 | on_copy(_src_fs, dir_path, _dst_fs, copy_path) | |
515 | ||
516 | """ | |
517 | namespaces = ("details",) | |
518 | with manage_fs(src_fs, writeable=False) as _src_fs: | |
519 | with manage_fs(dst_fs, create=True) as _dst_fs: | |
520 | src_meta = _src_fs.getinfo(src_path, namespaces) | |
521 | src_details = src_meta.raw.get("details", {}) | |
522 | dst_details = {} | |
523 | for value in ("metadata_changed", "modified"): | |
524 | if value in src_details: | |
525 | dst_details[value] = src_details[value] | |
526 | _dst_fs.setinfo(dst_path, {"details": dst_details}) |
0 | 0 | """Enums used by PyFilesystem. |
1 | 1 | """ |
2 | 2 | |
3 | from __future__ import absolute_import | |
4 | from __future__ import unicode_literals | |
3 | from __future__ import absolute_import, unicode_literals | |
5 | 4 | |
6 | 5 | import os |
7 | 6 | from enum import IntEnum, unique |
0 | 0 | """Tools for managing OS errors. |
1 | 1 | """ |
2 | 2 | |
3 | from __future__ import print_function | |
4 | from __future__ import unicode_literals | |
3 | from __future__ import print_function, unicode_literals | |
4 | ||
5 | import sys | |
6 | import typing | |
5 | 7 | |
6 | 8 | import errno |
7 | 9 | import platform |
8 | import sys | |
9 | import typing | |
10 | 10 | from contextlib import contextmanager |
11 | ||
12 | 11 | from six import reraise |
13 | 12 | |
14 | 13 | from . import errors |
15 | 14 | |
16 | 15 | if typing.TYPE_CHECKING: |
16 | from typing import Iterator, Optional, Text, Type, Union | |
17 | ||
17 | 18 | from types import TracebackType |
18 | from typing import Iterator, Optional, Text, Type, Union | |
19 | 19 | |
20 | 20 | try: |
21 | 21 | from collections.abc import Mapping |
27 | 27 | |
28 | 28 | |
29 | 29 | class _ConvertOSErrors(object): |
30 | """Context manager to convert OSErrors in to FS Errors. | |
31 | """ | |
30 | """Context manager to convert OSErrors in to FS Errors.""" | |
32 | 31 | |
33 | 32 | FILE_ERRORS = { |
34 | 33 | 64: errors.RemoteConnectionError, # ENONET |
7 | 7 | |
8 | 8 | """ |
9 | 9 | |
10 | from __future__ import unicode_literals | |
11 | from __future__ import print_function | |
10 | from __future__ import print_function, unicode_literals | |
11 | ||
12 | import typing | |
12 | 13 | |
13 | 14 | import functools |
14 | import typing | |
15 | ||
16 | 15 | import six |
17 | 16 | from six import text_type |
18 | 17 | |
50 | 49 | "ResourceNotFound", |
51 | 50 | "ResourceReadOnly", |
52 | 51 | "Unsupported", |
52 | "UnsupportedHash", | |
53 | 53 | ] |
54 | 54 | |
55 | 55 | |
56 | 56 | class MissingInfoNamespace(AttributeError): |
57 | """An expected namespace is missing. | |
58 | """ | |
59 | ||
60 | def __init__(self, namespace): | |
57 | """An expected namespace is missing.""" | |
58 | ||
59 | def __init__(self, namespace): # noqa: D107 | |
61 | 60 | # type: (Text) -> None |
62 | 61 | self.namespace = namespace |
63 | 62 | msg = "namespace '{}' is required for this attribute" |
69 | 68 | |
70 | 69 | @six.python_2_unicode_compatible |
71 | 70 | class FSError(Exception): |
72 | """Base exception for the `fs` module. | |
73 | """ | |
71 | """Base exception for the `fs` module.""" | |
74 | 72 | |
75 | 73 | default_message = "Unspecified error" |
76 | 74 | |
77 | def __init__(self, msg=None): | |
75 | def __init__(self, msg=None): # noqa: D107 | |
78 | 76 | # type: (Optional[Text]) -> None |
79 | 77 | self._msg = msg or self.default_message |
80 | 78 | super(FSError, self).__init__() |
81 | 79 | |
82 | 80 | def __str__(self): |
83 | 81 | # type: () -> Text |
84 | """Return the error message. | |
85 | """ | |
82 | """Return the error message.""" | |
86 | 83 | msg = self._msg.format(**self.__dict__) |
87 | 84 | return msg |
88 | 85 | |
93 | 90 | |
94 | 91 | |
95 | 92 | class FilesystemClosed(FSError): |
96 | """Attempt to use a closed filesystem. | |
97 | """ | |
93 | """Attempt to use a closed filesystem.""" | |
98 | 94 | |
99 | 95 | default_message = "attempt to use closed filesystem" |
100 | 96 | |
104 | 100 | |
105 | 101 | default_message = "One or more copy operations failed (see errors attribute)" |
106 | 102 | |
107 | def __init__(self, errors): | |
103 | def __init__(self, errors): # noqa: D107 | |
108 | 104 | self.errors = errors |
109 | 105 | super(BulkCopyFailed, self).__init__() |
110 | 106 | |
111 | 107 | |
112 | 108 | class CreateFailed(FSError): |
113 | """Filesystem could not be created. | |
114 | """ | |
109 | """Filesystem could not be created.""" | |
115 | 110 | |
116 | 111 | default_message = "unable to create filesystem, {details}" |
117 | 112 | |
118 | def __init__(self, msg=None, exc=None): | |
113 | def __init__(self, msg=None, exc=None): # noqa: D107 | |
119 | 114 | # type: (Optional[Text], Optional[Exception]) -> None |
120 | 115 | self._msg = msg or self.default_message |
121 | 116 | self.details = "" if exc is None else text_type(exc) |
139 | 134 | |
140 | 135 | |
141 | 136 | class PathError(FSError): |
142 | """Base exception for errors to do with a path string. | |
143 | """ | |
137 | """Base exception for errors to do with a path string.""" | |
144 | 138 | |
145 | 139 | default_message = "path '{path}' is invalid" |
146 | 140 | |
147 | def __init__(self, path, msg=None): | |
148 | # type: (Text, Optional[Text]) -> None | |
141 | def __init__(self, path, msg=None, exc=None): # noqa: D107 | |
142 | # type: (Text, Optional[Text], Optional[Exception]) -> None | |
149 | 143 | self.path = path |
144 | self.exc = exc | |
150 | 145 | super(PathError, self).__init__(msg=msg) |
151 | 146 | |
152 | 147 | def __reduce__(self): |
153 | return type(self), (self.path, self._msg) | |
148 | return type(self), (self.path, self._msg, self.exc) | |
154 | 149 | |
155 | 150 | |
156 | 151 | class NoSysPath(PathError): |
157 | """The filesystem does not provide *sys paths* to the resource. | |
158 | """ | |
152 | """The filesystem does not provide *sys paths* to the resource.""" | |
159 | 153 | |
160 | 154 | default_message = "path '{path}' does not map to the local filesystem" |
161 | 155 | |
162 | 156 | |
163 | 157 | class NoURL(PathError): |
164 | """The filesystem does not provide an URL for the resource. | |
165 | """ | |
158 | """The filesystem does not provide an URL for the resource.""" | |
166 | 159 | |
167 | 160 | default_message = "path '{path}' has no '{purpose}' URL" |
168 | 161 | |
169 | def __init__(self, path, purpose, msg=None): | |
162 | def __init__(self, path, purpose, msg=None): # noqa: D107 | |
170 | 163 | # type: (Text, Text, Optional[Text]) -> None |
171 | 164 | self.purpose = purpose |
172 | 165 | super(NoURL, self).__init__(path, msg=msg) |
176 | 169 | |
177 | 170 | |
178 | 171 | class InvalidPath(PathError): |
179 | """Path can't be mapped on to the underlaying filesystem. | |
180 | """ | |
172 | """Path can't be mapped on to the underlaying filesystem.""" | |
181 | 173 | |
182 | 174 | default_message = "path '{path}' is invalid on this filesystem " |
183 | 175 | |
184 | 176 | |
185 | 177 | class InvalidCharsInPath(InvalidPath): |
186 | """Path contains characters that are invalid on this filesystem. | |
187 | """ | |
178 | """Path contains characters that are invalid on this filesystem.""" | |
188 | 179 | |
189 | 180 | default_message = "path '{path}' contains invalid characters" |
190 | 181 | |
191 | 182 | |
192 | 183 | class OperationFailed(FSError): |
193 | """A specific operation failed. | |
194 | """ | |
184 | """A specific operation failed.""" | |
195 | 185 | |
196 | 186 | default_message = "operation failed, {details}" |
197 | 187 | |
200 | 190 | path=None, # type: Optional[Text] |
201 | 191 | exc=None, # type: Optional[Exception] |
202 | 192 | msg=None, # type: Optional[Text] |
203 | ): | |
193 | ): # noqa: D107 | |
204 | 194 | # type: (...) -> None |
205 | 195 | self.path = path |
206 | 196 | self.exc = exc |
213 | 203 | |
214 | 204 | |
215 | 205 | class Unsupported(OperationFailed): |
216 | """Operation not supported by the filesystem. | |
217 | """ | |
206 | """Operation not supported by the filesystem.""" | |
218 | 207 | |
219 | 208 | default_message = "not supported" |
220 | 209 | |
221 | 210 | |
222 | 211 | class RemoteConnectionError(OperationFailed): |
223 | """Operations encountered remote connection trouble. | |
224 | """ | |
212 | """Operations encountered remote connection trouble.""" | |
225 | 213 | |
226 | 214 | default_message = "remote connection error" |
227 | 215 | |
228 | 216 | |
229 | 217 | class InsufficientStorage(OperationFailed): |
230 | """Storage is insufficient for requested operation. | |
231 | """ | |
218 | """Storage is insufficient for requested operation.""" | |
232 | 219 | |
233 | 220 | default_message = "insufficient storage space" |
234 | 221 | |
235 | 222 | |
236 | 223 | class PermissionDenied(OperationFailed): |
237 | """Not enough permissions. | |
238 | """ | |
224 | """Not enough permissions.""" | |
239 | 225 | |
240 | 226 | default_message = "permission denied" |
241 | 227 | |
242 | 228 | |
243 | 229 | class OperationTimeout(OperationFailed): |
244 | """Filesystem took too long. | |
245 | """ | |
230 | """Filesystem took too long.""" | |
246 | 231 | |
247 | 232 | default_message = "operation timed out" |
248 | 233 | |
249 | 234 | |
250 | 235 | class RemoveRootError(OperationFailed): |
251 | """Attempt to remove the root directory. | |
252 | """ | |
236 | """Attempt to remove the root directory.""" | |
253 | 237 | |
254 | 238 | default_message = "root directory may not be removed" |
255 | 239 | |
256 | 240 | |
257 | 241 | class ResourceError(FSError): |
258 | """Base exception class for error associated with a specific resource. | |
259 | """ | |
242 | """Base exception class for error associated with a specific resource.""" | |
260 | 243 | |
261 | 244 | default_message = "failed on path {path}" |
262 | 245 | |
263 | def __init__(self, path, exc=None, msg=None): | |
246 | def __init__(self, path, exc=None, msg=None): # noqa: D107 | |
264 | 247 | # type: (Text, Optional[Exception], Optional[Text]) -> None |
265 | 248 | self.path = path |
266 | 249 | self.exc = exc |
271 | 254 | |
272 | 255 | |
273 | 256 | class ResourceNotFound(ResourceError): |
274 | """Required resource not found. | |
275 | """ | |
257 | """Required resource not found.""" | |
276 | 258 | |
277 | 259 | default_message = "resource '{path}' not found" |
278 | 260 | |
279 | 261 | |
280 | 262 | class ResourceInvalid(ResourceError): |
281 | """Resource has the wrong type. | |
282 | """ | |
263 | """Resource has the wrong type.""" | |
283 | 264 | |
284 | 265 | default_message = "resource '{path}' is invalid for this operation" |
285 | 266 | |
286 | 267 | |
287 | 268 | class FileExists(ResourceError): |
288 | """File already exists. | |
289 | """ | |
269 | """File already exists.""" | |
290 | 270 | |
291 | 271 | default_message = "resource '{path}' exists" |
292 | 272 | |
293 | 273 | |
294 | 274 | class FileExpected(ResourceInvalid): |
295 | """Operation only works on files. | |
296 | """ | |
275 | """Operation only works on files.""" | |
297 | 276 | |
298 | 277 | default_message = "path '{path}' should be a file" |
299 | 278 | |
300 | 279 | |
301 | 280 | class DirectoryExpected(ResourceInvalid): |
302 | """Operation only works on directories. | |
303 | """ | |
281 | """Operation only works on directories.""" | |
304 | 282 | |
305 | 283 | default_message = "path '{path}' should be a directory" |
306 | 284 | |
307 | 285 | |
308 | 286 | class DestinationExists(ResourceError): |
309 | """Target destination already exists. | |
310 | """ | |
287 | """Target destination already exists.""" | |
311 | 288 | |
312 | 289 | default_message = "destination '{path}' exists" |
313 | 290 | |
314 | 291 | |
315 | 292 | class DirectoryExists(ResourceError): |
316 | """Directory already exists. | |
317 | """ | |
293 | """Directory already exists.""" | |
318 | 294 | |
319 | 295 | default_message = "directory '{path}' exists" |
320 | 296 | |
321 | 297 | |
322 | 298 | class DirectoryNotEmpty(ResourceError): |
323 | """Attempt to remove a non-empty directory. | |
324 | """ | |
299 | """Attempt to remove a non-empty directory.""" | |
325 | 300 | |
326 | 301 | default_message = "directory '{path}' is not empty" |
327 | 302 | |
328 | 303 | |
329 | 304 | class ResourceLocked(ResourceError): |
330 | """Attempt to use a locked resource. | |
331 | """ | |
305 | """Attempt to use a locked resource.""" | |
332 | 306 | |
333 | 307 | default_message = "resource '{path}' is locked" |
334 | 308 | |
335 | 309 | |
336 | 310 | class ResourceReadOnly(ResourceError): |
337 | """Attempting to modify a read-only resource. | |
338 | """ | |
311 | """Attempting to modify a read-only resource.""" | |
339 | 312 | |
340 | 313 | default_message = "resource '{path}' is read only" |
341 | 314 | |
353 | 326 | |
354 | 327 | """ |
355 | 328 | |
356 | def __init__(self, path): | |
329 | def __init__(self, path): # noqa: D107 | |
357 | 330 | # type: (Text) -> None |
358 | 331 | self.path = path |
359 | 332 | msg = ("path '{path}' contains back-references outside of filesystem").format( |
10 | 10 | |
11 | 11 | """ |
12 | 12 | |
13 | from __future__ import division | |
14 | from __future__ import unicode_literals | |
13 | from __future__ import division, unicode_literals | |
15 | 14 | |
16 | 15 | import typing |
17 | 16 | |
35 | 34 | |
36 | 35 | # TODO (dargueta): Don't rely on unit or suffix being defined in the loop. |
37 | 36 | for i, suffix in enumerate(suffixes, 2): # noqa: B007 |
38 | unit = base ** i | |
37 | unit = base**i | |
39 | 38 | if size < unit: |
40 | 39 | break |
41 | 40 | return "{:,.1f} {}".format((base * size / unit), suffix) |
60 | 59 | `str`: A string containing an abbreviated file size and units. |
61 | 60 | |
62 | 61 | Example: |
63 | >>> filesize.traditional(30000) | |
62 | >>> fs.filesize.traditional(30000) | |
64 | 63 | '29.3 KB' |
65 | 64 | |
66 | 65 | """ |
86 | 85 | `str`: A string containing a abbreviated file size and units. |
87 | 86 | |
88 | 87 | Example: |
89 | >>> filesize.binary(30000) | |
88 | >>> fs.filesize.binary(30000) | |
90 | 89 | '29.3 KiB' |
91 | 90 | |
92 | 91 | """ |
111 | 110 | `str`: A string containing a abbreviated file size and units. |
112 | 111 | |
113 | 112 | Example: |
114 | >>> filesize.decimal(30000) | |
113 | >>> fs.filesize.decimal(30000) | |
115 | 114 | '30.0 kB' |
116 | 115 | |
117 | 116 | """ |
0 | 0 | """Manage filesystems on remote FTP servers. |
1 | 1 | """ |
2 | 2 | |
3 | from __future__ import print_function | |
4 | from __future__ import unicode_literals | |
5 | ||
3 | from __future__ import print_function, unicode_literals | |
4 | ||
5 | import typing | |
6 | ||
7 | import array | |
6 | 8 | import calendar |
9 | import datetime | |
7 | 10 | import io |
8 | 11 | import itertools |
9 | 12 | import socket |
10 | 13 | import threading |
11 | import typing | |
12 | 14 | from collections import OrderedDict |
13 | 15 | from contextlib import contextmanager |
14 | 16 | from ftplib import FTP |
15 | from ftplib import error_perm | |
16 | from ftplib import error_temp | |
17 | ||
18 | try: | |
19 | from ftplib import FTP_TLS | |
20 | except ImportError as err: | |
21 | FTP_TLS = err # type: ignore | |
17 | 22 | from typing import cast |
18 | 23 | |
19 | from six import PY2 | |
20 | from six import text_type | |
21 | ||
24 | from ftplib import error_perm, error_temp | |
25 | from six import PY2, raise_from, text_type | |
26 | ||
27 | from . import _ftp_parse as ftp_parse | |
22 | 28 | from . import errors |
23 | 29 | from .base import FS |
24 | 30 | from .constants import DEFAULT_CHUNK_SIZE |
25 | from .enums import ResourceType | |
26 | from .enums import Seek | |
31 | from .enums import ResourceType, Seek | |
27 | 32 | from .info import Info |
28 | 33 | from .iotools import line_iterator |
29 | 34 | from .mode import Mode |
30 | from .path import abspath | |
31 | from .path import dirname | |
32 | from .path import basename | |
33 | from .path import normpath | |
34 | from .path import split | |
35 | from . import _ftp_parse as ftp_parse | |
35 | from .path import abspath, basename, dirname, normpath, split | |
36 | from .time import epoch_to_datetime | |
36 | 37 | |
37 | 38 | if typing.TYPE_CHECKING: |
38 | import ftplib | |
39 | 39 | from typing import ( |
40 | 40 | Any, |
41 | 41 | BinaryIO, |
42 | 42 | ByteString, |
43 | Container, | |
43 | 44 | ContextManager, |
45 | Dict, | |
44 | 46 | Iterable, |
45 | 47 | Iterator, |
46 | Container, | |
47 | Dict, | |
48 | 48 | List, |
49 | 49 | Optional, |
50 | 50 | SupportsInt, |
52 | 52 | Tuple, |
53 | 53 | Union, |
54 | 54 | ) |
55 | ||
56 | import ftplib | |
57 | import mmap | |
58 | ||
55 | 59 | from .base import _OpendirFactory |
56 | 60 | from .info import RawInfo |
57 | 61 | from .permissions import Permissions |
121 | 125 | # type: (Union[Text, bytes], Text) -> Text |
122 | 126 | return st.decode(encoding, "replace") if isinstance(st, bytes) else st |
123 | 127 | |
124 | ||
125 | 128 | else: |
126 | 129 | |
127 | 130 | def _encode(st, _): |
235 | 238 | return b"".join(chunks) |
236 | 239 | |
237 | 240 | def readinto(self, buffer): |
238 | # type: (bytearray) -> int | |
241 | # type: (Union[bytearray, memoryview, array.array[Any], mmap.mmap]) -> int | |
239 | 242 | data = self.read(len(buffer)) |
240 | 243 | bytes_read = len(data) |
241 | buffer[:bytes_read] = data | |
244 | if isinstance(buffer, array.array): | |
245 | buffer[:bytes_read] = array.array(buffer.typecode, data) | |
246 | else: | |
247 | buffer[:bytes_read] = data # type: ignore | |
242 | 248 | return bytes_read |
243 | 249 | |
244 | def readline(self, size=-1): | |
245 | # type: (int) -> bytes | |
250 | def readline(self, size=None): | |
251 | # type: (Optional[int]) -> bytes | |
246 | 252 | return next(line_iterator(self, size)) # type: ignore |
247 | 253 | |
248 | 254 | def readlines(self, hint=-1): |
261 | 267 | return self.mode.writing |
262 | 268 | |
263 | 269 | def write(self, data): |
264 | # type: (bytes) -> int | |
270 | # type: (Union[bytes, memoryview, array.array[Any], mmap.mmap]) -> int | |
265 | 271 | if not self.mode.writing: |
266 | 272 | raise IOError("File not open for writing") |
273 | ||
274 | if isinstance(data, array.array): | |
275 | data = data.tobytes() | |
267 | 276 | |
268 | 277 | with self._lock: |
269 | 278 | conn = self.write_conn |
280 | 289 | return data_pos |
281 | 290 | |
282 | 291 | def writelines(self, lines): |
283 | # type: (Iterable[bytes]) -> None | |
284 | self.write(b"".join(lines)) | |
292 | # type: (Iterable[Union[bytes, memoryview, array.array[Any], mmap.mmap]]) -> None # noqa: E501 | |
293 | if not self.mode.writing: | |
294 | raise IOError("File not open for writing") | |
295 | data = bytearray() | |
296 | for line in lines: | |
297 | if isinstance(line, array.array): | |
298 | data.extend(line.tobytes()) | |
299 | else: | |
300 | data.extend(line) # type: ignore | |
301 | self.write(data) | |
285 | 302 | |
286 | 303 | def truncate(self, size=None): |
287 | 304 | # type: (Optional[int]) -> int |
331 | 348 | class FTPFS(FS): |
332 | 349 | """A FTP (File Transport Protocol) Filesystem. |
333 | 350 | |
334 | Arguments: | |
335 | host (str): A FTP host, e.g. ``'ftp.mirror.nl'``. | |
336 | user (str): A username (default is ``'anonymous'``). | |
337 | passwd (str): Password for the server, or `None` for anon. | |
338 | acct (str): FTP account. | |
339 | timeout (int): Timeout for contacting server (in seconds, | |
340 | defaults to 10). | |
341 | port (int): FTP port number (default 21). | |
342 | proxy (str, optional): An FTP proxy, or ``None`` (default) | |
343 | for no proxy. | |
351 | Optionally, the connection can be made securely via TLS. This is known as | |
352 | FTPS, or FTP Secure. TLS will be enabled when using the ftps:// protocol, | |
353 | or when setting the `tls` argument to True in the constructor. | |
354 | ||
355 | Examples: | |
356 | Create with the constructor:: | |
357 | ||
358 | >>> from fs.ftpfs import FTPFS | |
359 | >>> ftp_fs = FTPFS("demo.wftpserver.com") | |
360 | ||
361 | Or via an FS URL:: | |
362 | ||
363 | >>> ftp_fs = fs.open_fs('ftp://test.rebex.net') | |
364 | ||
365 | Or via an FS URL, using TLS:: | |
366 | ||
367 | >>> ftp_fs = fs.open_fs('ftps://demo.wftpserver.com') | |
368 | ||
369 | You can also use a non-anonymous username, and optionally a | |
370 | password, even within a FS URL:: | |
371 | ||
372 | >>> ftp_fs = FTPFS("test.rebex.net", user="demo", passwd="password") | |
373 | >>> ftp_fs = fs.open_fs('ftp://demo:password@test.rebex.net') | |
374 | ||
375 | Connecting via a proxy is supported. If using a FS URL, the proxy | |
376 | URL will need to be added as a URL parameter:: | |
377 | ||
378 | >>> ftp_fs = FTPFS("ftp.ebi.ac.uk", proxy="test.rebex.net") | |
379 | >>> ftp_fs = fs.open_fs('ftp://ftp.ebi.ac.uk/?proxy=test.rebex.net') | |
344 | 380 | |
345 | 381 | """ |
346 | 382 | |
362 | 398 | timeout=10, # type: int |
363 | 399 | port=21, # type: int |
364 | 400 | proxy=None, # type: Optional[Text] |
401 | tls=False, # type: bool | |
365 | 402 | ): |
366 | 403 | # type: (...) -> None |
404 | """Create a new `FTPFS` instance. | |
405 | ||
406 | Arguments: | |
407 | host (str): A FTP host, e.g. ``'ftp.mirror.nl'``. | |
408 | user (str): A username (default is ``'anonymous'``). | |
409 | passwd (str): Password for the server, or `None` for anon. | |
410 | acct (str): FTP account. | |
411 | timeout (int): Timeout for contacting server (in seconds, | |
412 | defaults to 10). | |
413 | port (int): FTP port number (default 21). | |
414 | proxy (str, optional): An FTP proxy, or ``None`` (default) | |
415 | for no proxy. | |
416 | tls (bool): Attempt to use FTP over TLS (FTPS) (default: False) | |
417 | ||
418 | """ | |
367 | 419 | super(FTPFS, self).__init__() |
368 | 420 | self._host = host |
369 | 421 | self._user = user |
372 | 424 | self.timeout = timeout |
373 | 425 | self.port = port |
374 | 426 | self.proxy = proxy |
427 | self.tls = tls | |
428 | ||
429 | if self.tls and isinstance(FTP_TLS, Exception): | |
430 | raise_from(errors.CreateFailed("FTP over TLS not supported"), FTP_TLS) | |
375 | 431 | |
376 | 432 | self.encoding = "latin-1" |
377 | 433 | self._ftp = None # type: Optional[FTP] |
402 | 458 | @classmethod |
403 | 459 | def _parse_features(cls, feat_response): |
404 | 460 | # type: (Text) -> Dict[Text, Text] |
405 | """Parse a dict of features from FTP feat response. | |
406 | """ | |
461 | """Parse a dict of features from FTP feat response.""" | |
407 | 462 | features = {} |
408 | 463 | if feat_response.split("-")[0] == "211": |
409 | 464 | for line in feat_response.splitlines(): |
414 | 469 | |
415 | 470 | def _open_ftp(self): |
416 | 471 | # type: () -> FTP |
417 | """Open a new ftp object. | |
418 | """ | |
419 | _ftp = FTP() | |
472 | """Open a new ftp object.""" | |
473 | _ftp = FTP_TLS() if self.tls else FTP() | |
420 | 474 | _ftp.set_debuglevel(0) |
421 | 475 | with ftp_errors(self): |
422 | 476 | _ftp.connect(self.host, self.port, self.timeout) |
423 | 477 | _ftp.login(self.user, self.passwd, self.acct) |
478 | try: | |
479 | _ftp.prot_p() # type: ignore | |
480 | except AttributeError: | |
481 | pass | |
424 | 482 | self._features = {} |
425 | 483 | try: |
426 | 484 | feat_response = _decode(_ftp.sendcmd("FEAT"), "latin-1") |
455 | 513 | _user_part = "" |
456 | 514 | else: |
457 | 515 | _user_part = "{}:{}@".format(self.user, self.passwd) |
458 | url = "ftp://{}{}".format(_user_part, _host_part) | |
516 | ||
517 | scheme = "ftps" if self.tls else "ftp" | |
518 | url = "{}://{}{}".format(scheme, _user_part, _host_part) | |
459 | 519 | return url |
460 | 520 | |
461 | 521 | @property |
462 | 522 | def ftp(self): |
463 | 523 | # type: () -> FTP |
464 | """~ftplib.FTP: the underlying FTP client. | |
465 | """ | |
524 | """~ftplib.FTP: the underlying FTP client.""" | |
466 | 525 | return self._get_ftp() |
467 | 526 | |
468 | 527 | def geturl(self, path, purpose="download"): |
480 | 539 | return self._ftp |
481 | 540 | |
482 | 541 | @property |
483 | def features(self): | |
542 | def features(self): # noqa: D401 | |
484 | 543 | # type: () -> Dict[Text, Text] |
485 | """dict: features of the remote FTP server. | |
486 | """ | |
544 | """`dict`: Features of the remote FTP server.""" | |
487 | 545 | self._get_ftp() |
488 | 546 | return self._features |
489 | 547 | |
505 | 563 | @property |
506 | 564 | def supports_mlst(self): |
507 | 565 | # type: () -> bool |
508 | """bool: whether the server supports MLST feature. | |
509 | """ | |
566 | """bool: whether the server supports MLST feature.""" | |
510 | 567 | return "MLST" in self.features |
568 | ||
569 | @property | |
570 | def supports_mdtm(self): | |
571 | # type: () -> bool | |
572 | """bool: whether the server supports the MDTM feature.""" | |
573 | return "MDTM" in self.features | |
511 | 574 | |
512 | 575 | def create(self, path, wipe=False): |
513 | 576 | # type: (Text, bool) -> bool |
524 | 587 | @classmethod |
525 | 588 | def _parse_ftp_time(cls, time_text): |
526 | 589 | # type: (Text) -> Optional[int] |
527 | """Parse a time from an ftp directory listing. | |
528 | """ | |
590 | """Parse a time from an ftp directory listing.""" | |
529 | 591 | try: |
530 | 592 | tm_year = int(time_text[0:4]) |
531 | 593 | tm_month = int(time_text[4:6]) |
630 | 692 | if namespace == "standard": |
631 | 693 | _meta = self._meta.copy() |
632 | 694 | _meta["unicode_paths"] = "UTF8" in self.features |
695 | _meta["supports_mtime"] = "MDTM" in self.features | |
633 | 696 | return _meta |
697 | ||
698 | def getmodified(self, path): | |
699 | # type: (Text) -> Optional[datetime.datetime] | |
700 | if self.supports_mdtm: | |
701 | _path = self.validatepath(path) | |
702 | with self._lock: | |
703 | with ftp_errors(self, path=path): | |
704 | cmd = "MDTM " + _encode(_path, self.ftp.encoding) | |
705 | response = self.ftp.sendcmd(cmd) | |
706 | mtime = self._parse_ftp_time(response.split()[1]) | |
707 | return epoch_to_datetime(mtime) | |
708 | return super(FTPFS, self).getmodified(path) | |
634 | 709 | |
635 | 710 | def listdir(self, path): |
636 | 711 | # type: (Text) -> List[Text] |
761 | 836 | # type: (Text, BinaryIO, Optional[int], **Any) -> None |
762 | 837 | _path = self.validatepath(path) |
763 | 838 | with self._lock: |
764 | with self._manage_ftp() as ftp: | |
765 | with ftp_errors(self, path): | |
766 | ftp.storbinary( | |
767 | str("STOR ") + _encode(_path, self.ftp.encoding), file | |
768 | ) | |
839 | with ftp_errors(self, path): | |
840 | self.ftp.storbinary( | |
841 | str("STOR ") + _encode(_path, self.ftp.encoding), file | |
842 | ) | |
769 | 843 | |
770 | 844 | def writebytes(self, path, contents): |
771 | 845 | # type: (Text, ByteString) -> None |
775 | 849 | |
776 | 850 | def setinfo(self, path, info): |
777 | 851 | # type: (Text, RawInfo) -> None |
778 | if not self.exists(path): | |
779 | raise errors.ResourceNotFound(path) | |
852 | use_mfmt = False | |
853 | if "MFMT" in self.features: | |
854 | info_details = None | |
855 | if "modified" in info: | |
856 | info_details = info["modified"] | |
857 | elif "details" in info: | |
858 | info_details = info["details"] | |
859 | if info_details and "modified" in info_details: | |
860 | use_mfmt = True | |
861 | mtime = cast(float, info_details["modified"]) | |
862 | ||
863 | if use_mfmt: | |
864 | with ftp_errors(self, path): | |
865 | cmd = ( | |
866 | "MFMT " | |
867 | + datetime.datetime.utcfromtimestamp(mtime).strftime("%Y%m%d%H%M%S") | |
868 | + " " | |
869 | + _encode(path, self.ftp.encoding) | |
870 | ) | |
871 | try: | |
872 | self.ftp.sendcmd(cmd) | |
873 | except error_perm: | |
874 | pass | |
875 | else: | |
876 | if not self.exists(path): | |
877 | raise errors.ResourceNotFound(path) | |
780 | 878 | |
781 | 879 | def readbytes(self, path): |
782 | 880 | # type: (Text) -> bytes |
0 | """Useful functions for working with glob patterns. | |
1 | """ | |
2 | ||
0 | 3 | from __future__ import unicode_literals |
1 | 4 | |
5 | import typing | |
6 | ||
7 | import re | |
2 | 8 | from collections import namedtuple |
3 | import re | |
4 | import typing | |
5 | ||
9 | ||
10 | from . import wildcard | |
11 | from ._repr import make_repr | |
6 | 12 | from .lrucache import LRUCache |
7 | from ._repr import make_repr | |
8 | 13 | from .path import iteratepath |
9 | from . import wildcard | |
10 | ||
11 | 14 | |
12 | 15 | GlobMatch = namedtuple("GlobMatch", ["path", "info"]) |
13 | 16 | Counts = namedtuple("Counts", ["files", "directories", "data"]) |
15 | 18 | |
16 | 19 | if typing.TYPE_CHECKING: |
17 | 20 | from typing import Iterator, List, Optional, Pattern, Text, Tuple |
21 | ||
18 | 22 | from .base import FS |
19 | 23 | |
20 | 24 | |
91 | 95 | |
92 | 96 | |
93 | 97 | class Globber(object): |
94 | """A generator of glob results. | |
95 | ||
96 | Arguments: | |
97 | fs (~fs.base.FS): A filesystem object | |
98 | pattern (str): A glob pattern, e.g. ``"**/*.py"`` | |
99 | path (str): A path to a directory in the filesystem. | |
100 | namespaces (list): A list of additional info namespaces. | |
101 | case_sensitive (bool): If ``True``, the path matching will be | |
102 | case *sensitive* i.e. ``"FOO.py"`` and ``"foo.py"`` will | |
103 | be different, otherwise path matching will be case *insensitive*. | |
104 | exclude_dirs (list): A list of patterns to exclude when searching, | |
105 | e.g. ``["*.git"]``. | |
106 | ||
107 | """ | |
98 | """A generator of glob results.""" | |
108 | 99 | |
109 | 100 | def __init__( |
110 | 101 | self, |
116 | 107 | exclude_dirs=None, |
117 | 108 | ): |
118 | 109 | # type: (FS, str, str, Optional[List[str]], bool, Optional[List[str]]) -> None |
110 | """Create a new Globber instance. | |
111 | ||
112 | Arguments: | |
113 | fs (~fs.base.FS): A filesystem object | |
114 | pattern (str): A glob pattern, e.g. ``"**/*.py"`` | |
115 | path (str): A path to a directory in the filesystem. | |
116 | namespaces (list): A list of additional info namespaces. | |
117 | case_sensitive (bool): If ``True``, the path matching will be | |
118 | case *sensitive* i.e. ``"FOO.py"`` and ``"foo.py"`` will be | |
119 | different, otherwise path matching will be case *insensitive*. | |
120 | exclude_dirs (list): A list of patterns to exclude when searching, | |
121 | e.g. ``["*.git"]``. | |
122 | ||
123 | """ | |
119 | 124 | self.fs = fs |
120 | 125 | self.pattern = pattern |
121 | 126 | self.path = path |
159 | 164 | |
160 | 165 | def __iter__(self): |
161 | 166 | # type: () -> Iterator[GlobMatch] |
162 | """An iterator of :class:`fs.glob.GlobMatch` objects.""" | |
167 | """Get an iterator of :class:`fs.glob.GlobMatch` objects.""" | |
163 | 168 | return self._make_iter() |
164 | 169 | |
165 | 170 | def count(self): |
167 | 172 | """Count files / directories / data in matched paths. |
168 | 173 | |
169 | 174 | Example: |
170 | >>> import fs | |
171 | >>> fs.open_fs('~/projects').glob('**/*.py').count() | |
172 | Counts(files=18519, directories=0, data=206690458) | |
175 | >>> my_fs.glob('**/*.py').count() | |
176 | Counts(files=2, directories=0, data=55) | |
173 | 177 | |
174 | 178 | Returns: |
175 | 179 | `~Counts`: A named tuple containing results. |
194 | 198 | `~LineCounts`: A named tuple containing line counts. |
195 | 199 | |
196 | 200 | Example: |
197 | >>> import fs | |
198 | >>> fs.open_fs('~/projects').glob('**/*.py').count_lines() | |
199 | LineCounts(lines=5767102, non_blank=4915110) | |
200 | ||
201 | """ | |
202 | ||
201 | >>> my_fs.glob('**/*.py').count_lines() | |
202 | LineCounts(lines=4, non_blank=3) | |
203 | ||
204 | """ | |
203 | 205 | lines = 0 |
204 | 206 | non_blank = 0 |
205 | 207 | for path, info in self._make_iter(): |
212 | 214 | |
213 | 215 | def remove(self): |
214 | 216 | # type: () -> int |
215 | """Removed all matched paths. | |
217 | """Remove all matched paths. | |
216 | 218 | |
217 | 219 | Returns: |
218 | 220 | int: Number of file and directories removed. |
219 | 221 | |
220 | 222 | Example: |
221 | >>> import fs | |
222 | >>> fs.open_fs('~/projects/my_project').glob('**/*.pyc').remove() | |
223 | 29 | |
223 | >>> my_fs.glob('**/*.pyc').remove() | |
224 | 2 | |
224 | 225 | |
225 | 226 | """ |
226 | 227 | removes = 0 |
234 | 235 | |
235 | 236 | |
236 | 237 | class BoundGlobber(object): |
237 | """A :class:`~Globber` object bound to a filesystem. | |
238 | """A `~fs.glob.Globber` object bound to a filesystem. | |
238 | 239 | |
239 | 240 | An instance of this object is available on every Filesystem object |
240 | as ``.glob``. | |
241 | ||
242 | Arguments: | |
243 | fs (FS): A filesystem object. | |
241 | as the `~fs.base.FS.glob` property. | |
244 | 242 | |
245 | 243 | """ |
246 | 244 | |
248 | 246 | |
249 | 247 | def __init__(self, fs): |
250 | 248 | # type: (FS) -> None |
249 | """Create a new bound Globber. | |
250 | ||
251 | Arguments: | |
252 | fs (FS): A filesystem object to bind to. | |
253 | ||
254 | """ | |
251 | 255 | self.fs = fs |
252 | 256 | |
253 | 257 | def __repr__(self): |
269 | 273 | e.g. ``["*.git"]``. |
270 | 274 | |
271 | 275 | Returns: |
272 | `~Globber`: | |
273 | An object that may be queried for the glob matches. | |
274 | ||
276 | `Globber`: An object that may be queried for the glob matches. | |
275 | 277 | |
276 | 278 | """ |
277 | 279 | return Globber( |
0 | 0 | """Container for filesystem resource informations. |
1 | 1 | """ |
2 | 2 | |
3 | from __future__ import absolute_import | |
4 | from __future__ import print_function | |
5 | from __future__ import unicode_literals | |
3 | from __future__ import absolute_import, print_function, unicode_literals | |
6 | 4 | |
7 | 5 | import typing |
8 | 6 | from typing import cast |
7 | ||
8 | import six | |
9 | 9 | from copy import deepcopy |
10 | 10 | |
11 | import six | |
12 | ||
13 | from .path import join | |
11 | from ._typing import Text, overload | |
14 | 12 | from .enums import ResourceType |
15 | 13 | from .errors import MissingInfoNamespace |
14 | from .path import join | |
16 | 15 | from .permissions import Permissions |
17 | 16 | from .time import epoch_to_datetime |
18 | from ._typing import overload, Text | |
19 | 17 | |
20 | 18 | if typing.TYPE_CHECKING: |
19 | from typing import Any, Callable, List, Mapping, Optional, Union | |
20 | ||
21 | 21 | from datetime import datetime |
22 | from typing import Any, Callable, List, Mapping, Optional, Union | |
23 | 22 | |
24 | 23 | RawInfo = Mapping[Text, Mapping[Text, object]] |
25 | 24 | ToDatetime = Callable[[int], datetime] |
40 | 39 | raw_info (dict): A dict containing resource info. |
41 | 40 | to_datetime (callable): A callable that converts an |
42 | 41 | epoch time to a datetime object. The default uses |
43 | :func:`~fs.time.epoch_to_datetime`. | |
42 | `~fs.time.epoch_to_datetime`. | |
44 | 43 | |
45 | 44 | """ |
46 | 45 | |
48 | 47 | |
49 | 48 | def __init__(self, raw_info, to_datetime=epoch_to_datetime): |
50 | 49 | # type: (RawInfo, ToDatetime) -> None |
51 | """Create a resource info object from a raw info dict. | |
52 | """ | |
50 | """Create a resource info object from a raw info dict.""" | |
53 | 51 | self.raw = raw_info |
54 | 52 | self._to_datetime = to_datetime |
55 | 53 | self.namespaces = frozenset(self.raw.keys()) |
72 | 70 | # type: (None) -> None |
73 | 71 | pass |
74 | 72 | |
75 | @overload # noqa: F811 | |
76 | def _make_datetime(self, t): | |
73 | @overload | |
74 | def _make_datetime(self, t): # noqa: F811 | |
77 | 75 | # type: (int) -> datetime |
78 | 76 | pass |
79 | 77 | |
90 | 88 | pass |
91 | 89 | |
92 | 90 | @overload # noqa: F811 |
93 | def get(self, namespace, key, default): | |
91 | def get(self, namespace, key, default): # noqa: F811 | |
94 | 92 | # type: (Text, Text, T) -> Union[Any, T] |
95 | 93 | pass |
96 | 94 | |
106 | 104 | is not found. |
107 | 105 | |
108 | 106 | Example: |
109 | >>> info.get('access', 'permissions') | |
110 | ['u_r', 'u_w', '_wx'] | |
107 | >>> info = my_fs.getinfo("foo.py", namespaces=["details"]) | |
108 | >>> info.get('details', 'type') | |
109 | 2 | |
111 | 110 | |
112 | 111 | """ |
113 | 112 | try: |
131 | 130 | # type: (Text, Text) -> bool |
132 | 131 | """Check if a given key in a namespace is writable. |
133 | 132 | |
134 | Uses `~fs.base.FS.setinfo`. | |
133 | When creating an `Info` object, you can add a ``_write`` key to | |
134 | each raw namespace that lists which keys are writable or not. | |
135 | ||
136 | In general, this means they are compatible with the `setinfo` | |
137 | function of filesystem objects. | |
135 | 138 | |
136 | 139 | Arguments: |
137 | 140 | namespace (str): A namespace identifier. |
140 | 143 | Returns: |
141 | 144 | bool: `True` if the key can be modified, `False` otherwise. |
142 | 145 | |
146 | Example: | |
147 | Create an `Info` object that marks only the ``modified`` key | |
148 | as writable in the ``details`` namespace:: | |
149 | ||
150 | >>> now = time.time() | |
151 | >>> info = Info({ | |
152 | ... "basic": {"name": "foo", "is_dir": False}, | |
153 | ... "details": { | |
154 | ... "modified": now, | |
155 | ... "created": now, | |
156 | ... "_write": ["modified"], | |
157 | ... } | |
158 | ... }) | |
159 | >>> info.is_writeable("details", "created") | |
160 | False | |
161 | >>> info.is_writeable("details", "modified") | |
162 | True | |
163 | ||
143 | 164 | """ |
144 | 165 | _writeable = self.get(namespace, "_write", ()) |
145 | 166 | return key in _writeable |
159 | 180 | |
160 | 181 | def copy(self, to_datetime=None): |
161 | 182 | # type: (Optional[ToDatetime]) -> Info |
162 | """Create a copy of this resource info object. | |
163 | """ | |
183 | """Create a copy of this resource info object.""" | |
164 | 184 | return Info(deepcopy(self.raw), to_datetime=to_datetime or self._to_datetime) |
165 | 185 | |
166 | 186 | def make_path(self, dir_path): |
179 | 199 | @property |
180 | 200 | def name(self): |
181 | 201 | # type: () -> Text |
182 | """`str`: the resource name. | |
183 | """ | |
202 | """`str`: the resource name.""" | |
184 | 203 | return cast(Text, self.get("basic", "name")) |
185 | 204 | |
186 | 205 | @property |
187 | 206 | def suffix(self): |
188 | 207 | # type: () -> Text |
189 | """`str`: the last component of the name (including dot), or an | |
190 | empty string if there is no suffix. | |
208 | """`str`: the last component of the name (with dot). | |
209 | ||
210 | In case there is no suffix, an empty string is returned. | |
191 | 211 | |
192 | 212 | Example: |
193 | >>> info | |
194 | <info 'foo.py'> | |
213 | >>> info = my_fs.getinfo("foo.py") | |
195 | 214 | >>> info.suffix |
196 | 215 | '.py' |
216 | >>> info2 = my_fs.getinfo("bar") | |
217 | >>> info2.suffix | |
218 | '' | |
219 | ||
197 | 220 | """ |
198 | 221 | name = self.get("basic", "name") |
199 | 222 | if name.startswith(".") and name.count(".") == 1: |
207 | 230 | """`List`: a list of any suffixes in the name. |
208 | 231 | |
209 | 232 | Example: |
210 | >>> info | |
211 | <info 'foo.tar.gz'> | |
233 | >>> info = my_fs.getinfo("foo.tar.gz") | |
212 | 234 | >>> info.suffixes |
213 | 235 | ['.tar', '.gz'] |
236 | ||
214 | 237 | """ |
215 | 238 | name = self.get("basic", "name") |
216 | 239 | if name.startswith(".") and name.count(".") == 1: |
223 | 246 | """`str`: the name minus any suffixes. |
224 | 247 | |
225 | 248 | Example: |
226 | >>> info | |
227 | <info 'foo.tar.gz'> | |
249 | >>> info = my_fs.getinfo("foo.tar.gz") | |
228 | 250 | >>> info.stem |
229 | 251 | 'foo' |
230 | 252 | |
237 | 259 | @property |
238 | 260 | def is_dir(self): |
239 | 261 | # type: () -> bool |
240 | """`bool`: `True` if the resource references a directory. | |
241 | """ | |
262 | """`bool`: `True` if the resource references a directory.""" | |
242 | 263 | return cast(bool, self.get("basic", "is_dir")) |
243 | 264 | |
244 | 265 | @property |
245 | 266 | def is_file(self): |
246 | 267 | # type: () -> bool |
247 | """`bool`: `True` if the resource references a file. | |
248 | """ | |
268 | """`bool`: `True` if the resource references a file.""" | |
249 | 269 | return not cast(bool, self.get("basic", "is_dir")) |
250 | 270 | |
251 | 271 | @property |
252 | 272 | def is_link(self): |
253 | 273 | # type: () -> bool |
254 | """`bool`: `True` if the resource is a symlink. | |
255 | """ | |
274 | """`bool`: `True` if the resource is a symlink.""" | |
256 | 275 | self._require_namespace("link") |
257 | 276 | return self.get("link", "target", None) is not None |
258 | 277 |
0 | 0 | """Compatibility tools between Python 2 and Python 3 I/O interfaces. |
1 | 1 | """ |
2 | 2 | |
3 | from __future__ import print_function | |
4 | from __future__ import unicode_literals | |
5 | ||
3 | from __future__ import print_function, unicode_literals | |
4 | ||
5 | import typing | |
6 | ||
7 | import array | |
6 | 8 | import io |
7 | import typing | |
8 | from io import SEEK_SET, SEEK_CUR | |
9 | from io import SEEK_CUR, SEEK_SET | |
9 | 10 | |
10 | 11 | from .mode import Mode |
11 | 12 | |
12 | 13 | if typing.TYPE_CHECKING: |
14 | from typing import IO, Any, Iterable, Iterator, List, Optional, Text, Union | |
15 | ||
16 | import mmap | |
13 | 17 | from io import RawIOBase |
14 | from typing import ( | |
15 | Any, | |
16 | Iterable, | |
17 | Iterator, | |
18 | IO, | |
19 | List, | |
20 | Optional, | |
21 | Text, | |
22 | Union, | |
23 | ) | |
24 | 18 | |
25 | 19 | |
26 | 20 | class RawWrapper(io.RawIOBase): |
27 | """Convert a Python 2 style file-like object in to a IO object. | |
28 | """ | |
29 | ||
30 | def __init__(self, f, mode=None, name=None): | |
21 | """Convert a Python 2 style file-like object in to a IO object.""" | |
22 | ||
23 | def __init__(self, f, mode=None, name=None): # noqa: D107 | |
31 | 24 | # type: (IO[bytes], Optional[Text], Optional[Text]) -> None |
32 | 25 | self._f = f |
33 | 26 | self.mode = mode or getattr(f, "mode", None) |
88 | 81 | return self._f.truncate(size) |
89 | 82 | |
90 | 83 | def write(self, data): |
91 | # type: (bytes) -> int | |
92 | count = self._f.write(data) | |
84 | # type: (Union[bytes, memoryview, array.array[Any], mmap.mmap]) -> int | |
85 | if isinstance(data, array.array): | |
86 | count = self._f.write(data.tobytes()) | |
87 | else: | |
88 | count = self._f.write(data) # type: ignore | |
93 | 89 | return len(data) if count is None else count |
94 | 90 | |
95 | 91 | @typing.no_type_check |
130 | 126 | b[:bytes_read] = data |
131 | 127 | return bytes_read |
132 | 128 | |
133 | def readline(self, limit=-1): | |
134 | # type: (int) -> bytes | |
135 | return self._f.readline(limit) | |
136 | ||
137 | def readlines(self, hint=-1): | |
138 | # type: (int) -> List[bytes] | |
139 | return self._f.readlines(hint) | |
140 | ||
141 | def writelines(self, sequence): | |
142 | # type: (Iterable[Union[bytes, bytearray]]) -> None | |
143 | return self._f.writelines(sequence) | |
129 | def readline(self, limit=None): | |
130 | # type: (Optional[int]) -> bytes | |
131 | return self._f.readline(-1 if limit is None else limit) | |
132 | ||
133 | def readlines(self, hint=None): | |
134 | # type: (Optional[int]) -> List[bytes] | |
135 | return self._f.readlines(-1 if hint is None else hint) | |
136 | ||
137 | def writelines(self, lines): | |
138 | # type: (Iterable[Union[bytes, memoryview, array.array[Any], mmap.mmap]]) -> None # noqa: E501 | |
139 | _lines = ( | |
140 | line.tobytes() if isinstance(line, array.array) else line for line in lines | |
141 | ) | |
142 | return self._f.writelines(typing.cast("Iterable[bytes]", _lines)) | |
144 | 143 | |
145 | 144 | def __iter__(self): |
146 | 145 | # type: () -> Iterator[bytes] |
160 | 159 | **kwargs # type: Any |
161 | 160 | ): |
162 | 161 | # type: (...) -> IO |
163 | """Take a Python 2.x binary file and return an IO Stream. | |
164 | """ | |
162 | """Take a Python 2.x binary file and return an IO Stream.""" | |
165 | 163 | reading = "r" in mode |
166 | 164 | writing = "w" in mode |
167 | 165 | appending = "a" in mode |
0 | 0 | """Least Recently Used cache mapping. |
1 | 1 | """ |
2 | 2 | |
3 | from __future__ import absolute_import | |
4 | from __future__ import unicode_literals | |
3 | from __future__ import absolute_import, unicode_literals | |
5 | 4 | |
6 | 5 | import typing |
6 | ||
7 | 7 | from collections import OrderedDict |
8 | ||
9 | 8 | |
10 | 9 | _K = typing.TypeVar("_K") |
11 | 10 | _V = typing.TypeVar("_V") |
21 | 20 | |
22 | 21 | def __init__(self, cache_size): |
23 | 22 | # type: (int) -> None |
23 | """Create a new LRUCache with the given size.""" | |
24 | 24 | self.cache_size = cache_size |
25 | 25 | super(LRUCache, self).__init__() |
26 | 26 | |
27 | 27 | def __setitem__(self, key, value): |
28 | 28 | # type: (_K, _V) -> None |
29 | """Store a new views, potentially discarding an old value. | |
30 | """ | |
29 | """Store a new views, potentially discarding an old value.""" | |
31 | 30 | if key not in self: |
32 | 31 | if len(self) >= self.cache_size: |
33 | 32 | self.popitem(last=False) |
35 | 34 | |
36 | 35 | def __getitem__(self, key): |
37 | 36 | # type: (_K) -> _V |
38 | """Get the item, but also makes it most recent. | |
39 | """ | |
37 | """Get the item, but also makes it most recent.""" | |
40 | 38 | _super = typing.cast(OrderedDict, super(LRUCache, self)) |
41 | 39 | value = _super.__getitem__(key) |
42 | 40 | _super.__delitem__(key) |
0 | 0 | """Manage a volatile in-memory filesystem. |
1 | 1 | """ |
2 | from __future__ import absolute_import | |
3 | from __future__ import unicode_literals | |
2 | from __future__ import absolute_import, unicode_literals | |
3 | ||
4 | import typing | |
4 | 5 | |
5 | 6 | import contextlib |
6 | 7 | import io |
7 | 8 | import os |
9 | import six | |
8 | 10 | import time |
9 | import typing | |
10 | 11 | from collections import OrderedDict |
11 | 12 | from threading import RLock |
12 | 13 | |
13 | import six | |
14 | ||
15 | 14 | from . import errors |
15 | from ._typing import overload | |
16 | 16 | from .base import FS |
17 | from .copy import copy_modified_time | |
17 | 18 | from .enums import ResourceType, Seek |
18 | 19 | from .info import Info |
19 | 20 | from .mode import Mode |
20 | from .path import iteratepath | |
21 | from .path import normpath | |
22 | from .path import split | |
23 | from ._typing import overload | |
21 | from .path import iteratepath, normpath, split | |
24 | 22 | |
25 | 23 | if typing.TYPE_CHECKING: |
26 | 24 | from typing import ( |
28 | 26 | BinaryIO, |
29 | 27 | Collection, |
30 | 28 | Dict, |
29 | Iterable, | |
31 | 30 | Iterator, |
32 | 31 | List, |
33 | 32 | Optional, |
34 | 33 | SupportsInt, |
34 | Text, | |
35 | Tuple, | |
35 | 36 | Union, |
36 | Text, | |
37 | 37 | ) |
38 | ||
39 | import array | |
40 | import mmap | |
41 | ||
38 | 42 | from .base import _OpendirFactory |
39 | 43 | from .info import RawInfo |
40 | 44 | from .permissions import Permissions |
89 | 93 | |
90 | 94 | def on_modify(self): # noqa: D401 |
91 | 95 | # type: () -> None |
92 | """Called when file data is modified. | |
93 | """ | |
96 | """Called when file data is modified.""" | |
94 | 97 | self._dir_entry.modified_time = self.modified_time = time.time() |
95 | 98 | |
96 | 99 | def on_access(self): # noqa: D401 |
97 | 100 | # type: () -> None |
98 | """Called when file is accessed. | |
99 | """ | |
101 | """Called when file is accessed.""" | |
100 | 102 | self._dir_entry.accessed_time = self.accessed_time = time.time() |
101 | 103 | |
102 | 104 | def flush(self): |
117 | 119 | |
118 | 120 | __next__ = next |
119 | 121 | |
120 | def readline(self, size=-1): | |
121 | # type: (int) -> bytes | |
122 | def readline(self, size=None): | |
123 | # type: (Optional[int]) -> bytes | |
122 | 124 | if not self._mode.reading: |
123 | 125 | raise IOError("File not open for reading") |
124 | 126 | with self._seek_lock(): |
132 | 134 | self._dir_entry.remove_open_file(self) |
133 | 135 | super(_MemoryFile, self).close() |
134 | 136 | |
135 | def read(self, size=-1): | |
137 | def read(self, size=None): | |
136 | 138 | # type: (Optional[int]) -> bytes |
137 | 139 | if not self._mode.reading: |
138 | 140 | raise IOError("File not open for reading") |
191 | 193 | return self._mode.writing |
192 | 194 | |
193 | 195 | def write(self, data): |
194 | # type: (bytes) -> int | |
196 | # type: (Union[bytes, memoryview, array.array[Any], mmap.mmap]) -> int | |
195 | 197 | if not self._mode.writing: |
196 | 198 | raise IOError("File not open for writing") |
197 | 199 | with self._seek_lock(): |
198 | 200 | self.on_modify() |
199 | 201 | return self._bytes_io.write(data) |
200 | 202 | |
201 | def writelines(self, sequence): # type: ignore | |
202 | # type: (List[bytes]) -> None | |
203 | # FIXME(@althonos): For some reason the stub for IOBase.writelines | |
204 | # is List[Any] ?! It should probably be Iterable[ByteString] | |
203 | def writelines(self, sequence): | |
204 | # type: (Iterable[Union[bytes, memoryview, array.array[Any], mmap.mmap]]) -> None # noqa: E501 | |
205 | 205 | with self._seek_lock(): |
206 | 206 | self.on_modify() |
207 | 207 | self._bytes_io.writelines(sequence) |
246 | 246 | _bytes_file.seek(0, os.SEEK_END) |
247 | 247 | return _bytes_file.tell() |
248 | 248 | |
249 | @overload # noqa: F811 | |
250 | def get_entry(self, name, default): | |
249 | @overload | |
250 | def get_entry(self, name, default): # noqa: F811 | |
251 | 251 | # type: (Text, _DirEntry) -> _DirEntry |
252 | 252 | pass |
253 | 253 | |
254 | @overload # noqa: F811 | |
255 | def get_entry(self, name): | |
254 | @overload | |
255 | def get_entry(self, name): # noqa: F811 | |
256 | 256 | # type: (Text) -> Optional[_DirEntry] |
257 | 257 | pass |
258 | 258 | |
259 | @overload # noqa: F811 | |
260 | def get_entry(self, name, default): | |
259 | @overload | |
260 | def get_entry(self, name, default): # noqa: F811 | |
261 | 261 | # type: (Text, None) -> Optional[_DirEntry] |
262 | 262 | pass |
263 | 263 | |
274 | 274 | # type: (Text) -> None |
275 | 275 | del self._dir[name] |
276 | 276 | |
277 | def clear(self): | |
278 | # type: () -> None | |
279 | self._dir.clear() | |
280 | ||
277 | 281 | def __contains__(self, name): |
278 | 282 | # type: (object) -> bool |
279 | 283 | return name in self._dir |
293 | 297 | def remove_open_file(self, memory_file): |
294 | 298 | # type: (_MemoryFile) -> None |
295 | 299 | self._open_files.remove(memory_file) |
300 | ||
301 | def to_info(self, namespaces=None): | |
302 | # type: (Optional[Collection[Text]]) -> Info | |
303 | namespaces = namespaces or () | |
304 | info = {"basic": {"name": self.name, "is_dir": self.is_dir}} | |
305 | if "details" in namespaces: | |
306 | info["details"] = { | |
307 | "_write": ["accessed", "modified"], | |
308 | "type": int(self.resource_type), | |
309 | "size": self.size, | |
310 | "accessed": self.accessed_time, | |
311 | "modified": self.modified_time, | |
312 | "created": self.created_time, | |
313 | } | |
314 | return Info(info) | |
296 | 315 | |
297 | 316 | |
298 | 317 | @six.python_2_unicode_compatible |
304 | 323 | fast, but non-permanent. The `MemoryFS` constructor takes no |
305 | 324 | arguments. |
306 | 325 | |
307 | Example: | |
308 | >>> mem_fs = MemoryFS() | |
309 | ||
310 | Or via an FS URL: | |
311 | >>> import fs | |
312 | >>> mem_fs = fs.open_fs('mem://') | |
326 | Examples: | |
327 | Create with the constructor:: | |
328 | ||
329 | >>> from fs.memoryfs import MemoryFS | |
330 | >>> mem_fs = MemoryFS() | |
331 | ||
332 | Or via an FS URL:: | |
333 | ||
334 | >>> import fs | |
335 | >>> mem_fs = fs.open_fs('mem://') | |
313 | 336 | |
314 | 337 | """ |
315 | 338 | |
325 | 348 | |
326 | 349 | def __init__(self): |
327 | 350 | # type: () -> None |
328 | """Create an in-memory filesystem. | |
329 | """ | |
351 | """Create an in-memory filesystem.""" | |
330 | 352 | self._meta = self._meta.copy() |
331 | 353 | self.root = self._make_dir_entry(ResourceType.directory, "") |
332 | 354 | super(MemoryFS, self).__init__() |
345 | 367 | |
346 | 368 | def _get_dir_entry(self, dir_path): |
347 | 369 | # type: (Text) -> Optional[_DirEntry] |
348 | """Get a directory entry, or `None` if one doesn't exist. | |
349 | """ | |
370 | """Get a directory entry, or `None` if one doesn't exist.""" | |
350 | 371 | with self._lock: |
351 | 372 | dir_path = normpath(dir_path) |
352 | 373 | current_entry = self.root # type: Optional[_DirEntry] |
366 | 387 | |
367 | 388 | def getinfo(self, path, namespaces=None): |
368 | 389 | # type: (Text, Optional[Collection[Text]]) -> Info |
369 | namespaces = namespaces or () | |
370 | 390 | _path = self.validatepath(path) |
371 | 391 | dir_entry = self._get_dir_entry(_path) |
372 | 392 | if dir_entry is None: |
373 | 393 | raise errors.ResourceNotFound(path) |
374 | info = {"basic": {"name": dir_entry.name, "is_dir": dir_entry.is_dir}} | |
375 | if "details" in namespaces: | |
376 | info["details"] = { | |
377 | "_write": ["accessed", "modified"], | |
378 | "type": int(dir_entry.resource_type), | |
379 | "size": dir_entry.size, | |
380 | "accessed": dir_entry.accessed_time, | |
381 | "modified": dir_entry.modified_time, | |
382 | "created": dir_entry.created_time, | |
383 | } | |
384 | return Info(info) | |
394 | return dir_entry.to_info(namespaces=namespaces) | |
385 | 395 | |
386 | 396 | def listdir(self, path): |
387 | 397 | # type: (Text) -> List[Text] |
388 | 398 | self.check() |
389 | 399 | _path = self.validatepath(path) |
390 | 400 | with self._lock: |
401 | # locate and validate the entry corresponding to the given path | |
391 | 402 | dir_entry = self._get_dir_entry(_path) |
392 | 403 | if dir_entry is None: |
393 | 404 | raise errors.ResourceNotFound(path) |
394 | 405 | if not dir_entry.is_dir: |
395 | 406 | raise errors.DirectoryExpected(path) |
407 | # return the filenames in the order they were created | |
396 | 408 | return dir_entry.list() |
397 | 409 | |
398 | 410 | if typing.TYPE_CHECKING: |
430 | 442 | new_dir = self._make_dir_entry(ResourceType.directory, dir_name) |
431 | 443 | parent_dir.set_entry(dir_name, new_dir) |
432 | 444 | return self.opendir(path) |
445 | ||
446 | def move(self, src_path, dst_path, overwrite=False, preserve_time=False): | |
447 | src_dir, src_name = split(self.validatepath(src_path)) | |
448 | dst_dir, dst_name = split(self.validatepath(dst_path)) | |
449 | ||
450 | with self._lock: | |
451 | src_dir_entry = self._get_dir_entry(src_dir) | |
452 | if src_dir_entry is None or src_name not in src_dir_entry: | |
453 | raise errors.ResourceNotFound(src_path) | |
454 | src_entry = src_dir_entry.get_entry(src_name) | |
455 | if src_entry.is_dir: | |
456 | raise errors.FileExpected(src_path) | |
457 | ||
458 | dst_dir_entry = self._get_dir_entry(dst_dir) | |
459 | if dst_dir_entry is None: | |
460 | raise errors.ResourceNotFound(dst_path) | |
461 | elif not overwrite and dst_name in dst_dir_entry: | |
462 | raise errors.DestinationExists(dst_path) | |
463 | ||
464 | # move the entry from the src folder to the dst folder | |
465 | dst_dir_entry.set_entry(dst_name, src_entry) | |
466 | src_dir_entry.remove_entry(src_name) | |
467 | # make sure to update the entry name itself (see #509) | |
468 | src_entry.name = dst_name | |
469 | ||
470 | if preserve_time: | |
471 | copy_modified_time(self, src_path, self, dst_path) | |
472 | ||
473 | def movedir(self, src_path, dst_path, create=False, preserve_time=False): | |
474 | src_dir, src_name = split(self.validatepath(src_path)) | |
475 | dst_dir, dst_name = split(self.validatepath(dst_path)) | |
476 | ||
477 | with self._lock: | |
478 | src_dir_entry = self._get_dir_entry(src_dir) | |
479 | if src_dir_entry is None or src_name not in src_dir_entry: | |
480 | raise errors.ResourceNotFound(src_path) | |
481 | src_entry = src_dir_entry.get_entry(src_name) | |
482 | if not src_entry.is_dir: | |
483 | raise errors.DirectoryExpected(src_path) | |
484 | ||
485 | # move the entry from the src folder to the dst folder | |
486 | dst_dir_entry = self._get_dir_entry(dst_dir) | |
487 | if dst_dir_entry is None or (not create and dst_name not in dst_dir_entry): | |
488 | raise errors.ResourceNotFound(dst_path) | |
489 | ||
490 | # move the entry from the src folder to the dst folder | |
491 | dst_dir_entry.set_entry(dst_name, src_entry) | |
492 | src_dir_entry.remove_entry(src_name) | |
493 | # make sure to update the entry name itself (see #509) | |
494 | src_entry.name = dst_name | |
495 | ||
496 | if preserve_time: | |
497 | copy_modified_time(self, src_path, self, dst_path) | |
433 | 498 | |
434 | 499 | def openbin(self, path, mode="r", buffering=-1, **options): |
435 | 500 | # type: (Text, Text, int, **Any) -> BinaryIO |
497 | 562 | |
498 | 563 | def removedir(self, path): |
499 | 564 | # type: (Text) -> None |
500 | _path = self.validatepath(path) | |
501 | ||
565 | # make sure we are not removing root | |
566 | _path = self.validatepath(path) | |
502 | 567 | if _path == "/": |
503 | 568 | raise errors.RemoveRootError() |
504 | ||
505 | with self._lock: | |
569 | # make sure the directory is empty | |
570 | if not self.isempty(path): | |
571 | raise errors.DirectoryNotEmpty(path) | |
572 | # we can now delegate to removetree since we confirmed that | |
573 | # * path exists (isempty) | |
574 | # * path is a folder (isempty) | |
575 | # * path is not root | |
576 | self.removetree(_path) | |
577 | ||
578 | def removetree(self, path): | |
579 | # type: (Text) -> None | |
580 | _path = self.validatepath(path) | |
581 | ||
582 | with self._lock: | |
583 | ||
584 | if _path == "/": | |
585 | self.root.clear() | |
586 | return | |
587 | ||
506 | 588 | dir_path, file_name = split(_path) |
507 | 589 | parent_dir_entry = self._get_dir_entry(dir_path) |
508 | 590 | |
513 | 595 | if not dir_dir_entry.is_dir: |
514 | 596 | raise errors.DirectoryExpected(path) |
515 | 597 | |
516 | if len(dir_dir_entry): | |
517 | raise errors.DirectoryNotEmpty(path) | |
518 | ||
519 | 598 | parent_dir_entry.remove_entry(file_name) |
599 | ||
600 | def scandir( | |
601 | self, | |
602 | path, # type: Text | |
603 | namespaces=None, # type: Optional[Collection[Text]] | |
604 | page=None, # type: Optional[Tuple[int, int]] | |
605 | ): | |
606 | # type: (...) -> Iterator[Info] | |
607 | self.check() | |
608 | _path = self.validatepath(path) | |
609 | with self._lock: | |
610 | # locate and validate the entry corresponding to the given path | |
611 | dir_entry = self._get_dir_entry(_path) | |
612 | if dir_entry is None: | |
613 | raise errors.ResourceNotFound(path) | |
614 | if not dir_entry.is_dir: | |
615 | raise errors.DirectoryExpected(path) | |
616 | # if paging was requested, slice the filenames | |
617 | filenames = dir_entry.list() | |
618 | if page is not None: | |
619 | start, end = page | |
620 | filenames = filenames[start:end] | |
621 | # yield info with the right namespaces | |
622 | for name in filenames: | |
623 | entry = typing.cast(_DirEntry, dir_entry.get_entry(name)) | |
624 | yield entry.to_info(namespaces=namespaces) | |
520 | 625 | |
521 | 626 | def setinfo(self, path, info): |
522 | 627 | # type: (Text, RawInfo) -> None |
15 | 15 | |
16 | 16 | """ |
17 | 17 | |
18 | from __future__ import print_function | |
19 | from __future__ import unicode_literals | |
18 | from __future__ import print_function, unicode_literals | |
20 | 19 | |
21 | 20 | import typing |
22 | 21 | |
29 | 28 | |
30 | 29 | if typing.TYPE_CHECKING: |
31 | 30 | from typing import Callable, Optional, Text, Union |
31 | ||
32 | 32 | from .base import FS |
33 | 33 | from .info import Info |
34 | 34 | |
56 | 56 | walker=None, # type: Optional[Walker] |
57 | 57 | copy_if_newer=True, # type: bool |
58 | 58 | workers=0, # type: int |
59 | preserve_time=False, # type: bool | |
59 | 60 | ): |
60 | 61 | # type: (...) -> None |
61 | 62 | """Mirror files / directories from one filesystem to another. |
72 | 73 | workers (int): Number of worker threads used |
73 | 74 | (0 for single threaded). Set to a relatively low number |
74 | 75 | for network filesystems, 4 would be a good start. |
76 | preserve_time (bool): If `True`, try to preserve mtime of the | |
77 | resources (defaults to `False`). | |
78 | ||
75 | 79 | """ |
76 | 80 | |
77 | 81 | def src(): |
81 | 85 | return manage_fs(dst_fs, create=True) |
82 | 86 | |
83 | 87 | with src() as _src_fs, dst() as _dst_fs: |
84 | with _src_fs.lock(), _dst_fs.lock(): | |
85 | _thread_safe = is_thread_safe(_src_fs, _dst_fs) | |
86 | with Copier(num_workers=workers if _thread_safe else 0) as copier: | |
88 | _thread_safe = is_thread_safe(_src_fs, _dst_fs) | |
89 | with Copier( | |
90 | num_workers=workers if _thread_safe else 0, preserve_time=preserve_time | |
91 | ) as copier: | |
92 | with _src_fs.lock(), _dst_fs.lock(): | |
87 | 93 | _mirror( |
88 | 94 | _src_fs, |
89 | 95 | _dst_fs, |
90 | 96 | walker=walker, |
91 | 97 | copy_if_newer=copy_if_newer, |
92 | 98 | copy_file=copier.copy, |
99 | preserve_time=preserve_time, | |
93 | 100 | ) |
94 | 101 | |
95 | 102 | |
96 | 103 | def _mirror( |
97 | src_fs, dst_fs, walker=None, copy_if_newer=True, copy_file=copy_file_internal | |
104 | src_fs, # type: FS | |
105 | dst_fs, # type: FS | |
106 | walker=None, # type: Optional[Walker] | |
107 | copy_if_newer=True, # type: bool | |
108 | copy_file=copy_file_internal, # type: Callable[[FS, str, FS, str, bool], None] | |
109 | preserve_time=False, # type: bool | |
98 | 110 | ): |
99 | # type: (FS, FS, Optional[Walker], bool, Callable[[FS, str, FS, str], None]) -> None | |
111 | # type: (...) -> None | |
100 | 112 | walker = walker or Walker() |
101 | 113 | walk = walker.walk(src_fs, namespaces=["details"]) |
102 | 114 | for path, dirs, files in walk: |
120 | 132 | # Compare file info |
121 | 133 | if copy_if_newer and not _compare(_file, dst_file): |
122 | 134 | continue |
123 | copy_file(src_fs, _path, dst_fs, _path) | |
135 | copy_file(src_fs, _path, dst_fs, _path, preserve_time) | |
124 | 136 | |
125 | 137 | # Make directories |
126 | 138 | for _dir in dirs: |
4 | 4 | |
5 | 5 | """ |
6 | 6 | |
7 | from __future__ import print_function | |
8 | from __future__ import unicode_literals | |
7 | from __future__ import print_function, unicode_literals | |
9 | 8 | |
10 | 9 | import typing |
11 | 10 | |
12 | 11 | import six |
13 | 12 | |
14 | 13 | from ._typing import Text |
15 | ||
16 | 14 | |
17 | 15 | if typing.TYPE_CHECKING: |
18 | 16 | from typing import FrozenSet, Set, Union |
29 | 27 | A mode object provides properties that can be used to interrogate the |
30 | 28 | `mode strings <https://docs.python.org/3/library/functions.html#open>`_ |
31 | 29 | used when opening files. |
32 | ||
33 | Arguments: | |
34 | mode (str): A *mode* string, as used by `io.open`. | |
35 | ||
36 | Raises: | |
37 | ValueError: If the mode string is invalid. | |
38 | 30 | |
39 | 31 | Example: |
40 | 32 | >>> mode = Mode('rb') |
51 | 43 | |
52 | 44 | def __init__(self, mode): |
53 | 45 | # type: (Text) -> None |
46 | """Create a new `Mode` instance. | |
47 | ||
48 | Arguments: | |
49 | mode (str): A *mode* string, as used by `io.open`. | |
50 | ||
51 | Raises: | |
52 | ValueError: If the mode string is invalid. | |
53 | ||
54 | """ | |
54 | 55 | self._mode = mode |
55 | 56 | self.validate() |
56 | 57 | |
64 | 65 | |
65 | 66 | def __contains__(self, character): |
66 | 67 | # type: (object) -> bool |
67 | """Check if a mode contains a given character. | |
68 | """ | |
68 | """Check if a mode contains a given character.""" | |
69 | 69 | assert isinstance(character, Text) |
70 | 70 | return character in self._mode |
71 | 71 | |
122 | 122 | @property |
123 | 123 | def create(self): |
124 | 124 | # type: () -> bool |
125 | """`bool`: `True` if the mode would create a file. | |
126 | """ | |
125 | """`bool`: `True` if the mode would create a file.""" | |
127 | 126 | return "a" in self or "w" in self or "x" in self |
128 | 127 | |
129 | 128 | @property |
130 | 129 | def reading(self): |
131 | 130 | # type: () -> bool |
132 | """`bool`: `True` if the mode permits reading. | |
133 | """ | |
131 | """`bool`: `True` if the mode permits reading.""" | |
134 | 132 | return "r" in self or "+" in self |
135 | 133 | |
136 | 134 | @property |
137 | 135 | def writing(self): |
138 | 136 | # type: () -> bool |
139 | """`bool`: `True` if the mode permits writing. | |
140 | """ | |
137 | """`bool`: `True` if the mode permits writing.""" | |
141 | 138 | return "w" in self or "a" in self or "+" in self or "x" in self |
142 | 139 | |
143 | 140 | @property |
144 | 141 | def appending(self): |
145 | 142 | # type: () -> bool |
146 | """`bool`: `True` if the mode permits appending. | |
147 | """ | |
143 | """`bool`: `True` if the mode permits appending.""" | |
148 | 144 | return "a" in self |
149 | 145 | |
150 | 146 | @property |
151 | 147 | def updating(self): |
152 | 148 | # type: () -> bool |
153 | """`bool`: `True` if the mode permits both reading and writing. | |
154 | """ | |
149 | """`bool`: `True` if the mode permits both reading and writing.""" | |
155 | 150 | return "+" in self |
156 | 151 | |
157 | 152 | @property |
158 | 153 | def truncate(self): |
159 | 154 | # type: () -> bool |
160 | """`bool`: `True` if the mode would truncate an existing file. | |
161 | """ | |
155 | """`bool`: `True` if the mode would truncate an existing file.""" | |
162 | 156 | return "w" in self or "x" in self |
163 | 157 | |
164 | 158 | @property |
165 | 159 | def exclusive(self): |
166 | 160 | # type: () -> bool |
167 | """`bool`: `True` if the mode require exclusive creation. | |
168 | """ | |
161 | """`bool`: `True` if the mode require exclusive creation.""" | |
169 | 162 | return "x" in self |
170 | 163 | |
171 | 164 | @property |
172 | 165 | def binary(self): |
173 | 166 | # type: () -> bool |
174 | """`bool`: `True` if a mode specifies binary. | |
175 | """ | |
167 | """`bool`: `True` if a mode specifies binary.""" | |
176 | 168 | return "b" in self |
177 | 169 | |
178 | 170 | @property |
179 | 171 | def text(self): |
180 | 172 | # type: () -> bool |
181 | """`bool`: `True` if a mode specifies text. | |
182 | """ | |
173 | """`bool`: `True` if a mode specifies text.""" | |
183 | 174 | return "t" in self or "b" not in self |
184 | 175 | |
185 | 176 |
0 | 0 | """Manage other filesystems as a folder hierarchy. |
1 | 1 | """ |
2 | 2 | |
3 | from __future__ import absolute_import | |
4 | from __future__ import print_function | |
5 | from __future__ import unicode_literals | |
3 | from __future__ import absolute_import, print_function, unicode_literals | |
6 | 4 | |
7 | 5 | import typing |
8 | 6 | |
11 | 9 | from . import errors |
12 | 10 | from .base import FS |
13 | 11 | from .memoryfs import MemoryFS |
14 | from .path import abspath | |
15 | from .path import forcedir | |
16 | from .path import normpath | |
17 | from .mode import validate_open_mode | |
18 | from .mode import validate_openbin_mode | |
12 | from .mode import validate_open_mode, validate_openbin_mode | |
13 | from .path import abspath, forcedir, normpath | |
19 | 14 | |
20 | 15 | if typing.TYPE_CHECKING: |
21 | 16 | from typing import ( |
17 | IO, | |
22 | 18 | Any, |
23 | 19 | BinaryIO, |
24 | 20 | Collection, |
25 | 21 | Iterator, |
26 | IO, | |
27 | 22 | List, |
28 | 23 | MutableSequence, |
29 | 24 | Optional, |
31 | 26 | Tuple, |
32 | 27 | Union, |
33 | 28 | ) |
29 | ||
34 | 30 | from .enums import ResourceType |
35 | 31 | from .info import Info, RawInfo |
36 | 32 | from .permissions import Permissions |
40 | 36 | |
41 | 37 | |
42 | 38 | class MountError(Exception): |
43 | """Thrown when mounts conflict. | |
44 | """ | |
39 | """Thrown when mounts conflict.""" | |
45 | 40 | |
46 | 41 | |
47 | 42 | class MountFS(FS): |
48 | """A virtual filesystem that maps directories on to other file-systems. | |
49 | ||
50 | Arguments: | |
51 | auto_close (bool): If `True` (the default), the child | |
52 | filesystems will be closed when `MountFS` is closed. | |
53 | ||
54 | """ | |
43 | """A virtual filesystem that maps directories on to other file-systems.""" | |
55 | 44 | |
56 | 45 | _meta = { |
57 | 46 | "virtual": True, |
63 | 52 | |
64 | 53 | def __init__(self, auto_close=True): |
65 | 54 | # type: (bool) -> None |
55 | """Create a new `MountFS` instance. | |
56 | ||
57 | Arguments: | |
58 | auto_close (bool): If `True` (the default), the child | |
59 | filesystems will be closed when `MountFS` is closed. | |
60 | ||
61 | """ | |
66 | 62 | super(MountFS, self).__init__() |
67 | 63 | self.auto_close = auto_close |
68 | 64 | self.default_fs = MemoryFS() # type: FS |
0 | 0 | """Functions for moving files between filesystems. |
1 | 1 | """ |
2 | 2 | |
3 | from __future__ import print_function | |
4 | from __future__ import unicode_literals | |
3 | from __future__ import print_function, unicode_literals | |
5 | 4 | |
6 | 5 | import typing |
7 | 6 | |
8 | from .copy import copy_dir | |
9 | from .copy import copy_file | |
7 | from ._pathcompat import commonpath | |
8 | from .copy import copy_dir, copy_file | |
9 | from .errors import FSError | |
10 | 10 | from .opener import manage_fs |
11 | from .osfs import OSFS | |
12 | from .path import frombase | |
11 | 13 | |
12 | 14 | if typing.TYPE_CHECKING: |
13 | from .base import FS | |
14 | 15 | from typing import Text, Union |
15 | 16 | |
17 | from .base import FS | |
16 | 18 | |
17 | def move_fs(src_fs, dst_fs, workers=0): | |
18 | # type: (Union[Text, FS], Union[Text, FS], int) -> None | |
19 | ||
20 | def move_fs( | |
21 | src_fs, # type: Union[Text, FS] | |
22 | dst_fs, # type:Union[Text, FS] | |
23 | workers=0, # type: int | |
24 | preserve_time=False, # type: bool | |
25 | ): | |
26 | # type: (...) -> None | |
19 | 27 | """Move the contents of a filesystem to another filesystem. |
20 | 28 | |
21 | 29 | Arguments: |
23 | 31 | dst_fs (FS or str): Destination filesystem (instance or URL). |
24 | 32 | workers (int): Use `worker` threads to copy data, or ``0`` (default) for |
25 | 33 | a single-threaded copy. |
34 | preserve_time (bool): If `True`, try to preserve mtime of the | |
35 | resources (defaults to `False`). | |
26 | 36 | |
27 | 37 | """ |
28 | move_dir(src_fs, "/", dst_fs, "/", workers=workers) | |
38 | move_dir(src_fs, "/", dst_fs, "/", workers=workers, preserve_time=preserve_time) | |
29 | 39 | |
30 | 40 | |
31 | 41 | def move_file( |
33 | 43 | src_path, # type: Text |
34 | 44 | dst_fs, # type: Union[Text, FS] |
35 | 45 | dst_path, # type: Text |
46 | preserve_time=False, # type: bool | |
47 | cleanup_dst_on_error=True, # type: bool | |
36 | 48 | ): |
37 | 49 | # type: (...) -> None |
38 | 50 | """Move a file from one filesystem to another. |
40 | 52 | Arguments: |
41 | 53 | src_fs (FS or str): Source filesystem (instance or URL). |
42 | 54 | src_path (str): Path to a file on ``src_fs``. |
43 | dst_fs (FS or str); Destination filesystem (instance or URL). | |
55 | dst_fs (FS or str): Destination filesystem (instance or URL). | |
44 | 56 | dst_path (str): Path to a file on ``dst_fs``. |
57 | preserve_time (bool): If `True`, try to preserve mtime of the | |
58 | resources (defaults to `False`). | |
59 | cleanup_dst_on_error (bool): If `True`, tries to delete the file copied to | |
60 | ``dst_fs`` if deleting the file from ``src_fs`` fails (defaults to `True`). | |
45 | 61 | |
46 | 62 | """ |
47 | with manage_fs(src_fs) as _src_fs: | |
48 | with manage_fs(dst_fs, create=True) as _dst_fs: | |
63 | with manage_fs(src_fs, writeable=True) as _src_fs: | |
64 | with manage_fs(dst_fs, writeable=True, create=True) as _dst_fs: | |
49 | 65 | if _src_fs is _dst_fs: |
50 | 66 | # Same filesystem, may be optimized |
51 | _src_fs.move(src_path, dst_path, overwrite=True) | |
52 | else: | |
53 | # Standard copy and delete | |
54 | with _src_fs.lock(), _dst_fs.lock(): | |
55 | copy_file(_src_fs, src_path, _dst_fs, dst_path) | |
67 | _src_fs.move( | |
68 | src_path, dst_path, overwrite=True, preserve_time=preserve_time | |
69 | ) | |
70 | return | |
71 | ||
72 | if _src_fs.hassyspath(src_path) and _dst_fs.hassyspath(dst_path): | |
73 | # if both filesystems have a syspath we create a new OSFS from a | |
74 | # common parent folder and use it to move the file. | |
75 | try: | |
76 | src_syspath = _src_fs.getsyspath(src_path) | |
77 | dst_syspath = _dst_fs.getsyspath(dst_path) | |
78 | common = commonpath([src_syspath, dst_syspath]) | |
79 | if common: | |
80 | rel_src = frombase(common, src_syspath) | |
81 | rel_dst = frombase(common, dst_syspath) | |
82 | with _src_fs.lock(), _dst_fs.lock(): | |
83 | with OSFS(common) as base: | |
84 | base.move(rel_src, rel_dst, preserve_time=preserve_time) | |
85 | return # optimization worked, exit early | |
86 | except ValueError: | |
87 | # This is raised if we cannot find a common base folder. | |
88 | # In this case just fall through to the standard method. | |
89 | pass | |
90 | ||
91 | # Standard copy and delete | |
92 | with _src_fs.lock(), _dst_fs.lock(): | |
93 | copy_file( | |
94 | _src_fs, | |
95 | src_path, | |
96 | _dst_fs, | |
97 | dst_path, | |
98 | preserve_time=preserve_time, | |
99 | ) | |
100 | try: | |
56 | 101 | _src_fs.remove(src_path) |
102 | except FSError as e: | |
103 | # if the source cannot be removed we delete the copy on the | |
104 | # destination | |
105 | if cleanup_dst_on_error: | |
106 | _dst_fs.remove(dst_path) | |
107 | raise e | |
57 | 108 | |
58 | 109 | |
59 | 110 | def move_dir( |
62 | 113 | dst_fs, # type: Union[Text, FS] |
63 | 114 | dst_path, # type: Text |
64 | 115 | workers=0, # type: int |
116 | preserve_time=False, # type: bool | |
65 | 117 | ): |
66 | 118 | # type: (...) -> None |
67 | 119 | """Move a directory from one filesystem to another. |
71 | 123 | src_path (str): Path to a directory on ``src_fs`` |
72 | 124 | dst_fs (FS or str): Destination filesystem (instance or URL). |
73 | 125 | dst_path (str): Path to a directory on ``dst_fs``. |
74 | workers (int): Use `worker` threads to copy data, or ``0`` (default) for | |
75 | a single-threaded copy. | |
126 | workers (int): Use ``worker`` threads to copy data, or ``0`` | |
127 | (default) for a single-threaded copy. | |
128 | preserve_time (bool): If `True`, try to preserve mtime of the | |
129 | resources (defaults to `False`). | |
76 | 130 | |
77 | 131 | """ |
78 | ||
79 | def src(): | |
80 | return manage_fs(src_fs, writeable=False) | |
81 | ||
82 | def dst(): | |
83 | return manage_fs(dst_fs, create=True) | |
84 | ||
85 | with src() as _src_fs, dst() as _dst_fs: | |
86 | with _src_fs.lock(), _dst_fs.lock(): | |
87 | _dst_fs.makedir(dst_path, recreate=True) | |
88 | copy_dir(src_fs, src_path, dst_fs, dst_path, workers=workers) | |
89 | _src_fs.removetree(src_path) | |
132 | with manage_fs(src_fs, writeable=True) as _src_fs: | |
133 | with manage_fs(dst_fs, writeable=True, create=True) as _dst_fs: | |
134 | with _src_fs.lock(), _dst_fs.lock(): | |
135 | _dst_fs.makedir(dst_path, recreate=True) | |
136 | copy_dir( | |
137 | src_fs, | |
138 | src_path, | |
139 | dst_fs, | |
140 | dst_path, | |
141 | workers=workers, | |
142 | preserve_time=preserve_time, | |
143 | ) | |
144 | _src_fs.removetree(src_path) |
0 | 0 | """Manage several filesystems through a single view. |
1 | 1 | """ |
2 | 2 | |
3 | from __future__ import absolute_import | |
4 | from __future__ import unicode_literals | |
5 | from __future__ import print_function | |
3 | from __future__ import absolute_import, print_function, unicode_literals | |
6 | 4 | |
7 | 5 | import typing |
8 | from collections import namedtuple, OrderedDict | |
6 | ||
7 | from collections import OrderedDict, namedtuple | |
9 | 8 | from operator import itemgetter |
10 | ||
11 | 9 | from six import text_type |
12 | 10 | |
13 | 11 | from . import errors |
18 | 16 | |
19 | 17 | if typing.TYPE_CHECKING: |
20 | 18 | from typing import ( |
19 | IO, | |
21 | 20 | Any, |
22 | 21 | BinaryIO, |
23 | 22 | Collection, |
24 | 23 | Iterator, |
25 | IO, | |
24 | List, | |
26 | 25 | MutableMapping, |
27 | List, | |
28 | 26 | MutableSet, |
29 | 27 | Optional, |
30 | 28 | Text, |
31 | 29 | Tuple, |
32 | 30 | ) |
31 | ||
33 | 32 | from .enums import ResourceType |
34 | 33 | from .info import Info, RawInfo |
35 | 34 | from .permissions import Permissions |
54 | 53 | |
55 | 54 | def __init__(self, auto_close=True): |
56 | 55 | # type: (bool) -> None |
56 | """Create a new MultiFS. | |
57 | ||
58 | Arguments: | |
59 | auto_close (bool): If `True` (the default), the child | |
60 | filesystems will be closed when `MultiFS` is closed. | |
61 | ||
62 | """ | |
57 | 63 | super(MultiFS, self).__init__() |
58 | 64 | |
59 | 65 | self._auto_close = auto_close |
126 | 132 | |
127 | 133 | def _resort(self): |
128 | 134 | # type: () -> None |
129 | """Force `iterate_fs` to re-sort on next reference. | |
130 | """ | |
135 | """Force `iterate_fs` to re-sort on next reference.""" | |
131 | 136 | self._fs_sequence = None |
132 | 137 | |
133 | 138 | def iterate_fs(self): |
134 | 139 | # type: () -> Iterator[Tuple[Text, FS]] |
135 | """Get iterator that returns (name, fs) in priority order. | |
136 | """ | |
140 | """Get iterator that returns (name, fs) in priority order.""" | |
137 | 141 | if self._fs_sequence is None: |
138 | 142 | self._fs_sequence = [ |
139 | 143 | (name, fs) |
145 | 149 | |
146 | 150 | def _delegate(self, path): |
147 | 151 | # type: (Text) -> Optional[FS] |
148 | """Get a filesystem which has a given path. | |
149 | """ | |
152 | """Get a filesystem which has a given path.""" | |
150 | 153 | for _name, fs in self.iterate_fs(): |
151 | 154 | if fs.exists(path): |
152 | 155 | return fs |
154 | 157 | |
155 | 158 | def _delegate_required(self, path): |
156 | 159 | # type: (Text) -> FS |
157 | """Check that there is a filesystem with the given ``path``. | |
158 | """ | |
160 | """Check that there is a filesystem with the given ``path``.""" | |
159 | 161 | fs = self._delegate(path) |
160 | 162 | if fs is None: |
161 | 163 | raise errors.ResourceNotFound(path) |
163 | 165 | |
164 | 166 | def _writable_required(self, path): |
165 | 167 | # type: (Text) -> FS |
166 | """Check that ``path`` is writeable. | |
167 | """ | |
168 | """Check that ``path`` is writeable.""" | |
168 | 169 | if self.write_fs is None: |
169 | 170 | raise errors.ResourceReadOnly(path) |
170 | 171 | return self.write_fs |
4 | 4 | # Declare fs.opener as a namespace package |
5 | 5 | __import__("pkg_resources").declare_namespace(__name__) # type: ignore |
6 | 6 | |
7 | # Import opener modules so that `registry.install` if called on each opener | |
8 | from . import appfs, ftpfs, memoryfs, osfs, tarfs, tempfs, zipfs | |
9 | ||
7 | 10 | # Import objects into fs.opener namespace |
8 | 11 | from .base import Opener |
9 | 12 | from .parse import parse_fs_url as parse |
10 | 13 | from .registry import registry |
11 | ||
12 | # Import opener modules so that `registry.install` if called on each opener | |
13 | from . import appfs, ftpfs, memoryfs, osfs, tarfs, tempfs, zipfs | |
14 | 14 | |
15 | 15 | # Alias functions defined as Registry methods |
16 | 16 | open_fs = registry.open_fs |
1 | 1 | """``AppFS`` opener definition. |
2 | 2 | """ |
3 | 3 | |
4 | from __future__ import absolute_import | |
5 | from __future__ import print_function | |
6 | from __future__ import unicode_literals | |
4 | from __future__ import absolute_import, print_function, unicode_literals | |
7 | 5 | |
8 | 6 | import typing |
9 | 7 | |
10 | 8 | from .base import Opener |
9 | from .errors import OpenerError | |
11 | 10 | from .registry import registry |
12 | from .errors import OpenerError | |
13 | 11 | |
14 | 12 | if typing.TYPE_CHECKING: |
15 | 13 | from typing import Text, Union |
16 | from .parse import ParseResult | |
14 | ||
17 | 15 | from ..appfs import _AppFS |
18 | 16 | from ..subfs import SubFS |
17 | from .parse import ParseResult | |
19 | 18 | |
20 | 19 | |
21 | 20 | @registry.install |
22 | 21 | class AppFSOpener(Opener): |
23 | """``AppFS`` opener. | |
24 | """ | |
22 | """``AppFS`` opener.""" | |
25 | 23 | |
26 | 24 | protocols = ["userdata", "userconf", "sitedata", "siteconf", "usercache", "userlog"] |
27 | 25 | _protocol_mapping = None |
36 | 34 | ): |
37 | 35 | # type: (...) -> Union[_AppFS, SubFS[_AppFS]] |
38 | 36 | |
37 | from .. import appfs | |
39 | 38 | from ..subfs import ClosingSubFS |
40 | from .. import appfs | |
41 | 39 | |
42 | 40 | if self._protocol_mapping is None: |
43 | 41 | self._protocol_mapping = { |
1 | 1 | """`Opener` abstract base class. |
2 | 2 | """ |
3 | 3 | |
4 | import abc | |
5 | 4 | import typing |
6 | 5 | |
6 | import abc | |
7 | 7 | import six |
8 | 8 | |
9 | 9 | if typing.TYPE_CHECKING: |
10 | 10 | from typing import List, Text |
11 | ||
11 | 12 | from ..base import FS |
12 | 13 | from .parse import ParseResult |
13 | 14 |
3 | 3 | |
4 | 4 | |
5 | 5 | class ParseError(ValueError): |
6 | """Attempt to parse an invalid FS URL. | |
7 | """ | |
6 | """Attempt to parse an invalid FS URL.""" | |
8 | 7 | |
9 | 8 | |
10 | 9 | class OpenerError(Exception): |
11 | """Base exception for opener related errors. | |
12 | """ | |
10 | """Base exception for opener related errors.""" | |
13 | 11 | |
14 | 12 | |
15 | 13 | class UnsupportedProtocol(OpenerError): |
16 | """No opener found for the given protocol. | |
17 | """ | |
14 | """No opener found for the given protocol.""" | |
18 | 15 | |
19 | 16 | |
20 | 17 | class EntryPointError(OpenerError): |
21 | """An entry point could not be loaded. | |
22 | """ | |
18 | """An entry point could not be loaded.""" | |
23 | 19 | |
24 | 20 | |
25 | 21 | class NotWriteable(OpenerError): |
26 | """A writable FS could not be created. | |
27 | """ | |
22 | """A writable FS could not be created.""" |
1 | 1 | """`FTPFS` opener definition. |
2 | 2 | """ |
3 | 3 | |
4 | from __future__ import absolute_import | |
5 | from __future__ import print_function | |
6 | from __future__ import unicode_literals | |
4 | from __future__ import absolute_import, print_function, unicode_literals | |
7 | 5 | |
8 | 6 | import typing |
9 | 7 | |
8 | from ..errors import CreateFailed | |
10 | 9 | from .base import Opener |
11 | 10 | from .registry import registry |
12 | from ..errors import CreateFailed | |
13 | 11 | |
14 | 12 | if typing.TYPE_CHECKING: |
15 | 13 | from typing import Text, Union |
14 | ||
16 | 15 | from ..ftpfs import FTPFS # noqa: F401 |
17 | 16 | from ..subfs import SubFS |
18 | 17 | from .parse import ParseResult |
20 | 19 | |
21 | 20 | @registry.install |
22 | 21 | class FTPOpener(Opener): |
23 | """`FTPFS` opener. | |
24 | """ | |
22 | """`FTPFS` opener.""" | |
25 | 23 | |
26 | protocols = ["ftp"] | |
24 | protocols = ["ftp", "ftps"] | |
27 | 25 | |
28 | 26 | @CreateFailed.catch_all |
29 | 27 | def open_fs( |
48 | 46 | passwd=parse_result.password, |
49 | 47 | proxy=parse_result.params.get("proxy"), |
50 | 48 | timeout=int(parse_result.params.get("timeout", "10")), |
49 | tls=bool(parse_result.protocol == "ftps"), | |
51 | 50 | ) |
52 | 51 | if dir_path: |
53 | 52 | if create: |
1 | 1 | """`MemoryFS` opener definition. |
2 | 2 | """ |
3 | 3 | |
4 | from __future__ import absolute_import | |
5 | from __future__ import print_function | |
6 | from __future__ import unicode_literals | |
4 | from __future__ import absolute_import, print_function, unicode_literals | |
7 | 5 | |
8 | 6 | import typing |
9 | 7 | |
12 | 10 | |
13 | 11 | if typing.TYPE_CHECKING: |
14 | 12 | from typing import Text |
13 | ||
14 | from ..memoryfs import MemoryFS # noqa: F401 | |
15 | 15 | from .parse import ParseResult |
16 | from ..memoryfs import MemoryFS # noqa: F401 | |
17 | 16 | |
18 | 17 | |
19 | 18 | @registry.install |
20 | 19 | class MemOpener(Opener): |
21 | """`MemoryFS` opener. | |
22 | """ | |
20 | """`MemoryFS` opener.""" | |
23 | 21 | |
24 | 22 | protocols = ["mem"] |
25 | 23 |
1 | 1 | """`OSFS` opener definition. |
2 | 2 | """ |
3 | 3 | |
4 | from __future__ import absolute_import | |
5 | from __future__ import print_function | |
6 | from __future__ import unicode_literals | |
4 | from __future__ import absolute_import, print_function, unicode_literals | |
7 | 5 | |
8 | 6 | import typing |
9 | 7 | |
12 | 10 | |
13 | 11 | if typing.TYPE_CHECKING: |
14 | 12 | from typing import Text |
13 | ||
14 | from ..osfs import OSFS # noqa: F401 | |
15 | 15 | from .parse import ParseResult |
16 | from ..osfs import OSFS # noqa: F401 | |
17 | 16 | |
18 | 17 | |
19 | 18 | @registry.install |
20 | 19 | class OSFSOpener(Opener): |
21 | """`OSFS` opener. | |
22 | """ | |
20 | """`OSFS` opener.""" | |
23 | 21 | |
24 | 22 | protocols = ["file", "osfs"] |
25 | 23 | |
32 | 30 | cwd, # type: Text |
33 | 31 | ): |
34 | 32 | # type: (...) -> OSFS |
33 | from os.path import abspath, expanduser, join, normpath | |
34 | ||
35 | 35 | from ..osfs import OSFS |
36 | from os.path import abspath, expanduser, normpath, join | |
37 | 36 | |
38 | 37 | _path = abspath(join(cwd, expanduser(parse_result.resource))) |
39 | 38 | path = normpath(_path) |
0 | 0 | """Function to parse FS URLs in to their constituent parts. |
1 | 1 | """ |
2 | 2 | |
3 | from __future__ import absolute_import | |
4 | from __future__ import print_function | |
5 | from __future__ import unicode_literals | |
3 | from __future__ import absolute_import, print_function, unicode_literals | |
4 | ||
5 | import typing | |
6 | 6 | |
7 | 7 | import collections |
8 | 8 | import re |
9 | import typing | |
10 | ||
11 | 9 | import six |
12 | 10 | from six.moves.urllib.parse import parse_qs, unquote |
13 | 11 | |
17 | 15 | from typing import Optional, Text |
18 | 16 | |
19 | 17 | |
20 | _ParseResult = collections.namedtuple( | |
21 | "ParseResult", ["protocol", "username", "password", "resource", "params", "path"] | |
22 | ) | |
23 | ||
24 | ||
25 | class ParseResult(_ParseResult): | |
18 | class ParseResult( | |
19 | collections.namedtuple( | |
20 | "ParseResult", | |
21 | ["protocol", "username", "password", "resource", "params", "path"], | |
22 | ) | |
23 | ): | |
26 | 24 | """A named tuple containing fields of a parsed FS URL. |
27 | 25 | |
28 | 26 | Attributes: |
1 | 1 | """`Registry` class mapping protocols and FS URLs to their `Opener`. |
2 | 2 | """ |
3 | 3 | |
4 | from __future__ import absolute_import | |
5 | from __future__ import print_function | |
6 | from __future__ import unicode_literals | |
4 | from __future__ import absolute_import, print_function, unicode_literals | |
5 | ||
6 | import typing | |
7 | 7 | |
8 | 8 | import collections |
9 | 9 | import contextlib |
10 | import typing | |
11 | ||
12 | 10 | import pkg_resources |
13 | 11 | |
12 | from ..errors import ResourceReadOnly | |
14 | 13 | from .base import Opener |
15 | from .errors import UnsupportedProtocol, EntryPointError | |
14 | from .errors import EntryPointError, UnsupportedProtocol | |
16 | 15 | from .parse import parse_fs_url |
17 | 16 | |
18 | 17 | if typing.TYPE_CHECKING: |
19 | from typing import ( | |
20 | Callable, | |
21 | Dict, | |
22 | Iterator, | |
23 | List, | |
24 | Text, | |
25 | Type, | |
26 | Tuple, | |
27 | Union, | |
28 | ) | |
18 | from typing import Callable, Dict, Iterator, List, Text, Tuple, Type, Union | |
19 | ||
29 | 20 | from ..base import FS |
30 | 21 | |
31 | 22 | |
32 | 23 | class Registry(object): |
33 | """A registry for `Opener` instances. | |
34 | """ | |
24 | """A registry for `Opener` instances.""" | |
35 | 25 | |
36 | 26 | def __init__(self, default_opener="osfs", load_extern=False): |
37 | 27 | # type: (Text, bool) -> None |
63 | 53 | |
64 | 54 | Note: |
65 | 55 | May be used as a class decorator. For example:: |
56 | ||
66 | 57 | registry = Registry() |
67 | 58 | @registry.install |
68 | 59 | class ArchiveOpener(Opener): |
69 | 60 | protocols = ['zip', 'tar'] |
61 | ||
70 | 62 | """ |
71 | 63 | _opener = opener if isinstance(opener, Opener) else opener() |
72 | 64 | assert isinstance(_opener, Opener), "Opener instance required" |
78 | 70 | @property |
79 | 71 | def protocols(self): |
80 | 72 | # type: () -> List[Text] |
81 | """`list`: the list of supported protocols. | |
82 | """ | |
83 | ||
73 | """`list`: the list of supported protocols.""" | |
84 | 74 | _protocols = list(self._protocols) |
85 | 75 | if self.load_extern: |
86 | 76 | _protocols.extend( |
198 | 188 | """Open a filesystem from a FS URL (ignoring the path component). |
199 | 189 | |
200 | 190 | Arguments: |
201 | fs_url (str): A filesystem URL. | |
191 | fs_url (str): A filesystem URL. If a filesystem instance is | |
192 | given instead, it will be returned transparently. | |
202 | 193 | writeable (bool, optional): `True` if the filesystem must |
203 | 194 | be writeable. |
204 | 195 | create (bool, optional): `True` if the filesystem should be |
210 | 201 | |
211 | 202 | Returns: |
212 | 203 | ~fs.base.FS: A filesystem instance. |
204 | ||
205 | Caution: | |
206 | The ``writeable`` parameter only controls whether the | |
207 | filesystem *needs* to be writable, which is relevant for | |
208 | some archive filesystems. Passing ``writeable=False`` will | |
209 | **not** make the return filesystem read-only. For this, | |
210 | consider using `fs.wrap.read_only` to wrap the returned | |
211 | instance. | |
213 | 212 | |
214 | 213 | """ |
215 | 214 | from ..base import FS |
251 | 250 | required logic for that. |
252 | 251 | |
253 | 252 | Example: |
254 | >>> def print_ls(list_fs): | |
255 | ... '''List a directory.''' | |
256 | ... with manage_fs(list_fs) as fs: | |
257 | ... print(' '.join(fs.listdir())) | |
253 | The `~Registry.manage_fs` method can be used to define a small | |
254 | utility function:: | |
255 | ||
256 | >>> def print_ls(list_fs): | |
257 | ... '''List a directory.''' | |
258 | ... with manage_fs(list_fs) as fs: | |
259 | ... print(' '.join(fs.listdir())) | |
258 | 260 | |
259 | 261 | This function may be used in two ways. You may either pass |
260 | 262 | a ``str``, as follows:: |
270 | 272 | """ |
271 | 273 | from ..base import FS |
272 | 274 | |
275 | def assert_writeable(fs): | |
276 | if fs.getmeta().get("read_only", True): | |
277 | raise ResourceReadOnly(path="/") | |
278 | ||
273 | 279 | if isinstance(fs_url, FS): |
280 | if writeable: | |
281 | assert_writeable(fs_url) | |
274 | 282 | yield fs_url |
275 | 283 | else: |
276 | 284 | _fs = self.open_fs(fs_url, create=create, writeable=writeable, cwd=cwd) |
285 | if writeable: | |
286 | assert_writeable(_fs) | |
277 | 287 | try: |
278 | 288 | yield _fs |
279 | 289 | finally: |
1 | 1 | """`TarFS` opener definition. |
2 | 2 | """ |
3 | 3 | |
4 | from __future__ import absolute_import | |
5 | from __future__ import print_function | |
6 | from __future__ import unicode_literals | |
4 | from __future__ import absolute_import, print_function, unicode_literals | |
7 | 5 | |
8 | 6 | import typing |
9 | 7 | |
10 | 8 | from .base import Opener |
9 | from .errors import NotWriteable | |
11 | 10 | from .registry import registry |
12 | from .errors import NotWriteable | |
13 | 11 | |
14 | 12 | if typing.TYPE_CHECKING: |
15 | 13 | from typing import Text |
14 | ||
15 | from ..tarfs import TarFS # noqa: F401 | |
16 | 16 | from .parse import ParseResult |
17 | from ..tarfs import TarFS # noqa: F401 | |
18 | 17 | |
19 | 18 | |
20 | 19 | @registry.install |
21 | 20 | class TarOpener(Opener): |
22 | """`TarFS` opener. | |
23 | """ | |
21 | """`TarFS` opener.""" | |
24 | 22 | |
25 | 23 | protocols = ["tar"] |
26 | 24 |
1 | 1 | """`TempFS` opener definition. |
2 | 2 | """ |
3 | 3 | |
4 | from __future__ import absolute_import | |
5 | from __future__ import print_function | |
6 | from __future__ import unicode_literals | |
4 | from __future__ import absolute_import, print_function, unicode_literals | |
7 | 5 | |
8 | 6 | import typing |
9 | 7 | |
12 | 10 | |
13 | 11 | if typing.TYPE_CHECKING: |
14 | 12 | from typing import Text |
13 | ||
14 | from ..tempfs import TempFS # noqa: F401 | |
15 | 15 | from .parse import ParseResult |
16 | from ..tempfs import TempFS # noqa: F401 | |
17 | 16 | |
18 | 17 | |
19 | 18 | @registry.install |
20 | 19 | class TempOpener(Opener): |
21 | """`TempFS` opener. | |
22 | """ | |
20 | """`TempFS` opener.""" | |
23 | 21 | |
24 | 22 | protocols = ["temp"] |
25 | 23 |
1 | 1 | """`ZipFS` opener definition. |
2 | 2 | """ |
3 | 3 | |
4 | from __future__ import absolute_import | |
5 | from __future__ import print_function | |
6 | from __future__ import unicode_literals | |
4 | from __future__ import absolute_import, print_function, unicode_literals | |
7 | 5 | |
8 | 6 | import typing |
9 | 7 | |
10 | 8 | from .base import Opener |
9 | from .errors import NotWriteable | |
11 | 10 | from .registry import registry |
12 | from .errors import NotWriteable | |
13 | 11 | |
14 | 12 | if typing.TYPE_CHECKING: |
15 | 13 | from typing import Text |
14 | ||
15 | from ..zipfs import ZipFS # noqa: F401 | |
16 | 16 | from .parse import ParseResult |
17 | from ..zipfs import ZipFS # noqa: F401 | |
18 | 17 | |
19 | 18 | |
20 | 19 | @registry.install |
21 | 20 | class ZipOpener(Opener): |
22 | """`ZipFS` opener. | |
23 | """ | |
21 | """`ZipFS` opener.""" | |
24 | 22 | |
25 | 23 | protocols = ["zip"] |
26 | 24 |
3 | 3 | of the Python standard library. |
4 | 4 | """ |
5 | 5 | |
6 | from __future__ import absolute_import | |
7 | from __future__ import print_function | |
8 | from __future__ import unicode_literals | |
6 | from __future__ import absolute_import, print_function, unicode_literals | |
7 | ||
8 | import sys | |
9 | import typing | |
9 | 10 | |
10 | 11 | import errno |
11 | 12 | import io |
14 | 15 | import os |
15 | 16 | import platform |
16 | 17 | import shutil |
18 | import six | |
17 | 19 | import stat |
18 | import sys | |
19 | 20 | import tempfile |
20 | import typing | |
21 | ||
22 | import six | |
23 | 21 | |
24 | 22 | try: |
25 | 23 | from os import scandir |
38 | 36 | sendfile = None # type: ignore # pragma: no cover |
39 | 37 | |
40 | 38 | from . import errors |
39 | from ._fscompat import fsdecode, fsencode, fspath | |
40 | from ._url_tools import url_quote | |
41 | 41 | from .base import FS |
42 | from .copy import copy_modified_time | |
42 | 43 | from .enums import ResourceType |
43 | from ._fscompat import fsencode, fsdecode, fspath | |
44 | from .error_tools import convert_os_errors | |
45 | from .errors import FileExpected, NoURL | |
44 | 46 | from .info import Info |
47 | from .mode import Mode, validate_open_mode | |
45 | 48 | from .path import basename, dirname |
46 | 49 | from .permissions import Permissions |
47 | from .error_tools import convert_os_errors | |
48 | from .mode import Mode, validate_open_mode | |
49 | from .errors import FileExpected, NoURL | |
50 | from ._url_tools import url_quote | |
51 | 50 | |
52 | 51 | if typing.TYPE_CHECKING: |
53 | 52 | from typing import ( |
53 | IO, | |
54 | 54 | Any, |
55 | 55 | BinaryIO, |
56 | 56 | Collection, |
57 | 57 | Dict, |
58 | 58 | Iterator, |
59 | IO, | |
60 | 59 | List, |
61 | 60 | Optional, |
62 | 61 | SupportsInt, |
63 | 62 | Text, |
64 | 63 | Tuple, |
65 | 64 | ) |
65 | ||
66 | 66 | from .base import _OpendirFactory |
67 | 67 | from .info import RawInfo |
68 | 68 | from .subfs import SubFS |
79 | 79 | @six.python_2_unicode_compatible |
80 | 80 | class OSFS(FS): |
81 | 81 | """Create an OSFS. |
82 | ||
83 | Arguments: | |
84 | root_path (str or ~os.PathLike): An OS path or path-like object to | |
85 | the location on your HD you wish to manage. | |
86 | create (bool): Set to `True` to create the root directory if it | |
87 | does not already exist, otherwise the directory should exist | |
88 | prior to creating the ``OSFS`` instance (defaults to `False`). | |
89 | create_mode (int): The permissions that will be used to create | |
90 | the directory if ``create`` is `True` and the path doesn't | |
91 | exist, defaults to ``0o777``. | |
92 | expand_vars(bool): If `True` (the default) environment variables of | |
93 | the form $name or ${name} will be expanded. | |
94 | ||
95 | Raises: | |
96 | `fs.errors.CreateFailed`: If ``root_path`` does not | |
97 | exist, or could not be created. | |
98 | 82 | |
99 | 83 | Examples: |
100 | 84 | >>> current_directory_fs = OSFS('.') |
112 | 96 | ): |
113 | 97 | # type: (...) -> None |
114 | 98 | """Create an OSFS instance. |
99 | ||
100 | Arguments: | |
101 | root_path (str or ~os.PathLike): An OS path or path-like object | |
102 | to the location on your HD you wish to manage. | |
103 | create (bool): Set to `True` to create the root directory if it | |
104 | does not already exist, otherwise the directory should exist | |
105 | prior to creating the ``OSFS`` instance (defaults to `False`). | |
106 | create_mode (int): The permissions that will be used to create | |
107 | the directory if ``create`` is `True` and the path doesn't | |
108 | exist, defaults to ``0o777``. | |
109 | expand_vars(bool): If `True` (the default) environment variables | |
110 | of the form ``~``, ``$name`` or ``${name}`` will be expanded. | |
111 | ||
112 | Raises: | |
113 | `fs.errors.CreateFailed`: If ``root_path`` does not | |
114 | exist, or could not be created. | |
115 | ||
115 | 116 | """ |
116 | 117 | super(OSFS, self).__init__() |
117 | 118 | if isinstance(root_path, bytes): |
187 | 188 | |
188 | 189 | def _to_sys_path(self, path): |
189 | 190 | # type: (Text) -> bytes |
190 | """Convert a FS path to a path on the OS. | |
191 | """ | |
191 | """Convert a FS path to a path on the OS.""" | |
192 | 192 | sys_path = fsencode( |
193 | 193 | os.path.join(self._root_path, path.lstrip("/").replace("/", os.sep)) |
194 | 194 | ) |
197 | 197 | @classmethod |
198 | 198 | def _make_details_from_stat(cls, stat_result): |
199 | 199 | # type: (os.stat_result) -> Dict[Text, object] |
200 | """Make a *details* info dict from an `os.stat_result` object. | |
201 | """ | |
200 | """Make a *details* info dict from an `os.stat_result` object.""" | |
202 | 201 | details = { |
203 | 202 | "_write": ["accessed", "modified"], |
204 | 203 | "accessed": stat_result.st_atime, |
217 | 216 | @classmethod |
218 | 217 | def _make_access_from_stat(cls, stat_result): |
219 | 218 | # type: (os.stat_result) -> Dict[Text, object] |
220 | """Make an *access* info dict from an `os.stat_result` object. | |
221 | """ | |
219 | """Make an *access* info dict from an `os.stat_result` object.""" | |
222 | 220 | access = {} # type: Dict[Text, object] |
223 | 221 | access["permissions"] = Permissions(mode=stat_result.st_mode).dump() |
224 | 222 | access["gid"] = gid = stat_result.st_gid |
251 | 249 | @classmethod |
252 | 250 | def _get_type_from_stat(cls, _stat): |
253 | 251 | # type: (os.stat_result) -> ResourceType |
254 | """Get the resource type from an `os.stat_result` object. | |
255 | """ | |
252 | """Get the resource type from an `os.stat_result` object.""" | |
256 | 253 | st_mode = _stat.st_mode |
257 | 254 | st_type = stat.S_IFMT(st_mode) |
258 | 255 | return cls.STAT_TO_RESOURCE_TYPE.get(st_type, ResourceType.unknown) |
433 | 430 | if hasattr(errno, "ENOTSUP"): |
434 | 431 | _sendfile_error_codes.add(errno.ENOTSUP) |
435 | 432 | |
436 | def copy(self, src_path, dst_path, overwrite=False): | |
437 | # type: (Text, Text, bool) -> None | |
433 | def copy(self, src_path, dst_path, overwrite=False, preserve_time=False): | |
434 | # type: (Text, Text, bool, bool) -> None | |
438 | 435 | with self._lock: |
439 | 436 | # validate and canonicalise paths |
440 | 437 | _src_path, _dst_path = self._check_copy(src_path, dst_path, overwrite) |
454 | 451 | while sent > 0: |
455 | 452 | sent = sendfile(fd_dst, fd_src, offset, maxsize) |
456 | 453 | offset += sent |
454 | if preserve_time: | |
455 | copy_modified_time(self, src_path, self, dst_path) | |
457 | 456 | except OSError as e: |
458 | 457 | # the error is not a simple "sendfile not supported" error |
459 | 458 | if e.errno not in self._sendfile_error_codes: |
463 | 462 | |
464 | 463 | else: |
465 | 464 | |
466 | def copy(self, src_path, dst_path, overwrite=False): | |
467 | # type: (Text, Text, bool) -> None | |
465 | def copy(self, src_path, dst_path, overwrite=False, preserve_time=False): | |
466 | # type: (Text, Text, bool, bool) -> None | |
468 | 467 | with self._lock: |
469 | 468 | _src_path, _dst_path = self._check_copy(src_path, dst_path, overwrite) |
470 | 469 | shutil.copy2(self.getsyspath(_src_path), self.getsyspath(_dst_path)) |
477 | 476 | # type: (Text, Optional[Collection[Text]]) -> Iterator[Info] |
478 | 477 | self.check() |
479 | 478 | namespaces = namespaces or () |
479 | requires_stat = not {"details", "stat", "access"}.isdisjoint(namespaces) | |
480 | 480 | _path = self.validatepath(path) |
481 | 481 | if _WINDOWS_PLATFORM: |
482 | 482 | sys_path = os.path.join( |
485 | 485 | else: |
486 | 486 | sys_path = self._to_sys_path(_path) # type: ignore |
487 | 487 | with convert_os_errors("scandir", path, directory=True): |
488 | for dir_entry in scandir(sys_path): | |
489 | info = { | |
490 | "basic": { | |
491 | "name": fsdecode(dir_entry.name), | |
492 | "is_dir": dir_entry.is_dir(), | |
488 | scandir_iter = scandir(sys_path) | |
489 | try: | |
490 | for dir_entry in scandir_iter: | |
491 | info = { | |
492 | "basic": { | |
493 | "name": fsdecode(dir_entry.name), | |
494 | "is_dir": dir_entry.is_dir(), | |
495 | } | |
493 | 496 | } |
494 | } | |
495 | if "details" in namespaces: | |
496 | stat_result = dir_entry.stat() | |
497 | info["details"] = self._make_details_from_stat(stat_result) | |
498 | if "stat" in namespaces: | |
499 | stat_result = dir_entry.stat() | |
500 | info["stat"] = { | |
501 | k: getattr(stat_result, k) | |
502 | for k in dir(stat_result) | |
503 | if k.startswith("st_") | |
504 | } | |
505 | if "lstat" in namespaces: | |
506 | lstat_result = dir_entry.stat(follow_symlinks=False) | |
507 | info["lstat"] = { | |
508 | k: getattr(lstat_result, k) | |
509 | for k in dir(lstat_result) | |
510 | if k.startswith("st_") | |
511 | } | |
512 | if "link" in namespaces: | |
513 | info["link"] = self._make_link_info( | |
514 | os.path.join(sys_path, dir_entry.name) | |
515 | ) | |
516 | if "access" in namespaces: | |
517 | stat_result = dir_entry.stat() | |
518 | info["access"] = self._make_access_from_stat(stat_result) | |
519 | ||
520 | yield Info(info) | |
497 | if requires_stat: | |
498 | stat_result = dir_entry.stat() | |
499 | if "details" in namespaces: | |
500 | info["details"] = self._make_details_from_stat( | |
501 | stat_result | |
502 | ) | |
503 | if "stat" in namespaces: | |
504 | info["stat"] = { | |
505 | k: getattr(stat_result, k) | |
506 | for k in dir(stat_result) | |
507 | if k.startswith("st_") | |
508 | } | |
509 | if "access" in namespaces: | |
510 | info["access"] = self._make_access_from_stat( | |
511 | stat_result | |
512 | ) | |
513 | if "lstat" in namespaces: | |
514 | lstat_result = dir_entry.stat(follow_symlinks=False) | |
515 | info["lstat"] = { | |
516 | k: getattr(lstat_result, k) | |
517 | for k in dir(lstat_result) | |
518 | if k.startswith("st_") | |
519 | } | |
520 | if "link" in namespaces: | |
521 | info["link"] = self._make_link_info( | |
522 | os.path.join(sys_path, dir_entry.name) | |
523 | ) | |
524 | ||
525 | yield Info(info) | |
526 | finally: | |
527 | if sys.version_info >= (3, 6): | |
528 | scandir_iter.close() | |
521 | 529 | |
522 | 530 | else: |
523 | 531 | |
655 | 663 | if "details" in info: |
656 | 664 | details = info["details"] |
657 | 665 | if "accessed" in details or "modified" in details: |
658 | _accessed = typing.cast(int, details.get("accessed")) | |
659 | _modified = typing.cast(int, details.get("modified", _accessed)) | |
660 | accessed = int(_modified if _accessed is None else _accessed) | |
661 | modified = int(_modified) | |
666 | _accessed = typing.cast(float, details.get("accessed")) | |
667 | _modified = typing.cast(float, details.get("modified", _accessed)) | |
668 | accessed = float(_modified if _accessed is None else _accessed) | |
669 | modified = float(_modified) | |
662 | 670 | if accessed is not None or modified is not None: |
663 | 671 | with convert_os_errors("setinfo", path): |
664 | 672 | os.utime(sys_path, (accessed, modified)) |
672 | 680 | raise errors.InvalidCharsInPath( |
673 | 681 | path, |
674 | 682 | msg="path '{path}' could not be encoded for the filesystem (check LANG" |
675 | " env var); {error}".format(path=path, error=error), | |
683 | " env var); {error}".format(path=path, error=error), | |
676 | 684 | ) |
677 | 685 | return super(OSFS, self).validatepath(path) |
7 | 7 | |
8 | 8 | """ |
9 | 9 | |
10 | from __future__ import print_function | |
11 | from __future__ import unicode_literals | |
10 | from __future__ import print_function, unicode_literals | |
11 | ||
12 | import typing | |
12 | 13 | |
13 | 14 | import re |
14 | import typing | |
15 | 15 | |
16 | 16 | from .errors import IllegalBackReference |
17 | 17 | |
63 | 63 | >>> normpath("/foo//bar/frob/../baz") |
64 | 64 | '/foo/bar/baz' |
65 | 65 | >>> normpath("foo/../../bar") |
66 | Traceback (most recent call last) | |
66 | Traceback (most recent call last): | |
67 | 67 | ... |
68 | IllegalBackReference: path 'foo/../../bar' contains back-references outside of filesystem" | |
68 | fs.errors.IllegalBackReference: path 'foo/../../bar' contains back-references outside of filesystem | |
69 | 69 | |
70 | 70 | """ # noqa: E501 |
71 | 71 | if path in "/": |
85 | 85 | else: |
86 | 86 | components.append(component) |
87 | 87 | except IndexError: |
88 | # FIXME (@althonos): should be raised from the IndexError | |
88 | 89 | raise IllegalBackReference(path) |
89 | 90 | return prefix + "/".join(components) |
90 | 91 |
0 | 0 | """Abstract permissions container. |
1 | 1 | """ |
2 | 2 | |
3 | from __future__ import print_function | |
4 | from __future__ import unicode_literals | |
3 | from __future__ import print_function, unicode_literals | |
5 | 4 | |
6 | 5 | import typing |
7 | 6 | from typing import Iterable |
10 | 9 | |
11 | 10 | from ._typing import Text |
12 | 11 | |
13 | ||
14 | 12 | if typing.TYPE_CHECKING: |
15 | 13 | from typing import Iterator, List, Optional, Tuple, Type, Union |
16 | 14 | |
17 | 15 | |
18 | 16 | def make_mode(init): |
19 | 17 | # type: (Union[int, Iterable[Text], None]) -> int |
20 | """Make a mode integer from an initial value. | |
21 | """ | |
18 | """Make a mode integer from an initial value.""" | |
22 | 19 | return Permissions.get_mode(init) |
23 | 20 | |
24 | 21 | |
25 | 22 | class _PermProperty(object): |
26 | """Creates simple properties to get/set permissions. | |
27 | """ | |
23 | """Creates simple properties to get/set permissions.""" | |
28 | 24 | |
29 | 25 | def __init__(self, name): |
30 | 26 | # type: (Text) -> None |
50 | 46 | Permissions objects store information regarding the permissions |
51 | 47 | on a resource. It supports Linux permissions, but is generic enough |
52 | 48 | to manage permission information from almost any filesystem. |
53 | ||
54 | Arguments: | |
55 | names (list, optional): A list of permissions. | |
56 | mode (int, optional): A mode integer. | |
57 | user (str, optional): A triplet of *user* permissions, e.g. | |
58 | ``"rwx"`` or ``"r--"`` | |
59 | group (str, optional): A triplet of *group* permissions, e.g. | |
60 | ``"rwx"`` or ``"r--"`` | |
61 | other (str, optional): A triplet of *other* permissions, e.g. | |
62 | ``"rwx"`` or ``"r--"`` | |
63 | sticky (bool, optional): A boolean for the *sticky* bit. | |
64 | setuid (bool, optional): A boolean for the *setuid* bit. | |
65 | setguid (bool, optional): A boolean for the *setguid* bit. | |
66 | 49 | |
67 | 50 | Example: |
68 | 51 | >>> from fs.permissions import Permissions |
72 | 55 | >>> p.mode |
73 | 56 | 500 |
74 | 57 | >>> oct(p.mode) |
75 | '0764' | |
58 | '0o764' | |
76 | 59 | |
77 | 60 | """ |
78 | 61 | |
104 | 87 | setguid=None, # type: Optional[bool] |
105 | 88 | ): |
106 | 89 | # type: (...) -> None |
90 | """Create a new `Permissions` instance. | |
91 | ||
92 | Arguments: | |
93 | names (list, optional): A list of permissions. | |
94 | mode (int, optional): A mode integer. | |
95 | user (str, optional): A triplet of *user* permissions, e.g. | |
96 | ``"rwx"`` or ``"r--"`` | |
97 | group (str, optional): A triplet of *group* permissions, e.g. | |
98 | ``"rwx"`` or ``"r--"`` | |
99 | other (str, optional): A triplet of *other* permissions, e.g. | |
100 | ``"rwx"`` or ``"r--"`` | |
101 | sticky (bool, optional): A boolean for the *sticky* bit. | |
102 | setuid (bool, optional): A boolean for the *setuid* bit. | |
103 | setguid (bool, optional): A boolean for the *setguid* bit. | |
104 | ||
105 | """ | |
107 | 106 | if names is not None: |
108 | 107 | self._perms = set(names) |
109 | 108 | elif mode is not None: |
173 | 172 | @classmethod |
174 | 173 | def parse(cls, ls): |
175 | 174 | # type: (Text) -> Permissions |
176 | """Parse permissions in Linux notation. | |
177 | """ | |
175 | """Parse permissions in Linux notation.""" | |
178 | 176 | user = ls[:3] |
179 | 177 | group = ls[3:6] |
180 | 178 | other = ls[6:9] |
183 | 181 | @classmethod |
184 | 182 | def load(cls, permissions): |
185 | 183 | # type: (List[Text]) -> Permissions |
186 | """Load a serialized permissions object. | |
187 | """ | |
184 | """Load a serialized permissions object.""" | |
188 | 185 | return cls(names=permissions) |
189 | 186 | |
190 | 187 | @classmethod |
221 | 218 | @classmethod |
222 | 219 | def get_mode(cls, init): |
223 | 220 | # type: (Union[int, Iterable[Text], None]) -> int |
224 | """Convert an initial value to a mode integer. | |
225 | """ | |
221 | """Convert an initial value to a mode integer.""" | |
226 | 222 | return cls.create(init).mode |
227 | 223 | |
228 | 224 | def copy(self): |
229 | 225 | # type: () -> Permissions |
230 | """Make a copy of this permissions object. | |
231 | """ | |
226 | """Make a copy of this permissions object.""" | |
232 | 227 | return Permissions(names=list(self._perms)) |
233 | 228 | |
234 | 229 | def dump(self): |
235 | 230 | # type: () -> List[Text] |
236 | """Get a list suitable for serialization. | |
237 | """ | |
231 | """Get a list suitable for serialization.""" | |
238 | 232 | return sorted(self._perms) |
239 | 233 | |
240 | 234 | def as_str(self): |
241 | 235 | # type: () -> Text |
242 | """Get a Linux-style string representation of permissions. | |
243 | """ | |
236 | """Get a Linux-style string representation of permissions.""" | |
244 | 237 | perms = [ |
245 | 238 | c if name in self._perms else "-" |
246 | 239 | for name, c in zip(self._LINUX_PERMS_NAMES[-9:], "rwxrwxrwx") |
258 | 251 | @property |
259 | 252 | def mode(self): |
260 | 253 | # type: () -> int |
261 | """`int`: mode integer. | |
262 | """ | |
254 | """`int`: mode integer.""" | |
263 | 255 | mode = 0 |
264 | 256 | for name, mask in self._LINUX_PERMS: |
265 | 257 | if name in self._perms: |
0 | 0 | """Manage a directory in a *parent* filesystem. |
1 | 1 | """ |
2 | 2 | |
3 | from __future__ import print_function | |
4 | from __future__ import unicode_literals | |
3 | from __future__ import print_function, unicode_literals | |
5 | 4 | |
6 | 5 | import typing |
7 | 6 | |
8 | 7 | import six |
9 | 8 | |
9 | from .path import abspath, join, normpath, relpath | |
10 | 10 | from .wrapfs import WrapFS |
11 | from .path import abspath, join, normpath, relpath | |
12 | 11 | |
13 | 12 | if typing.TYPE_CHECKING: |
13 | from typing import Text, Tuple | |
14 | ||
14 | 15 | from .base import FS # noqa: F401 |
15 | from typing import Text, Tuple | |
16 | 16 | |
17 | 17 | |
18 | 18 | _F = typing.TypeVar("_F", bound="FS", covariant=True) |
20 | 20 | |
21 | 21 | @six.python_2_unicode_compatible |
22 | 22 | class SubFS(WrapFS[_F], typing.Generic[_F]): |
23 | """A sub-directory on another filesystem. | |
23 | """A sub-directory on a parent filesystem. | |
24 | 24 | |
25 | 25 | A SubFS is a filesystem object that maps to a sub-directory of |
26 | 26 | another filesystem. This is the object that is returned by |
28 | 28 | |
29 | 29 | """ |
30 | 30 | |
31 | def __init__(self, parent_fs, path): | |
31 | def __init__(self, parent_fs, path): # noqa: D107 | |
32 | 32 | # type: (_F, Text) -> None |
33 | 33 | super(SubFS, self).__init__(parent_fs) |
34 | 34 | self._sub_dir = abspath(normpath(path)) |
54 | 54 | |
55 | 55 | |
56 | 56 | class ClosingSubFS(SubFS[_F], typing.Generic[_F]): |
57 | """A version of `SubFS` which closes its parent when closed. | |
58 | """ | |
57 | """A version of `SubFS` which closes its parent when closed.""" | |
59 | 58 | |
60 | 59 | def close(self): |
61 | 60 | # type: () -> None |
0 | 0 | """Manage the filesystem in a Tar archive. |
1 | 1 | """ |
2 | 2 | |
3 | from __future__ import print_function | |
4 | from __future__ import unicode_literals | |
3 | from __future__ import print_function, unicode_literals | |
4 | ||
5 | import typing | |
6 | from typing import IO, cast | |
5 | 7 | |
6 | 8 | import os |
9 | import six | |
7 | 10 | import tarfile |
8 | import typing | |
9 | 11 | from collections import OrderedDict |
10 | from typing import cast, IO | |
11 | ||
12 | import six | |
13 | 12 | |
14 | 13 | from . import errors |
14 | from ._url_tools import url_quote | |
15 | 15 | from .base import FS |
16 | 16 | from .compress import write_tar |
17 | 17 | from .enums import ResourceType |
19 | 19 | from .info import Info |
20 | 20 | from .iotools import RawWrapper |
21 | 21 | from .opener import open_fs |
22 | from .path import basename, frombase, isbase, normpath, parts, relpath | |
22 | 23 | from .permissions import Permissions |
23 | from ._url_tools import url_quote | |
24 | from .path import relpath, basename, isbase, normpath, parts, frombase | |
25 | 24 | from .wrapfs import WrapFS |
26 | 25 | |
27 | 26 | if typing.TYPE_CHECKING: |
28 | from tarfile import TarInfo | |
29 | 27 | from typing import ( |
30 | 28 | Any, |
31 | 29 | BinaryIO, |
37 | 35 | Tuple, |
38 | 36 | Union, |
39 | 37 | ) |
38 | ||
39 | from tarfile import TarInfo | |
40 | ||
40 | 41 | from .info import RawInfo |
41 | 42 | from .subfs import SubFS |
42 | 43 | |
51 | 52 | def _get_member_info(member, encoding): |
52 | 53 | # type: (TarInfo, Text) -> Dict[Text, object] |
53 | 54 | return member.get_info(encoding, None) |
54 | ||
55 | 55 | |
56 | 56 | else: |
57 | 57 | |
65 | 65 | class TarFS(WrapFS): |
66 | 66 | """Read and write tar files. |
67 | 67 | |
68 | There are two ways to open a TarFS for the use cases of reading | |
68 | There are two ways to open a `TarFS` for the use cases of reading | |
69 | 69 | a tar file, and creating a new one. |
70 | 70 | |
71 | If you open the TarFS with ``write`` set to `False` (the | |
71 | If you open the `TarFS` with ``write`` set to `False` (the | |
72 | 72 | default), then the filesystem will be a read only filesystem which |
73 | 73 | maps to the files and directories within the tar file. Files are |
74 | 74 | decompressed on the fly when you open them. |
78 | 78 | with TarFS('foo.tar.gz') as tar_fs: |
79 | 79 | readme = tar_fs.readtext('readme.txt') |
80 | 80 | |
81 | If you open the TarFS with ``write`` set to `True`, then the TarFS | |
81 | If you open the TarFS with ``write`` set to `True`, then the `TarFS` | |
82 | 82 | will be a empty temporary filesystem. Any files / directories you |
83 | create in the TarFS will be written in to a tar file when the TarFS | |
83 | create in the `TarFS` will be written in to a tar file when the `TarFS` | |
84 | 84 | is closed. The compression is set from the new file name but may be |
85 | 85 | set manually with the ``compression`` argument. |
86 | 86 | |
99 | 99 | use default (`False`) to read an existing tar file. |
100 | 100 | compression (str, optional): Compression to use (one of the formats |
101 | 101 | supported by `tarfile`: ``xz``, ``gz``, ``bz2``, or `None`). |
102 | temp_fs (str): An FS URL for the temporary filesystem | |
103 | used to store data prior to tarring. | |
102 | temp_fs (str): An FS URL or an FS instance to use to store | |
103 | data prior to tarring. Defaults to creating a new | |
104 | `~fs.tempfs.TempFS`. | |
104 | 105 | |
105 | 106 | """ |
106 | 107 | |
117 | 118 | write=False, # type: bool |
118 | 119 | compression=None, # type: Optional[Text] |
119 | 120 | encoding="utf-8", # type: Text |
120 | temp_fs="temp://__tartemp__", # type: Text | |
121 | temp_fs="temp://__tartemp__", # type: Union[Text, FS] | |
121 | 122 | ): |
122 | 123 | # type: (...) -> FS |
123 | 124 | if isinstance(file, (six.text_type, six.binary_type)): |
149 | 150 | compression=None, # type: Optional[Text] |
150 | 151 | encoding="utf-8", # type: Text |
151 | 152 | temp_fs="temp://__tartemp__", # type: Text |
152 | ): | |
153 | ): # noqa: D107 | |
153 | 154 | # type: (...) -> None |
154 | 155 | pass |
155 | 156 | |
156 | 157 | |
157 | 158 | @six.python_2_unicode_compatible |
158 | 159 | class WriteTarFS(WrapFS): |
159 | """A writable tar file. | |
160 | """ | |
160 | """A writable tar file.""" | |
161 | 161 | |
162 | 162 | def __init__( |
163 | 163 | self, |
164 | 164 | file, # type: Union[Text, BinaryIO] |
165 | 165 | compression=None, # type: Optional[Text] |
166 | 166 | encoding="utf-8", # type: Text |
167 | temp_fs="temp://__tartemp__", # type: Text | |
168 | ): | |
167 | temp_fs="temp://__tartemp__", # type: Union[Text, FS] | |
168 | ): # noqa: D107 | |
169 | 169 | # type: (...) -> None |
170 | 170 | self._file = file # type: Union[Text, BinaryIO] |
171 | 171 | self.compression = compression |
221 | 221 | |
222 | 222 | Note: |
223 | 223 | This is called automatically when the TarFS is closed. |
224 | ||
224 | 225 | """ |
225 | 226 | if not self.isclosed(): |
226 | 227 | write_tar( |
233 | 234 | |
234 | 235 | @six.python_2_unicode_compatible |
235 | 236 | class ReadTarFS(FS): |
236 | """A readable tar file. | |
237 | """ | |
237 | """A readable tar file.""" | |
238 | 238 | |
239 | 239 | _meta = { |
240 | 240 | "case_insensitive": True, |
259 | 259 | } |
260 | 260 | |
261 | 261 | @errors.CreateFailed.catch_all |
262 | def __init__(self, file, encoding="utf-8"): | |
262 | def __init__(self, file, encoding="utf-8"): # noqa: D107 | |
263 | 263 | # type: (Union[Text, BinaryIO], Text) -> None |
264 | 264 | super(ReadTarFS, self).__init__() |
265 | 265 | self._file = file |
8 | 8 | |
9 | 9 | """ |
10 | 10 | |
11 | from __future__ import print_function | |
12 | from __future__ import unicode_literals | |
11 | from __future__ import print_function, unicode_literals | |
12 | ||
13 | import typing | |
13 | 14 | |
14 | 15 | import shutil |
16 | import six | |
15 | 17 | import tempfile |
16 | import typing | |
17 | ||
18 | import six | |
19 | 18 | |
20 | 19 | from . import errors |
21 | 20 | from .osfs import OSFS |
28 | 27 | class TempFS(OSFS): |
29 | 28 | """A temporary filesystem on the OS. |
30 | 29 | |
31 | Arguments: | |
32 | identifier (str): A string to distinguish the directory within | |
33 | the OS temp location, used as part of the directory name. | |
34 | temp_dir (str, optional): An OS path to your temp directory | |
35 | (leave as `None` to auto-detect) | |
36 | auto_clean (bool): If `True` (the default), the directory | |
37 | contents will be wiped on close. | |
38 | ignore_clean_errors (bool): If `True` (the default), any errors | |
39 | in the clean process will be suppressed. If `False`, they | |
40 | will be raised. | |
30 | Temporary filesystems are created using the `tempfile.mkdtemp` | |
31 | function to obtain a temporary folder in an OS-specific location. | |
32 | You can provide an alternative location with the ``temp_dir`` | |
33 | argument of the constructor. | |
34 | ||
35 | Examples: | |
36 | Create with the constructor:: | |
37 | ||
38 | >>> from fs.tempfs import TempFS | |
39 | >>> tmp_fs = TempFS() | |
40 | ||
41 | Or via an FS URL:: | |
42 | ||
43 | >>> import fs | |
44 | >>> tmp_fs = fs.open_fs("temp://") | |
45 | ||
46 | Use a specific identifier for the temporary folder to better | |
47 | illustrate its purpose:: | |
48 | ||
49 | >>> named_tmp_fs = fs.open_fs("temp://local_copy") | |
50 | >>> named_tmp_fs = TempFS(identifier="local_copy") | |
41 | 51 | |
42 | 52 | """ |
43 | 53 | |
49 | 59 | ignore_clean_errors=True, # type: bool |
50 | 60 | ): |
51 | 61 | # type: (...) -> None |
62 | """Create a new `TempFS` instance. | |
63 | ||
64 | Arguments: | |
65 | identifier (str): A string to distinguish the directory within | |
66 | the OS temp location, used as part of the directory name. | |
67 | temp_dir (str, optional): An OS path to your temp directory | |
68 | (leave as `None` to auto-detect). | |
69 | auto_clean (bool): If `True` (the default), the directory | |
70 | contents will be wiped on close. | |
71 | ignore_clean_errors (bool): If `True` (the default), any errors | |
72 | in the clean process will be suppressed. If `False`, they | |
73 | will be raised. | |
74 | ||
75 | """ | |
52 | 76 | self.identifier = identifier |
53 | 77 | self._auto_clean = auto_clean |
54 | 78 | self._ignore_clean_errors = ignore_clean_errors |
69 | 93 | |
70 | 94 | def close(self): |
71 | 95 | # type: () -> None |
96 | """Close the filesystem and release any resources. | |
97 | ||
98 | It is important to call this method when you have finished | |
99 | working with the filesystem. Some filesystems may not finalize | |
100 | changes until they are closed (archives for example). You may | |
101 | call this method explicitly (it is safe to call close multiple | |
102 | times), or you can use the filesystem as a context manager to | |
103 | automatically close. | |
104 | ||
105 | Hint: | |
106 | Depending on the value of ``auto_clean`` passed when creating | |
107 | the `TempFS`, the underlying temporary folder may be removed | |
108 | or not. | |
109 | ||
110 | Example: | |
111 | >>> tmp_fs = TempFS(auto_clean=False) | |
112 | >>> syspath = tmp_fs.getsyspath("/") | |
113 | >>> tmp_fs.close() | |
114 | >>> os.path.exists(syspath) | |
115 | True | |
116 | ||
117 | """ | |
72 | 118 | if self._auto_clean: |
73 | 119 | self.clean() |
74 | 120 | super(TempFS, self).close() |
75 | 121 | |
76 | 122 | def clean(self): |
77 | 123 | # type: () -> None |
78 | """Clean (delete) temporary files created by this filesystem. | |
79 | """ | |
124 | """Clean (delete) temporary files created by this filesystem.""" | |
80 | 125 | if self._cleaned: |
81 | 126 | return |
82 | 127 |
4 | 4 | |
5 | 5 | """ |
6 | 6 | |
7 | from __future__ import absolute_import | |
8 | from __future__ import unicode_literals | |
9 | ||
10 | from datetime import datetime | |
7 | from __future__ import absolute_import, unicode_literals | |
8 | ||
11 | 9 | import io |
12 | 10 | import itertools |
13 | 11 | import json |
14 | import math | |
15 | 12 | import os |
13 | import six | |
16 | 14 | import time |
17 | 15 | import unittest |
16 | import warnings | |
17 | from datetime import datetime | |
18 | from six import text_type | |
18 | 19 | |
19 | 20 | import fs.copy |
20 | 21 | import fs.move |
21 | from fs import ResourceType, Seek | |
22 | from fs import errors | |
23 | from fs import walk | |
24 | from fs import glob | |
22 | from fs import ResourceType, Seek, errors, glob, walk | |
25 | 23 | from fs.opener import open_fs |
26 | 24 | from fs.subfs import ClosingSubFS, SubFS |
27 | ||
28 | import pytz | |
29 | import six | |
30 | from six import text_type | |
31 | 25 | |
32 | 26 | if six.PY2: |
33 | 27 | import collections as collections_abc |
34 | 28 | else: |
35 | 29 | import collections.abc as collections_abc |
30 | ||
31 | try: | |
32 | from datetime import timezone | |
33 | except ImportError: | |
34 | from ._tzcompat import timezone # type: ignore | |
36 | 35 | |
37 | 36 | |
38 | 37 | UNICODE_TEXT = """ |
244 | 243 | |
245 | 244 | |
246 | 245 | class FSTestCases(object): |
247 | """Basic FS tests. | |
248 | """ | |
246 | """Basic FS tests.""" | |
247 | ||
248 | data1 = b"foo" * 256 * 1024 | |
249 | data2 = b"bar" * 2 * 256 * 1024 | |
250 | data3 = b"baz" * 3 * 256 * 1024 | |
251 | data4 = b"egg" * 7 * 256 * 1024 | |
249 | 252 | |
250 | 253 | def make_fs(self): |
251 | """Return an FS instance. | |
252 | ||
253 | """ | |
254 | """Return an FS instance.""" | |
254 | 255 | raise NotImplementedError("implement me") |
255 | 256 | |
256 | 257 | def destroy_fs(self, fs): |
287 | 288 | |
288 | 289 | """ |
289 | 290 | self.assertFalse(self.fs.exists(path)) |
291 | ||
292 | def assert_isempty(self, path): | |
293 | """Assert a path is an empty directory. | |
294 | ||
295 | Arguments: | |
296 | path (str): A path on the filesystem. | |
297 | ||
298 | """ | |
299 | self.assertTrue(self.fs.isempty(path)) | |
290 | 300 | |
291 | 301 | def assert_isfile(self, path): |
292 | 302 | """Assert a path is a file. |
429 | 439 | self.fs.hasurl("a/b/c/foo/bar") |
430 | 440 | |
431 | 441 | def test_geturl_purpose(self): |
432 | """Check an unknown purpose raises a NoURL error. | |
433 | """ | |
442 | """Check an unknown purpose raises a NoURL error.""" | |
434 | 443 | self.fs.create("foo") |
435 | 444 | with self.assertRaises(errors.NoURL): |
436 | 445 | self.fs.geturl("foo", purpose="__nosuchpurpose__") |
437 | 446 | |
438 | 447 | def test_validatepath(self): |
439 | """Check validatepath returns an absolute path. | |
440 | """ | |
448 | """Check validatepath returns an absolute path.""" | |
441 | 449 | path = self.fs.validatepath("foo") |
442 | 450 | self.assertEqual(path, "/foo") |
443 | 451 | |
455 | 463 | root_info = self.fs.getinfo("/") |
456 | 464 | self.assertEqual(root_info.name, "") |
457 | 465 | self.assertTrue(root_info.is_dir) |
466 | self.assertIn("basic", root_info.namespaces) | |
458 | 467 | |
459 | 468 | # Make a file of known size |
460 | 469 | self.fs.writebytes("foo", b"bar") |
462 | 471 | |
463 | 472 | # Check basic namespace |
464 | 473 | info = self.fs.getinfo("foo").raw |
474 | self.assertIn("basic", info) | |
465 | 475 | self.assertIsInstance(info["basic"]["name"], text_type) |
466 | 476 | self.assertEqual(info["basic"]["name"], "foo") |
467 | 477 | self.assertFalse(info["basic"]["is_dir"]) |
468 | 478 | |
469 | 479 | # Check basic namespace dir |
470 | 480 | info = self.fs.getinfo("dir").raw |
481 | self.assertIn("basic", info) | |
471 | 482 | self.assertEqual(info["basic"]["name"], "dir") |
472 | 483 | self.assertTrue(info["basic"]["is_dir"]) |
473 | 484 | |
474 | 485 | # Get the info |
475 | 486 | info = self.fs.getinfo("foo", namespaces=["details"]).raw |
487 | self.assertIn("basic", info) | |
476 | 488 | self.assertIsInstance(info, dict) |
477 | 489 | self.assertEqual(info["details"]["size"], 3) |
478 | 490 | self.assertEqual(info["details"]["type"], int(ResourceType.file)) |
883 | 895 | self.assertFalse(f.closed) |
884 | 896 | self.assertTrue(f.closed) |
885 | 897 | |
886 | iter_lines = iter(self.fs.open("text")) | |
887 | self.assertEqual(next(iter_lines), "Hello\n") | |
898 | with self.fs.open("text") as f: | |
899 | iter_lines = iter(f) | |
900 | self.assertEqual(next(iter_lines), "Hello\n") | |
888 | 901 | |
889 | 902 | with self.fs.open("unicode", "w") as f: |
890 | 903 | self.assertEqual(12, f.write("Héllo\nWörld\n")) |
1098 | 1111 | self.fs.removedir("foo/bar") |
1099 | 1112 | |
1100 | 1113 | def test_removetree(self): |
1114 | self.fs.makedirs("spam") | |
1101 | 1115 | self.fs.makedirs("foo/bar/baz") |
1102 | 1116 | self.fs.makedirs("foo/egg") |
1103 | 1117 | self.fs.makedirs("foo/a/b/c/d/e") |
1113 | 1127 | |
1114 | 1128 | self.fs.removetree("foo") |
1115 | 1129 | self.assert_not_exists("foo") |
1130 | self.assert_exists("spam") | |
1131 | ||
1132 | # Errors on files | |
1133 | self.fs.create("bar") | |
1134 | with self.assertRaises(errors.DirectoryExpected): | |
1135 | self.fs.removetree("bar") | |
1136 | ||
1137 | # Errors on non-existing path | |
1138 | with self.assertRaises(errors.ResourceNotFound): | |
1139 | self.fs.removetree("foofoo") | |
1140 | ||
1141 | def test_removetree_root(self): | |
1142 | self.fs.makedirs("foo/bar/baz") | |
1143 | self.fs.makedirs("foo/egg") | |
1144 | self.fs.makedirs("foo/a/b/c/d/e") | |
1145 | self.fs.create("foo/egg.txt") | |
1146 | self.fs.create("foo/bar/egg.bin") | |
1147 | self.fs.create("foo/a/b/c/1.txt") | |
1148 | self.fs.create("foo/a/b/c/2.txt") | |
1149 | self.fs.create("foo/a/b/c/3.txt") | |
1150 | ||
1151 | self.assert_exists("foo/egg.txt") | |
1152 | self.assert_exists("foo/bar/egg.bin") | |
1153 | ||
1154 | # removetree("/") removes the contents, | |
1155 | # but not the root folder itself | |
1156 | self.fs.removetree("/") | |
1157 | self.assert_exists("/") | |
1158 | self.assert_isempty("/") | |
1159 | ||
1160 | # we check we can create a file after | |
1161 | # to catch potential issues with the | |
1162 | # root folder being deleted on faulty | |
1163 | # implementations | |
1164 | self.fs.create("egg") | |
1165 | self.fs.makedir("yolk") | |
1166 | self.assert_exists("egg") | |
1167 | self.assert_exists("yolk") | |
1116 | 1168 | |
1117 | 1169 | def test_setinfo(self): |
1118 | 1170 | self.fs.create("birthday.txt") |
1119 | now = math.floor(time.time()) | |
1171 | now = time.time() | |
1120 | 1172 | |
1121 | 1173 | change_info = {"details": {"accessed": now + 60, "modified": now + 60 * 60}} |
1122 | 1174 | self.fs.setinfo("birthday.txt", change_info) |
1123 | new_info = self.fs.getinfo("birthday.txt", namespaces=["details"]).raw | |
1124 | if "accessed" in new_info.get("_write", []): | |
1125 | self.assertEqual(new_info["details"]["accessed"], now + 60) | |
1126 | if "modified" in new_info.get("_write", []): | |
1127 | self.assertEqual(new_info["details"]["modified"], now + 60 * 60) | |
1175 | new_info = self.fs.getinfo("birthday.txt", namespaces=["details"]) | |
1176 | can_write_acccess = new_info.is_writeable("details", "accessed") | |
1177 | can_write_modified = new_info.is_writeable("details", "modified") | |
1178 | if can_write_acccess: | |
1179 | self.assertAlmostEqual( | |
1180 | new_info.get("details", "accessed"), now + 60, places=4 | |
1181 | ) | |
1182 | if can_write_modified: | |
1183 | self.assertAlmostEqual( | |
1184 | new_info.get("details", "modified"), now + 60 * 60, places=4 | |
1185 | ) | |
1128 | 1186 | |
1129 | 1187 | with self.assertRaises(errors.ResourceNotFound): |
1130 | 1188 | self.fs.setinfo("nothing", {}) |
1133 | 1191 | self.fs.create("birthday.txt") |
1134 | 1192 | self.fs.settimes("birthday.txt", accessed=datetime(2016, 7, 5)) |
1135 | 1193 | info = self.fs.getinfo("birthday.txt", namespaces=["details"]) |
1136 | writeable = info.get("details", "_write", []) | |
1137 | if "accessed" in writeable: | |
1138 | self.assertEqual(info.accessed, datetime(2016, 7, 5, tzinfo=pytz.UTC)) | |
1139 | if "modified" in writeable: | |
1140 | self.assertEqual(info.modified, datetime(2016, 7, 5, tzinfo=pytz.UTC)) | |
1194 | can_write_acccess = info.is_writeable("details", "accessed") | |
1195 | can_write_modified = info.is_writeable("details", "modified") | |
1196 | if can_write_acccess: | |
1197 | self.assertEqual(info.accessed, datetime(2016, 7, 5, tzinfo=timezone.utc)) | |
1198 | if can_write_modified: | |
1199 | self.assertEqual(info.modified, datetime(2016, 7, 5, tzinfo=timezone.utc)) | |
1141 | 1200 | |
1142 | 1201 | def test_touch(self): |
1143 | 1202 | self.fs.touch("new.txt") |
1145 | 1204 | self.fs.settimes("new.txt", datetime(2016, 7, 5)) |
1146 | 1205 | info = self.fs.getinfo("new.txt", namespaces=["details"]) |
1147 | 1206 | if info.is_writeable("details", "accessed"): |
1148 | self.assertEqual(info.accessed, datetime(2016, 7, 5, tzinfo=pytz.UTC)) | |
1207 | self.assertEqual(info.accessed, datetime(2016, 7, 5, tzinfo=timezone.utc)) | |
1149 | 1208 | now = time.time() |
1150 | 1209 | self.fs.touch("new.txt") |
1151 | 1210 | accessed = self.fs.getinfo("new.txt", namespaces=["details"]).raw[ |
1195 | 1254 | |
1196 | 1255 | def _test_upload(self, workers): |
1197 | 1256 | """Test fs.copy with varying number of worker threads.""" |
1198 | data1 = b"foo" * 256 * 1024 | |
1199 | data2 = b"bar" * 2 * 256 * 1024 | |
1200 | data3 = b"baz" * 3 * 256 * 1024 | |
1201 | data4 = b"egg" * 7 * 256 * 1024 | |
1202 | ||
1203 | 1257 | with open_fs("temp://") as src_fs: |
1204 | src_fs.writebytes("foo", data1) | |
1205 | src_fs.writebytes("bar", data2) | |
1206 | src_fs.makedir("dir1").writebytes("baz", data3) | |
1207 | src_fs.makedirs("dir2/dir3").writebytes("egg", data4) | |
1258 | src_fs.writebytes("foo", self.data1) | |
1259 | src_fs.writebytes("bar", self.data2) | |
1260 | src_fs.makedir("dir1").writebytes("baz", self.data3) | |
1261 | src_fs.makedirs("dir2/dir3").writebytes("egg", self.data4) | |
1208 | 1262 | dst_fs = self.fs |
1209 | 1263 | fs.copy.copy_fs(src_fs, dst_fs, workers=workers) |
1210 | self.assertEqual(dst_fs.readbytes("foo"), data1) | |
1211 | self.assertEqual(dst_fs.readbytes("bar"), data2) | |
1212 | self.assertEqual(dst_fs.readbytes("dir1/baz"), data3) | |
1213 | self.assertEqual(dst_fs.readbytes("dir2/dir3/egg"), data4) | |
1264 | self.assertEqual(dst_fs.readbytes("foo"), self.data1) | |
1265 | self.assertEqual(dst_fs.readbytes("bar"), self.data2) | |
1266 | self.assertEqual(dst_fs.readbytes("dir1/baz"), self.data3) | |
1267 | self.assertEqual(dst_fs.readbytes("dir2/dir3/egg"), self.data4) | |
1214 | 1268 | |
1215 | 1269 | def test_upload_0(self): |
1216 | 1270 | self._test_upload(0) |
1226 | 1280 | |
1227 | 1281 | def _test_download(self, workers): |
1228 | 1282 | """Test fs.copy with varying number of worker threads.""" |
1229 | data1 = b"foo" * 256 * 1024 | |
1230 | data2 = b"bar" * 2 * 256 * 1024 | |
1231 | data3 = b"baz" * 3 * 256 * 1024 | |
1232 | data4 = b"egg" * 7 * 256 * 1024 | |
1233 | 1283 | src_fs = self.fs |
1234 | 1284 | with open_fs("temp://") as dst_fs: |
1235 | src_fs.writebytes("foo", data1) | |
1236 | src_fs.writebytes("bar", data2) | |
1237 | src_fs.makedir("dir1").writebytes("baz", data3) | |
1238 | src_fs.makedirs("dir2/dir3").writebytes("egg", data4) | |
1285 | src_fs.writebytes("foo", self.data1) | |
1286 | src_fs.writebytes("bar", self.data2) | |
1287 | src_fs.makedir("dir1").writebytes("baz", self.data3) | |
1288 | src_fs.makedirs("dir2/dir3").writebytes("egg", self.data4) | |
1239 | 1289 | fs.copy.copy_fs(src_fs, dst_fs, workers=workers) |
1240 | self.assertEqual(dst_fs.readbytes("foo"), data1) | |
1241 | self.assertEqual(dst_fs.readbytes("bar"), data2) | |
1242 | self.assertEqual(dst_fs.readbytes("dir1/baz"), data3) | |
1243 | self.assertEqual(dst_fs.readbytes("dir2/dir3/egg"), data4) | |
1290 | self.assertEqual(dst_fs.readbytes("foo"), self.data1) | |
1291 | self.assertEqual(dst_fs.readbytes("bar"), self.data2) | |
1292 | self.assertEqual(dst_fs.readbytes("dir1/baz"), self.data3) | |
1293 | self.assertEqual(dst_fs.readbytes("dir2/dir3/egg"), self.data4) | |
1244 | 1294 | |
1245 | 1295 | def test_download_0(self): |
1246 | 1296 | self._test_download(0) |
1493 | 1543 | with self.fs.open("foo", "rb") as f: |
1494 | 1544 | data = f.read() |
1495 | 1545 | self.assertEqual(data, b"bar") |
1546 | ||
1547 | # upload to non-existing path (/spam/eggs) | |
1548 | with self.assertRaises(errors.ResourceNotFound): | |
1549 | self.fs.upload("/spam/eggs", bytes_file) | |
1496 | 1550 | |
1497 | 1551 | def test_upload_chunk_size(self): |
1498 | 1552 | test_data = b"bar" * 128 |
1589 | 1643 | self.assert_bytes("foo2", b"help") |
1590 | 1644 | |
1591 | 1645 | # Test __del__ doesn't throw traceback |
1592 | f = self.fs.open("foo2", "r") | |
1593 | del f | |
1646 | with warnings.catch_warnings(): | |
1647 | warnings.simplefilter("ignore") | |
1648 | f = self.fs.open("foo2", "r") | |
1649 | del f | |
1594 | 1650 | |
1595 | 1651 | with self.assertRaises(IOError): |
1596 | 1652 | with self.fs.open("foo2", "r") as f: |
1680 | 1736 | self._test_copy_dir("temp://") |
1681 | 1737 | self._test_copy_dir_write("temp://") |
1682 | 1738 | |
1739 | def test_move_dir_same_fs(self): | |
1740 | self.fs.makedirs("foo/bar/baz") | |
1741 | self.fs.makedir("egg") | |
1742 | self.fs.writetext("top.txt", "Hello, World") | |
1743 | self.fs.writetext("/foo/bar/baz/test.txt", "Goodbye, World") | |
1744 | ||
1745 | fs.move.move_dir(self.fs, "foo", self.fs, "foo2") | |
1746 | ||
1747 | expected = {"/egg", "/foo2", "/foo2/bar", "/foo2/bar/baz"} | |
1748 | self.assertEqual(set(walk.walk_dirs(self.fs)), expected) | |
1749 | self.assert_text("top.txt", "Hello, World") | |
1750 | self.assert_text("/foo2/bar/baz/test.txt", "Goodbye, World") | |
1751 | ||
1752 | self.assertEqual(sorted(self.fs.listdir("/")), ["egg", "foo2", "top.txt"]) | |
1753 | self.assertEqual( | |
1754 | sorted(x.name for x in self.fs.scandir("/")), ["egg", "foo2", "top.txt"] | |
1755 | ) | |
1756 | ||
1683 | 1757 | def _test_move_dir_write(self, protocol): |
1684 | 1758 | # Test moving to this filesystem from another. |
1685 | 1759 | other_fs = open_fs(protocol) |
1702 | 1776 | def test_move_dir_temp(self): |
1703 | 1777 | self._test_move_dir_write("temp://") |
1704 | 1778 | |
1705 | def test_move_same_fs(self): | |
1706 | self.fs.makedirs("foo/bar/baz") | |
1707 | self.fs.makedir("egg") | |
1708 | self.fs.writetext("top.txt", "Hello, World") | |
1709 | self.fs.writetext("/foo/bar/baz/test.txt", "Goodbye, World") | |
1710 | ||
1711 | fs.move.move_dir(self.fs, "foo", self.fs, "foo2") | |
1712 | ||
1713 | expected = {"/egg", "/foo2", "/foo2/bar", "/foo2/bar/baz"} | |
1714 | self.assertEqual(set(walk.walk_dirs(self.fs)), expected) | |
1715 | self.assert_text("top.txt", "Hello, World") | |
1716 | self.assert_text("/foo2/bar/baz/test.txt", "Goodbye, World") | |
1717 | ||
1718 | 1779 | def test_move_file_same_fs(self): |
1719 | 1780 | text = "Hello, World" |
1720 | 1781 | self.fs.makedir("foo").writetext("test.txt", text) |
1723 | 1784 | fs.move.move_file(self.fs, "foo/test.txt", self.fs, "foo/test2.txt") |
1724 | 1785 | self.assert_not_exists("foo/test.txt") |
1725 | 1786 | self.assert_text("foo/test2.txt", text) |
1787 | ||
1788 | self.assertEqual(self.fs.listdir("foo"), ["test2.txt"]) | |
1789 | self.assertEqual(next(self.fs.scandir("foo")).name, "test2.txt") | |
1726 | 1790 | |
1727 | 1791 | def _test_move_file(self, protocol): |
1728 | 1792 | other_fs = open_fs(protocol) |
0 | 0 | """Time related tools. |
1 | 1 | """ |
2 | 2 | |
3 | from __future__ import print_function | |
4 | from __future__ import unicode_literals | |
3 | from __future__ import print_function, unicode_literals | |
4 | ||
5 | import typing | |
5 | 6 | |
6 | 7 | from calendar import timegm |
7 | 8 | from datetime import datetime |
8 | from pytz import UTC, timezone | |
9 | 9 | |
10 | try: | |
11 | from datetime import timezone | |
12 | except ImportError: | |
13 | from ._tzcompat import timezone # type: ignore | |
10 | 14 | |
11 | utcfromtimestamp = datetime.utcfromtimestamp | |
12 | utclocalize = UTC.localize | |
13 | GMT = timezone("GMT") | |
15 | if typing.TYPE_CHECKING: | |
16 | from typing import Optional | |
14 | 17 | |
15 | 18 | |
16 | 19 | def datetime_to_epoch(d): |
17 | 20 | # type: (datetime) -> int |
18 | """Convert datetime to epoch. | |
19 | """ | |
21 | """Convert datetime to epoch.""" | |
20 | 22 | return timegm(d.utctimetuple()) |
21 | 23 | |
22 | 24 | |
25 | @typing.overload | |
26 | def epoch_to_datetime(t): # noqa: D103 | |
27 | # type: (None) -> None | |
28 | pass | |
29 | ||
30 | ||
31 | @typing.overload | |
32 | def epoch_to_datetime(t): # noqa: D103 | |
33 | # type: (int) -> datetime | |
34 | pass | |
35 | ||
36 | ||
23 | 37 | def epoch_to_datetime(t): |
24 | # type: (int) -> datetime | |
25 | """Convert epoch time to a UTC datetime. | |
26 | """ | |
27 | return utclocalize(utcfromtimestamp(t)) if t is not None else None | |
38 | # type: (Optional[int]) -> Optional[datetime] | |
39 | """Convert epoch time to a UTC datetime.""" | |
40 | if t is None: | |
41 | return None | |
42 | return datetime.fromtimestamp(t, tz=timezone.utc) |
0 | 0 | """Miscellaneous tools for operating on filesystems. |
1 | 1 | """ |
2 | 2 | |
3 | from __future__ import print_function | |
4 | from __future__ import unicode_literals | |
3 | from __future__ import print_function, unicode_literals | |
5 | 4 | |
6 | 5 | import typing |
7 | 6 | |
8 | 7 | from . import errors |
9 | from .errors import DirectoryNotEmpty | |
10 | from .errors import ResourceNotFound | |
11 | from .path import abspath | |
12 | from .path import dirname | |
13 | from .path import normpath | |
14 | from .path import recursepath | |
8 | from .errors import DirectoryNotEmpty, ResourceNotFound | |
9 | from .path import abspath, dirname, normpath, recursepath | |
15 | 10 | |
16 | 11 | if typing.TYPE_CHECKING: |
17 | 12 | from typing import IO, List, Optional, Text, Union |
13 | ||
18 | 14 | from .base import FS |
19 | 15 | |
20 | 16 |
3 | 3 | Color is supported on UNIX terminals. |
4 | 4 | """ |
5 | 5 | |
6 | from __future__ import print_function | |
7 | from __future__ import unicode_literals | |
6 | from __future__ import print_function, unicode_literals | |
8 | 7 | |
9 | 8 | import sys |
10 | 9 | import typing |
13 | 12 | |
14 | 13 | if typing.TYPE_CHECKING: |
15 | 14 | from typing import List, Optional, Text, TextIO, Tuple |
15 | ||
16 | 16 | from .base import FS |
17 | 17 | from .info import Info |
18 | 18 | |
78 | 78 | |
79 | 79 | def write(line): |
80 | 80 | # type: (Text) -> None |
81 | """Write a line to the output. | |
82 | """ | |
81 | """Write a line to the output.""" | |
83 | 82 | print(line, file=file) |
84 | 83 | |
85 | 84 | # FIXME(@althonos): define functions using `with_color` and |
87 | 86 | |
88 | 87 | def format_prefix(prefix): |
89 | 88 | # type: (Text) -> Text |
90 | """Format the prefix lines. | |
91 | """ | |
89 | """Format the prefix lines.""" | |
92 | 90 | if not with_color: |
93 | 91 | return prefix |
94 | 92 | return "\x1b[32m%s\x1b[0m" % prefix |
95 | 93 | |
96 | 94 | def format_dirname(dirname): |
97 | 95 | # type: (Text) -> Text |
98 | """Format a directory name. | |
99 | """ | |
96 | """Format a directory name.""" | |
100 | 97 | if not with_color: |
101 | 98 | return dirname |
102 | 99 | return "\x1b[1;34m%s\x1b[0m" % dirname |
103 | 100 | |
104 | 101 | def format_error(msg): |
105 | 102 | # type: (Text) -> Text |
106 | """Format an error. | |
107 | """ | |
103 | """Format an error.""" | |
108 | 104 | if not with_color: |
109 | 105 | return msg |
110 | 106 | return "\x1b[31m%s\x1b[0m" % msg |
111 | 107 | |
112 | 108 | def format_filename(fname): |
113 | 109 | # type: (Text) -> Text |
114 | """Format a filename. | |
115 | """ | |
110 | """Format a filename.""" | |
116 | 111 | if not with_color: |
117 | 112 | return fname |
118 | 113 | if fname.startswith("."): |
121 | 116 | |
122 | 117 | def sort_key_dirs_first(info): |
123 | 118 | # type: (Info) -> Tuple[bool, Text] |
124 | """Get the info sort function with directories first. | |
125 | """ | |
119 | """Get the info sort function with directories first.""" | |
126 | 120 | return (not info.is_dir, info.name.lower()) |
127 | 121 | |
128 | 122 | def sort_key(info): |
129 | 123 | # type: (Info) -> Text |
130 | """Get the default info sort function using resource name. | |
131 | """ | |
124 | """Get the default info sort function using resource name.""" | |
132 | 125 | return info.name.lower() |
133 | 126 | |
134 | 127 | counts = {"dirs": 0, "files": 0} |
135 | 128 | |
136 | 129 | def format_directory(path, levels): |
137 | 130 | # type: (Text, List[bool]) -> None |
138 | """Recursive directory function. | |
139 | """ | |
131 | """Recursive directory function.""" | |
140 | 132 | try: |
141 | 133 | directory = sorted( |
142 | 134 | fs.filterdir(path, exclude_dirs=exclude, files=filter), |
143 | key=sort_key_dirs_first if dirs_first else sort_key, | |
135 | key=sort_key_dirs_first if dirs_first else sort_key, # type: ignore | |
144 | 136 | ) |
145 | 137 | except Exception as error: |
146 | 138 | prefix = ( |
7 | 7 | from __future__ import unicode_literals |
8 | 8 | |
9 | 9 | import typing |
10 | from collections import defaultdict | |
11 | from collections import deque | |
12 | from collections import namedtuple | |
10 | ||
11 | from collections import defaultdict, deque, namedtuple | |
13 | 12 | |
14 | 13 | from ._repr import make_repr |
15 | 14 | from .errors import FSError |
16 | from .path import abspath | |
17 | from .path import combine | |
18 | from .path import normpath | |
15 | from .path import abspath, combine, normpath | |
19 | 16 | |
20 | 17 | if typing.TYPE_CHECKING: |
21 | 18 | from typing import ( |
24 | 21 | Collection, |
25 | 22 | Iterator, |
26 | 23 | List, |
24 | MutableMapping, | |
27 | 25 | Optional, |
28 | MutableMapping, | |
29 | 26 | Text, |
30 | 27 | Tuple, |
31 | 28 | Type, |
32 | 29 | ) |
30 | ||
33 | 31 | from .base import FS |
34 | 32 | from .info import Info |
35 | 33 | |
49 | 47 | |
50 | 48 | |
51 | 49 | class Walker(object): |
52 | """A walker object recursively lists directories in a filesystem. | |
53 | ||
54 | Arguments: | |
55 | ignore_errors (bool): If `True`, any errors reading a | |
56 | directory will be ignored, otherwise exceptions will | |
57 | be raised. | |
58 | on_error (callable, optional): If ``ignore_errors`` is `False`, | |
59 | then this callable will be invoked for a path and the exception | |
60 | object. It should return `True` to ignore the error, or `False` | |
61 | to re-raise it. | |
62 | search (str): If ``'breadth'`` then the directory will be | |
63 | walked *top down*. Set to ``'depth'`` to walk *bottom up*. | |
64 | filter (list, optional): If supplied, this parameter should be | |
65 | a list of filename patterns, e.g. ``['*.py']``. Files will | |
66 | only be returned if the final component matches one of the | |
67 | patterns. | |
68 | exclude (list, optional): If supplied, this parameter should be | |
69 | a list of filename patterns, e.g. ``['~*']``. Files matching | |
70 | any of these patterns will be removed from the walk. | |
71 | filter_dirs (list, optional): A list of patterns that will be used | |
72 | to match directories paths. The walk will only open directories | |
73 | that match at least one of these patterns. | |
74 | exclude_dirs (list, optional): A list of patterns that will be | |
75 | used to filter out directories from the walk. e.g. | |
76 | ``['*.svn', '*.git']``. | |
77 | max_depth (int, optional): Maximum directory depth to walk. | |
78 | ||
79 | """ | |
50 | """A walker object recursively lists directories in a filesystem.""" | |
80 | 51 | |
81 | 52 | def __init__( |
82 | 53 | self, |
90 | 61 | max_depth=None, # type: Optional[int] |
91 | 62 | ): |
92 | 63 | # type: (...) -> None |
64 | """Create a new `Walker` instance. | |
65 | ||
66 | Arguments: | |
67 | ignore_errors (bool): If `True`, any errors reading a | |
68 | directory will be ignored, otherwise exceptions will | |
69 | be raised. | |
70 | on_error (callable, optional): If ``ignore_errors`` is `False`, | |
71 | then this callable will be invoked for a path and the | |
72 | exception object. It should return `True` to ignore the error, | |
73 | or `False` to re-raise it. | |
74 | search (str): If ``"breadth"`` then the directory will be | |
75 | walked *top down*. Set to ``"depth"`` to walk *bottom up*. | |
76 | filter (list, optional): If supplied, this parameter should be | |
77 | a list of filename patterns, e.g. ``["*.py"]``. Files will | |
78 | only be returned if the final component matches one of the | |
79 | patterns. | |
80 | exclude (list, optional): If supplied, this parameter should be | |
81 | a list of filename patterns, e.g. ``["~*"]``. Files matching | |
82 | any of these patterns will be removed from the walk. | |
83 | filter_dirs (list, optional): A list of patterns that will be used | |
84 | to match directories paths. The walk will only open directories | |
85 | that match at least one of these patterns. | |
86 | exclude_dirs (list, optional): A list of patterns that will be | |
87 | used to filter out directories from the walk. e.g. | |
88 | ``['*.svn', '*.git']``. | |
89 | max_depth (int, optional): Maximum directory depth to walk. | |
90 | ||
91 | """ | |
93 | 92 | if search not in ("breadth", "depth"): |
94 | 93 | raise ValueError("search must be 'breadth' or 'depth'") |
95 | 94 | self.ignore_errors = ignore_errors |
113 | 112 | @classmethod |
114 | 113 | def _ignore_errors(cls, path, error): |
115 | 114 | # type: (Text, Exception) -> bool |
116 | """Default on_error callback.""" | |
115 | """Ignore dir scan errors when called.""" | |
117 | 116 | return True |
118 | 117 | |
119 | 118 | @classmethod |
120 | 119 | def _raise_errors(cls, path, error): |
121 | 120 | # type: (Text, Exception) -> bool |
122 | """Callback to re-raise dir scan errors.""" | |
121 | """Re-raise dir scan errors when called.""" | |
123 | 122 | return False |
124 | 123 | |
125 | 124 | @classmethod |
126 | 125 | def _calculate_depth(cls, path): |
127 | 126 | # type: (Text) -> int |
128 | """Calculate the 'depth' of a directory path (number of | |
129 | components). | |
130 | """ | |
127 | """Calculate the 'depth' of a directory path (i.e. count components).""" | |
131 | 128 | _path = path.strip("/") |
132 | 129 | return _path.count("/") + 1 if _path else 0 |
133 | 130 | |
146 | 143 | Returns: |
147 | 144 | ~fs.walk.BoundWalker: a bound walker. |
148 | 145 | |
149 | Example: | |
150 | >>> from fs import open_fs | |
151 | >>> from fs.walk import Walker | |
152 | >>> home_fs = open_fs('~/') | |
153 | >>> walker = Walker.bind(home_fs) | |
154 | >>> for path in walker.files(filter=['*.py']): | |
155 | ... print(path) | |
156 | ||
157 | Unless you have written a customized walker class, you will be | |
158 | unlikely to need to call this explicitly, as filesystem objects | |
159 | already have a ``walk`` attribute which is a bound walker | |
160 | object. | |
161 | ||
162 | Example: | |
163 | >>> from fs import open_fs | |
164 | >>> home_fs = open_fs('~/') | |
165 | >>> for path in home_fs.walk.files(filter=['*.py']): | |
166 | ... print(path) | |
146 | Examples: | |
147 | Use this method to explicitly bind a filesystem instance:: | |
148 | ||
149 | >>> walker = Walker.bind(my_fs) | |
150 | >>> for path in walker.files(filter=['*.py']): | |
151 | ... print(path) | |
152 | /foo.py | |
153 | /bar.py | |
154 | ||
155 | Unless you have written a customized walker class, you will | |
156 | be unlikely to need to call this explicitly, as filesystem | |
157 | objects already have a ``walk`` attribute which is a bound | |
158 | walker object:: | |
159 | ||
160 | >>> for path in my_fs.walk.files(filter=['*.py']): | |
161 | ... print(path) | |
162 | /foo.py | |
163 | /bar.py | |
167 | 164 | |
168 | 165 | """ |
169 | 166 | return BoundWalker(fs) |
197 | 194 | |
198 | 195 | def _check_open_dir(self, fs, path, info): |
199 | 196 | # type: (FS, Text, Info) -> bool |
200 | """Check if a directory should be considered in the walk. | |
201 | """ | |
197 | """Check if a directory should be considered in the walk.""" | |
202 | 198 | if self.exclude_dirs is not None and fs.match(self.exclude_dirs, info.name): |
203 | 199 | return False |
204 | 200 | if self.filter_dirs is not None and not fs.match(self.filter_dirs, info.name): |
262 | 258 | bool: `True` if the file should be included. |
263 | 259 | |
264 | 260 | """ |
265 | ||
266 | 261 | if self.exclude is not None and fs.match(self.exclude, info.name): |
267 | 262 | return False |
268 | 263 | return fs.match(self.filter, info.name) |
318 | 313 | `~fs.info.Info` objects for directories and files in ``<path>``. |
319 | 314 | |
320 | 315 | Example: |
321 | >>> home_fs = open_fs('~/') | |
322 | 316 | >>> walker = Walker(filter=['*.py']) |
323 | >>> namespaces = ['details'] | |
324 | >>> for path, dirs, files in walker.walk(home_fs, namespaces) | |
317 | >>> for path, dirs, files in walker.walk(my_fs, namespaces=["details"]): | |
325 | 318 | ... print("[{}]".format(path)) |
326 | 319 | ... print("{} directories".format(len(dirs))) |
327 | 320 | ... total = sum(info.size for info in files) |
328 | ... print("{} bytes {}".format(total)) | |
321 | ... print("{} bytes".format(total)) | |
322 | [/] | |
323 | 2 directories | |
324 | 55 bytes | |
325 | ... | |
329 | 326 | |
330 | 327 | """ |
331 | 328 | _path = abspath(normpath(path)) |
410 | 407 | namespaces=None, # type: Optional[Collection[Text]] |
411 | 408 | ): |
412 | 409 | # type: (...) -> Iterator[Tuple[Text, Optional[Info]]] |
413 | """Walk files using a *breadth first* search. | |
414 | """ | |
410 | """Walk files using a *breadth first* search.""" | |
415 | 411 | queue = deque([path]) |
416 | 412 | push = queue.appendleft |
417 | 413 | pop = queue.pop |
446 | 442 | namespaces=None, # type: Optional[Collection[Text]] |
447 | 443 | ): |
448 | 444 | # type: (...) -> Iterator[Tuple[Text, Optional[Info]]] |
449 | """Walk files using a *depth first* search. | |
450 | """ | |
445 | """Walk files using a *depth first* search.""" | |
451 | 446 | # No recursion! |
452 | 447 | |
453 | 448 | _combine = combine |
494 | 489 | class BoundWalker(typing.Generic[_F]): |
495 | 490 | """A class that binds a `Walker` instance to a `FS` instance. |
496 | 491 | |
497 | Arguments: | |
498 | fs (FS): A filesystem instance. | |
499 | walker_class (type): A `~fs.walk.WalkerBase` | |
500 | sub-class. The default uses `~fs.walk.Walker`. | |
501 | ||
502 | 492 | You will typically not need to create instances of this class |
503 | 493 | explicitly. Filesystems have a `~FS.walk` property which returns a |
504 | 494 | `BoundWalker` object. |
505 | 495 | |
506 | 496 | Example: |
507 | >>> import fs | |
508 | >>> home_fs = fs.open_fs('~/') | |
509 | >>> home_fs.walk | |
510 | BoundWalker(OSFS('/Users/will', encoding='utf-8')) | |
511 | ||
512 | A `BoundWalker` is callable. Calling it is an alias for | |
513 | `~fs.walk.BoundWalker.walk`. | |
497 | >>> tmp_fs = fs.tempfs.TempFS() | |
498 | >>> tmp_fs.walk | |
499 | BoundWalker(TempFS()) | |
500 | ||
501 | A `BoundWalker` is callable. Calling it is an alias for the | |
502 | `~fs.walk.BoundWalker.walk` method. | |
514 | 503 | |
515 | 504 | """ |
516 | 505 | |
517 | 506 | def __init__(self, fs, walker_class=Walker): |
518 | 507 | # type: (_F, Type[Walker]) -> None |
508 | """Create a new walker bound to the given filesystem. | |
509 | ||
510 | Arguments: | |
511 | fs (FS): A filesystem instance. | |
512 | walker_class (type): A `~fs.walk.WalkerBase` | |
513 | sub-class. The default uses `~fs.walk.Walker`. | |
514 | ||
515 | """ | |
519 | 516 | self.fs = fs |
520 | 517 | self.walker_class = walker_class |
521 | 518 | |
525 | 522 | |
526 | 523 | def _make_walker(self, *args, **kwargs): |
527 | 524 | # type: (*Any, **Any) -> Walker |
528 | """Create a walker instance. | |
529 | """ | |
525 | """Create a walker instance.""" | |
530 | 526 | walker = self.walker_class(*args, **kwargs) |
531 | 527 | return walker |
532 | 528 | |
577 | 573 | `~fs.info.Info` objects for directories and files in ``<path>``. |
578 | 574 | |
579 | 575 | Example: |
580 | >>> home_fs = open_fs('~/') | |
581 | 576 | >>> walker = Walker(filter=['*.py']) |
582 | >>> for path, dirs, files in walker.walk(home_fs, namespaces=['details']): | |
577 | >>> for path, dirs, files in walker.walk(my_fs, namespaces=['details']): | |
583 | 578 | ... print("[{}]".format(path)) |
584 | 579 | ... print("{} directories".format(len(dirs))) |
585 | 580 | ... total = sum(info.size for info in files) |
586 | ... print("{} bytes {}".format(total)) | |
581 | ... print("{} bytes".format(total)) | |
582 | [/] | |
583 | 2 directories | |
584 | 55 bytes | |
585 | ... | |
587 | 586 | |
588 | 587 | This method invokes `Walker.walk` with bound `FS` object. |
589 | 588 |
1 | 1 | """ |
2 | 2 | # Adapted from https://hg.python.org/cpython/file/2.7/Lib/fnmatch.py |
3 | 3 | |
4 | from __future__ import unicode_literals, print_function | |
4 | from __future__ import print_function, unicode_literals | |
5 | ||
6 | import typing | |
5 | 7 | |
6 | 8 | import re |
7 | import typing | |
8 | 9 | from functools import partial |
9 | 10 | |
10 | 11 | from .lrucache import LRUCache |
11 | 12 | |
12 | 13 | if typing.TYPE_CHECKING: |
13 | from typing import Callable, Iterable, Text, Tuple, Pattern | |
14 | from typing import Callable, Iterable, Pattern, Text, Tuple | |
14 | 15 | |
15 | 16 | |
16 | 17 | _PATTERN_CACHE = LRUCache(1000) # type: LRUCache[Tuple[Text, bool], Pattern] |
31 | 32 | try: |
32 | 33 | re_pat = _PATTERN_CACHE[(pattern, True)] |
33 | 34 | except KeyError: |
34 | res = "(?ms)" + _translate(pattern) + r'\Z' | |
35 | res = "(?ms)" + _translate(pattern) + r"\Z" | |
35 | 36 | _PATTERN_CACHE[(pattern, True)] = re_pat = re.compile(res) |
36 | 37 | return re_pat.match(name) is not None |
37 | 38 | |
51 | 52 | try: |
52 | 53 | re_pat = _PATTERN_CACHE[(pattern, False)] |
53 | 54 | except KeyError: |
54 | res = "(?ms)" + _translate(pattern, case_sensitive=False) + r'\Z' | |
55 | res = "(?ms)" + _translate(pattern, case_sensitive=False) + r"\Z" | |
55 | 56 | _PATTERN_CACHE[(pattern, False)] = re_pat = re.compile(res, re.IGNORECASE) |
56 | 57 | return re_pat.match(name) is not None |
57 | 58 |
1 | 1 | |
2 | 2 | Here's an example that opens a filesystem then makes it *read only*:: |
3 | 3 | |
4 | >>> from fs import open_fs | |
5 | >>> from fs.wrap import read_only | |
6 | >>> projects_fs = open_fs('~/projects') | |
7 | >>> read_only_projects_fs = read_only(projects_fs) | |
8 | >>> read_only_projects_fs.remove('__init__.py') | |
4 | >>> home_fs = fs.open_fs('~') | |
5 | >>> read_only_home_fs = fs.wrap.read_only(home_fs) | |
6 | >>> read_only_home_fs.removedir('Desktop') | |
9 | 7 | Traceback (most recent call last): |
10 | 8 | ... |
11 | fs.errors.ResourceReadOnly: resource '__init__.py' is read only | |
9 | fs.errors.ResourceReadOnly: resource 'Desktop' is read only | |
12 | 10 | |
13 | 11 | """ |
14 | 12 | |
15 | from __future__ import print_function | |
16 | from __future__ import unicode_literals | |
13 | from __future__ import print_function, unicode_literals | |
17 | 14 | |
18 | 15 | import typing |
19 | 16 | |
20 | from .wrapfs import WrapFS | |
21 | from .path import abspath, normpath, split | |
22 | from .errors import ResourceReadOnly, ResourceNotFound | |
17 | from .errors import ResourceNotFound, ResourceReadOnly | |
23 | 18 | from .info import Info |
24 | 19 | from .mode import check_writable |
20 | from .path import abspath, normpath, split | |
21 | from .wrapfs import WrapFS | |
25 | 22 | |
26 | 23 | if typing.TYPE_CHECKING: |
27 | from datetime import datetime | |
28 | 24 | from typing import ( |
25 | IO, | |
29 | 26 | Any, |
30 | 27 | BinaryIO, |
31 | 28 | Collection, |
32 | 29 | Dict, |
33 | 30 | Iterator, |
34 | IO, | |
31 | Mapping, | |
35 | 32 | Optional, |
36 | 33 | Text, |
37 | 34 | Tuple, |
38 | 35 | ) |
36 | ||
37 | from datetime import datetime | |
38 | ||
39 | 39 | from .base import FS # noqa: F401 |
40 | 40 | from .info import RawInfo |
41 | from .permissions import Permissions | |
41 | 42 | from .subfs import SubFS |
42 | from .permissions import Permissions | |
43 | 43 | |
44 | 44 | |
45 | 45 | _W = typing.TypeVar("_W", bound="WrapFS") |
91 | 91 | |
92 | 92 | """ |
93 | 93 | |
94 | # FIXME (@althonos): The caching data structure can very likely be | |
95 | # improved. With the current implementation, if `scandir` result was | |
96 | # cached for `namespaces=["details", "access"]`, calling `scandir` | |
97 | # again only with `names=["details"]` will miss the cache, even though | |
98 | # we are already storing the totality of the required metadata. | |
99 | # | |
100 | # A possible solution would be to replaced the cached with a | |
101 | # Dict[Text, Dict[Text, Dict[Text, Info]]] | |
102 | # ^ ^ ^ ^-- the actual info object | |
103 | # | | \-- the path of the directory entry | |
104 | # | \-- the namespace of the info | |
105 | # \-- the cached directory entry | |
106 | # | |
107 | # Furthermore, `listdir` and `filterdir` calls should be cached as well, | |
108 | # since they can be written as wrappers of `scandir`. | |
109 | ||
94 | 110 | wrap_name = "cached-dir" |
95 | 111 | |
96 | def __init__(self, wrap_fs): | |
112 | def __init__(self, wrap_fs): # noqa: D107 | |
97 | 113 | # type: (_F) -> None |
98 | 114 | super(WrapCachedDir, self).__init__(wrap_fs) |
99 | 115 | self._cache = {} # type: Dict[Tuple[Text, frozenset], Dict[Text, Info]] |
134 | 150 | |
135 | 151 | def isdir(self, path): |
136 | 152 | # type: (Text) -> bool |
137 | # FIXME(@althonos): this raises an error on non-existing file ! | |
138 | return self.getinfo(path).is_dir | |
153 | try: | |
154 | return self.getinfo(path).is_dir | |
155 | except ResourceNotFound: | |
156 | return False | |
139 | 157 | |
140 | 158 | def isfile(self, path): |
141 | 159 | # type: (Text) -> bool |
142 | # FIXME(@althonos): this raises an error on non-existing file ! | |
143 | return not self.getinfo(path).is_dir | |
160 | try: | |
161 | return not self.getinfo(path).is_dir | |
162 | except ResourceNotFound: | |
163 | return False | |
144 | 164 | |
145 | 165 | |
146 | 166 | class WrapReadOnly(WrapFS[_F], typing.Generic[_F]): |
180 | 200 | self.check() |
181 | 201 | raise ResourceReadOnly(path) |
182 | 202 | |
183 | def move(self, src_path, dst_path, overwrite=False): | |
184 | # type: (Text, Text, bool) -> None | |
203 | def move(self, src_path, dst_path, overwrite=False, preserve_time=False): | |
204 | # type: (Text, Text, bool, bool) -> None | |
185 | 205 | self.check() |
186 | 206 | raise ResourceReadOnly(dst_path) |
187 | 207 | |
198 | 218 | raise ResourceReadOnly(path) |
199 | 219 | |
200 | 220 | def removedir(self, path): |
221 | # type: (Text) -> None | |
222 | self.check() | |
223 | raise ResourceReadOnly(path) | |
224 | ||
225 | def removetree(self, path): | |
201 | 226 | # type: (Text) -> None |
202 | 227 | self.check() |
203 | 228 | raise ResourceReadOnly(path) |
224 | 249 | self.check() |
225 | 250 | raise ResourceReadOnly(path) |
226 | 251 | |
227 | def copy(self, src_path, dst_path, overwrite=False): | |
228 | # type: (Text, Text, bool) -> None | |
252 | def copy(self, src_path, dst_path, overwrite=False, preserve_time=False): | |
253 | # type: (Text, Text, bool, bool) -> None | |
229 | 254 | self.check() |
230 | 255 | raise ResourceReadOnly(dst_path) |
231 | 256 | |
296 | 321 | # type: (Text) -> None |
297 | 322 | self.check() |
298 | 323 | raise ResourceReadOnly(path) |
324 | ||
325 | def getmeta(self, namespace="standard"): | |
326 | # type: (Text) -> Mapping[Text, object] | |
327 | self.check() | |
328 | meta = dict(self.delegate_fs().getmeta(namespace=namespace)) | |
329 | meta.update(read_only=True, supports_rename=False) | |
330 | return meta |
8 | 8 | |
9 | 9 | from . import errors |
10 | 10 | from .base import FS |
11 | from .copy import copy_file, copy_dir | |
11 | from .copy import copy_dir, copy_file | |
12 | from .error_tools import unwrap_errors | |
12 | 13 | from .info import Info |
13 | from .move import move_file, move_dir | |
14 | from .path import abspath, normpath | |
15 | from .error_tools import unwrap_errors | |
14 | from .path import abspath, join, normpath | |
16 | 15 | |
17 | 16 | if typing.TYPE_CHECKING: |
18 | from datetime import datetime | |
19 | from threading import RLock | |
20 | 17 | from typing import ( |
18 | IO, | |
21 | 19 | Any, |
22 | 20 | AnyStr, |
23 | 21 | BinaryIO, |
24 | 22 | Callable, |
25 | 23 | Collection, |
24 | Iterable, | |
26 | 25 | Iterator, |
27 | Iterable, | |
28 | IO, | |
29 | 26 | List, |
30 | 27 | Mapping, |
31 | 28 | Optional, |
33 | 30 | Tuple, |
34 | 31 | Union, |
35 | 32 | ) |
33 | ||
34 | from datetime import datetime | |
35 | from threading import RLock | |
36 | ||
36 | 37 | from .enums import ResourceType |
37 | 38 | from .info import RawInfo |
38 | 39 | from .permissions import Permissions |
59 | 60 | |
60 | 61 | wrap_name = None # type: Optional[Text] |
61 | 62 | |
62 | def __init__(self, wrap_fs): | |
63 | def __init__(self, wrap_fs): # noqa: D107 | |
63 | 64 | # type: (_F) -> None |
64 | 65 | self._wrap_fs = wrap_fs |
65 | 66 | super(WrapFS, self).__init__() |
166 | 167 | with unwrap_errors(path): |
167 | 168 | return _fs.makedir(_path, permissions=permissions, recreate=recreate) |
168 | 169 | |
169 | def move(self, src_path, dst_path, overwrite=False): | |
170 | # type: (Text, Text, bool) -> None | |
171 | # A custom move permits a potentially optimized code path | |
170 | def move(self, src_path, dst_path, overwrite=False, preserve_time=False): | |
171 | # type: (Text, Text, bool, bool) -> None | |
172 | _fs, _src_path = self.delegate_path(src_path) | |
173 | _, _dst_path = self.delegate_path(dst_path) | |
174 | with unwrap_errors({_src_path: src_path, _dst_path: dst_path}): | |
175 | _fs.move( | |
176 | _src_path, _dst_path, overwrite=overwrite, preserve_time=preserve_time | |
177 | ) | |
178 | ||
179 | def movedir(self, src_path, dst_path, create=False, preserve_time=False): | |
180 | # type: (Text, Text, bool, bool) -> None | |
181 | _fs, _src_path = self.delegate_path(src_path) | |
182 | _, _dst_path = self.delegate_path(dst_path) | |
183 | with unwrap_errors({_src_path: src_path, _dst_path: dst_path}): | |
184 | _fs.movedir( | |
185 | _src_path, _dst_path, create=create, preserve_time=preserve_time | |
186 | ) | |
187 | ||
188 | def openbin(self, path, mode="r", buffering=-1, **options): | |
189 | # type: (Text, Text, int, **Any) -> BinaryIO | |
190 | self.check() | |
191 | _fs, _path = self.delegate_path(path) | |
192 | with unwrap_errors(path): | |
193 | bin_file = _fs.openbin(_path, mode=mode, buffering=-1, **options) | |
194 | return bin_file | |
195 | ||
196 | def remove(self, path): | |
197 | # type: (Text) -> None | |
198 | self.check() | |
199 | _fs, _path = self.delegate_path(path) | |
200 | with unwrap_errors(path): | |
201 | _fs.remove(_path) | |
202 | ||
203 | def removedir(self, path): | |
204 | # type: (Text) -> None | |
205 | self.check() | |
206 | _path = abspath(normpath(path)) | |
207 | if _path == "/": | |
208 | raise errors.RemoveRootError() | |
209 | _fs, _path = self.delegate_path(path) | |
210 | with unwrap_errors(path): | |
211 | _fs.removedir(_path) | |
212 | ||
213 | def removetree(self, dir_path): | |
214 | # type: (Text) -> None | |
215 | self.check() | |
216 | _path = abspath(normpath(dir_path)) | |
217 | _delegate_fs, _delegate_path = self.delegate_path(dir_path) | |
218 | with unwrap_errors(dir_path): | |
219 | if _path == "/": | |
220 | # with root path, we must remove the contents but | |
221 | # not the directory itself, so we can't just directly | |
222 | # delegate | |
223 | for info in _delegate_fs.scandir(_delegate_path): | |
224 | info_path = join(_delegate_path, info.name) | |
225 | if info.is_dir: | |
226 | _delegate_fs.removetree(info_path) | |
227 | else: | |
228 | _delegate_fs.remove(info_path) | |
229 | else: | |
230 | _delegate_fs.removetree(_delegate_path) | |
231 | ||
232 | def scandir( | |
233 | self, | |
234 | path, # type: Text | |
235 | namespaces=None, # type: Optional[Collection[Text]] | |
236 | page=None, # type: Optional[Tuple[int, int]] | |
237 | ): | |
238 | # type: (...) -> Iterator[Info] | |
239 | self.check() | |
240 | _fs, _path = self.delegate_path(path) | |
241 | with unwrap_errors(path): | |
242 | for info in _fs.scandir(_path, namespaces=namespaces, page=page): | |
243 | yield info | |
244 | ||
245 | def setinfo(self, path, info): | |
246 | # type: (Text, RawInfo) -> None | |
247 | self.check() | |
248 | _fs, _path = self.delegate_path(path) | |
249 | return _fs.setinfo(_path, info) | |
250 | ||
251 | def settimes(self, path, accessed=None, modified=None): | |
252 | # type: (Text, Optional[datetime], Optional[datetime]) -> None | |
253 | self.check() | |
254 | _fs, _path = self.delegate_path(path) | |
255 | with unwrap_errors(path): | |
256 | _fs.settimes(_path, accessed=accessed, modified=modified) | |
257 | ||
258 | def touch(self, path): | |
259 | # type: (Text) -> None | |
260 | self.check() | |
261 | _fs, _path = self.delegate_path(path) | |
262 | with unwrap_errors(path): | |
263 | _fs.touch(_path) | |
264 | ||
265 | def copy(self, src_path, dst_path, overwrite=False, preserve_time=False): | |
266 | # type: (Text, Text, bool, bool) -> None | |
172 | 267 | src_fs, _src_path = self.delegate_path(src_path) |
173 | 268 | dst_fs, _dst_path = self.delegate_path(dst_path) |
174 | 269 | with unwrap_errors({_src_path: src_path, _dst_path: dst_path}): |
175 | 270 | if not overwrite and dst_fs.exists(_dst_path): |
176 | 271 | raise errors.DestinationExists(_dst_path) |
177 | move_file(src_fs, _src_path, dst_fs, _dst_path) | |
178 | ||
179 | def movedir(self, src_path, dst_path, create=False): | |
180 | # type: (Text, Text, bool) -> None | |
272 | copy_file(src_fs, _src_path, dst_fs, _dst_path, preserve_time=preserve_time) | |
273 | ||
274 | def copydir(self, src_path, dst_path, create=False, preserve_time=False): | |
275 | # type: (Text, Text, bool, bool) -> None | |
181 | 276 | src_fs, _src_path = self.delegate_path(src_path) |
182 | 277 | dst_fs, _dst_path = self.delegate_path(dst_path) |
183 | 278 | with unwrap_errors({_src_path: src_path, _dst_path: dst_path}): |
185 | 280 | raise errors.ResourceNotFound(dst_path) |
186 | 281 | if not src_fs.getinfo(_src_path).is_dir: |
187 | 282 | raise errors.DirectoryExpected(src_path) |
188 | move_dir(src_fs, _src_path, dst_fs, _dst_path) | |
189 | ||
190 | def openbin(self, path, mode="r", buffering=-1, **options): | |
191 | # type: (Text, Text, int, **Any) -> BinaryIO | |
192 | self.check() | |
193 | _fs, _path = self.delegate_path(path) | |
194 | with unwrap_errors(path): | |
195 | bin_file = _fs.openbin(_path, mode=mode, buffering=-1, **options) | |
196 | return bin_file | |
197 | ||
198 | def remove(self, path): | |
199 | # type: (Text) -> None | |
200 | self.check() | |
201 | _fs, _path = self.delegate_path(path) | |
202 | with unwrap_errors(path): | |
203 | _fs.remove(_path) | |
204 | ||
205 | def removedir(self, path): | |
206 | # type: (Text) -> None | |
207 | self.check() | |
208 | _path = abspath(normpath(path)) | |
209 | if _path == "/": | |
210 | raise errors.RemoveRootError() | |
211 | _fs, _path = self.delegate_path(path) | |
212 | with unwrap_errors(path): | |
213 | _fs.removedir(_path) | |
214 | ||
215 | def removetree(self, dir_path): | |
216 | # type: (Text) -> None | |
217 | self.check() | |
218 | _path = abspath(normpath(dir_path)) | |
219 | if _path == "/": | |
220 | raise errors.RemoveRootError() | |
221 | _fs, _path = self.delegate_path(dir_path) | |
222 | with unwrap_errors(dir_path): | |
223 | _fs.removetree(_path) | |
224 | ||
225 | def scandir( | |
226 | self, | |
227 | path, # type: Text | |
228 | namespaces=None, # type: Optional[Collection[Text]] | |
229 | page=None, # type: Optional[Tuple[int, int]] | |
230 | ): | |
231 | # type: (...) -> Iterator[Info] | |
232 | self.check() | |
233 | _fs, _path = self.delegate_path(path) | |
234 | with unwrap_errors(path): | |
235 | for info in _fs.scandir(_path, namespaces=namespaces, page=page): | |
236 | yield info | |
237 | ||
238 | def setinfo(self, path, info): | |
239 | # type: (Text, RawInfo) -> None | |
240 | self.check() | |
241 | _fs, _path = self.delegate_path(path) | |
242 | return _fs.setinfo(_path, info) | |
243 | ||
244 | def settimes(self, path, accessed=None, modified=None): | |
245 | # type: (Text, Optional[datetime], Optional[datetime]) -> None | |
246 | self.check() | |
247 | _fs, _path = self.delegate_path(path) | |
248 | with unwrap_errors(path): | |
249 | _fs.settimes(_path, accessed=accessed, modified=modified) | |
250 | ||
251 | def touch(self, path): | |
252 | # type: (Text) -> None | |
253 | self.check() | |
254 | _fs, _path = self.delegate_path(path) | |
255 | with unwrap_errors(path): | |
256 | _fs.touch(_path) | |
257 | ||
258 | def copy(self, src_path, dst_path, overwrite=False): | |
259 | # type: (Text, Text, bool) -> None | |
260 | src_fs, _src_path = self.delegate_path(src_path) | |
261 | dst_fs, _dst_path = self.delegate_path(dst_path) | |
262 | with unwrap_errors({_src_path: src_path, _dst_path: dst_path}): | |
263 | if not overwrite and dst_fs.exists(_dst_path): | |
264 | raise errors.DestinationExists(_dst_path) | |
265 | copy_file(src_fs, _src_path, dst_fs, _dst_path) | |
266 | ||
267 | def copydir(self, src_path, dst_path, create=False): | |
268 | # type: (Text, Text, bool) -> None | |
269 | src_fs, _src_path = self.delegate_path(src_path) | |
270 | dst_fs, _dst_path = self.delegate_path(dst_path) | |
271 | with unwrap_errors({_src_path: src_path, _dst_path: dst_path}): | |
272 | if not create and not dst_fs.exists(_dst_path): | |
273 | raise errors.ResourceNotFound(dst_path) | |
274 | if not src_fs.getinfo(_src_path).is_dir: | |
275 | raise errors.DirectoryExpected(src_path) | |
276 | copy_dir(src_fs, _src_path, dst_fs, _dst_path) | |
283 | copy_dir(src_fs, _src_path, dst_fs, _dst_path, preserve_time=preserve_time) | |
277 | 284 | |
278 | 285 | def create(self, path, wipe=False): |
279 | 286 | # type: (Text, bool) -> bool |
0 | 0 | """Manage the filesystem in a Zip archive. |
1 | 1 | """ |
2 | 2 | |
3 | from __future__ import print_function | |
4 | from __future__ import unicode_literals | |
5 | ||
3 | from __future__ import print_function, unicode_literals | |
4 | ||
5 | import sys | |
6 | 6 | import typing |
7 | ||
8 | import six | |
7 | 9 | import zipfile |
8 | 10 | from datetime import datetime |
9 | 11 | |
10 | import six | |
11 | ||
12 | 12 | from . import errors |
13 | from ._url_tools import url_quote | |
13 | 14 | from .base import FS |
14 | 15 | from .compress import write_zip |
15 | 16 | from .enums import ResourceType, Seek |
16 | 17 | from .info import Info |
17 | 18 | from .iotools import RawWrapper |
18 | from .permissions import Permissions | |
19 | 19 | from .memoryfs import MemoryFS |
20 | 20 | from .opener import open_fs |
21 | 21 | from .path import dirname, forcedir, normpath, relpath |
22 | from .permissions import Permissions | |
22 | 23 | from .time import datetime_to_epoch |
23 | 24 | from .wrapfs import WrapFS |
24 | from ._url_tools import url_quote | |
25 | 25 | |
26 | 26 | if typing.TYPE_CHECKING: |
27 | 27 | from typing import ( |
36 | 36 | Tuple, |
37 | 37 | Union, |
38 | 38 | ) |
39 | ||
39 | 40 | from .info import RawInfo |
40 | 41 | from .subfs import SubFS |
41 | 42 | |
43 | 44 | |
44 | 45 | |
45 | 46 | class _ZipExtFile(RawWrapper): |
46 | def __init__(self, fs, name): | |
47 | def __init__(self, fs, name): # noqa: D107 | |
47 | 48 | # type: (ReadZipFS, Text) -> None |
48 | 49 | self._zip = _zip = fs._zip |
49 | 50 | self._end = _zip.getinfo(name).file_size |
50 | 51 | self._pos = 0 |
51 | 52 | super(_ZipExtFile, self).__init__(_zip.open(name), "r", name) |
52 | 53 | |
53 | def read(self, size=-1): | |
54 | # type: (int) -> bytes | |
55 | buf = self._f.read(-1 if size is None else size) | |
56 | self._pos += len(buf) | |
57 | return buf | |
58 | ||
59 | def read1(self, size=-1): | |
60 | # type: (int) -> bytes | |
61 | buf = self._f.read1(-1 if size is None else size) # type: ignore | |
62 | self._pos += len(buf) | |
63 | return buf | |
64 | ||
65 | def seek(self, offset, whence=Seek.set): | |
66 | # type: (int, SupportsInt) -> int | |
67 | """Change stream position. | |
68 | ||
69 | Change the stream position to the given byte offset. The | |
70 | offset is interpreted relative to the position indicated by | |
71 | ``whence``. | |
72 | ||
73 | Arguments: | |
74 | offset (int): the offset to the new position, in bytes. | |
75 | whence (int): the position reference. Possible values are: | |
76 | * `Seek.set`: start of stream (the default). | |
77 | * `Seek.current`: current position; offset may be negative. | |
78 | * `Seek.end`: end of stream; offset must be negative. | |
79 | ||
80 | Returns: | |
81 | int: the new absolute position. | |
82 | ||
83 | Raises: | |
84 | ValueError: when ``whence`` is not known, or ``offset`` | |
85 | is invalid. | |
86 | ||
87 | Note: | |
88 | Zip compression does not support seeking, so the seeking | |
89 | is emulated. Seeking somewhere else than the current position | |
90 | will need to either: | |
91 | * reopen the file and restart decompression | |
92 | * read and discard data to advance in the file | |
93 | ||
94 | """ | |
95 | _whence = int(whence) | |
96 | if _whence == Seek.current: | |
97 | offset += self._pos | |
98 | if _whence == Seek.current or _whence == Seek.set: | |
99 | if offset < 0: | |
100 | raise ValueError("Negative seek position {}".format(offset)) | |
101 | elif _whence == Seek.end: | |
102 | if offset > 0: | |
103 | raise ValueError("Positive seek position {}".format(offset)) | |
104 | offset += self._end | |
105 | else: | |
106 | raise ValueError( | |
107 | "Invalid whence ({}, should be {}, {} or {})".format( | |
108 | _whence, Seek.set, Seek.current, Seek.end | |
54 | # NOTE(@althonos): Starting from Python 3.7, files inside a Zip archive are | |
55 | # seekable provided they were opened from a seekable file | |
56 | # handle. Before that, we can emulate a seek using the | |
57 | # read method, although it adds a ton of overhead and is | |
58 | # way less efficient than extracting once to a BytesIO. | |
59 | if sys.version_info < (3, 7): | |
60 | ||
61 | def read(self, size=-1): | |
62 | # type: (int) -> bytes | |
63 | buf = self._f.read(-1 if size is None else size) | |
64 | self._pos += len(buf) | |
65 | return buf | |
66 | ||
67 | def read1(self, size=-1): | |
68 | # type: (int) -> bytes | |
69 | buf = self._f.read1(-1 if size is None else size) # type: ignore | |
70 | self._pos += len(buf) | |
71 | return buf | |
72 | ||
73 | def tell(self): | |
74 | # type: () -> int | |
75 | return self._pos | |
76 | ||
77 | def seekable(self): | |
78 | return True | |
79 | ||
80 | def seek(self, offset, whence=Seek.set): | |
81 | # type: (int, SupportsInt) -> int | |
82 | """Change stream position. | |
83 | ||
84 | Change the stream position to the given byte offset. The | |
85 | offset is interpreted relative to the position indicated by | |
86 | ``whence``. | |
87 | ||
88 | Arguments: | |
89 | offset (int): the offset to the new position, in bytes. | |
90 | whence (int): the position reference. Possible values are: | |
91 | * `Seek.set`: start of stream (the default). | |
92 | * `Seek.current`: current position; offset may be negative. | |
93 | * `Seek.end`: end of stream; offset must be negative. | |
94 | ||
95 | Returns: | |
96 | int: the new absolute position. | |
97 | ||
98 | Raises: | |
99 | ValueError: when ``whence`` is not known, or ``offset`` | |
100 | is invalid. | |
101 | ||
102 | Note: | |
103 | Zip compression does not support seeking, so the seeking | |
104 | is emulated. Seeking somewhere else than the current position | |
105 | will need to either: | |
106 | * reopen the file and restart decompression | |
107 | * read and discard data to advance in the file | |
108 | ||
109 | """ | |
110 | _whence = int(whence) | |
111 | if _whence == Seek.current: | |
112 | offset += self._pos | |
113 | if _whence == Seek.current or _whence == Seek.set: | |
114 | if offset < 0: | |
115 | raise ValueError("Negative seek position {}".format(offset)) | |
116 | elif _whence == Seek.end: | |
117 | if offset > 0: | |
118 | raise ValueError("Positive seek position {}".format(offset)) | |
119 | offset += self._end | |
120 | else: | |
121 | raise ValueError( | |
122 | "Invalid whence ({}, should be {}, {} or {})".format( | |
123 | _whence, Seek.set, Seek.current, Seek.end | |
124 | ) | |
109 | 125 | ) |
110 | ) | |
111 | ||
112 | if offset < self._pos: | |
113 | self._f = self._zip.open(self.name) # type: ignore | |
114 | self._pos = 0 | |
115 | self.read(offset - self._pos) | |
116 | return self._pos | |
117 | ||
118 | def tell(self): | |
119 | # type: () -> int | |
120 | return self._pos | |
126 | ||
127 | if offset < self._pos: | |
128 | self._f = self._zip.open(self.name) # type: ignore | |
129 | self._pos = 0 | |
130 | self.read(offset - self._pos) | |
131 | return self._pos | |
132 | ||
133 | else: | |
134 | ||
135 | def seek(self, offset, whence=Seek.set): | |
136 | # type: (int, SupportsInt) -> int | |
137 | """Change stream position. | |
138 | ||
139 | Change the stream position to the given byte offset. The | |
140 | offset is interpreted relative to the position indicated by | |
141 | ``whence``. | |
142 | ||
143 | Arguments: | |
144 | offset (int): the offset to the new position, in bytes. | |
145 | whence (int): the position reference. Possible values are: | |
146 | * `Seek.set`: start of stream (the default). | |
147 | * `Seek.current`: current position; offset may be negative. | |
148 | * `Seek.end`: end of stream; offset must be negative. | |
149 | ||
150 | Returns: | |
151 | int: the new absolute position. | |
152 | ||
153 | Raises: | |
154 | ValueError: when ``whence`` is not known, or ``offset`` | |
155 | is invalid. | |
156 | ||
157 | """ | |
158 | _whence = int(whence) | |
159 | _pos = self.tell() | |
160 | if _whence == Seek.set: | |
161 | if offset < 0: | |
162 | raise ValueError("Negative seek position {}".format(offset)) | |
163 | elif _whence == Seek.current: | |
164 | if _pos + offset < 0: | |
165 | raise ValueError("Negative seek position {}".format(offset)) | |
166 | elif _whence == Seek.end: | |
167 | if offset > 0: | |
168 | raise ValueError("Positive seek position {}".format(offset)) | |
169 | else: | |
170 | raise ValueError( | |
171 | "Invalid whence ({}, should be {}, {} or {})".format( | |
172 | _whence, Seek.set, Seek.current, Seek.end | |
173 | ) | |
174 | ) | |
175 | ||
176 | return self._f.seek(offset, _whence) | |
121 | 177 | |
122 | 178 | |
123 | 179 | class ZipFS(WrapFS): |
124 | 180 | """Read and write zip files. |
125 | 181 | |
126 | There are two ways to open a ZipFS for the use cases of reading | |
182 | There are two ways to open a `ZipFS` for the use cases of reading | |
127 | 183 | a zip file, and creating a new one. |
128 | 184 | |
129 | If you open the ZipFS with ``write`` set to `False` (the default) | |
130 | then the filesystem will be a read only filesystem which maps to | |
185 | If you open the `ZipFS` with ``write`` set to `False` (the default) | |
186 | then the filesystem will be a read-only filesystem which maps to | |
131 | 187 | the files and directories within the zip file. Files are |
132 | 188 | decompressed on the fly when you open them. |
133 | 189 | |
136 | 192 | with ZipFS('foo.zip') as zip_fs: |
137 | 193 | readme = zip_fs.readtext('readme.txt') |
138 | 194 | |
139 | If you open the ZipFS with ``write`` set to `True`, then the ZipFS | |
140 | will be a empty temporary filesystem. Any files / directories you | |
141 | create in the ZipFS will be written in to a zip file when the ZipFS | |
195 | If you open the `ZipFS` with ``write`` set to `True`, then the `ZipFS` | |
196 | will be an empty temporary filesystem. Any files / directories you | |
197 | create in the `ZipFS` will be written in to a zip file when the `ZipFS` | |
142 | 198 | is closed. |
143 | 199 | |
144 | Here's how you might write a new zip file containing a readme.txt | |
200 | Here's how you might write a new zip file containing a ``readme.txt`` | |
145 | 201 | file:: |
146 | 202 | |
147 | 203 | with ZipFS('foo.zip', write=True) as new_zip: |
157 | 213 | (default) to read an existing zip file. |
158 | 214 | compression (int): Compression to use (one of the constants |
159 | 215 | defined in the `zipfile` module in the stdlib). |
160 | temp_fs (str): An FS URL for the temporary filesystem used to | |
161 | store data prior to zipping. | |
216 | temp_fs (str or FS): An FS URL or an FS instance to use to | |
217 | store data prior to zipping. Defaults to creating a new | |
218 | `~fs.tempfs.TempFS`. | |
162 | 219 | |
163 | 220 | """ |
164 | 221 | |
169 | 226 | write=False, # type: bool |
170 | 227 | compression=zipfile.ZIP_DEFLATED, # type: int |
171 | 228 | encoding="utf-8", # type: Text |
172 | temp_fs="temp://__ziptemp__", # type: Text | |
229 | temp_fs="temp://__ziptemp__", # type: Union[Text, FS] | |
173 | 230 | ): |
174 | 231 | # type: (...) -> FS |
175 | 232 | # This magic returns a different class instance based on the |
190 | 247 | compression=zipfile.ZIP_DEFLATED, # type: int |
191 | 248 | encoding="utf-8", # type: Text |
192 | 249 | temp_fs="temp://__ziptemp__", # type: Text |
193 | ): | |
250 | ): # noqa: D107 | |
194 | 251 | # type: (...) -> None |
195 | 252 | pass |
196 | 253 | |
197 | 254 | |
198 | 255 | @six.python_2_unicode_compatible |
199 | 256 | class WriteZipFS(WrapFS): |
200 | """A writable zip file. | |
201 | """ | |
257 | """A writable zip file.""" | |
202 | 258 | |
203 | 259 | def __init__( |
204 | 260 | self, |
205 | 261 | file, # type: Union[Text, BinaryIO] |
206 | 262 | compression=zipfile.ZIP_DEFLATED, # type: int |
207 | 263 | encoding="utf-8", # type: Text |
208 | temp_fs="temp://__ziptemp__", # type: Text | |
209 | ): | |
264 | temp_fs="temp://__ziptemp__", # type: Union[Text, FS] | |
265 | ): # noqa: D107 | |
210 | 266 | # type: (...) -> None |
211 | 267 | self._file = file |
212 | 268 | self.compression = compression |
275 | 331 | |
276 | 332 | @six.python_2_unicode_compatible |
277 | 333 | class ReadZipFS(FS): |
278 | """A readable zip file. | |
279 | """ | |
334 | """A readable zip file.""" | |
280 | 335 | |
281 | 336 | _meta = { |
282 | "case_insensitive": True, | |
337 | "case_insensitive": False, | |
283 | 338 | "network": False, |
284 | 339 | "read_only": True, |
285 | 340 | "supports_rename": False, |
289 | 344 | } |
290 | 345 | |
291 | 346 | @errors.CreateFailed.catch_all |
292 | def __init__(self, file, encoding="utf-8"): | |
347 | def __init__(self, file, encoding="utf-8"): # noqa: D107 | |
293 | 348 | # type: (Union[BinaryIO, Text], Text) -> None |
294 | 349 | super(ReadZipFS, self).__init__() |
295 | 350 | self._file = file |
307 | 362 | |
308 | 363 | def _path_to_zip_name(self, path): |
309 | 364 | # type: (Text) -> str |
310 | """Convert a path to a zip file name. | |
311 | """ | |
365 | """Convert a path to a zip file name.""" | |
312 | 366 | path = relpath(normpath(path)) |
313 | 367 | if self._directory.isdir(path): |
314 | 368 | path = forcedir(path) |
319 | 373 | @property |
320 | 374 | def _directory(self): |
321 | 375 | # type: () -> MemoryFS |
322 | """`MemoryFS`: a filesystem with the same folder hierarchy as the zip. | |
323 | """ | |
376 | """`MemoryFS`: a filesystem with the same folder hierarchy as the zip.""" | |
324 | 377 | self.check() |
325 | 378 | with self._lock: |
326 | 379 | if self._directory_fs is None: |
0 | [bdist_wheel] | |
1 | universal = 1 | |
0 | # --- Project configuration ------------------------------------------------- | |
2 | 1 | |
3 | 2 | [metadata] |
4 | 3 | version = attr: fs._version.__version__ |
21 | 20 | Operating System :: OS Independent |
22 | 21 | Programming Language :: Python |
23 | 22 | Programming Language :: Python :: 2.7 |
24 | Programming Language :: Python :: 3.4 | |
25 | 23 | Programming Language :: Python :: 3.5 |
26 | 24 | Programming Language :: Python :: 3.6 |
27 | 25 | Programming Language :: Python :: 3.7 |
28 | 26 | Programming Language :: Python :: 3.8 |
29 | 27 | Programming Language :: Python :: 3.9 |
28 | Programming Language :: Python :: 3.10 | |
30 | 29 | Programming Language :: Python :: Implementation :: CPython |
31 | 30 | Programming Language :: Python :: Implementation :: PyPy |
32 | 31 | Topic :: System :: Filesystems |
32 | Typing :: Typed | |
33 | 33 | project_urls = |
34 | 34 | Bug Reports = https://github.com/PyFilesystem/pyfilesystem2/issues |
35 | 35 | Documentation = https://pyfilesystem2.readthedocs.io/en/latest/ |
42 | 42 | setuptools >=38.3.0 |
43 | 43 | install_requires = |
44 | 44 | appdirs~=1.4.3 |
45 | pytz | |
46 | 45 | setuptools |
47 | 46 | six ~=1.10 |
48 | 47 | enum34 ~=1.1.6 ; python_version < '3.4' |
58 | 57 | |
59 | 58 | [options.package_data] |
60 | 59 | fs = py.typed |
60 | ||
61 | [bdist_wheel] | |
62 | universal = 1 | |
63 | ||
64 | # --- Individual linter configuration --------------------------------------- | |
61 | 65 | |
62 | 66 | [pydocstyle] |
63 | 67 | inherit = false |
82 | 86 | [mypy-fs.test] |
83 | 87 | disallow_untyped_defs = false |
84 | 88 | |
85 | [coverage:run] | |
86 | branch = true | |
87 | omit = fs/test.py | |
88 | source = fs | |
89 | ||
90 | [coverage:report] | |
91 | show_missing = true | |
92 | skip_covered = true | |
93 | exclude_lines = | |
94 | pragma: no cover | |
95 | if False: | |
96 | @typing.overload | |
97 | @overload | |
98 | ||
99 | [tool:pytest] | |
100 | markers = | |
101 | slow: marks tests as slow (deselect with '-m "not slow"') | |
102 | ||
103 | 89 | [flake8] |
104 | 90 | extend-ignore = E203,E402,W503 |
105 | 91 | max-line-length = 88 |
109 | 95 | tests/*:E501 |
110 | 96 | fs/opener/*:F811 |
111 | 97 | fs/_fscompat.py:F401 |
98 | fs/_pathcompat.py:C401 | |
112 | 99 | |
113 | 100 | [isort] |
114 | default_section = THIRD_PARTY | |
101 | default_section = THIRDPARTY | |
115 | 102 | known_first_party = fs |
116 | known_standard_library = typing | |
103 | known_standard_library = sys, typing | |
117 | 104 | line_length = 88 |
105 | profile = black | |
106 | skip_gitignore = true | |
107 | ||
108 | # --- Test and coverage configuration ------------------------------------------ | |
109 | ||
110 | [coverage:run] | |
111 | branch = true | |
112 | omit = fs/test.py | |
113 | source = fs | |
114 | relative_files = true | |
115 | parallel = true | |
116 | ||
117 | [coverage:report] | |
118 | show_missing = true | |
119 | skip_covered = true | |
120 | exclude_lines = | |
121 | pragma: no cover | |
122 | if False: | |
123 | it typing.TYPE_CHECKING: | |
124 | @typing.overload | |
125 | @overload | |
126 | ||
127 | [tool:pytest] | |
128 | markers = | |
129 | slow: marks tests as slow (deselect with '-m "not slow"') | |
130 | ||
131 | # --- Tox automation configuration --------------------------------------------- | |
132 | ||
133 | [tox:tox] | |
134 | envlist = py{27,34}{,-scandir}, py{35,36,37,38,39,310}, pypy{27,36,37}, typecheck, codestyle, docstyle, codeformat | |
135 | sitepackages = false | |
136 | skip_missing_interpreters = true | |
137 | requires = | |
138 | setuptools >=38.3.0 | |
139 | ||
140 | [testenv] | |
141 | commands = python -m coverage run --rcfile {toxinidir}/setup.cfg -m pytest {posargs} {toxinidir}/tests | |
142 | deps = | |
143 | -rtests/requirements.txt | |
144 | coverage~=5.0 | |
145 | py{35,36,37,38,39,310,py36,py37}: pytest~=6.0 | |
146 | py{27,34,py27}: pytest~=4.6 | |
147 | py{35,36,37,38,39,310,py36,py37}: pytest-randomly~=3.5 | |
148 | py{27,34,py27}: pytest-randomly~=1.2 | |
149 | scandir: .[scandir] | |
150 | !scandir: . | |
151 | ||
152 | [testenv:typecheck] | |
153 | commands = mypy --config-file {toxinidir}/setup.cfg {toxinidir}/fs | |
154 | deps = | |
155 | . | |
156 | mypy==0.800 | |
157 | ||
158 | [testenv:codestyle] | |
159 | commands = flake8 --config={toxinidir}/setup.cfg {toxinidir}/fs {toxinidir}/tests | |
160 | deps = | |
161 | flake8==3.7.9 | |
162 | #flake8-builtins==1.5.3 | |
163 | flake8-bugbear==19.8.0 | |
164 | flake8-comprehensions==3.1.4 | |
165 | flake8-mutable==1.2.0 | |
166 | flake8-tuple==0.4.0 | |
167 | ||
168 | [testenv:codeformat] | |
169 | commands = black --check {toxinidir}/fs | |
170 | deps = | |
171 | black==22.3.0 | |
172 | ||
173 | [testenv:docstyle] | |
174 | commands = pydocstyle --config={toxinidir}/setup.cfg {toxinidir}/fs | |
175 | deps = | |
176 | pydocstyle==5.1.1 | |
177 | ||
178 | [gh-actions] | |
179 | python = | |
180 | 2.7: py27, py27-scandir | |
181 | 3.4: py34, py34-scandir | |
182 | 3.5: py35 | |
183 | 3.6: py36 | |
184 | 3.7: py37 | |
185 | 3.8: py38 | |
186 | 3.9: py39 | |
187 | 3.10: py310 | |
188 | pypy-2.7: pypy27 | |
189 | pypy-3.6: pypy36 | |
190 | pypy-3.7: pypy37 |
0 | pytest==4.6.5 | |
1 | pytest-cov==2.7.1 | |
2 | pytest-randomly==1.2.3 ; python_version<"3.5" | |
3 | pytest-randomly==3.0.0 ; python_version>="3.5" | |
4 | mock==3.0.5 ; python_version<"3.3" | |
5 | pyftpdlib==1.5.5 | |
6 | ||
7 | # Not directly required. `pyftpdlib` appears to need these but doesn't list them | |
8 | # as requirements. | |
9 | psutil | |
10 | pysendfile |
0 | import pytest | |
1 | ||
2 | try: | |
3 | from unittest import mock | |
4 | except ImportError: | |
5 | import mock | |
6 | ||
7 | ||
8 | @pytest.fixture | |
9 | @mock.patch("appdirs.user_data_dir", autospec=True, spec_set=True) | |
10 | @mock.patch("appdirs.site_data_dir", autospec=True, spec_set=True) | |
11 | @mock.patch("appdirs.user_config_dir", autospec=True, spec_set=True) | |
12 | @mock.patch("appdirs.site_config_dir", autospec=True, spec_set=True) | |
13 | @mock.patch("appdirs.user_cache_dir", autospec=True, spec_set=True) | |
14 | @mock.patch("appdirs.user_state_dir", autospec=True, spec_set=True) | |
15 | @mock.patch("appdirs.user_log_dir", autospec=True, spec_set=True) | |
16 | def mock_appdir_directories( | |
17 | user_log_dir_mock, | |
18 | user_state_dir_mock, | |
19 | user_cache_dir_mock, | |
20 | site_config_dir_mock, | |
21 | user_config_dir_mock, | |
22 | site_data_dir_mock, | |
23 | user_data_dir_mock, | |
24 | tmpdir | |
25 | ): | |
26 | """Mock out every single AppDir directory so tests can't access real ones.""" | |
27 | user_log_dir_mock.return_value = str(tmpdir.join("user_log").mkdir()) | |
28 | user_state_dir_mock.return_value = str(tmpdir.join("user_state").mkdir()) | |
29 | user_cache_dir_mock.return_value = str(tmpdir.join("user_cache").mkdir()) | |
30 | site_config_dir_mock.return_value = str(tmpdir.join("site_config").mkdir()) | |
31 | user_config_dir_mock.return_value = str(tmpdir.join("user_config").mkdir()) | |
32 | site_data_dir_mock.return_value = str(tmpdir.join("site_data").mkdir()) | |
33 | user_data_dir_mock.return_value = str(tmpdir.join("user_data").mkdir()) |
0 | # the bare requirements for running tests | |
1 | ||
2 | # pyftpdlib is needed to spawn a FTP server for the | |
3 | # FTPFS test suite | |
4 | pyftpdlib ~=1.5 | |
5 | ||
6 | # these are optional dependencies for pyftpdlib that | |
7 | # are not explicitly listed, we need to install these | |
8 | # ourselves | |
9 | psutil ~=5.0 | |
10 | pysendfile ~=2.0 ; python_version <= "3.3" | |
11 | ||
12 | # mock is only available from Python 3.3 onward, and | |
13 | # mock v4+ doesn't support Python 2.7 anymore | |
14 | mock ~=3.0 ; python_version < "3.3" | |
15 | ||
16 | # parametrized to prevent code duplication in tests. | |
17 | parameterized ~=0.8⏎ |
0 | 0 | from __future__ import unicode_literals |
1 | 1 | |
2 | import pytest | |
2 | import shutil | |
3 | 3 | import six |
4 | import tempfile | |
5 | import unittest | |
4 | 6 | |
7 | try: | |
8 | from unittest import mock | |
9 | except ImportError: | |
10 | import mock | |
11 | ||
12 | import fs.test | |
5 | 13 | from fs import appfs |
6 | 14 | |
7 | 15 | |
8 | @pytest.fixture | |
9 | def fs(mock_appdir_directories): | |
10 | """Create a UserDataFS but strictly using a temporary directory.""" | |
11 | return appfs.UserDataFS("fstest", "willmcgugan", "1.0") | |
16 | class _TestAppFS(fs.test.FSTestCases): | |
17 | ||
18 | AppFS = None | |
19 | ||
20 | @classmethod | |
21 | def setUpClass(cls): | |
22 | super(_TestAppFS, cls).setUpClass() | |
23 | cls.tmpdir = tempfile.mkdtemp() | |
24 | ||
25 | @classmethod | |
26 | def tearDownClass(cls): | |
27 | shutil.rmtree(cls.tmpdir) | |
28 | ||
29 | def make_fs(self): | |
30 | with mock.patch( | |
31 | "appdirs.{}".format(self.AppFS.app_dir), | |
32 | autospec=True, | |
33 | spec_set=True, | |
34 | return_value=tempfile.mkdtemp(dir=self.tmpdir), | |
35 | ): | |
36 | return self.AppFS("fstest", "willmcgugan", "1.0") | |
37 | ||
38 | if six.PY2: | |
39 | ||
40 | def test_repr(self): | |
41 | self.assertEqual( | |
42 | repr(self.fs), | |
43 | "{}(u'fstest', author=u'willmcgugan', version=u'1.0')".format( | |
44 | self.AppFS.__name__ | |
45 | ), | |
46 | ) | |
47 | ||
48 | else: | |
49 | ||
50 | def test_repr(self): | |
51 | self.assertEqual( | |
52 | repr(self.fs), | |
53 | "{}('fstest', author='willmcgugan', version='1.0')".format( | |
54 | self.AppFS.__name__ | |
55 | ), | |
56 | ) | |
57 | ||
58 | def test_str(self): | |
59 | self.assertEqual( | |
60 | str(self.fs), "<{} 'fstest'>".format(self.AppFS.__name__.lower()) | |
61 | ) | |
12 | 62 | |
13 | 63 | |
14 | @pytest.mark.skipif(six.PY2, reason="Test requires Python 3 repr") | |
15 | def test_user_data_repr_py3(fs): | |
16 | assert repr(fs) == "UserDataFS('fstest', author='willmcgugan', version='1.0')" | |
17 | assert str(fs) == "<userdatafs 'fstest'>" | |
64 | class TestUserDataFS(_TestAppFS, unittest.TestCase): | |
65 | AppFS = appfs.UserDataFS | |
18 | 66 | |
19 | 67 | |
20 | @pytest.mark.skipif(not six.PY2, reason="Test requires Python 2 repr") | |
21 | def test_user_data_repr_py2(fs): | |
22 | assert repr(fs) == "UserDataFS(u'fstest', author=u'willmcgugan', version=u'1.0')" | |
23 | assert str(fs) == "<userdatafs 'fstest'>" | |
68 | class TestUserConfigFS(_TestAppFS, unittest.TestCase): | |
69 | AppFS = appfs.UserConfigFS | |
70 | ||
71 | ||
72 | class TestUserCacheFS(_TestAppFS, unittest.TestCase): | |
73 | AppFS = appfs.UserCacheFS | |
74 | ||
75 | ||
76 | class TestSiteDataFS(_TestAppFS, unittest.TestCase): | |
77 | AppFS = appfs.SiteDataFS | |
78 | ||
79 | ||
80 | class TestSiteConfigFS(_TestAppFS, unittest.TestCase): | |
81 | AppFS = appfs.SiteConfigFS | |
82 | ||
83 | ||
84 | class TestUserLogFS(_TestAppFS, unittest.TestCase): | |
85 | AppFS = appfs.UserLogFS |
2 | 2 | |
3 | 3 | import os |
4 | 4 | import stat |
5 | ||
6 | 5 | from six import text_type |
7 | 6 | |
7 | from fs import errors, walk | |
8 | from fs.enums import ResourceType | |
8 | 9 | from fs.opener import open_fs |
9 | from fs.enums import ResourceType | |
10 | from fs import walk | |
11 | from fs import errors | |
12 | 10 | from fs.test import UNICODE_TEXT |
13 | 11 | |
14 | 12 |
3 | 3 | |
4 | 4 | import unittest |
5 | 5 | |
6 | from fs import errors | |
6 | 7 | from fs.base import FS |
7 | from fs import errors | |
8 | 8 | |
9 | 9 | |
10 | 10 | class DummyFS(FS): |
0 | 0 | from __future__ import unicode_literals |
1 | 1 | |
2 | import calendar | |
3 | import datetime | |
2 | 4 | import errno |
3 | import datetime | |
4 | 5 | import os |
6 | import shutil | |
7 | import tempfile | |
5 | 8 | import unittest |
6 | import tempfile | |
7 | import shutil | |
8 | import calendar | |
9 | from parameterized import parameterized | |
9 | 10 | |
10 | 11 | import fs.copy |
11 | 12 | from fs import open_fs |
12 | 13 | |
13 | 14 | |
14 | class TestCopy(unittest.TestCase): | |
15 | def test_copy_fs(self): | |
16 | for workers in (0, 1, 2, 4): | |
17 | src_fs = open_fs("mem://") | |
18 | src_fs.makedirs("foo/bar") | |
19 | src_fs.makedirs("foo/empty") | |
20 | src_fs.touch("test.txt") | |
21 | src_fs.touch("foo/bar/baz.txt") | |
22 | ||
23 | dst_fs = open_fs("mem://") | |
24 | fs.copy.copy_fs(src_fs, dst_fs, workers=workers) | |
25 | ||
26 | self.assertTrue(dst_fs.isdir("foo/empty")) | |
27 | self.assertTrue(dst_fs.isdir("foo/bar")) | |
28 | self.assertTrue(dst_fs.isfile("test.txt")) | |
15 | def _create_sandbox_dir(prefix="pyfilesystem2_sandbox_", home=None): | |
16 | if home is None: | |
17 | return tempfile.mkdtemp(prefix=prefix) | |
18 | else: | |
19 | sandbox_path = os.path.join(home, prefix) | |
20 | mkdirp(sandbox_path) | |
21 | return sandbox_path | |
22 | ||
23 | ||
24 | def _touch(root, filepath): | |
25 | # create abs filename | |
26 | abs_filepath = os.path.join(root, filepath) | |
27 | # create dir | |
28 | dirname = os.path.dirname(abs_filepath) | |
29 | mkdirp(dirname) | |
30 | # touch file | |
31 | with open(abs_filepath, "a"): | |
32 | os.utime( | |
33 | abs_filepath, None | |
34 | ) # update the mtime in case the file exists, same as touch | |
35 | ||
36 | return abs_filepath | |
37 | ||
38 | ||
39 | def _write_file(filepath, write_chars=1024): | |
40 | with open(filepath, "w") as f: | |
41 | f.write("1" * write_chars) | |
42 | return filepath | |
43 | ||
44 | ||
45 | def _delay_file_utime(filepath, delta_sec): | |
46 | utcnow = datetime.datetime.utcnow() | |
47 | unix_timestamp = calendar.timegm(utcnow.timetuple()) | |
48 | times = unix_timestamp + delta_sec, unix_timestamp + delta_sec | |
49 | os.utime(filepath, times) | |
50 | ||
51 | ||
52 | def mkdirp(path): | |
53 | # os.makedirs(path, exist_ok=True) only for python3.? | |
54 | try: | |
55 | os.makedirs(path) | |
56 | except OSError as exc: | |
57 | if exc.errno == errno.EEXIST and os.path.isdir(path): | |
58 | pass | |
59 | else: | |
60 | raise | |
61 | ||
62 | ||
63 | class TestCopySimple(unittest.TestCase): | |
64 | @parameterized.expand([(0,), (1,), (2,), (4,)]) | |
65 | def test_copy_fs(self, workers): | |
66 | namespaces = ("details", "modified") | |
67 | ||
68 | src_fs = open_fs("mem://") | |
69 | src_fs.makedirs("foo/bar") | |
70 | src_fs.makedirs("foo/empty") | |
71 | src_fs.touch("test.txt") | |
72 | src_fs.touch("foo/bar/baz.txt") | |
73 | src_file1_info = src_fs.getinfo("test.txt", namespaces) | |
74 | src_file2_info = src_fs.getinfo("foo/bar/baz.txt", namespaces) | |
75 | ||
76 | dst_fs = open_fs("mem://") | |
77 | fs.copy.copy_fs(src_fs, dst_fs, workers=workers, preserve_time=True) | |
78 | ||
79 | self.assertTrue(dst_fs.isdir("foo/empty")) | |
80 | self.assertTrue(dst_fs.isdir("foo/bar")) | |
81 | self.assertTrue(dst_fs.isfile("test.txt")) | |
82 | ||
83 | dst_file1_info = dst_fs.getinfo("test.txt", namespaces) | |
84 | dst_file2_info = dst_fs.getinfo("foo/bar/baz.txt", namespaces) | |
85 | self.assertEqual(dst_file1_info.modified, src_file1_info.modified) | |
86 | self.assertEqual(dst_file2_info.modified, src_file2_info.modified) | |
29 | 87 | |
30 | 88 | def test_copy_value_error(self): |
31 | 89 | src_fs = open_fs("mem://") |
33 | 91 | with self.assertRaises(ValueError): |
34 | 92 | fs.copy.copy_fs(src_fs, dst_fs, workers=-1) |
35 | 93 | |
36 | def test_copy_dir(self): | |
94 | def test_copy_dir0(self): | |
95 | namespaces = ("details", "modified") | |
96 | ||
37 | 97 | src_fs = open_fs("mem://") |
38 | 98 | src_fs.makedirs("foo/bar") |
39 | 99 | src_fs.makedirs("foo/empty") |
40 | 100 | src_fs.touch("test.txt") |
41 | 101 | src_fs.touch("foo/bar/baz.txt") |
42 | for workers in (0, 1, 2, 4): | |
43 | with open_fs("mem://") as dst_fs: | |
44 | fs.copy.copy_dir(src_fs, "/foo", dst_fs, "/", workers=workers) | |
45 | self.assertTrue(dst_fs.isdir("bar")) | |
46 | self.assertTrue(dst_fs.isdir("empty")) | |
47 | self.assertTrue(dst_fs.isfile("bar/baz.txt")) | |
102 | src_file2_info = src_fs.getinfo("foo/bar/baz.txt", namespaces) | |
103 | ||
104 | with open_fs("mem://") as dst_fs: | |
105 | fs.copy.copy_dir(src_fs, "/foo", dst_fs, "/", workers=0, preserve_time=True) | |
106 | self.assertTrue(dst_fs.isdir("bar")) | |
107 | self.assertTrue(dst_fs.isdir("empty")) | |
108 | self.assertTrue(dst_fs.isfile("bar/baz.txt")) | |
109 | ||
110 | dst_file2_info = dst_fs.getinfo("bar/baz.txt", namespaces) | |
111 | self.assertEqual(dst_file2_info.modified, src_file2_info.modified) | |
112 | ||
113 | @parameterized.expand([(0,), (1,), (2,), (4,)]) | |
114 | def test_copy_dir(self, workers): | |
115 | namespaces = ("details", "modified") | |
116 | ||
117 | src_fs = open_fs("mem://") | |
118 | src_fs.makedirs("foo/bar") | |
119 | src_fs.makedirs("foo/empty") | |
120 | src_fs.touch("test.txt") | |
121 | src_fs.touch("foo/bar/baz.txt") | |
122 | src_file2_info = src_fs.getinfo("foo/bar/baz.txt", namespaces) | |
123 | ||
124 | with open_fs("mem://") as dst_fs: | |
125 | fs.copy.copy_dir( | |
126 | src_fs, "/foo", dst_fs, "/", workers=workers, preserve_time=True | |
127 | ) | |
128 | self.assertTrue(dst_fs.isdir("bar")) | |
129 | self.assertTrue(dst_fs.isdir("empty")) | |
130 | self.assertTrue(dst_fs.isfile("bar/baz.txt")) | |
131 | ||
132 | dst_file2_info = dst_fs.getinfo("bar/baz.txt", namespaces) | |
133 | self.assertEqual(dst_file2_info.modified, src_file2_info.modified) | |
48 | 134 | |
49 | 135 | def test_copy_large(self): |
50 | 136 | data1 = b"foo" * 512 * 1024 |
77 | 163 | fs.copy.copy_dir(src_fs, "/", dst_fs, "/", on_copy=on_copy) |
78 | 164 | self.assertEqual(on_copy_calls, [(src_fs, "/baz.txt", dst_fs, "/baz.txt")]) |
79 | 165 | |
80 | def mkdirp(self, path): | |
81 | # os.makedirs(path, exist_ok=True) only for python3.? | |
82 | try: | |
83 | os.makedirs(path) | |
84 | except OSError as exc: | |
85 | if exc.errno == errno.EEXIST and os.path.isdir(path): | |
86 | pass | |
87 | else: | |
88 | raise | |
89 | ||
90 | def _create_sandbox_dir(self, prefix="pyfilesystem2_sandbox_", home=None): | |
91 | if home is None: | |
92 | return tempfile.mkdtemp(prefix=prefix) | |
93 | else: | |
94 | sandbox_path = os.path.join(home, prefix) | |
95 | self.mkdirp(sandbox_path) | |
96 | return sandbox_path | |
97 | ||
98 | def _touch(self, root, filepath): | |
99 | # create abs filename | |
100 | abs_filepath = os.path.join(root, filepath) | |
101 | # create dir | |
102 | dirname = os.path.dirname(abs_filepath) | |
103 | self.mkdirp(dirname) | |
104 | # touch file | |
105 | with open(abs_filepath, "a"): | |
106 | os.utime( | |
107 | abs_filepath, None | |
108 | ) # update the mtime in case the file exists, same as touch | |
109 | ||
110 | return abs_filepath | |
111 | ||
112 | def _write_file(self, filepath, write_chars=1024): | |
113 | with open(filepath, "w") as f: | |
114 | f.write("1" * write_chars) | |
115 | return filepath | |
116 | ||
117 | def _delay_file_utime(self, filepath, delta_sec): | |
118 | utcnow = datetime.datetime.utcnow() | |
119 | unix_timestamp = calendar.timegm(utcnow.timetuple()) | |
120 | times = unix_timestamp + delta_sec, unix_timestamp + delta_sec | |
121 | os.utime(filepath, times) | |
122 | ||
123 | def test_copy_file_if_newer_same_fs(self): | |
166 | ||
167 | class TestCopyIfNewer(unittest.TestCase): | |
168 | copy_if_condition = "newer" | |
169 | ||
170 | def test_copy_file_if_same_fs(self): | |
124 | 171 | src_fs = open_fs("mem://") |
125 | 172 | src_fs.makedir("foo2").touch("exists") |
126 | 173 | src_fs.makedir("foo1").touch("test1.txt") |
128 | 175 | "foo2/exists", datetime.datetime.utcnow() + datetime.timedelta(hours=1) |
129 | 176 | ) |
130 | 177 | self.assertTrue( |
131 | fs.copy.copy_file_if_newer( | |
132 | src_fs, "foo1/test1.txt", src_fs, "foo2/test1.txt.copy" | |
178 | fs.copy.copy_file_if( | |
179 | src_fs, | |
180 | "foo1/test1.txt", | |
181 | src_fs, | |
182 | "foo2/test1.txt.copy", | |
183 | self.copy_if_condition, | |
133 | 184 | ) |
134 | 185 | ) |
135 | 186 | self.assertFalse( |
136 | fs.copy.copy_file_if_newer(src_fs, "foo1/test1.txt", src_fs, "foo2/exists") | |
187 | fs.copy.copy_file_if( | |
188 | src_fs, "foo1/test1.txt", src_fs, "foo2/exists", self.copy_if_condition | |
189 | ) | |
137 | 190 | ) |
138 | 191 | self.assertTrue(src_fs.exists("foo2/test1.txt.copy")) |
139 | 192 | |
140 | def test_copy_file_if_newer_dst_older(self): | |
193 | def test_copy_file_if_dst_is_older(self): | |
141 | 194 | try: |
142 | 195 | # create first dst ==> dst is older the src ==> file should be copied |
143 | dst_dir = self._create_sandbox_dir() | |
144 | dst_file1 = self._touch(dst_dir, "file1.txt") | |
145 | self._write_file(dst_file1) | |
146 | ||
147 | src_dir = self._create_sandbox_dir() | |
148 | src_file1 = self._touch(src_dir, "file1.txt") | |
149 | self._write_file(src_file1) | |
196 | dst_dir = _create_sandbox_dir() | |
197 | dst_file1 = _touch(dst_dir, "file1.txt") | |
198 | _write_file(dst_file1) | |
199 | ||
200 | src_dir = _create_sandbox_dir() | |
201 | src_file1 = _touch(src_dir, "file1.txt") | |
202 | _write_file(src_file1) | |
203 | ||
150 | 204 | # ensure src file is newer than dst, changing its modification time |
151 | self._delay_file_utime(src_file1, delta_sec=60) | |
152 | ||
153 | src_fs = open_fs("osfs://" + src_dir) | |
154 | dst_fs = open_fs("osfs://" + dst_dir) | |
155 | ||
156 | self.assertTrue(dst_fs.exists("/file1.txt")) | |
157 | ||
158 | copied = fs.copy.copy_file_if_newer( | |
159 | src_fs, "/file1.txt", dst_fs, "/file1.txt" | |
205 | _delay_file_utime(src_file1, delta_sec=60) | |
206 | ||
207 | src_fs = open_fs("osfs://" + src_dir) | |
208 | dst_fs = open_fs("osfs://" + dst_dir) | |
209 | ||
210 | self.assertTrue(dst_fs.exists("/file1.txt")) | |
211 | ||
212 | copied = fs.copy.copy_file_if( | |
213 | src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition | |
160 | 214 | ) |
161 | 215 | |
162 | 216 | self.assertTrue(copied) |
165 | 219 | shutil.rmtree(src_dir) |
166 | 220 | shutil.rmtree(dst_dir) |
167 | 221 | |
168 | def test_copy_file_if_newer_dst_doesnt_exists(self): | |
169 | try: | |
170 | src_dir = self._create_sandbox_dir() | |
171 | src_file1 = self._touch(src_dir, "file1.txt") | |
172 | self._write_file(src_file1) | |
173 | ||
174 | dst_dir = self._create_sandbox_dir() | |
175 | ||
176 | src_fs = open_fs("osfs://" + src_dir) | |
177 | dst_fs = open_fs("osfs://" + dst_dir) | |
178 | ||
179 | copied = fs.copy.copy_file_if_newer( | |
180 | src_fs, "/file1.txt", dst_fs, "/file1.txt" | |
222 | def test_copy_file_if_dst_doesnt_exists(self): | |
223 | try: | |
224 | src_dir = _create_sandbox_dir() | |
225 | src_file1 = _touch(src_dir, "file1.txt") | |
226 | _write_file(src_file1) | |
227 | ||
228 | dst_dir = _create_sandbox_dir() | |
229 | ||
230 | src_fs = open_fs("osfs://" + src_dir) | |
231 | dst_fs = open_fs("osfs://" + dst_dir) | |
232 | ||
233 | copied = fs.copy.copy_file_if( | |
234 | src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition | |
181 | 235 | ) |
182 | 236 | |
183 | 237 | self.assertTrue(copied) |
186 | 240 | shutil.rmtree(src_dir) |
187 | 241 | shutil.rmtree(dst_dir) |
188 | 242 | |
189 | def test_copy_file_if_newer_dst_is_newer(self): | |
190 | try: | |
191 | src_dir = self._create_sandbox_dir() | |
192 | src_file1 = self._touch(src_dir, "file1.txt") | |
193 | self._write_file(src_file1) | |
194 | ||
195 | dst_dir = self._create_sandbox_dir() | |
196 | dst_file1 = self._touch(dst_dir, "file1.txt") | |
197 | self._write_file(dst_file1) | |
198 | ||
199 | src_fs = open_fs("osfs://" + src_dir) | |
200 | dst_fs = open_fs("osfs://" + dst_dir) | |
201 | ||
202 | self.assertTrue(dst_fs.exists("/file1.txt")) | |
203 | ||
204 | copied = fs.copy.copy_file_if_newer( | |
205 | src_fs, "/file1.txt", dst_fs, "/file1.txt" | |
206 | ) | |
207 | ||
208 | self.assertEqual(copied, False) | |
209 | finally: | |
210 | shutil.rmtree(src_dir) | |
211 | shutil.rmtree(dst_dir) | |
212 | ||
213 | def test_copy_fs_if_newer_dst_older(self): | |
214 | try: | |
215 | # create first dst ==> dst is older the src ==> file should be copied | |
216 | dst_dir = self._create_sandbox_dir() | |
217 | dst_file1 = self._touch(dst_dir, "file1.txt") | |
218 | self._write_file(dst_file1) | |
219 | ||
220 | src_dir = self._create_sandbox_dir() | |
221 | src_file1 = self._touch(src_dir, "file1.txt") | |
222 | self._write_file(src_file1) | |
223 | # ensure src file is newer than dst, changing its modification time | |
224 | self._delay_file_utime(src_file1, delta_sec=60) | |
225 | ||
226 | src_fs = open_fs("osfs://" + src_dir) | |
227 | dst_fs = open_fs("osfs://" + dst_dir) | |
228 | ||
229 | self.assertTrue(dst_fs.exists("/file1.txt")) | |
230 | ||
231 | copied = [] | |
232 | ||
233 | def on_copy(src_fs, src_path, dst_fs, dst_path): | |
234 | copied.append(dst_path) | |
235 | ||
236 | fs.copy.copy_fs_if_newer(src_fs, dst_fs, on_copy=on_copy) | |
237 | ||
238 | self.assertEqual(copied, ["/file1.txt"]) | |
239 | self.assertTrue(dst_fs.exists("/file1.txt")) | |
243 | def test_copy_file_if_dst_is_newer(self): | |
244 | try: | |
245 | src_dir = _create_sandbox_dir() | |
246 | src_file1 = _touch(src_dir, "file1.txt") | |
247 | _write_file(src_file1) | |
248 | ||
249 | dst_dir = _create_sandbox_dir() | |
250 | dst_file1 = _touch(dst_dir, "file1.txt") | |
251 | _write_file(dst_file1) | |
252 | ||
253 | # ensure dst file is newer than src, changing its modification time | |
254 | _delay_file_utime(dst_file1, delta_sec=60) | |
255 | ||
256 | src_fs = open_fs("osfs://" + src_dir) | |
257 | dst_fs = open_fs("osfs://" + dst_dir) | |
258 | ||
259 | self.assertTrue(dst_fs.exists("/file1.txt")) | |
260 | ||
261 | copied = fs.copy.copy_file_if( | |
262 | src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition | |
263 | ) | |
264 | ||
265 | self.assertFalse(copied) | |
266 | finally: | |
267 | shutil.rmtree(src_dir) | |
268 | shutil.rmtree(dst_dir) | |
269 | ||
270 | def test_copy_fs_if(self): | |
271 | try: | |
272 | dst_dir = _create_sandbox_dir() | |
273 | dst_file1 = _touch(dst_dir, "file1.txt") | |
274 | dst_file2 = _touch(dst_dir, "file2.txt") | |
275 | _write_file(dst_file1) | |
276 | _write_file(dst_file2) | |
277 | ||
278 | src_dir = _create_sandbox_dir() | |
279 | src_file1 = _touch(src_dir, "file1.txt") | |
280 | src_file2 = _touch(src_dir, "file2.txt") | |
281 | src_file3 = _touch(src_dir, "file3.txt") | |
282 | _write_file(src_file1) | |
283 | _write_file(src_file2) | |
284 | _write_file(src_file3) | |
285 | ||
286 | # ensure src_file1 is newer than dst_file1, changing its modification time | |
287 | # ensure dst_file2 is newer than src_file2, changing its modification time | |
288 | _delay_file_utime(src_file1, delta_sec=60) | |
289 | _delay_file_utime(dst_file2, delta_sec=60) | |
290 | ||
291 | src_fs = open_fs("osfs://" + src_dir) | |
292 | dst_fs = open_fs("osfs://" + dst_dir) | |
293 | ||
294 | self.assertTrue(dst_fs.exists("/file1.txt")) | |
295 | self.assertTrue(dst_fs.exists("/file2.txt")) | |
296 | ||
297 | copied = [] | |
298 | ||
299 | def on_copy(src_fs, src_path, dst_fs, dst_path): | |
300 | copied.append(dst_path) | |
301 | ||
302 | fs.copy.copy_fs_if( | |
303 | src_fs, dst_fs, on_copy=on_copy, condition=self.copy_if_condition | |
304 | ) | |
305 | ||
306 | self.assertTrue("/file1.txt" in copied) | |
307 | self.assertTrue("/file2.txt" not in copied) | |
308 | self.assertTrue("/file3.txt" in copied) | |
309 | self.assertTrue(dst_fs.exists("/file1.txt")) | |
310 | self.assertTrue(dst_fs.exists("/file2.txt")) | |
311 | self.assertTrue(dst_fs.exists("/file3.txt")) | |
240 | 312 | |
241 | 313 | src_fs.close() |
242 | 314 | dst_fs.close() |
245 | 317 | shutil.rmtree(src_dir) |
246 | 318 | shutil.rmtree(dst_dir) |
247 | 319 | |
248 | def test_copy_fs_if_newer_when_dst_doesnt_exists(self): | |
249 | try: | |
250 | src_dir = self._create_sandbox_dir() | |
251 | src_file1 = self._touch(src_dir, "file1.txt") | |
252 | self._write_file(src_file1) | |
253 | ||
254 | src_file2 = self._touch(src_dir, "one_level_down" + os.sep + "file2.txt") | |
255 | self._write_file(src_file2) | |
256 | ||
257 | dst_dir = self._create_sandbox_dir() | |
258 | ||
259 | src_fs = open_fs("osfs://" + src_dir) | |
260 | dst_fs = open_fs("osfs://" + dst_dir) | |
261 | ||
262 | copied = [] | |
263 | ||
264 | def on_copy(src_fs, src_path, dst_fs, dst_path): | |
265 | copied.append(dst_path) | |
266 | ||
267 | fs.copy.copy_fs_if_newer(src_fs, dst_fs, on_copy=on_copy) | |
268 | ||
269 | self.assertEqual(copied, ["/file1.txt", "/one_level_down/file2.txt"]) | |
270 | self.assertTrue(dst_fs.exists("/file1.txt")) | |
271 | self.assertTrue(dst_fs.exists("/one_level_down/file2.txt")) | |
320 | def test_copy_dir_if(self): | |
321 | try: | |
322 | src_dir = _create_sandbox_dir() | |
323 | src_file1 = _touch(src_dir, "file1.txt") | |
324 | _write_file(src_file1) | |
325 | ||
326 | src_file2 = _touch(src_dir, os.path.join("one_level_down", "file2.txt")) | |
327 | _write_file(src_file2) | |
328 | ||
329 | dst_dir = _create_sandbox_dir() | |
330 | mkdirp(os.path.join(dst_dir, "target_dir")) | |
331 | dst_file1 = _touch(dst_dir, os.path.join("target_dir", "file1.txt")) | |
332 | _write_file(dst_file1) | |
333 | ||
334 | # ensure dst file is newer than src, changing its modification time | |
335 | _delay_file_utime(dst_file1, delta_sec=60) | |
336 | ||
337 | src_fs = open_fs("osfs://" + src_dir) | |
338 | dst_fs = open_fs("osfs://" + dst_dir) | |
339 | ||
340 | copied = [] | |
341 | ||
342 | def on_copy(src_fs, src_path, dst_fs, dst_path): | |
343 | copied.append(dst_path) | |
344 | ||
345 | fs.copy.copy_dir_if( | |
346 | src_fs, | |
347 | "/", | |
348 | dst_fs, | |
349 | "/target_dir/", | |
350 | on_copy=on_copy, | |
351 | condition=self.copy_if_condition, | |
352 | ) | |
353 | ||
354 | self.assertEqual(copied, ["/target_dir/one_level_down/file2.txt"]) | |
355 | self.assertTrue(dst_fs.exists("/target_dir/one_level_down/file2.txt")) | |
272 | 356 | |
273 | 357 | src_fs.close() |
274 | 358 | dst_fs.close() |
275 | ||
276 | finally: | |
277 | shutil.rmtree(src_dir) | |
278 | shutil.rmtree(dst_dir) | |
279 | ||
280 | def test_copy_fs_if_newer_dont_copy_when_dst_exists(self): | |
281 | try: | |
282 | # src is older than dst => no copy should be necessary | |
283 | src_dir = self._create_sandbox_dir() | |
284 | src_file1 = self._touch(src_dir, "file1.txt") | |
285 | self._write_file(src_file1) | |
286 | ||
287 | dst_dir = self._create_sandbox_dir() | |
288 | dst_file1 = self._touch(dst_dir, "file1.txt") | |
289 | self._write_file(dst_file1) | |
290 | # ensure dst file is newer than src, changing its modification time | |
291 | self._delay_file_utime(dst_file1, delta_sec=60) | |
292 | ||
293 | src_fs = open_fs("osfs://" + src_dir) | |
294 | dst_fs = open_fs("osfs://" + dst_dir) | |
295 | ||
296 | self.assertTrue(dst_fs.exists("/file1.txt")) | |
297 | ||
298 | copied = [] | |
299 | ||
300 | def on_copy(src_fs, src_path, dst_fs, dst_path): | |
301 | copied.append(dst_path) | |
302 | ||
303 | fs.copy.copy_fs_if_newer(src_fs, dst_fs, on_copy=on_copy) | |
304 | ||
305 | self.assertEqual(copied, []) | |
306 | self.assertTrue(dst_fs.exists("/file1.txt")) | |
307 | ||
308 | src_fs.close() | |
309 | dst_fs.close() | |
310 | ||
311 | finally: | |
312 | shutil.rmtree(src_dir) | |
313 | shutil.rmtree(dst_dir) | |
314 | ||
315 | def test_copy_dir_if_newer_one_dst_doesnt_exist(self): | |
316 | try: | |
317 | ||
318 | src_dir = self._create_sandbox_dir() | |
319 | src_file1 = self._touch(src_dir, "file1.txt") | |
320 | self._write_file(src_file1) | |
321 | ||
322 | src_file2 = self._touch(src_dir, "one_level_down" + os.sep + "file2.txt") | |
323 | self._write_file(src_file2) | |
324 | ||
325 | dst_dir = self._create_sandbox_dir() | |
326 | dst_file1 = self._touch(dst_dir, "file1.txt") | |
327 | self._write_file(dst_file1) | |
328 | # ensure dst file is newer than src, changing its modification time | |
329 | self._delay_file_utime(dst_file1, delta_sec=60) | |
330 | ||
331 | src_fs = open_fs("osfs://" + src_dir) | |
332 | dst_fs = open_fs("osfs://" + dst_dir) | |
333 | ||
334 | copied = [] | |
335 | ||
336 | def on_copy(src_fs, src_path, dst_fs, dst_path): | |
337 | copied.append(dst_path) | |
338 | ||
339 | fs.copy.copy_dir_if_newer(src_fs, "/", dst_fs, "/", on_copy=on_copy) | |
340 | ||
341 | self.assertEqual(copied, ["/one_level_down/file2.txt"]) | |
342 | self.assertTrue(dst_fs.exists("/one_level_down/file2.txt")) | |
343 | ||
344 | src_fs.close() | |
345 | dst_fs.close() | |
346 | finally: | |
347 | shutil.rmtree(src_dir) | |
348 | shutil.rmtree(dst_dir) | |
349 | ||
350 | def test_copy_dir_if_newer_same_fs(self): | |
351 | try: | |
352 | src_dir = self._create_sandbox_dir() | |
353 | src_file1 = self._touch(src_dir, "src" + os.sep + "file1.txt") | |
354 | self._write_file(src_file1) | |
355 | ||
356 | self._create_sandbox_dir(home=src_dir) | |
357 | ||
358 | src_fs = open_fs("osfs://" + src_dir) | |
359 | ||
360 | copied = [] | |
361 | ||
362 | def on_copy(src_fs, src_path, dst_fs, dst_path): | |
363 | copied.append(dst_path) | |
364 | ||
365 | fs.copy.copy_dir_if_newer(src_fs, "/src", src_fs, "/dst", on_copy=on_copy) | |
359 | finally: | |
360 | shutil.rmtree(src_dir) | |
361 | shutil.rmtree(dst_dir) | |
362 | ||
363 | def test_copy_dir_if_same_fs(self): | |
364 | try: | |
365 | src_dir = _create_sandbox_dir() | |
366 | src_file1 = _touch(src_dir, "src" + os.sep + "file1.txt") | |
367 | _write_file(src_file1) | |
368 | ||
369 | _create_sandbox_dir(home=src_dir) | |
370 | ||
371 | src_fs = open_fs("osfs://" + src_dir) | |
372 | ||
373 | copied = [] | |
374 | ||
375 | def on_copy(src_fs, src_path, dst_fs, dst_path): | |
376 | copied.append(dst_path) | |
377 | ||
378 | fs.copy.copy_dir_if( | |
379 | src_fs, "/src", src_fs, "/dst", on_copy=on_copy, condition="newer" | |
380 | ) | |
366 | 381 | |
367 | 382 | self.assertEqual(copied, ["/dst/file1.txt"]) |
368 | 383 | self.assertTrue(src_fs.exists("/dst/file1.txt")) |
372 | 387 | finally: |
373 | 388 | shutil.rmtree(src_dir) |
374 | 389 | |
375 | def test_copy_dir_if_newer_multiple_files(self): | |
376 | try: | |
377 | src_dir = self._create_sandbox_dir() | |
390 | def test_copy_dir_if_multiple_files(self): | |
391 | try: | |
392 | src_dir = _create_sandbox_dir() | |
378 | 393 | src_fs = open_fs("osfs://" + src_dir) |
379 | 394 | src_fs.makedirs("foo/bar") |
380 | 395 | src_fs.makedirs("foo/empty") |
381 | 396 | src_fs.touch("test.txt") |
382 | 397 | src_fs.touch("foo/bar/baz.txt") |
383 | 398 | |
384 | dst_dir = self._create_sandbox_dir() | |
385 | dst_fs = open_fs("osfs://" + dst_dir) | |
386 | ||
387 | fs.copy.copy_dir_if_newer(src_fs, "/foo", dst_fs, "/") | |
399 | dst_dir = _create_sandbox_dir() | |
400 | dst_fs = open_fs("osfs://" + dst_dir) | |
401 | ||
402 | fs.copy.copy_dir_if(src_fs, "/foo", dst_fs, "/", condition="newer") | |
388 | 403 | |
389 | 404 | self.assertTrue(dst_fs.isdir("bar")) |
390 | 405 | self.assertTrue(dst_fs.isdir("empty")) |
394 | 409 | shutil.rmtree(dst_dir) |
395 | 410 | |
396 | 411 | |
412 | class TestCopyIfOlder(unittest.TestCase): | |
413 | copy_if_condition = "older" | |
414 | ||
415 | def test_copy_file_if_same_fs(self): | |
416 | src_fs = open_fs("mem://") | |
417 | src_fs.makedir("foo2").touch("exists") | |
418 | src_fs.makedir("foo1").touch("test1.txt") | |
419 | src_fs.settimes( | |
420 | "foo2/exists", datetime.datetime.utcnow() - datetime.timedelta(hours=1) | |
421 | ) | |
422 | self.assertTrue( | |
423 | fs.copy.copy_file_if( | |
424 | src_fs, | |
425 | "foo1/test1.txt", | |
426 | src_fs, | |
427 | "foo2/test1.txt.copy", | |
428 | self.copy_if_condition, | |
429 | ) | |
430 | ) | |
431 | self.assertFalse( | |
432 | fs.copy.copy_file_if( | |
433 | src_fs, "foo1/test1.txt", src_fs, "foo2/exists", self.copy_if_condition | |
434 | ) | |
435 | ) | |
436 | self.assertTrue(src_fs.exists("foo2/test1.txt.copy")) | |
437 | ||
438 | def test_copy_file_if_dst_is_older(self): | |
439 | try: | |
440 | # create first dst ==> dst is older the src ==> file should be copied | |
441 | dst_dir = _create_sandbox_dir() | |
442 | dst_file1 = _touch(dst_dir, "file1.txt") | |
443 | _write_file(dst_file1) | |
444 | ||
445 | src_dir = _create_sandbox_dir() | |
446 | src_file1 = _touch(src_dir, "file1.txt") | |
447 | _write_file(src_file1) | |
448 | ||
449 | # ensure src file is newer than dst, changing its modification time | |
450 | _delay_file_utime(src_file1, delta_sec=60) | |
451 | ||
452 | src_fs = open_fs("osfs://" + src_dir) | |
453 | dst_fs = open_fs("osfs://" + dst_dir) | |
454 | ||
455 | self.assertTrue(dst_fs.exists("/file1.txt")) | |
456 | ||
457 | copied = fs.copy.copy_file_if( | |
458 | src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition | |
459 | ) | |
460 | ||
461 | self.assertFalse(copied) | |
462 | self.assertTrue(dst_fs.exists("/file1.txt")) | |
463 | finally: | |
464 | shutil.rmtree(src_dir) | |
465 | shutil.rmtree(dst_dir) | |
466 | ||
467 | def test_copy_file_if_dst_doesnt_exists(self): | |
468 | try: | |
469 | src_dir = _create_sandbox_dir() | |
470 | src_file1 = _touch(src_dir, "file1.txt") | |
471 | _write_file(src_file1) | |
472 | ||
473 | dst_dir = _create_sandbox_dir() | |
474 | ||
475 | src_fs = open_fs("osfs://" + src_dir) | |
476 | dst_fs = open_fs("osfs://" + dst_dir) | |
477 | ||
478 | copied = fs.copy.copy_file_if( | |
479 | src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition | |
480 | ) | |
481 | ||
482 | self.assertTrue(copied) | |
483 | self.assertTrue(dst_fs.exists("/file1.txt")) | |
484 | finally: | |
485 | shutil.rmtree(src_dir) | |
486 | shutil.rmtree(dst_dir) | |
487 | ||
488 | def test_copy_file_if_dst_is_newer(self): | |
489 | try: | |
490 | src_dir = _create_sandbox_dir() | |
491 | src_file1 = _touch(src_dir, "file1.txt") | |
492 | _write_file(src_file1) | |
493 | ||
494 | dst_dir = _create_sandbox_dir() | |
495 | dst_file1 = _touch(dst_dir, "file1.txt") | |
496 | _write_file(dst_file1) | |
497 | ||
498 | # ensure dst file is newer than src, changing its modification time | |
499 | _delay_file_utime(dst_file1, delta_sec=60) | |
500 | ||
501 | src_fs = open_fs("osfs://" + src_dir) | |
502 | dst_fs = open_fs("osfs://" + dst_dir) | |
503 | ||
504 | self.assertTrue(dst_fs.exists("/file1.txt")) | |
505 | ||
506 | copied = fs.copy.copy_file_if( | |
507 | src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition | |
508 | ) | |
509 | ||
510 | self.assertTrue(copied) | |
511 | finally: | |
512 | shutil.rmtree(src_dir) | |
513 | shutil.rmtree(dst_dir) | |
514 | ||
515 | def test_copy_fs_if(self): | |
516 | try: | |
517 | dst_dir = _create_sandbox_dir() | |
518 | dst_file1 = _touch(dst_dir, "file1.txt") | |
519 | dst_file2 = _touch(dst_dir, "file2.txt") | |
520 | _write_file(dst_file1) | |
521 | _write_file(dst_file2) | |
522 | ||
523 | src_dir = _create_sandbox_dir() | |
524 | src_file1 = _touch(src_dir, "file1.txt") | |
525 | src_file2 = _touch(src_dir, "file2.txt") | |
526 | src_file3 = _touch(src_dir, "file3.txt") | |
527 | _write_file(src_file1) | |
528 | _write_file(src_file2) | |
529 | _write_file(src_file3) | |
530 | ||
531 | # ensure src_file1 is newer than dst_file1, changing its modification time | |
532 | # ensure dst_file2 is newer than src_file2, changing its modification time | |
533 | _delay_file_utime(src_file1, delta_sec=60) | |
534 | _delay_file_utime(dst_file2, delta_sec=60) | |
535 | ||
536 | src_fs = open_fs("osfs://" + src_dir) | |
537 | dst_fs = open_fs("osfs://" + dst_dir) | |
538 | ||
539 | self.assertTrue(dst_fs.exists("/file1.txt")) | |
540 | self.assertTrue(dst_fs.exists("/file2.txt")) | |
541 | ||
542 | copied = [] | |
543 | ||
544 | def on_copy(src_fs, src_path, dst_fs, dst_path): | |
545 | copied.append(dst_path) | |
546 | ||
547 | fs.copy.copy_fs_if( | |
548 | src_fs, dst_fs, on_copy=on_copy, condition=self.copy_if_condition | |
549 | ) | |
550 | ||
551 | self.assertTrue("/file1.txt" not in copied) | |
552 | self.assertTrue("/file2.txt" in copied) | |
553 | self.assertTrue("/file3.txt" in copied) | |
554 | self.assertTrue(dst_fs.exists("/file1.txt")) | |
555 | self.assertTrue(dst_fs.exists("/file2.txt")) | |
556 | self.assertTrue(dst_fs.exists("/file3.txt")) | |
557 | ||
558 | src_fs.close() | |
559 | dst_fs.close() | |
560 | ||
561 | finally: | |
562 | shutil.rmtree(src_dir) | |
563 | shutil.rmtree(dst_dir) | |
564 | ||
565 | def test_copy_dir_if(self): | |
566 | try: | |
567 | src_dir = _create_sandbox_dir() | |
568 | src_file1 = _touch(src_dir, "file1.txt") | |
569 | _write_file(src_file1) | |
570 | ||
571 | src_file2 = _touch(src_dir, os.path.join("one_level_down", "file2.txt")) | |
572 | _write_file(src_file2) | |
573 | ||
574 | dst_dir = _create_sandbox_dir() | |
575 | mkdirp(os.path.join(dst_dir, "target_dir")) | |
576 | dst_file1 = _touch(dst_dir, os.path.join("target_dir", "file1.txt")) | |
577 | _write_file(dst_file1) | |
578 | ||
579 | # ensure src file is newer than dst, changing its modification time | |
580 | _delay_file_utime(src_file1, delta_sec=60) | |
581 | ||
582 | src_fs = open_fs("osfs://" + src_dir) | |
583 | dst_fs = open_fs("osfs://" + dst_dir) | |
584 | ||
585 | copied = [] | |
586 | ||
587 | def on_copy(src_fs, src_path, dst_fs, dst_path): | |
588 | copied.append(dst_path) | |
589 | ||
590 | fs.copy.copy_dir_if( | |
591 | src_fs, | |
592 | "/", | |
593 | dst_fs, | |
594 | "/target_dir/", | |
595 | on_copy=on_copy, | |
596 | condition=self.copy_if_condition, | |
597 | ) | |
598 | ||
599 | self.assertEqual(copied, ["/target_dir/one_level_down/file2.txt"]) | |
600 | self.assertTrue(dst_fs.exists("/target_dir/one_level_down/file2.txt")) | |
601 | ||
602 | src_fs.close() | |
603 | dst_fs.close() | |
604 | finally: | |
605 | shutil.rmtree(src_dir) | |
606 | shutil.rmtree(dst_dir) | |
607 | ||
608 | def test_copy_dir_if_same_fs(self): | |
609 | try: | |
610 | src_dir = _create_sandbox_dir() | |
611 | src_file1 = _touch(src_dir, "src" + os.sep + "file1.txt") | |
612 | _write_file(src_file1) | |
613 | ||
614 | _create_sandbox_dir(home=src_dir) | |
615 | ||
616 | src_fs = open_fs("osfs://" + src_dir) | |
617 | ||
618 | copied = [] | |
619 | ||
620 | def on_copy(src_fs, src_path, dst_fs, dst_path): | |
621 | copied.append(dst_path) | |
622 | ||
623 | fs.copy.copy_dir_if( | |
624 | src_fs, "/src", src_fs, "/dst", on_copy=on_copy, condition="newer" | |
625 | ) | |
626 | ||
627 | self.assertEqual(copied, ["/dst/file1.txt"]) | |
628 | self.assertTrue(src_fs.exists("/dst/file1.txt")) | |
629 | ||
630 | src_fs.close() | |
631 | ||
632 | finally: | |
633 | shutil.rmtree(src_dir) | |
634 | ||
635 | def test_copy_dir_if_multiple_files(self): | |
636 | try: | |
637 | src_dir = _create_sandbox_dir() | |
638 | src_fs = open_fs("osfs://" + src_dir) | |
639 | src_fs.makedirs("foo/bar") | |
640 | src_fs.makedirs("foo/empty") | |
641 | src_fs.touch("test.txt") | |
642 | src_fs.touch("foo/bar/baz.txt") | |
643 | ||
644 | dst_dir = _create_sandbox_dir() | |
645 | dst_fs = open_fs("osfs://" + dst_dir) | |
646 | ||
647 | fs.copy.copy_dir_if(src_fs, "/foo", dst_fs, "/", condition="newer") | |
648 | ||
649 | self.assertTrue(dst_fs.isdir("bar")) | |
650 | self.assertTrue(dst_fs.isdir("empty")) | |
651 | self.assertTrue(dst_fs.isfile("bar/baz.txt")) | |
652 | finally: | |
653 | shutil.rmtree(src_dir) | |
654 | shutil.rmtree(dst_dir) | |
655 | ||
656 | ||
657 | class TestCopyIfExists(unittest.TestCase): | |
658 | copy_if_condition = "exists" | |
659 | ||
660 | def test_copy_file_if_same_fs(self): | |
661 | src_fs = open_fs("mem://") | |
662 | src_fs.makedir("foo2").touch("exists") | |
663 | src_fs.makedir("foo1").touch("test1.txt") | |
664 | self.assertFalse( | |
665 | fs.copy.copy_file_if( | |
666 | src_fs, | |
667 | "foo1/test1.txt", | |
668 | src_fs, | |
669 | "foo2/test1.txt.copy", | |
670 | self.copy_if_condition, | |
671 | ) | |
672 | ) | |
673 | self.assertTrue( | |
674 | fs.copy.copy_file_if( | |
675 | src_fs, "foo1/test1.txt", src_fs, "foo2/exists", self.copy_if_condition | |
676 | ) | |
677 | ) | |
678 | self.assertFalse(src_fs.exists("foo2/test1.txt.copy")) | |
679 | ||
680 | def test_copy_file_if_dst_doesnt_exists(self): | |
681 | try: | |
682 | src_dir = _create_sandbox_dir() | |
683 | src_file1 = _touch(src_dir, "file1.txt") | |
684 | _write_file(src_file1) | |
685 | ||
686 | dst_dir = _create_sandbox_dir() | |
687 | ||
688 | src_fs = open_fs("osfs://" + src_dir) | |
689 | dst_fs = open_fs("osfs://" + dst_dir) | |
690 | ||
691 | copied = fs.copy.copy_file_if( | |
692 | src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition | |
693 | ) | |
694 | ||
695 | self.assertFalse(copied) | |
696 | self.assertFalse(dst_fs.exists("/file1.txt")) | |
697 | finally: | |
698 | shutil.rmtree(src_dir) | |
699 | shutil.rmtree(dst_dir) | |
700 | ||
701 | def test_copy_file_if_dst_exists(self): | |
702 | try: | |
703 | src_dir = _create_sandbox_dir() | |
704 | src_file1 = _touch(src_dir, "file1.txt") | |
705 | _write_file(src_file1) | |
706 | ||
707 | dst_dir = _create_sandbox_dir() | |
708 | dst_file1 = _touch(dst_dir, "file1.txt") | |
709 | _write_file(dst_file1) | |
710 | ||
711 | src_fs = open_fs("osfs://" + src_dir) | |
712 | dst_fs = open_fs("osfs://" + dst_dir) | |
713 | ||
714 | self.assertTrue(dst_fs.exists("/file1.txt")) | |
715 | ||
716 | copied = fs.copy.copy_file_if( | |
717 | src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition | |
718 | ) | |
719 | ||
720 | self.assertTrue(copied) | |
721 | finally: | |
722 | shutil.rmtree(src_dir) | |
723 | shutil.rmtree(dst_dir) | |
724 | ||
725 | def test_copy_fs_if(self): | |
726 | try: | |
727 | dst_dir = _create_sandbox_dir() | |
728 | dst_file1 = _touch(dst_dir, "file1.txt") | |
729 | _write_file(dst_file1) | |
730 | ||
731 | src_dir = _create_sandbox_dir() | |
732 | src_file1 = _touch(src_dir, "file1.txt") | |
733 | src_file2 = _touch(src_dir, "file2.txt") | |
734 | _write_file(src_file1) | |
735 | _write_file(src_file2) | |
736 | ||
737 | src_fs = open_fs("osfs://" + src_dir) | |
738 | dst_fs = open_fs("osfs://" + dst_dir) | |
739 | ||
740 | self.assertTrue(dst_fs.exists("/file1.txt")) | |
741 | ||
742 | copied = [] | |
743 | ||
744 | def on_copy(src_fs, src_path, dst_fs, dst_path): | |
745 | copied.append(dst_path) | |
746 | ||
747 | fs.copy.copy_fs_if( | |
748 | src_fs, dst_fs, on_copy=on_copy, condition=self.copy_if_condition | |
749 | ) | |
750 | ||
751 | self.assertEqual(copied, ["/file1.txt"]) | |
752 | self.assertTrue(dst_fs.exists("/file1.txt")) | |
753 | self.assertFalse(dst_fs.exists("/file2.txt")) | |
754 | ||
755 | src_fs.close() | |
756 | dst_fs.close() | |
757 | ||
758 | finally: | |
759 | shutil.rmtree(src_dir) | |
760 | shutil.rmtree(dst_dir) | |
761 | ||
762 | def test_copy_dir_if(self): | |
763 | try: | |
764 | src_dir = _create_sandbox_dir() | |
765 | src_file1 = _touch(src_dir, "file1.txt") | |
766 | _write_file(src_file1) | |
767 | ||
768 | src_file2 = _touch(src_dir, os.path.join("one_level_down", "file2.txt")) | |
769 | _write_file(src_file2) | |
770 | ||
771 | dst_dir = _create_sandbox_dir() | |
772 | mkdirp(os.path.join(dst_dir, "target_dir")) | |
773 | dst_file1 = _touch(dst_dir, os.path.join("target_dir", "file1.txt")) | |
774 | _write_file(dst_file1) | |
775 | ||
776 | src_fs = open_fs("osfs://" + src_dir) | |
777 | dst_fs = open_fs("osfs://" + dst_dir) | |
778 | ||
779 | copied = [] | |
780 | ||
781 | def on_copy(src_fs, src_path, dst_fs, dst_path): | |
782 | copied.append(dst_path) | |
783 | ||
784 | fs.copy.copy_dir_if( | |
785 | src_fs, | |
786 | "/", | |
787 | dst_fs, | |
788 | "/target_dir/", | |
789 | on_copy=on_copy, | |
790 | condition=self.copy_if_condition, | |
791 | ) | |
792 | ||
793 | self.assertEqual(copied, ["/target_dir/file1.txt"]) | |
794 | self.assertFalse(dst_fs.exists("/target_dir/one_level_down/file2.txt")) | |
795 | ||
796 | src_fs.close() | |
797 | dst_fs.close() | |
798 | finally: | |
799 | shutil.rmtree(src_dir) | |
800 | shutil.rmtree(dst_dir) | |
801 | ||
802 | def test_copy_dir_if_same_fs(self): | |
803 | try: | |
804 | src_dir = _create_sandbox_dir() | |
805 | src_file1 = _touch(src_dir, "src" + os.sep + "file1.txt") | |
806 | _write_file(src_file1) | |
807 | ||
808 | _create_sandbox_dir(home=src_dir) | |
809 | ||
810 | src_fs = open_fs("osfs://" + src_dir) | |
811 | ||
812 | copied = [] | |
813 | ||
814 | def on_copy(src_fs, src_path, dst_fs, dst_path): | |
815 | copied.append(dst_path) | |
816 | ||
817 | fs.copy.copy_dir_if( | |
818 | src_fs, "/src", src_fs, "/dst", on_copy=on_copy, condition="newer" | |
819 | ) | |
820 | ||
821 | self.assertEqual(copied, ["/dst/file1.txt"]) | |
822 | self.assertTrue(src_fs.exists("/dst/file1.txt")) | |
823 | ||
824 | src_fs.close() | |
825 | ||
826 | finally: | |
827 | shutil.rmtree(src_dir) | |
828 | ||
829 | def test_copy_dir_if_multiple_files(self): | |
830 | try: | |
831 | src_dir = _create_sandbox_dir() | |
832 | src_fs = open_fs("osfs://" + src_dir) | |
833 | src_fs.makedirs("foo/bar") | |
834 | src_fs.makedirs("foo/empty") | |
835 | src_fs.touch("test.txt") | |
836 | src_fs.touch("foo/bar/baz.txt") | |
837 | ||
838 | dst_dir = _create_sandbox_dir() | |
839 | dst_fs = open_fs("osfs://" + dst_dir) | |
840 | ||
841 | fs.copy.copy_dir_if(src_fs, "/foo", dst_fs, "/", condition="newer") | |
842 | ||
843 | self.assertTrue(dst_fs.isdir("bar")) | |
844 | self.assertTrue(dst_fs.isdir("empty")) | |
845 | self.assertTrue(dst_fs.isfile("bar/baz.txt")) | |
846 | finally: | |
847 | shutil.rmtree(src_dir) | |
848 | shutil.rmtree(dst_dir) | |
849 | ||
850 | ||
851 | class TestCopyIfNotExists(unittest.TestCase): | |
852 | copy_if_condition = "not_exists" | |
853 | ||
854 | def test_copy_file_if_same_fs(self): | |
855 | src_fs = open_fs("mem://") | |
856 | src_fs.makedir("foo2").touch("exists") | |
857 | src_fs.makedir("foo1").touch("test1.txt") | |
858 | self.assertTrue( | |
859 | fs.copy.copy_file_if( | |
860 | src_fs, | |
861 | "foo1/test1.txt", | |
862 | src_fs, | |
863 | "foo2/test1.txt.copy", | |
864 | self.copy_if_condition, | |
865 | ) | |
866 | ) | |
867 | self.assertFalse( | |
868 | fs.copy.copy_file_if( | |
869 | src_fs, "foo1/test1.txt", src_fs, "foo2/exists", self.copy_if_condition | |
870 | ) | |
871 | ) | |
872 | self.assertTrue(src_fs.exists("foo2/test1.txt.copy")) | |
873 | ||
874 | def test_copy_file_if_dst_doesnt_exists(self): | |
875 | try: | |
876 | src_dir = _create_sandbox_dir() | |
877 | src_file1 = _touch(src_dir, "file1.txt") | |
878 | _write_file(src_file1) | |
879 | ||
880 | dst_dir = _create_sandbox_dir() | |
881 | ||
882 | src_fs = open_fs("osfs://" + src_dir) | |
883 | dst_fs = open_fs("osfs://" + dst_dir) | |
884 | ||
885 | copied = fs.copy.copy_file_if( | |
886 | src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition | |
887 | ) | |
888 | ||
889 | self.assertTrue(copied) | |
890 | self.assertTrue(dst_fs.exists("/file1.txt")) | |
891 | finally: | |
892 | shutil.rmtree(src_dir) | |
893 | shutil.rmtree(dst_dir) | |
894 | ||
895 | def test_copy_file_if_dst_exists(self): | |
896 | try: | |
897 | src_dir = _create_sandbox_dir() | |
898 | src_file1 = _touch(src_dir, "file1.txt") | |
899 | _write_file(src_file1) | |
900 | ||
901 | dst_dir = _create_sandbox_dir() | |
902 | dst_file1 = _touch(dst_dir, "file1.txt") | |
903 | _write_file(dst_file1) | |
904 | ||
905 | src_fs = open_fs("osfs://" + src_dir) | |
906 | dst_fs = open_fs("osfs://" + dst_dir) | |
907 | ||
908 | self.assertTrue(dst_fs.exists("/file1.txt")) | |
909 | ||
910 | copied = fs.copy.copy_file_if( | |
911 | src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition | |
912 | ) | |
913 | ||
914 | self.assertFalse(copied) | |
915 | self.assertTrue(dst_fs.exists("/file1.txt")) | |
916 | finally: | |
917 | shutil.rmtree(src_dir) | |
918 | shutil.rmtree(dst_dir) | |
919 | ||
920 | def test_copy_fs_if(self): | |
921 | try: | |
922 | dst_dir = _create_sandbox_dir() | |
923 | dst_file1 = _touch(dst_dir, "file1.txt") | |
924 | _write_file(dst_file1) | |
925 | ||
926 | src_dir = _create_sandbox_dir() | |
927 | src_file1 = _touch(src_dir, "file1.txt") | |
928 | src_file2 = _touch(src_dir, "file2.txt") | |
929 | _write_file(src_file1) | |
930 | _write_file(src_file2) | |
931 | ||
932 | src_fs = open_fs("osfs://" + src_dir) | |
933 | dst_fs = open_fs("osfs://" + dst_dir) | |
934 | ||
935 | self.assertTrue(dst_fs.exists("/file1.txt")) | |
936 | ||
937 | copied = [] | |
938 | ||
939 | def on_copy(src_fs, src_path, dst_fs, dst_path): | |
940 | copied.append(dst_path) | |
941 | ||
942 | fs.copy.copy_fs_if( | |
943 | src_fs, dst_fs, on_copy=on_copy, condition=self.copy_if_condition | |
944 | ) | |
945 | ||
946 | self.assertEqual(copied, ["/file2.txt"]) | |
947 | self.assertTrue(dst_fs.exists("/file1.txt")) | |
948 | self.assertTrue(dst_fs.exists("/file2.txt")) | |
949 | ||
950 | src_fs.close() | |
951 | dst_fs.close() | |
952 | ||
953 | finally: | |
954 | shutil.rmtree(src_dir) | |
955 | shutil.rmtree(dst_dir) | |
956 | ||
957 | def test_copy_dir_if(self): | |
958 | try: | |
959 | src_dir = _create_sandbox_dir() | |
960 | src_file1 = _touch(src_dir, "file1.txt") | |
961 | _write_file(src_file1) | |
962 | ||
963 | src_file2 = _touch(src_dir, os.path.join("one_level_down", "file2.txt")) | |
964 | _write_file(src_file2) | |
965 | ||
966 | dst_dir = _create_sandbox_dir() | |
967 | mkdirp(os.path.join(dst_dir, "target_dir")) | |
968 | dst_file1 = _touch(dst_dir, os.path.join("target_dir", "file1.txt")) | |
969 | _write_file(dst_file1) | |
970 | ||
971 | src_fs = open_fs("osfs://" + src_dir) | |
972 | dst_fs = open_fs("osfs://" + dst_dir) | |
973 | ||
974 | copied = [] | |
975 | ||
976 | def on_copy(src_fs, src_path, dst_fs, dst_path): | |
977 | copied.append(dst_path) | |
978 | ||
979 | fs.copy.copy_dir_if( | |
980 | src_fs, | |
981 | "/", | |
982 | dst_fs, | |
983 | "/target_dir/", | |
984 | on_copy=on_copy, | |
985 | condition=self.copy_if_condition, | |
986 | ) | |
987 | ||
988 | self.assertEqual(copied, ["/target_dir/one_level_down/file2.txt"]) | |
989 | self.assertTrue(dst_fs.exists("/target_dir/file1.txt")) | |
990 | self.assertTrue(dst_fs.exists("/target_dir/one_level_down/file2.txt")) | |
991 | ||
992 | src_fs.close() | |
993 | dst_fs.close() | |
994 | finally: | |
995 | shutil.rmtree(src_dir) | |
996 | shutil.rmtree(dst_dir) | |
997 | ||
998 | def test_copy_dir_if_same_fs(self): | |
999 | try: | |
1000 | src_dir = _create_sandbox_dir() | |
1001 | src_file1 = _touch(src_dir, "src" + os.sep + "file1.txt") | |
1002 | _write_file(src_file1) | |
1003 | ||
1004 | _create_sandbox_dir(home=src_dir) | |
1005 | ||
1006 | src_fs = open_fs("osfs://" + src_dir) | |
1007 | ||
1008 | copied = [] | |
1009 | ||
1010 | def on_copy(src_fs, src_path, dst_fs, dst_path): | |
1011 | copied.append(dst_path) | |
1012 | ||
1013 | fs.copy.copy_dir_if( | |
1014 | src_fs, "/src", src_fs, "/dst", on_copy=on_copy, condition="newer" | |
1015 | ) | |
1016 | ||
1017 | self.assertEqual(copied, ["/dst/file1.txt"]) | |
1018 | self.assertTrue(src_fs.exists("/dst/file1.txt")) | |
1019 | ||
1020 | src_fs.close() | |
1021 | ||
1022 | finally: | |
1023 | shutil.rmtree(src_dir) | |
1024 | ||
1025 | def test_copy_dir_if_multiple_files(self): | |
1026 | try: | |
1027 | src_dir = _create_sandbox_dir() | |
1028 | src_fs = open_fs("osfs://" + src_dir) | |
1029 | src_fs.makedirs("foo/bar") | |
1030 | src_fs.makedirs("foo/empty") | |
1031 | src_fs.touch("test.txt") | |
1032 | src_fs.touch("foo/bar/baz.txt") | |
1033 | ||
1034 | dst_dir = _create_sandbox_dir() | |
1035 | dst_fs = open_fs("osfs://" + dst_dir) | |
1036 | ||
1037 | fs.copy.copy_dir_if(src_fs, "/foo", dst_fs, "/", condition="newer") | |
1038 | ||
1039 | self.assertTrue(dst_fs.isdir("bar")) | |
1040 | self.assertTrue(dst_fs.isdir("empty")) | |
1041 | self.assertTrue(dst_fs.isfile("bar/baz.txt")) | |
1042 | finally: | |
1043 | shutil.rmtree(src_dir) | |
1044 | shutil.rmtree(dst_dir) | |
1045 | ||
1046 | ||
397 | 1047 | if __name__ == "__main__": |
398 | 1048 | unittest.main() |
0 | # coding: utf-8 | |
1 | """Test doctest contained tests in every file of the module. | |
2 | """ | |
3 | import doctest | |
4 | import importlib | |
5 | import os | |
6 | import pkgutil | |
7 | import tempfile | |
8 | import time | |
9 | import types | |
10 | import unittest | |
11 | import warnings | |
12 | from pprint import pprint | |
13 | ||
14 | try: | |
15 | from unittest import mock | |
16 | except ImportError: | |
17 | import mock | |
18 | ||
19 | import six | |
20 | ||
21 | import fs | |
22 | import fs.opener.parse | |
23 | from fs.memoryfs import MemoryFS | |
24 | from fs.subfs import ClosingSubFS | |
25 | ||
26 | # --- Mocks ------------------------------------------------------------------ | |
27 | ||
28 | ||
29 | def _home_fs(): | |
30 | """Create a mock filesystem that matches the XDG user-dirs spec.""" | |
31 | home_fs = MemoryFS() | |
32 | home_fs.makedir("Desktop") | |
33 | home_fs.makedir("Documents") | |
34 | home_fs.makedir("Downloads") | |
35 | home_fs.makedir("Music") | |
36 | home_fs.makedir("Pictures") | |
37 | home_fs.makedir("Public") | |
38 | home_fs.makedir("Templates") | |
39 | home_fs.makedir("Videos") | |
40 | return home_fs | |
41 | ||
42 | ||
43 | def _open_fs(path): | |
44 | """A mock `open_fs` that avoids side effects when running doctests.""" | |
45 | if "://" not in path: | |
46 | path = "osfs://{}".format(path) | |
47 | parse_result = fs.opener.parse(path) | |
48 | if parse_result.protocol == "osfs" and parse_result.resource == "~": | |
49 | home_fs = _home_fs() | |
50 | if parse_result.path is not None: | |
51 | home_fs = home_fs.opendir(parse_result.path, factory=ClosingSubFS) | |
52 | return home_fs | |
53 | elif parse_result.protocol in {"ftp", "ftps", "mem", "temp"}: | |
54 | return MemoryFS() | |
55 | else: | |
56 | raise RuntimeError("not allowed in doctests: {}".format(path)) | |
57 | ||
58 | ||
59 | def _my_fs(module): | |
60 | """Create a mock filesystem to be used in examples.""" | |
61 | my_fs = MemoryFS() | |
62 | if module == "fs.base": | |
63 | my_fs.makedir("Desktop") | |
64 | my_fs.makedir("Videos") | |
65 | my_fs.touch("Videos/starwars.mov") | |
66 | my_fs.touch("file.txt") | |
67 | elif module == "fs.info": | |
68 | my_fs.touch("foo.tar.gz") | |
69 | my_fs.settext("foo.py", "print('Hello, world!')") | |
70 | my_fs.makedir("bar") | |
71 | elif module in {"fs.walk", "fs.glob"}: | |
72 | my_fs.makedir("dir1") | |
73 | my_fs.makedir("dir2") | |
74 | my_fs.settext("foo.py", "print('Hello, world!')") | |
75 | my_fs.touch("foo.pyc") | |
76 | my_fs.settext("bar.py", "print('ok')\n\n# this is a comment\n") | |
77 | my_fs.touch("bar.pyc") | |
78 | return my_fs | |
79 | ||
80 | ||
81 | def _open(filename, mode="r"): | |
82 | """A mock `open` that actually opens a temporary file.""" | |
83 | return tempfile.NamedTemporaryFile(mode="r+" if mode == "r" else mode) | |
84 | ||
85 | ||
86 | # --- Loader protocol -------------------------------------------------------- | |
87 | ||
88 | ||
89 | def _load_tests_from_module(tests, module, globs, setUp=None, tearDown=None): | |
90 | """Load tests from module, iterating through submodules.""" | |
91 | for attr in (getattr(module, x) for x in dir(module) if not x.startswith("_")): | |
92 | if isinstance(attr, types.ModuleType): | |
93 | suite = doctest.DocTestSuite( | |
94 | attr, | |
95 | globs, | |
96 | setUp=setUp, | |
97 | tearDown=tearDown, | |
98 | optionflags=+doctest.ELLIPSIS, | |
99 | ) | |
100 | tests.addTests(suite) | |
101 | return tests | |
102 | ||
103 | ||
104 | def _load_tests(loader, tests, ignore): | |
105 | """`load_test` function used by unittest to find the doctests.""" | |
106 | ||
107 | # NB (@althonos): we only test docstrings on Python 3 because it's | |
108 | # extremely hard to maintain compatibility for both versions without | |
109 | # extensively hacking `doctest` and `unittest`. | |
110 | if six.PY2: | |
111 | return tests | |
112 | ||
113 | def setUp(self): | |
114 | warnings.simplefilter("ignore") | |
115 | self._open_fs_mock = mock.patch.object(fs, "open_fs", new=_open_fs) | |
116 | self._open_fs_mock.__enter__() | |
117 | self._ftpfs_mock = mock.patch.object(fs.ftpfs, "FTPFS") | |
118 | self._ftpfs_mock.__enter__() | |
119 | ||
120 | def tearDown(self): | |
121 | self._open_fs_mock.__exit__(None, None, None) | |
122 | self._ftpfs_mock.__exit__(None, None, None) | |
123 | warnings.simplefilter(warnings.defaultaction) | |
124 | ||
125 | # recursively traverse all library submodules and load tests from them | |
126 | packages = [None, fs] | |
127 | for pkg in iter(packages.pop, None): | |
128 | for (_, subpkgname, subispkg) in pkgutil.walk_packages(pkg.__path__): | |
129 | # import the submodule and add it to the tests | |
130 | module = importlib.import_module(".".join([pkg.__name__, subpkgname])) | |
131 | ||
132 | # load some useful modules / classes / mocks to the | |
133 | # globals so that we don't need to explicitly import | |
134 | # them in the doctests | |
135 | globs = dict(**module.__dict__) | |
136 | globs.update( | |
137 | os=os, | |
138 | fs=fs, | |
139 | my_fs=_my_fs(module.__name__), | |
140 | open=_open, | |
141 | # NB (@althonos): This allows using OSFS in some examples, | |
142 | # while not actually opening the real filesystem | |
143 | OSFS=lambda path: MemoryFS(), | |
144 | # NB (@althonos): This is for compatibility in `fs.registry` | |
145 | print_list=lambda path: None, | |
146 | pprint=pprint, | |
147 | time=time, | |
148 | ) | |
149 | ||
150 | # load the doctests into the unittest test suite | |
151 | tests.addTests( | |
152 | doctest.DocTestSuite( | |
153 | module, | |
154 | globs=globs, | |
155 | setUp=setUp, | |
156 | tearDown=tearDown, | |
157 | optionflags=+doctest.ELLIPSIS, | |
158 | ) | |
159 | ) | |
160 | ||
161 | # if the submodule is a package, we need to process its submodules | |
162 | # as well, so we add it to the package queue | |
163 | if subispkg: | |
164 | packages.append(module) | |
165 | ||
166 | return tests | |
167 | ||
168 | ||
169 | # --- Unit test wrapper ------------------------------------------------------ | |
170 | # | |
171 | # NB (@althonos): Since pytest doesn't support the `load_tests` protocol | |
172 | # above, we manually build a `unittest.TestCase` using a dedicated test | |
173 | # method for each doctest. This should be safe to remove when pytest | |
174 | # supports it, or if we move away from pytest to run tests. | |
175 | ||
176 | ||
177 | class TestDoctest(unittest.TestCase): | |
178 | pass | |
179 | ||
180 | ||
181 | def make_wrapper(x): | |
182 | def _test_wrapper(self): | |
183 | x.setUp() | |
184 | try: | |
185 | x.runTest() | |
186 | finally: | |
187 | x.tearDown() | |
188 | ||
189 | return _test_wrapper | |
190 | ||
191 | ||
192 | for x in _load_tests(None, unittest.TestSuite(), False): | |
193 | setattr(TestDoctest, "test_{}".format(x.id().replace(".", "_")), make_wrapper(x)) |
2 | 2 | import os |
3 | 3 | import platform |
4 | 4 | import shutil |
5 | import six | |
5 | 6 | import tempfile |
6 | 7 | import unittest |
7 | ||
8 | import pytest | |
9 | ||
10 | import six | |
11 | 8 | |
12 | 9 | import fs |
13 | 10 | from fs.osfs import OSFS |
14 | 11 | |
15 | ||
16 | 12 | if platform.system() != "Windows": |
17 | 13 | |
18 | @pytest.mark.skipif( | |
19 | platform.system() == "Darwin", reason="Bad unicode not possible on OSX" | |
20 | ) | |
14 | @unittest.skipIf(platform.system() == "Darwin", "Bad unicode not possible on OSX") | |
21 | 15 | class TestEncoding(unittest.TestCase): |
22 | 16 | |
23 | 17 | TEST_FILENAME = b"foo\xb1bar" |
0 | 0 | import os |
1 | import unittest | |
1 | 2 | |
2 | 3 | from fs import enums |
3 | ||
4 | import unittest | |
5 | 4 | |
6 | 5 | |
7 | 6 | class TestEnums(unittest.TestCase): |
2 | 2 | import errno |
3 | 3 | import unittest |
4 | 4 | |
5 | import fs.errors | |
5 | 6 | from fs.error_tools import convert_os_errors |
6 | from fs import errors as fserrors | |
7 | 7 | |
8 | 8 | |
9 | 9 | class TestErrorTools(unittest.TestCase): |
10 | def assert_convert_os_errors(self): | |
10 | def test_convert_enoent(self): | |
11 | exception = OSError(errno.ENOENT, "resource not found") | |
12 | with self.assertRaises(fs.errors.ResourceNotFound) as ctx: | |
13 | with convert_os_errors("stat", "/tmp/test"): | |
14 | raise exception | |
15 | self.assertEqual(ctx.exception.exc, exception) | |
16 | self.assertEqual(ctx.exception.path, "/tmp/test") | |
11 | 17 | |
12 | with self.assertRaises(fserrors.ResourceNotFound): | |
13 | with convert_os_errors("foo", "test"): | |
14 | raise OSError(errno.ENOENT) | |
18 | def test_convert_enametoolong(self): | |
19 | exception = OSError(errno.ENAMETOOLONG, "File name too long: test") | |
20 | with self.assertRaises(fs.errors.PathError) as ctx: | |
21 | with convert_os_errors("stat", "/tmp/test"): | |
22 | raise exception | |
23 | self.assertEqual(ctx.exception.exc, exception) | |
24 | self.assertEqual(ctx.exception.path, "/tmp/test") |
1 | 1 | |
2 | 2 | import multiprocessing |
3 | 3 | import unittest |
4 | ||
5 | 4 | from six import text_type |
6 | 5 | |
7 | 6 | from fs import errors |
29 | 28 | [errors.NoURL, "some_path", "some_purpose"], |
30 | 29 | [errors.Unsupported], |
31 | 30 | [errors.IllegalBackReference, "path"], |
32 | [errors.MissingInfoNamespace, "path"] | |
31 | [errors.MissingInfoNamespace, "path"], | |
33 | 32 | ] |
34 | 33 | try: |
35 | 34 | pool = multiprocessing.Pool(1) |
0 | 0 | from __future__ import unicode_literals |
1 | 1 | |
2 | import unittest | |
3 | ||
2 | 4 | from fs import filesize |
3 | ||
4 | import unittest | |
5 | 5 | |
6 | 6 | |
7 | 7 | class TestFilesize(unittest.TestCase): |
0 | 0 | from __future__ import unicode_literals |
1 | 1 | |
2 | import six | |
2 | 3 | import unittest |
3 | 4 | |
4 | import six | |
5 | ||
6 | from fs._fscompat import fsencode, fsdecode, fspath | |
5 | from fs._fscompat import fsdecode, fsencode, fspath | |
7 | 6 | |
8 | 7 | |
9 | 8 | class PathMock(object): |
0 | 0 | from __future__ import unicode_literals |
1 | 1 | |
2 | import textwrap | |
2 | 3 | import time |
3 | 4 | import unittest |
4 | 5 | |
32 | 33 | self.assertEqual(ftp_parse._parse_time("notadate", formats=["%b %d %Y"]), None) |
33 | 34 | |
34 | 35 | def test_parse(self): |
35 | self.assertEqual(ftp_parse.parse([""]), []) | |
36 | self.assertListEqual(ftp_parse.parse([""]), []) | |
36 | 37 | |
37 | 38 | def test_parse_line(self): |
38 | 39 | self.assertIs(ftp_parse.parse_line("not a dir"), None) |
40 | 41 | @mock.patch("time.localtime") |
41 | 42 | def test_decode_linux(self, mock_localtime): |
42 | 43 | mock_localtime.return_value = time2017 |
43 | directory = """\ | |
44 | lrwxrwxrwx 1 0 0 19 Jan 18 2006 debian -> ./pub/mirror/debian | |
45 | drwxr-xr-x 10 0 0 4096 Aug 03 09:21 debian-archive | |
46 | lrwxrwxrwx 1 0 0 27 Nov 30 2015 debian-backports -> pub/mirror/debian-backports | |
47 | drwxr-xr-x 12 0 0 4096 Sep 29 13:13 pub | |
48 | -rw-r--r-- 1 0 0 26 Mar 04 2010 robots.txt | |
49 | drwxr-xr-x 8 foo bar 4096 Oct 4 09:05 test | |
50 | drwxr-xr-x 2 foo-user foo-group 0 Jan 5 11:59 240485 | |
51 | """ | |
44 | directory = textwrap.dedent( | |
45 | """ | |
46 | lrwxrwxrwx 1 0 0 19 Jan 18 2006 debian -> ./pub/mirror/debian | |
47 | drwxr-xr-x 10 0 0 4096 Aug 03 09:21 debian-archive | |
48 | lrwxrwxrwx 1 0 0 27 Nov 30 2015 debian-backports -> pub/mirror/debian-backports | |
49 | drwxr-xr-x 12 0 0 4096 Sep 29 13:13 pub | |
50 | -rw-r--r-- 1 0 0 26 Mar 04 2010 robots.txt | |
51 | drwxr-xr-x 8 foo bar 4096 Oct 4 09:05 test | |
52 | drwxr-xr-x 8 f b 4096 Oct 4 09:05 test | |
53 | drwxr-xr-x 2 foo-user foo-group 0 Jan 5 11:59 240485 | |
54 | drwxr-xr-x 2 foo.user$ foo@group_ 0 Jan 5 11:59 240485 | |
55 | """ | |
56 | ) | |
52 | 57 | |
53 | 58 | expected = [ |
54 | 59 | { |
145 | 150 | }, |
146 | 151 | { |
147 | 152 | "access": { |
153 | "group": "b", | |
154 | "permissions": ["g_r", "g_x", "o_r", "o_x", "u_r", "u_w", "u_x"], | |
155 | "user": "f", | |
156 | }, | |
157 | "basic": {"is_dir": True, "name": "test"}, | |
158 | "details": {"modified": 1507107900.0, "size": 4096, "type": 1}, | |
159 | "ftp": { | |
160 | "ls": "drwxr-xr-x 8 f b 4096 Oct 4 09:05 test" | |
161 | }, | |
162 | }, | |
163 | { | |
164 | "access": { | |
148 | 165 | "group": "foo-group", |
149 | 166 | "permissions": ["g_r", "g_x", "o_r", "o_x", "u_r", "u_w", "u_x"], |
150 | 167 | "user": "foo-user", |
155 | 172 | "ls": "drwxr-xr-x 2 foo-user foo-group 0 Jan 5 11:59 240485" |
156 | 173 | }, |
157 | 174 | }, |
175 | { | |
176 | "access": { | |
177 | "group": "foo@group_", | |
178 | "permissions": ["g_r", "g_x", "o_r", "o_x", "u_r", "u_w", "u_x"], | |
179 | "user": "foo.user$", | |
180 | }, | |
181 | "basic": {"is_dir": True, "name": "240485"}, | |
182 | "details": {"modified": 1483617540.0, "size": 0, "type": 1}, | |
183 | "ftp": { | |
184 | "ls": "drwxr-xr-x 2 foo.user$ foo@group_ 0 Jan 5 11:59 240485" | |
185 | }, | |
186 | }, | |
158 | 187 | ] |
159 | 188 | |
160 | parsed = ftp_parse.parse(directory.splitlines()) | |
161 | self.assertEqual(parsed, expected) | |
189 | parsed = ftp_parse.parse(directory.strip().splitlines()) | |
190 | self.assertListEqual(parsed, expected) | |
162 | 191 | |
163 | 192 | @mock.patch("time.localtime") |
164 | 193 | def test_decode_windowsnt(self, mock_localtime): |
165 | 194 | mock_localtime.return_value = time2017 |
166 | directory = """\ | |
167 | unparsable line | |
168 | 11-02-17 02:00AM <DIR> docs | |
169 | 11-02-17 02:12PM <DIR> images | |
170 | 11-02-17 02:12PM <DIR> AM to PM | |
171 | 11-02-17 03:33PM 9276 logo.gif | |
172 | 05-11-20 22:11 <DIR> src | |
173 | 11-02-17 01:23 1 12 | |
174 | 11-02-17 4:54 0 icon.bmp | |
175 | 11-02-17 4:54AM 0 icon.gif | |
176 | 11-02-17 4:54PM 0 icon.png | |
177 | 11-02-17 16:54 0 icon.jpg | |
178 | """ | |
195 | directory = textwrap.dedent( | |
196 | """ | |
197 | unparsable line | |
198 | 11-02-17 02:00AM <DIR> docs | |
199 | 11-02-17 02:12PM <DIR> images | |
200 | 11-02-17 02:12PM <DIR> AM to PM | |
201 | 11-02-17 03:33PM 9276 logo.gif | |
202 | 05-11-20 22:11 <DIR> src | |
203 | 11-02-17 01:23 1 12 | |
204 | 11-02-17 4:54 0 icon.bmp | |
205 | 11-02-17 4:54AM 0 icon.gif | |
206 | 11-02-17 4:54PM 0 icon.png | |
207 | 11-02-17 16:54 0 icon.jpg | |
208 | """ | |
209 | ) | |
179 | 210 | expected = [ |
180 | 211 | { |
181 | 212 | "basic": {"is_dir": True, "name": "docs"}, |
229 | 260 | }, |
230 | 261 | ] |
231 | 262 | |
232 | parsed = ftp_parse.parse(directory.splitlines()) | |
263 | parsed = ftp_parse.parse(directory.strip().splitlines()) | |
233 | 264 | self.assertEqual(parsed, expected) |
265 | ||
266 | @mock.patch("time.localtime") | |
267 | def test_decode_linux_suid(self, mock_localtime): | |
268 | # reported in #451 | |
269 | mock_localtime.return_value = time2017 | |
270 | directory = textwrap.dedent( | |
271 | """ | |
272 | drwxr-sr-x 66 ftp ftp 8192 Mar 16 17:54 pub | |
273 | -rw-r--r-- 1 ftp ftp 25 Mar 18 19:34 robots.txt | |
274 | """ | |
275 | ) | |
276 | expected = [ | |
277 | { | |
278 | "access": { | |
279 | "group": "ftp", | |
280 | "permissions": [ | |
281 | "g_r", | |
282 | "g_s", | |
283 | "o_r", | |
284 | "o_x", | |
285 | "u_r", | |
286 | "u_w", | |
287 | "u_x", | |
288 | ], | |
289 | "user": "ftp", | |
290 | }, | |
291 | "basic": {"is_dir": True, "name": "pub"}, | |
292 | "details": {"modified": 1489686840.0, "size": 8192, "type": 1}, | |
293 | "ftp": { | |
294 | "ls": "drwxr-sr-x 66 ftp ftp 8192 Mar 16 17:54 pub" | |
295 | }, | |
296 | }, | |
297 | { | |
298 | "access": { | |
299 | "group": "ftp", | |
300 | "permissions": [ | |
301 | "g_r", | |
302 | "o_r", | |
303 | "u_r", | |
304 | "u_w", | |
305 | ], | |
306 | "user": "ftp", | |
307 | }, | |
308 | "basic": {"is_dir": False, "name": "robots.txt"}, | |
309 | "details": {"modified": 1489865640.0, "size": 25, "type": 2}, | |
310 | "ftp": { | |
311 | "ls": "-rw-r--r-- 1 ftp ftp 25 Mar 18 19:34 robots.txt" | |
312 | }, | |
313 | }, | |
314 | ] | |
315 | ||
316 | parsed = ftp_parse.parse(directory.strip().splitlines()) | |
317 | self.assertListEqual(parsed, expected) | |
318 | ||
319 | @mock.patch("time.localtime") | |
320 | def test_decode_linux_sticky(self, mock_localtime): | |
321 | # reported in #451 | |
322 | mock_localtime.return_value = time2017 | |
323 | directory = textwrap.dedent( | |
324 | """ | |
325 | drwxr-xr-t 66 ftp ftp 8192 Mar 16 17:54 pub | |
326 | """ | |
327 | ) | |
328 | expected = [ | |
329 | { | |
330 | "access": { | |
331 | "group": "ftp", | |
332 | "permissions": [ | |
333 | "g_r", | |
334 | "g_x", | |
335 | "o_r", | |
336 | "o_t", | |
337 | "u_r", | |
338 | "u_w", | |
339 | "u_x", | |
340 | ], | |
341 | "user": "ftp", | |
342 | }, | |
343 | "basic": {"is_dir": True, "name": "pub"}, | |
344 | "details": {"modified": 1489686840.0, "size": 8192, "type": 1}, | |
345 | "ftp": { | |
346 | "ls": "drwxr-xr-t 66 ftp ftp 8192 Mar 16 17:54 pub" | |
347 | }, | |
348 | }, | |
349 | ] | |
350 | ||
351 | self.maxDiff = None | |
352 | parsed = ftp_parse.parse(directory.strip().splitlines()) | |
353 | self.assertListEqual(parsed, expected) |
0 | 0 | # coding: utf-8 |
1 | from __future__ import absolute_import | |
2 | from __future__ import print_function | |
3 | from __future__ import unicode_literals | |
4 | ||
5 | import socket | |
1 | from __future__ import absolute_import, print_function, unicode_literals | |
2 | ||
3 | import calendar | |
4 | import datetime | |
6 | 5 | import os |
7 | 6 | import platform |
8 | 7 | import shutil |
8 | import socket | |
9 | 9 | import tempfile |
10 | 10 | import time |
11 | 11 | import unittest |
12 | 12 | import uuid |
13 | 13 | |
14 | import pytest | |
15 | from six import text_type | |
16 | ||
17 | from ftplib import error_perm | |
18 | from ftplib import error_temp | |
19 | ||
14 | try: | |
15 | from unittest import mock | |
16 | except ImportError: | |
17 | import mock | |
18 | ||
19 | from ftplib import error_perm, error_temp | |
20 | 20 | from pyftpdlib.authorizers import DummyAuthorizer |
21 | from six import BytesIO, text_type | |
21 | 22 | |
22 | 23 | from fs import errors |
24 | from fs.ftpfs import FTPFS, ftp_errors | |
23 | 25 | from fs.opener import open_fs |
24 | from fs.ftpfs import FTPFS, ftp_errors | |
25 | 26 | from fs.path import join |
26 | 27 | from fs.subfs import SubFS |
27 | 28 | from fs.test import FSTestCases |
28 | 29 | |
30 | try: | |
31 | from pytest import mark | |
32 | except ImportError: | |
33 | from . import mark | |
29 | 34 | |
30 | 35 | # Prevent socket timeouts from slowing tests too much |
31 | 36 | socket.setdefaulttimeout(1) |
84 | 89 | self.assertIsInstance(ftp_fs, FTPFS) |
85 | 90 | self.assertEqual(ftp_fs.host, "ftp.example.org") |
86 | 91 | |
92 | ftps_fs = open_fs("ftps://will:wfc@ftp.example.org") | |
93 | self.assertIsInstance(ftps_fs, FTPFS) | |
94 | self.assertTrue(ftps_fs.tls) | |
95 | ||
87 | 96 | |
88 | 97 | class TestFTPErrors(unittest.TestCase): |
89 | 98 | """Test the ftp_errors context manager.""" |
128 | 137 | ) |
129 | 138 | |
130 | 139 | |
131 | @pytest.mark.slow | |
140 | @mark.slow | |
141 | @unittest.skipIf(platform.python_implementation() == "PyPy", "ftp unreliable with PyPy") | |
132 | 142 | class TestFTPFS(FSTestCases, unittest.TestCase): |
133 | ||
134 | 143 | user = "user" |
135 | 144 | pasw = "1234" |
136 | 145 | |
148 | 157 | cls.server.shutdown_after = -1 |
149 | 158 | cls.server.handler.authorizer = DummyAuthorizer() |
150 | 159 | cls.server.handler.authorizer.add_user( |
151 | cls.user, cls.pasw, cls._temp_path, perm="elradfmw" | |
160 | cls.user, cls.pasw, cls._temp_path, perm="elradfmwT" | |
152 | 161 | ) |
153 | 162 | cls.server.handler.authorizer.add_anonymous(cls._temp_path) |
154 | 163 | cls.server.start() |
209 | 218 | ), |
210 | 219 | ) |
211 | 220 | |
221 | def test_setinfo(self): | |
222 | # TODO: temporary test, since FSTestCases.test_setinfo is broken. | |
223 | self.fs.create("bar") | |
224 | original_modified = self.fs.getinfo("bar", ("details",)).modified | |
225 | new_modified = original_modified - datetime.timedelta(hours=1) | |
226 | new_modified_stamp = calendar.timegm(new_modified.timetuple()) | |
227 | self.fs.setinfo("bar", {"details": {"modified": new_modified_stamp}}) | |
228 | new_modified_get = self.fs.getinfo("bar", ("details",)).modified | |
229 | if original_modified.microsecond == 0 or new_modified_get.microsecond == 0: | |
230 | original_modified = original_modified.replace(microsecond=0) | |
231 | new_modified_get = new_modified_get.replace(microsecond=0) | |
232 | if original_modified.second == 0 or new_modified_get.second == 0: | |
233 | original_modified = original_modified.replace(second=0) | |
234 | new_modified_get = new_modified_get.replace(second=0) | |
235 | new_modified_get = new_modified_get + datetime.timedelta(hours=1) | |
236 | self.assertEqual(original_modified, new_modified_get) | |
237 | ||
212 | 238 | def test_host(self): |
213 | 239 | self.assertEqual(self.fs.host, self.server.host) |
214 | 240 | |
229 | 255 | del self.fs.features["UTF8"] |
230 | 256 | self.assertFalse(self.fs.getmeta().get("unicode_paths")) |
231 | 257 | |
258 | def test_getinfo_modified(self): | |
259 | self.assertIn("MDTM", self.fs.features) | |
260 | self.fs.create("bar") | |
261 | mtime_detail = self.fs.getinfo("bar", ("basic", "details")).modified | |
262 | mtime_modified = self.fs.getmodified("bar") | |
263 | # Microsecond and seconds might not actually be supported by all | |
264 | # FTP commands, so we strip them before comparing if it looks | |
265 | # like at least one of the two values does not contain them. | |
266 | replacement = {} | |
267 | if mtime_detail.microsecond == 0 or mtime_modified.microsecond == 0: | |
268 | replacement["microsecond"] = 0 | |
269 | if mtime_detail.second == 0 or mtime_modified.second == 0: | |
270 | replacement["second"] = 0 | |
271 | self.assertEqual( | |
272 | mtime_detail.replace(**replacement), mtime_modified.replace(**replacement) | |
273 | ) | |
274 | ||
232 | 275 | def test_opener_path(self): |
233 | 276 | self.fs.makedir("foo") |
234 | 277 | self.fs.writetext("foo/bar", "baz") |
265 | 308 | # Open with create and check this does fail |
266 | 309 | with open_fs(url, create=True) as ftp_fs: |
267 | 310 | self.assertTrue(ftp_fs.isfile("foo")) |
311 | ||
312 | def test_upload_connection(self): | |
313 | with mock.patch.object(self.fs, "_manage_ftp") as _manage_ftp: | |
314 | self.fs.upload("foo", BytesIO(b"hello")) | |
315 | self.assertEqual(self.fs.gettext("foo"), "hello") | |
316 | _manage_ftp.assert_not_called() | |
268 | 317 | |
269 | 318 | |
270 | 319 | class TestFTPFSNoMLSD(TestFTPFS): |
278 | 327 | pass |
279 | 328 | |
280 | 329 | |
281 | @pytest.mark.slow | |
330 | @mark.slow | |
331 | @unittest.skipIf(platform.python_implementation() == "PyPy", "ftp unreliable with PyPy") | |
282 | 332 | class TestAnonFTPFS(FSTestCases, unittest.TestCase): |
283 | ||
284 | 333 | user = "anonymous" |
285 | 334 | pasw = "" |
286 | 335 |
1 | 1 | |
2 | 2 | import unittest |
3 | 3 | |
4 | from fs import glob | |
5 | from fs import open_fs | |
4 | from fs import glob, open_fs | |
6 | 5 | |
7 | 6 | |
8 | 7 | class TestGlob(unittest.TestCase): |
0 | ||
1 | 0 | from __future__ import unicode_literals |
2 | 1 | |
3 | import datetime | |
4 | 2 | import unittest |
5 | ||
6 | import pytz | |
3 | from datetime import datetime | |
7 | 4 | |
8 | 5 | from fs.enums import ResourceType |
9 | 6 | from fs.info import Info |
10 | 7 | from fs.permissions import Permissions |
11 | 8 | from fs.time import datetime_to_epoch |
9 | ||
10 | try: | |
11 | from datetime import timezone | |
12 | except ImportError: | |
13 | from fs._tzcompat import timezone # type: ignore | |
12 | 14 | |
13 | 15 | |
14 | 16 | class TestInfo(unittest.TestCase): |
71 | 73 | |
72 | 74 | def test_details(self): |
73 | 75 | dates = [ |
74 | datetime.datetime(2016, 7, 5, tzinfo=pytz.UTC), | |
75 | datetime.datetime(2016, 7, 6, tzinfo=pytz.UTC), | |
76 | datetime.datetime(2016, 7, 7, tzinfo=pytz.UTC), | |
77 | datetime.datetime(2016, 7, 8, tzinfo=pytz.UTC), | |
76 | datetime(2016, 7, 5, tzinfo=timezone.utc), | |
77 | datetime(2016, 7, 6, tzinfo=timezone.utc), | |
78 | datetime(2016, 7, 7, tzinfo=timezone.utc), | |
79 | datetime(2016, 7, 8, tzinfo=timezone.utc), | |
78 | 80 | ] |
79 | 81 | epochs = [datetime_to_epoch(d) for d in dates] |
80 | 82 |
0 | 0 | from __future__ import unicode_literals |
1 | 1 | |
2 | 2 | import io |
3 | import six | |
3 | 4 | import unittest |
4 | 5 | |
5 | import six | |
6 | ||
7 | from fs import iotools | |
8 | from fs import tempfs | |
9 | ||
6 | from fs import iotools, tempfs | |
10 | 7 | from fs.test import UNICODE_TEXT |
11 | 8 | |
12 | 9 |
2 | 2 | import posixpath |
3 | 3 | import unittest |
4 | 4 | |
5 | import pytest | |
6 | ||
7 | 5 | from fs import memoryfs |
8 | from fs.test import FSTestCases | |
9 | from fs.test import UNICODE_TEXT | |
6 | from fs.test import UNICODE_TEXT, FSTestCases | |
10 | 7 | |
11 | 8 | try: |
12 | 9 | # Only supported on Python 3.4+ |
29 | 26 | posixpath.join(parent_dir, str(file_id)), UNICODE_TEXT |
30 | 27 | ) |
31 | 28 | |
32 | @pytest.mark.skipif( | |
33 | not tracemalloc, reason="`tracemalloc` isn't supported on this Python version." | |
29 | @unittest.skipUnless( | |
30 | tracemalloc, reason="`tracemalloc` isn't supported on this Python version." | |
34 | 31 | ) |
35 | 32 | def test_close_mem_free(self): |
36 | 33 | """Ensure all file memory is freed when calling close(). |
67 | 64 | "Memory usage increased after closing the file system; diff is %0.2f KiB." |
68 | 65 | % (diff_close.size_diff / 1024.0), |
69 | 66 | ) |
67 | ||
68 | def test_copy_preserve_time(self): | |
69 | self.fs.makedir("foo") | |
70 | self.fs.makedir("bar") | |
71 | self.fs.touch("foo/file.txt") | |
72 | ||
73 | src_datetime = self.fs.getmodified("foo/file.txt") | |
74 | ||
75 | self.fs.copy("foo/file.txt", "bar/file.txt", preserve_time=True) | |
76 | self.assertTrue(self.fs.exists("bar/file.txt")) | |
77 | ||
78 | dst_datetime = self.fs.getmodified("bar/file.txt") | |
79 | self.assertEqual(dst_datetime, src_datetime) | |
80 | ||
81 | ||
82 | class TestMemoryFile(unittest.TestCase): | |
83 | def setUp(self): | |
84 | self.fs = memoryfs.MemoryFS() | |
85 | ||
86 | def tearDown(self): | |
87 | self.fs.close() | |
88 | ||
89 | def test_readline_writing(self): | |
90 | with self.fs.openbin("test.txt", "w") as f: | |
91 | self.assertRaises(IOError, f.readline) | |
92 | ||
93 | def test_readinto_writing(self): | |
94 | with self.fs.openbin("test.txt", "w") as f: | |
95 | self.assertRaises(IOError, f.readinto, bytearray(10)) |
0 | 0 | from __future__ import unicode_literals |
1 | 1 | |
2 | 2 | import unittest |
3 | from parameterized import parameterized_class | |
3 | 4 | |
5 | from fs import open_fs | |
4 | 6 | from fs.mirror import mirror |
5 | from fs import open_fs | |
6 | 7 | |
7 | 8 | |
9 | @parameterized_class(("WORKERS",), [(0,), (1,), (2,), (4,)]) | |
8 | 10 | class TestMirror(unittest.TestCase): |
9 | WORKERS = 0 # Single threaded | |
10 | ||
11 | 11 | def _contents(self, fs): |
12 | 12 | """Extract an FS in to a simple data structure.""" |
13 | namespaces = ("details", "metadata_changed", "modified") | |
13 | 14 | contents = [] |
14 | 15 | for path, dirs, files in fs.walk(): |
15 | 16 | for info in dirs: |
17 | 18 | contents.append((_path, "dir", b"")) |
18 | 19 | for info in files: |
19 | 20 | _path = info.make_path(path) |
20 | contents.append((_path, "file", fs.readbytes(_path))) | |
21 | _bytes = fs.readbytes(_path) | |
22 | _info = fs.getinfo(_path, namespaces) | |
23 | contents.append( | |
24 | ( | |
25 | _path, | |
26 | "file", | |
27 | _bytes, | |
28 | _info.modified, | |
29 | _info.metadata_changed, | |
30 | ) | |
31 | ) | |
21 | 32 | return sorted(contents) |
22 | 33 | |
23 | 34 | def assert_compare_fs(self, fs1, fs2): |
27 | 38 | def test_empty_mirror(self): |
28 | 39 | m1 = open_fs("mem://") |
29 | 40 | m2 = open_fs("mem://") |
30 | mirror(m1, m2, workers=self.WORKERS) | |
41 | mirror(m1, m2, workers=self.WORKERS, preserve_time=True) | |
31 | 42 | self.assert_compare_fs(m1, m2) |
32 | 43 | |
33 | 44 | def test_mirror_one_file(self): |
34 | 45 | m1 = open_fs("mem://") |
35 | 46 | m1.writetext("foo", "hello") |
36 | 47 | m2 = open_fs("mem://") |
37 | mirror(m1, m2, workers=self.WORKERS) | |
48 | mirror(m1, m2, workers=self.WORKERS, preserve_time=True) | |
38 | 49 | self.assert_compare_fs(m1, m2) |
39 | 50 | |
40 | 51 | def test_mirror_one_file_one_dir(self): |
42 | 53 | m1.writetext("foo", "hello") |
43 | 54 | m1.makedir("bar") |
44 | 55 | m2 = open_fs("mem://") |
45 | mirror(m1, m2, workers=self.WORKERS) | |
56 | mirror(m1, m2, workers=self.WORKERS, preserve_time=True) | |
46 | 57 | self.assert_compare_fs(m1, m2) |
47 | 58 | |
48 | 59 | def test_mirror_delete_replace(self): |
50 | 61 | m1.writetext("foo", "hello") |
51 | 62 | m1.makedir("bar") |
52 | 63 | m2 = open_fs("mem://") |
53 | mirror(m1, m2, workers=self.WORKERS) | |
64 | mirror(m1, m2, workers=self.WORKERS, preserve_time=True) | |
54 | 65 | self.assert_compare_fs(m1, m2) |
55 | 66 | m2.remove("foo") |
56 | mirror(m1, m2, workers=self.WORKERS) | |
67 | mirror(m1, m2, workers=self.WORKERS, preserve_time=True) | |
57 | 68 | self.assert_compare_fs(m1, m2) |
58 | 69 | m2.removedir("bar") |
59 | mirror(m1, m2, workers=self.WORKERS) | |
70 | mirror(m1, m2, workers=self.WORKERS, preserve_time=True) | |
60 | 71 | self.assert_compare_fs(m1, m2) |
61 | 72 | |
62 | 73 | def test_mirror_extra_dir(self): |
65 | 76 | m1.makedir("bar") |
66 | 77 | m2 = open_fs("mem://") |
67 | 78 | m2.makedir("baz") |
68 | mirror(m1, m2, workers=self.WORKERS) | |
79 | mirror(m1, m2, workers=self.WORKERS, preserve_time=True) | |
69 | 80 | self.assert_compare_fs(m1, m2) |
70 | 81 | |
71 | 82 | def test_mirror_extra_file(self): |
75 | 86 | m2 = open_fs("mem://") |
76 | 87 | m2.makedir("baz") |
77 | 88 | m2.touch("egg") |
78 | mirror(m1, m2, workers=self.WORKERS) | |
89 | mirror(m1, m2, workers=self.WORKERS, preserve_time=True) | |
79 | 90 | self.assert_compare_fs(m1, m2) |
80 | 91 | |
81 | 92 | def test_mirror_wrong_type(self): |
85 | 96 | m2 = open_fs("mem://") |
86 | 97 | m2.makedir("foo") |
87 | 98 | m2.touch("bar") |
88 | mirror(m1, m2, workers=self.WORKERS) | |
99 | mirror(m1, m2, workers=self.WORKERS, preserve_time=True) | |
89 | 100 | self.assert_compare_fs(m1, m2) |
90 | 101 | |
91 | 102 | def test_mirror_update(self): |
93 | 104 | m1.writetext("foo", "hello") |
94 | 105 | m1.makedir("bar") |
95 | 106 | m2 = open_fs("mem://") |
96 | mirror(m1, m2, workers=self.WORKERS) | |
107 | mirror(m1, m2, workers=self.WORKERS, preserve_time=True) | |
97 | 108 | self.assert_compare_fs(m1, m2) |
98 | 109 | m2.appendtext("foo", " world!") |
99 | mirror(m1, m2, workers=self.WORKERS) | |
110 | mirror(m1, m2, workers=self.WORKERS, preserve_time=True) | |
100 | 111 | self.assert_compare_fs(m1, m2) |
101 | ||
102 | ||
103 | class TestMirrorWorkers1(TestMirror): | |
104 | WORKERS = 1 | |
105 | ||
106 | ||
107 | class TestMirrorWorkers2(TestMirror): | |
108 | WORKERS = 2 | |
109 | ||
110 | ||
111 | class TestMirrorWorkers4(TestMirror): | |
112 | WORKERS = 4 |
0 | 0 | from __future__ import unicode_literals |
1 | 1 | |
2 | 2 | import unittest |
3 | ||
4 | 3 | from six import text_type |
5 | 4 | |
6 | from fs.mode import check_readable, check_writable, Mode | |
5 | from fs.mode import Mode, check_readable, check_writable | |
7 | 6 | |
8 | 7 | |
9 | 8 | class TestMode(unittest.TestCase): |
1 | 1 | |
2 | 2 | import unittest |
3 | 3 | |
4 | from fs.memoryfs import MemoryFS | |
4 | 5 | from fs.mountfs import MountError, MountFS |
5 | from fs.memoryfs import MemoryFS | |
6 | 6 | from fs.tempfs import TempFS |
7 | 7 | from fs.test import FSTestCases |
8 | 8 |
0 | ||
1 | 0 | from __future__ import unicode_literals |
2 | 1 | |
3 | 2 | import unittest |
4 | 3 | |
4 | try: | |
5 | from unittest import mock | |
6 | except ImportError: | |
7 | import mock | |
8 | ||
9 | from parameterized import parameterized, parameterized_class | |
10 | ||
5 | 11 | import fs.move |
6 | 12 | from fs import open_fs |
13 | from fs.errors import FSError, ResourceReadOnly | |
14 | from fs.path import join | |
15 | from fs.wrap import read_only | |
7 | 16 | |
8 | 17 | |
9 | class TestMove(unittest.TestCase): | |
18 | @parameterized_class(("preserve_time",), [(True,), (False,)]) | |
19 | class TestMoveCheckTime(unittest.TestCase): | |
10 | 20 | def test_move_fs(self): |
21 | namespaces = ("details", "modified") | |
22 | ||
11 | 23 | src_fs = open_fs("mem://") |
12 | 24 | src_fs.makedirs("foo/bar") |
13 | 25 | src_fs.touch("test.txt") |
14 | 26 | src_fs.touch("foo/bar/baz.txt") |
27 | src_file1_info = src_fs.getinfo("test.txt", namespaces) | |
28 | src_file2_info = src_fs.getinfo("foo/bar/baz.txt", namespaces) | |
15 | 29 | |
16 | 30 | dst_fs = open_fs("mem://") |
17 | fs.move.move_fs(src_fs, dst_fs) | |
31 | dst_fs.create("test.txt") | |
32 | dst_fs.setinfo("test.txt", {"details": {"modified": 1000000}}) | |
18 | 33 | |
34 | fs.move.move_fs(src_fs, dst_fs, preserve_time=self.preserve_time) | |
35 | ||
36 | self.assertTrue(src_fs.isempty("/")) | |
19 | 37 | self.assertTrue(dst_fs.isdir("foo/bar")) |
20 | 38 | self.assertTrue(dst_fs.isfile("test.txt")) |
21 | self.assertTrue(src_fs.isempty("/")) | |
39 | self.assertTrue(dst_fs.isfile("foo/bar/baz.txt")) | |
22 | 40 | |
23 | def test_copy_dir(self): | |
41 | if self.preserve_time: | |
42 | dst_file1_info = dst_fs.getinfo("test.txt", namespaces) | |
43 | dst_file2_info = dst_fs.getinfo("foo/bar/baz.txt", namespaces) | |
44 | self.assertEqual(dst_file1_info.modified, src_file1_info.modified) | |
45 | self.assertEqual(dst_file2_info.modified, src_file2_info.modified) | |
46 | ||
47 | def test_move_file(self): | |
48 | namespaces = ("details", "modified") | |
49 | with open_fs("mem://") as src_fs, open_fs("mem://") as dst_fs: | |
50 | src_fs.writetext("source.txt", "Source") | |
51 | src_fs_file_info = src_fs.getinfo("source.txt", namespaces) | |
52 | fs.move.move_file( | |
53 | src_fs, | |
54 | "source.txt", | |
55 | dst_fs, | |
56 | "dest.txt", | |
57 | preserve_time=self.preserve_time, | |
58 | ) | |
59 | self.assertFalse(src_fs.exists("source.txt")) | |
60 | self.assertEqual(dst_fs.readtext("dest.txt"), "Source") | |
61 | ||
62 | if self.preserve_time: | |
63 | dst_fs_file_info = dst_fs.getinfo("dest.txt", namespaces) | |
64 | self.assertEqual(src_fs_file_info.modified, dst_fs_file_info.modified) | |
65 | ||
66 | def test_move_dir(self): | |
67 | namespaces = ("details", "modified") | |
68 | ||
24 | 69 | src_fs = open_fs("mem://") |
25 | 70 | src_fs.makedirs("foo/bar") |
26 | 71 | src_fs.touch("test.txt") |
27 | 72 | src_fs.touch("foo/bar/baz.txt") |
73 | src_file2_info = src_fs.getinfo("foo/bar/baz.txt", namespaces) | |
28 | 74 | |
29 | 75 | dst_fs = open_fs("mem://") |
30 | fs.move.move_dir(src_fs, "/foo", dst_fs, "/") | |
76 | dst_fs.create("test.txt") | |
77 | dst_fs.setinfo("test.txt", {"details": {"modified": 1000000}}) | |
31 | 78 | |
79 | fs.move.move_dir(src_fs, "/foo", dst_fs, "/", preserve_time=self.preserve_time) | |
80 | ||
81 | self.assertFalse(src_fs.exists("foo")) | |
82 | self.assertTrue(src_fs.isfile("test.txt")) | |
32 | 83 | self.assertTrue(dst_fs.isdir("bar")) |
33 | 84 | self.assertTrue(dst_fs.isfile("bar/baz.txt")) |
34 | self.assertFalse(src_fs.exists("foo")) | |
85 | ||
86 | if self.preserve_time: | |
87 | dst_file2_info = dst_fs.getinfo("bar/baz.txt", namespaces) | |
88 | self.assertEqual(dst_file2_info.modified, src_file2_info.modified) | |
89 | ||
90 | ||
91 | class TestMove(unittest.TestCase): | |
92 | def test_move_file_tempfs(self): | |
93 | with open_fs("temp://") as src, open_fs("temp://") as dst: | |
94 | src_dir = src.makedir("Some subfolder") | |
95 | src_dir.writetext("file.txt", "Content") | |
96 | dst_dir = dst.makedir("dest dir") | |
97 | fs.move.move_file(src_dir, "file.txt", dst_dir, "target.txt") | |
98 | self.assertFalse(src.exists("Some subfolder/file.txt")) | |
99 | self.assertEqual(dst.readtext("dest dir/target.txt"), "Content") | |
100 | ||
101 | def test_move_file_fs_urls(self): | |
102 | # create a temp dir to work on | |
103 | with open_fs("temp://") as tmp: | |
104 | path = tmp.getsyspath("/") | |
105 | tmp.makedir("subdir_src") | |
106 | tmp.writetext("subdir_src/file.txt", "Content") | |
107 | tmp.makedir("subdir_dst") | |
108 | fs.move.move_file( | |
109 | "osfs://" + join(path, "subdir_src"), | |
110 | "file.txt", | |
111 | "osfs://" + join(path, "subdir_dst"), | |
112 | "target.txt", | |
113 | ) | |
114 | self.assertFalse(tmp.exists("subdir_src/file.txt")) | |
115 | self.assertEqual(tmp.readtext("subdir_dst/target.txt"), "Content") | |
116 | ||
117 | def test_move_file_same_fs_read_only_source(self): | |
118 | with open_fs("temp://") as tmp: | |
119 | path = tmp.getsyspath("/") | |
120 | tmp.writetext("file.txt", "Content") | |
121 | src = read_only(open_fs(path)) | |
122 | dst = tmp.makedir("sub") | |
123 | with self.assertRaises(ResourceReadOnly): | |
124 | fs.move.move_file(src, "file.txt", dst, "target_file.txt") | |
125 | self.assertTrue(src.exists("file.txt")) | |
126 | self.assertFalse( | |
127 | dst.exists("target_file.txt"), "file should not have been copied over" | |
128 | ) | |
129 | ||
130 | def test_move_file_read_only_mem_source(self): | |
131 | with open_fs("mem://") as src, open_fs("mem://") as dst: | |
132 | src.writetext("file.txt", "Content") | |
133 | dst_sub = dst.makedir("sub") | |
134 | src_ro = read_only(src) | |
135 | with self.assertRaises(ResourceReadOnly): | |
136 | fs.move.move_file(src_ro, "file.txt", dst_sub, "target.txt") | |
137 | self.assertTrue(src.exists("file.txt")) | |
138 | self.assertFalse( | |
139 | dst_sub.exists("target.txt"), "file should not have been copied over" | |
140 | ) | |
141 | ||
142 | def test_move_file_read_only_mem_dest(self): | |
143 | with open_fs("mem://") as src, open_fs("mem://") as dst: | |
144 | src.writetext("file.txt", "Content") | |
145 | dst_ro = read_only(dst) | |
146 | with self.assertRaises(ResourceReadOnly): | |
147 | fs.move.move_file(src, "file.txt", dst_ro, "target.txt") | |
148 | self.assertTrue(src.exists("file.txt")) | |
149 | self.assertFalse( | |
150 | dst_ro.exists("target.txt"), "file should not have been copied over" | |
151 | ) | |
152 | ||
153 | @parameterized.expand([(True,), (False,)]) | |
154 | def test_move_file_cleanup_on_error(self, cleanup): | |
155 | with open_fs("mem://") as src, open_fs("mem://") as dst: | |
156 | src.writetext("file.txt", "Content") | |
157 | with mock.patch.object(src, "remove") as mck: | |
158 | mck.side_effect = FSError | |
159 | with self.assertRaises(FSError): | |
160 | fs.move.move_file( | |
161 | src, | |
162 | "file.txt", | |
163 | dst, | |
164 | "target.txt", | |
165 | cleanup_dst_on_error=cleanup, | |
166 | ) | |
167 | self.assertTrue(src.exists("file.txt")) | |
168 | self.assertEqual(not dst.exists("target.txt"), cleanup) |
1 | 1 | |
2 | 2 | import unittest |
3 | 3 | |
4 | from fs import errors | |
5 | from fs.memoryfs import MemoryFS | |
4 | 6 | from fs.multifs import MultiFS |
5 | from fs.memoryfs import MemoryFS | |
6 | from fs import errors | |
7 | ||
8 | 7 | from fs.test import FSTestCases |
9 | 8 | |
10 | 9 |
0 | 0 | from __future__ import unicode_literals |
1 | 1 | |
2 | import sys | |
3 | ||
2 | 4 | import os |
3 | import sys | |
5 | import pkg_resources | |
6 | import shutil | |
4 | 7 | import tempfile |
5 | 8 | import unittest |
6 | import pkg_resources | |
7 | ||
8 | import pytest | |
9 | 9 | |
10 | 10 | from fs import open_fs, opener |
11 | from fs.osfs import OSFS | |
12 | from fs.opener import registry, errors | |
11 | from fs.appfs import UserDataFS | |
13 | 12 | from fs.memoryfs import MemoryFS |
14 | from fs.appfs import UserDataFS | |
13 | from fs.opener import errors, registry | |
15 | 14 | from fs.opener.parse import ParseResult |
16 | 15 | from fs.opener.registry import Registry |
16 | from fs.osfs import OSFS | |
17 | 17 | |
18 | 18 | try: |
19 | 19 | from unittest import mock |
207 | 207 | self.assertTrue(mem_fs.isclosed()) |
208 | 208 | |
209 | 209 | |
210 | @pytest.mark.usefixtures("mock_appdir_directories") | |
211 | 210 | class TestOpeners(unittest.TestCase): |
211 | def setUp(self): | |
212 | self.tmpdir = tempfile.mkdtemp() | |
213 | ||
214 | def tearDown(self): | |
215 | shutil.rmtree(self.tmpdir) | |
216 | ||
212 | 217 | def test_repr(self): |
213 | 218 | # Check __repr__ works |
214 | 219 | for entry_point in pkg_resources.iter_entry_points("fs.opener"): |
259 | 264 | mem_fs_2 = opener.open_fs(mem_fs) |
260 | 265 | self.assertEqual(mem_fs, mem_fs_2) |
261 | 266 | |
262 | def test_open_userdata(self): | |
267 | @mock.patch("appdirs.{}".format(UserDataFS.app_dir), autospec=True, spec_set=True) | |
268 | def test_open_userdata(self, app_dir): | |
269 | app_dir.return_value = self.tmpdir | |
270 | ||
263 | 271 | with self.assertRaises(errors.OpenerError): |
264 | 272 | opener.open_fs("userdata://foo:bar:baz:egg") |
265 | 273 | |
268 | 276 | self.assertEqual(app_fs.app_dirs.appauthor, "willmcgugan") |
269 | 277 | self.assertEqual(app_fs.app_dirs.version, "1.0") |
270 | 278 | |
271 | def test_open_userdata_no_version(self): | |
279 | @mock.patch("appdirs.{}".format(UserDataFS.app_dir), autospec=True, spec_set=True) | |
280 | def test_open_userdata_no_version(self, app_dir): | |
281 | app_dir.return_value = self.tmpdir | |
282 | ||
272 | 283 | app_fs = opener.open_fs("userdata://fstest:willmcgugan", create=True) |
273 | 284 | self.assertEqual(app_fs.app_dirs.appname, "fstest") |
274 | 285 | self.assertEqual(app_fs.app_dirs.appauthor, "willmcgugan") |
275 | 286 | self.assertEqual(app_fs.app_dirs.version, None) |
276 | 287 | |
277 | def test_user_data_opener(self): | |
288 | @mock.patch("appdirs.{}".format(UserDataFS.app_dir), autospec=True, spec_set=True) | |
289 | def test_user_data_opener(self, app_dir): | |
290 | app_dir.return_value = self.tmpdir | |
291 | ||
278 | 292 | user_data_fs = open_fs("userdata://fstest:willmcgugan:1.0", create=True) |
279 | 293 | self.assertIsInstance(user_data_fs, UserDataFS) |
280 | 294 | user_data_fs.makedir("foo", recreate=True) |
286 | 300 | def test_open_ftp(self, mock_FTPFS): |
287 | 301 | open_fs("ftp://foo:bar@ftp.example.org") |
288 | 302 | mock_FTPFS.assert_called_once_with( |
289 | "ftp.example.org", passwd="bar", port=21, user="foo", proxy=None, timeout=10 | |
303 | "ftp.example.org", | |
304 | passwd="bar", | |
305 | port=21, | |
306 | user="foo", | |
307 | proxy=None, | |
308 | timeout=10, | |
309 | tls=False, | |
310 | ) | |
311 | ||
312 | @mock.patch("fs.ftpfs.FTPFS") | |
313 | def test_open_ftps(self, mock_FTPFS): | |
314 | open_fs("ftps://foo:bar@ftp.example.org") | |
315 | mock_FTPFS.assert_called_once_with( | |
316 | "ftp.example.org", | |
317 | passwd="bar", | |
318 | port=21, | |
319 | user="foo", | |
320 | proxy=None, | |
321 | timeout=10, | |
322 | tls=True, | |
290 | 323 | ) |
291 | 324 | |
292 | 325 | @mock.patch("fs.ftpfs.FTPFS") |
299 | 332 | user="foo", |
300 | 333 | proxy="ftp.proxy.org", |
301 | 334 | timeout=10, |
302 | ) | |
335 | tls=False, | |
336 | ) |
0 | 0 | # coding: utf-8 |
1 | 1 | from __future__ import unicode_literals |
2 | ||
3 | import sys | |
2 | 4 | |
3 | 5 | import errno |
4 | 6 | import io |
5 | 7 | import os |
6 | 8 | import shutil |
7 | 9 | import tempfile |
8 | import sys | |
10 | import time | |
9 | 11 | import unittest |
10 | import pytest | |
11 | ||
12 | from fs import osfs, open_fs | |
13 | from fs.path import relpath, dirname | |
14 | from fs import errors | |
12 | import warnings | |
13 | from six import text_type | |
14 | ||
15 | from fs import errors, open_fs, osfs | |
16 | from fs.path import dirname, relpath | |
15 | 17 | from fs.test import FSTestCases |
16 | ||
17 | from six import text_type | |
18 | 18 | |
19 | 19 | try: |
20 | 20 | from unittest import mock |
24 | 24 | |
25 | 25 | class TestOSFS(FSTestCases, unittest.TestCase): |
26 | 26 | """Test OSFS implementation.""" |
27 | ||
28 | @classmethod | |
29 | def setUpClass(cls): | |
30 | warnings.simplefilter("error") | |
31 | ||
32 | @classmethod | |
33 | def tearDownClass(cls): | |
34 | warnings.simplefilter(warnings.defaultaction) | |
27 | 35 | |
28 | 36 | def make_fs(self): |
29 | 37 | temp_dir = tempfile.mkdtemp("fstestosfs") |
87 | 95 | self.assertIn("TYRIONLANISTER", fs1.getsyspath("/")) |
88 | 96 | self.assertNotIn("TYRIONLANISTER", fs2.getsyspath("/")) |
89 | 97 | |
90 | @pytest.mark.skipif(osfs.sendfile is None, reason="sendfile not supported") | |
91 | @pytest.mark.skipif( | |
98 | def test_copy_preserve_time(self): | |
99 | self.fs.makedir("foo") | |
100 | self.fs.makedir("bar") | |
101 | self.fs.create("foo/file.txt") | |
102 | raw_info = {"details": {"modified": time.time() - 10000}} | |
103 | self.fs.setinfo("foo/file.txt", raw_info) | |
104 | ||
105 | namespaces = ("details", "modified") | |
106 | src_info = self.fs.getinfo("foo/file.txt", namespaces) | |
107 | ||
108 | self.fs.copy("foo/file.txt", "bar/file.txt", preserve_time=True) | |
109 | self.assertTrue(self.fs.exists("bar/file.txt")) | |
110 | ||
111 | dst_info = self.fs.getinfo("bar/file.txt", namespaces) | |
112 | delta = dst_info.modified - src_info.modified | |
113 | self.assertAlmostEqual(delta.total_seconds(), 0, places=2) | |
114 | ||
115 | @unittest.skipUnless(osfs.sendfile, "sendfile not supported") | |
116 | @unittest.skipIf( | |
92 | 117 | sys.version_info >= (3, 8), |
93 | reason="the copy function uses sendfile in Python 3.8+, " | |
118 | "the copy function uses sendfile in Python 3.8+, " | |
94 | 119 | "making the patched implementation irrelevant", |
95 | 120 | ) |
96 | 121 | def test_copy_sendfile(self): |
138 | 163 | finally: |
139 | 164 | shutil.rmtree(dir_path) |
140 | 165 | |
141 | @pytest.mark.skipif(not hasattr(os, "symlink"), reason="No symlink support") | |
166 | @unittest.skipUnless(hasattr(os, "symlink"), "No symlink support") | |
142 | 167 | def test_symlinks(self): |
143 | 168 | with open(self._get_real_path("foo"), "wb") as f: |
144 | 169 | f.write(b"foobar") |
0 | from __future__ import absolute_import, unicode_literals, print_function | |
0 | from __future__ import absolute_import, print_function, unicode_literals | |
1 | 1 | |
2 | 2 | """ |
3 | 3 | fstests.test_path: testcases for the fs path functions |
0 | from __future__ import unicode_literals | |
1 | from __future__ import print_function | |
0 | from __future__ import print_function, unicode_literals | |
2 | 1 | |
3 | 2 | import unittest |
4 | ||
5 | 3 | from six import text_type |
6 | 4 | |
7 | from fs.permissions import make_mode, Permissions | |
5 | from fs.permissions import Permissions, make_mode | |
8 | 6 | |
9 | 7 | |
10 | 8 | class TestPermissions(unittest.TestCase): |
5 | 5 | import unittest |
6 | 6 | |
7 | 7 | from fs import osfs |
8 | from fs.subfs import SubFS | |
9 | 8 | from fs.memoryfs import MemoryFS |
10 | 9 | from fs.path import relpath |
10 | from fs.subfs import SubFS | |
11 | ||
11 | 12 | from .test_osfs import TestOSFS |
12 | 13 | |
13 | 14 |
6 | 6 | import tarfile |
7 | 7 | import tempfile |
8 | 8 | import unittest |
9 | import pytest | |
10 | 9 | |
11 | 10 | from fs import tarfs |
11 | from fs.compress import write_tar | |
12 | 12 | from fs.enums import ResourceType |
13 | from fs.compress import write_tar | |
13 | from fs.errors import NoURL | |
14 | 14 | from fs.opener import open_fs |
15 | 15 | from fs.opener.errors import NotWriteable |
16 | from fs.errors import NoURL | |
17 | 16 | from fs.test import FSTestCases |
18 | 17 | |
19 | 18 | from .test_archives import ArchiveTestCases |
19 | ||
20 | try: | |
21 | from pytest import mark | |
22 | except ImportError: | |
23 | from . import mark | |
20 | 24 | |
21 | 25 | |
22 | 26 | class TestWriteReadTarFS(unittest.TestCase): |
93 | 97 | del fs._tar_file |
94 | 98 | |
95 | 99 | |
96 | @pytest.mark.skipif(six.PY2, reason="Python2 does not support LZMA") | |
100 | @mark.slow | |
101 | @unittest.skipIf(six.PY2, "Python2 does not support LZMA") | |
97 | 102 | class TestWriteXZippedTarFS(FSTestCases, unittest.TestCase): |
98 | 103 | def make_fs(self): |
99 | 104 | fh, _tar_file = tempfile.mkstemp() |
118 | 123 | tarfile.open(fs._tar_file, "r:{}".format(other_comps)) |
119 | 124 | |
120 | 125 | |
126 | @mark.slow | |
121 | 127 | class TestWriteBZippedTarFS(FSTestCases, unittest.TestCase): |
122 | 128 | def make_fs(self): |
123 | 129 | fh, _tar_file = tempfile.mkstemp() |
236 | 242 | |
237 | 243 | |
238 | 244 | class TestImplicitDirectories(unittest.TestCase): |
239 | """Regression tests for #160. | |
240 | """ | |
245 | """Regression tests for #160.""" | |
241 | 246 | |
242 | 247 | @classmethod |
243 | 248 | def setUpClass(cls): |
1 | 1 | |
2 | 2 | import os |
3 | 3 | |
4 | from fs import errors | |
4 | 5 | from fs.tempfs import TempFS |
5 | from fs import errors | |
6 | 6 | |
7 | 7 | from .test_osfs import TestOSFS |
8 | 8 |
0 | from __future__ import unicode_literals, print_function | |
0 | from __future__ import print_function, unicode_literals | |
1 | 1 | |
2 | import unittest | |
2 | 3 | from datetime import datetime |
3 | import unittest | |
4 | ||
5 | import pytz | |
6 | 4 | |
7 | 5 | from fs.time import datetime_to_epoch, epoch_to_datetime |
6 | ||
7 | try: | |
8 | from datetime import timezone | |
9 | except ImportError: | |
10 | from fs._tzcompat import timezone # type: ignore | |
8 | 11 | |
9 | 12 | |
10 | 13 | class TestEpoch(unittest.TestCase): |
11 | 14 | def test_epoch_to_datetime(self): |
12 | 15 | self.assertEqual( |
13 | epoch_to_datetime(142214400), datetime(1974, 7, 5, tzinfo=pytz.UTC) | |
16 | epoch_to_datetime(142214400), datetime(1974, 7, 5, tzinfo=timezone.utc) | |
14 | 17 | ) |
15 | 18 | |
16 | 19 | def test_datetime_to_epoch(self): |
17 | 20 | self.assertEqual( |
18 | datetime_to_epoch(datetime(1974, 7, 5, tzinfo=pytz.UTC)), 142214400 | |
21 | datetime_to_epoch(datetime(1974, 7, 5, tzinfo=timezone.utc)), 142214400 | |
19 | 22 | ) |
1 | 1 | |
2 | 2 | import unittest |
3 | 3 | |
4 | from fs.mode import validate_open_mode | |
5 | from fs.mode import validate_openbin_mode | |
6 | 4 | from fs import tools |
5 | from fs.mode import validate_open_mode, validate_openbin_mode | |
7 | 6 | from fs.opener import open_fs |
8 | 7 | |
9 | 8 |
0 | from __future__ import print_function | |
1 | from __future__ import unicode_literals | |
0 | from __future__ import print_function, unicode_literals | |
2 | 1 | |
3 | 2 | import io |
4 | 3 | import unittest |
0 | 0 | from __future__ import unicode_literals |
1 | 1 | |
2 | import six | |
2 | 3 | import unittest |
3 | 4 | |
5 | from fs import walk | |
4 | 6 | from fs.errors import FSError |
5 | 7 | from fs.memoryfs import MemoryFS |
6 | from fs import walk | |
7 | 8 | from fs.wrap import read_only |
8 | import six | |
9 | 9 | |
10 | 10 | |
11 | 11 | class TestWalker(unittest.TestCase): |
0 | 0 | from __future__ import unicode_literals |
1 | 1 | |
2 | import operator | |
2 | 3 | import unittest |
3 | 4 | |
4 | from fs import errors | |
5 | try: | |
6 | from unittest import mock | |
7 | except ImportError: | |
8 | import mock | |
9 | ||
10 | import six | |
11 | ||
12 | import fs.copy | |
13 | import fs.errors | |
14 | import fs.mirror | |
15 | import fs.move | |
16 | import fs.wrap | |
5 | 17 | from fs import open_fs |
6 | from fs import wrap | |
7 | ||
8 | ||
9 | class TestWrap(unittest.TestCase): | |
10 | def test_readonly(self): | |
11 | mem_fs = open_fs("mem://") | |
12 | fs = wrap.read_only(mem_fs) | |
13 | ||
14 | with self.assertRaises(errors.ResourceReadOnly): | |
15 | fs.open("foo", "w") | |
16 | ||
17 | with self.assertRaises(errors.ResourceReadOnly): | |
18 | fs.appendtext("foo", "bar") | |
19 | ||
20 | with self.assertRaises(errors.ResourceReadOnly): | |
21 | fs.appendbytes("foo", b"bar") | |
22 | ||
23 | with self.assertRaises(errors.ResourceReadOnly): | |
24 | fs.makedir("foo") | |
25 | ||
26 | with self.assertRaises(errors.ResourceReadOnly): | |
27 | fs.move("foo", "bar") | |
28 | ||
29 | with self.assertRaises(errors.ResourceReadOnly): | |
30 | fs.openbin("foo", "w") | |
31 | ||
32 | with self.assertRaises(errors.ResourceReadOnly): | |
33 | fs.remove("foo") | |
34 | ||
35 | with self.assertRaises(errors.ResourceReadOnly): | |
36 | fs.removedir("foo") | |
37 | ||
38 | with self.assertRaises(errors.ResourceReadOnly): | |
39 | fs.setinfo("foo", {}) | |
40 | ||
41 | with self.assertRaises(errors.ResourceReadOnly): | |
42 | fs.settimes("foo", {}) | |
43 | ||
44 | with self.assertRaises(errors.ResourceReadOnly): | |
45 | fs.copy("foo", "bar") | |
46 | ||
47 | with self.assertRaises(errors.ResourceReadOnly): | |
48 | fs.create("foo") | |
49 | ||
50 | with self.assertRaises(errors.ResourceReadOnly): | |
51 | fs.writetext("foo", "bar") | |
52 | ||
53 | with self.assertRaises(errors.ResourceReadOnly): | |
54 | fs.writebytes("foo", b"bar") | |
55 | ||
56 | with self.assertRaises(errors.ResourceReadOnly): | |
57 | fs.makedirs("foo/bar") | |
58 | ||
59 | with self.assertRaises(errors.ResourceReadOnly): | |
60 | fs.touch("foo") | |
61 | ||
62 | with self.assertRaises(errors.ResourceReadOnly): | |
63 | fs.upload("foo", None) | |
64 | ||
65 | with self.assertRaises(errors.ResourceReadOnly): | |
66 | fs.writefile("foo", None) | |
67 | ||
68 | self.assertTrue(mem_fs.isempty("/")) | |
69 | mem_fs.writebytes("file", b"read me") | |
70 | with fs.openbin("file") as read_file: | |
18 | from fs.info import Info | |
19 | ||
20 | ||
21 | class TestWrapReadOnly(unittest.TestCase): | |
22 | def setUp(self): | |
23 | self.fs = open_fs("mem://") | |
24 | self.ro = fs.wrap.read_only(self.fs) | |
25 | ||
26 | def tearDown(self): | |
27 | self.fs.close() | |
28 | ||
29 | def assertReadOnly(self, func, *args, **kwargs): | |
30 | self.assertRaises(fs.errors.ResourceReadOnly, func, *args, **kwargs) | |
31 | ||
32 | def test_open_w(self): | |
33 | self.assertReadOnly(self.ro.open, "foo", "w") | |
34 | ||
35 | def test_appendtext(self): | |
36 | self.assertReadOnly(self.ro.appendtext, "foo", "bar") | |
37 | ||
38 | def test_appendbytes(self): | |
39 | self.assertReadOnly(self.ro.appendbytes, "foo", b"bar") | |
40 | ||
41 | def test_makedir(self): | |
42 | self.assertReadOnly(self.ro.makedir, "foo") | |
43 | ||
44 | def test_move(self): | |
45 | self.assertReadOnly(self.ro.move, "foo", "bar") | |
46 | ||
47 | def test_openbin_w(self): | |
48 | self.assertReadOnly(self.ro.openbin, "foo", "w") | |
49 | ||
50 | def test_remove(self): | |
51 | self.assertReadOnly(self.ro.remove, "foo") | |
52 | ||
53 | def test_removedir(self): | |
54 | self.assertReadOnly(self.ro.removedir, "foo") | |
55 | ||
56 | def test_removetree(self): | |
57 | self.assertReadOnly(self.ro.removetree, "foo") | |
58 | ||
59 | def test_setinfo(self): | |
60 | self.assertReadOnly(self.ro.setinfo, "foo", {}) | |
61 | ||
62 | def test_settimes(self): | |
63 | self.assertReadOnly(self.ro.settimes, "foo", {}) | |
64 | ||
65 | def test_copy(self): | |
66 | self.assertReadOnly(self.ro.copy, "foo", "bar") | |
67 | ||
68 | def test_create(self): | |
69 | self.assertReadOnly(self.ro.create, "foo") | |
70 | ||
71 | def test_writetext(self): | |
72 | self.assertReadOnly(self.ro.writetext, "foo", "bar") | |
73 | ||
74 | def test_writebytes(self): | |
75 | self.assertReadOnly(self.ro.writebytes, "foo", b"bar") | |
76 | ||
77 | def test_makedirs(self): | |
78 | self.assertReadOnly(self.ro.makedirs, "foo/bar") | |
79 | ||
80 | def test_touch(self): | |
81 | self.assertReadOnly(self.ro.touch, "foo") | |
82 | ||
83 | def test_upload(self): | |
84 | self.assertReadOnly(self.ro.upload, "foo", six.BytesIO()) | |
85 | ||
86 | def test_writefile(self): | |
87 | self.assertReadOnly(self.ro.writefile, "foo", six.StringIO()) | |
88 | ||
89 | def test_openbin_r(self): | |
90 | self.fs.writebytes("file", b"read me") | |
91 | with self.ro.openbin("file") as read_file: | |
71 | 92 | self.assertEqual(read_file.read(), b"read me") |
72 | 93 | |
73 | with fs.open("file", "rb") as read_file: | |
94 | def test_open_r(self): | |
95 | self.fs.writebytes("file", b"read me") | |
96 | with self.ro.open("file", "rb") as read_file: | |
74 | 97 | self.assertEqual(read_file.read(), b"read me") |
75 | 98 | |
76 | def test_cachedir(self): | |
77 | mem_fs = open_fs("mem://") | |
78 | mem_fs.makedirs("foo/bar/baz") | |
79 | mem_fs.touch("egg") | |
80 | ||
81 | fs = wrap.cache_directory(mem_fs) | |
82 | self.assertEqual(sorted(fs.listdir("/")), ["egg", "foo"]) | |
83 | self.assertEqual(sorted(fs.listdir("/")), ["egg", "foo"]) | |
84 | self.assertTrue(fs.isdir("foo")) | |
85 | self.assertTrue(fs.isdir("foo")) | |
86 | self.assertTrue(fs.isfile("egg")) | |
87 | self.assertTrue(fs.isfile("egg")) | |
88 | ||
89 | self.assertEqual(fs.getinfo("foo"), mem_fs.getinfo("foo")) | |
90 | self.assertEqual(fs.getinfo("foo"), mem_fs.getinfo("foo")) | |
91 | ||
92 | self.assertEqual(fs.getinfo("/"), mem_fs.getinfo("/")) | |
93 | self.assertEqual(fs.getinfo("/"), mem_fs.getinfo("/")) | |
94 | ||
95 | with self.assertRaises(errors.ResourceNotFound): | |
96 | fs.getinfo("/foofoo") | |
99 | ||
100 | class TestWrapReadOnlySyspath(unittest.TestCase): | |
101 | # If the wrapped fs has a syspath, there is a chance that somewhere | |
102 | # in fs.copy or fs.mirror we try to use it to our advantage, but | |
103 | # we want to make sure these implementations don't circumvent the | |
104 | # wrapper. | |
105 | ||
106 | def setUp(self): | |
107 | self.fs = open_fs("temp://") | |
108 | self.ro = fs.wrap.read_only(self.fs) | |
109 | self.src = open_fs("temp://") | |
110 | self.src.touch("foo") | |
111 | self.src.makedir("bar") | |
112 | ||
113 | def tearDown(self): | |
114 | self.fs.close() | |
115 | self.src.close() | |
116 | ||
117 | def assertReadOnly(self, func, *args, **kwargs): | |
118 | self.assertRaises(fs.errors.ResourceReadOnly, func, *args, **kwargs) | |
119 | ||
120 | def test_copy_fs(self): | |
121 | self.assertReadOnly(fs.copy.copy_fs, self.src, self.ro) | |
122 | ||
123 | def test_copy_fs_if_newer(self): | |
124 | self.assertReadOnly(fs.copy.copy_fs_if_newer, self.src, self.ro) | |
125 | ||
126 | def test_copy_file(self): | |
127 | self.assertReadOnly(fs.copy.copy_file, self.src, "foo", self.ro, "foo") | |
128 | ||
129 | def test_copy_file_if_newer(self): | |
130 | self.assertReadOnly(fs.copy.copy_file_if_newer, self.src, "foo", self.ro, "foo") | |
131 | ||
132 | def test_copy_structure(self): | |
133 | self.assertReadOnly(fs.copy.copy_structure, self.src, self.ro) | |
134 | ||
135 | def test_mirror(self): | |
136 | self.assertReadOnly(fs.mirror.mirror, self.src, self.ro) | |
137 | fs.mirror.mirror(self.src, self.fs) | |
138 | self.fs.touch("baz") | |
139 | self.assertReadOnly(fs.mirror.mirror, self.src, self.ro) | |
140 | ||
141 | def test_move_fs(self): | |
142 | self.assertReadOnly(fs.move.move_fs, self.src, self.ro) | |
143 | self.src.removetree("/") | |
144 | self.fs.touch("foo") | |
145 | self.assertReadOnly(fs.move.move_fs, self.ro, self.src) | |
146 | ||
147 | def test_move_file(self): | |
148 | self.assertReadOnly(fs.move.move_file, self.src, "foo", self.ro, "foo") | |
149 | self.fs.touch("baz") | |
150 | self.assertReadOnly(fs.move.move_file, self.ro, "baz", self.src, "foo") | |
151 | ||
152 | def test_move_dir(self): | |
153 | self.assertReadOnly(fs.move.move_file, self.src, "bar", self.ro, "bar") | |
154 | self.fs.makedir("baz") | |
155 | self.assertReadOnly(fs.move.move_dir, self.ro, "baz", self.src, "baz") | |
156 | ||
157 | ||
158 | class TestWrapCachedDir(unittest.TestCase): | |
159 | def setUp(self): | |
160 | self.fs = open_fs("mem://") | |
161 | self.fs.makedirs("foo/bar/baz") | |
162 | self.fs.touch("egg") | |
163 | self.cached = fs.wrap.cache_directory(self.fs) | |
164 | ||
165 | def tearDown(self): | |
166 | self.fs.close() | |
167 | ||
168 | def assertNotFound(self, func, *args, **kwargs): | |
169 | self.assertRaises(fs.errors.ResourceNotFound, func, *args, **kwargs) | |
170 | ||
171 | def test_scandir(self): | |
172 | key = operator.attrgetter("name") | |
173 | expected = [ | |
174 | Info({"basic": {"name": "egg", "is_dir": False}}), | |
175 | Info({"basic": {"name": "foo", "is_dir": True}}), | |
176 | ] | |
177 | with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir: | |
178 | self.assertEqual(sorted(self.cached.scandir("/"), key=key), expected) | |
179 | scandir.assert_has_calls([mock.call("/", namespaces=None, page=None)]) | |
180 | with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir: | |
181 | self.assertEqual(sorted(self.cached.scandir("/"), key=key), expected) | |
182 | scandir.assert_not_called() | |
183 | ||
184 | def test_isdir(self): | |
185 | with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir: | |
186 | self.assertTrue(self.cached.isdir("foo")) | |
187 | self.assertFalse(self.cached.isdir("egg")) # is file | |
188 | self.assertFalse(self.cached.isdir("spam")) # doesn't exist | |
189 | scandir.assert_has_calls([mock.call("/", namespaces=None, page=None)]) | |
190 | with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir: | |
191 | self.assertTrue(self.cached.isdir("foo")) | |
192 | self.assertFalse(self.cached.isdir("egg")) | |
193 | self.assertFalse(self.cached.isdir("spam")) | |
194 | scandir.assert_not_called() | |
195 | ||
196 | def test_isfile(self): | |
197 | with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir: | |
198 | self.assertTrue(self.cached.isfile("egg")) | |
199 | self.assertFalse(self.cached.isfile("foo")) # is dir | |
200 | self.assertFalse(self.cached.isfile("spam")) # doesn't exist | |
201 | scandir.assert_has_calls([mock.call("/", namespaces=None, page=None)]) | |
202 | with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir: | |
203 | self.assertTrue(self.cached.isfile("egg")) | |
204 | self.assertFalse(self.cached.isfile("foo")) | |
205 | self.assertFalse(self.cached.isfile("spam")) | |
206 | scandir.assert_not_called() | |
207 | ||
208 | def test_getinfo(self): | |
209 | with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir: | |
210 | self.assertEqual(self.cached.getinfo("foo"), self.fs.getinfo("foo")) | |
211 | self.assertEqual(self.cached.getinfo("/"), self.fs.getinfo("/")) | |
212 | self.assertNotFound(self.cached.getinfo, "spam") | |
213 | scandir.assert_has_calls([mock.call("/", namespaces=None, page=None)]) | |
214 | with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir: | |
215 | self.assertEqual(self.cached.getinfo("foo"), self.fs.getinfo("foo")) | |
216 | self.assertEqual(self.cached.getinfo("/"), self.fs.getinfo("/")) | |
217 | self.assertNotFound(self.cached.getinfo, "spam") | |
218 | scandir.assert_not_called() |
0 | 0 | from __future__ import unicode_literals |
1 | 1 | |
2 | 2 | import unittest |
3 | ||
4 | 3 | from six import text_type |
5 | 4 | |
6 | 5 | from fs import wrapfs |
0 | 0 | # -*- encoding: UTF-8 |
1 | 1 | from __future__ import unicode_literals |
2 | 2 | |
3 | import sys | |
4 | ||
3 | 5 | import os |
4 | import sys | |
6 | import six | |
5 | 7 | import tempfile |
6 | 8 | import unittest |
7 | 9 | import zipfile |
8 | 10 | |
9 | import six | |
10 | ||
11 | 11 | from fs import zipfs |
12 | 12 | from fs.compress import write_zip |
13 | from fs.enums import Seek | |
14 | from fs.errors import NoURL | |
13 | 15 | from fs.opener import open_fs |
14 | 16 | from fs.opener.errors import NotWriteable |
15 | from fs.errors import NoURL | |
16 | 17 | from fs.test import FSTestCases |
17 | from fs.enums import Seek | |
18 | 18 | |
19 | 19 | from .test_archives import ArchiveTestCases |
20 | 20 |
0 | [tox] | |
1 | envlist = {py27,py34,py35,py36,py37}{,-scandir},pypy,typecheck,lint | |
2 | sitepackages = False | |
3 | skip_missing_interpreters=True | |
4 | ||
5 | [testenv] | |
6 | deps = -r {toxinidir}/testrequirements.txt | |
7 | commands = coverage run -m pytest --cov-append {posargs} {toxinidir}/tests | |
8 | ||
9 | [testenv:typecheck] | |
10 | python = python37 | |
11 | deps = | |
12 | mypy==0.740 | |
13 | -r {toxinidir}/testrequirements.txt | |
14 | commands = make typecheck | |
15 | whitelist_externals = make | |
16 | ||
17 | [testenv:lint] | |
18 | python = python37 | |
19 | deps = | |
20 | flake8 | |
21 | # flake8-builtins | |
22 | flake8-bugbear | |
23 | flake8-comprehensions | |
24 | # flake8-isort | |
25 | flake8-mutable | |
26 | commands = flake8 fs tests |