Codebase list python-fs / f8dd117
New upstream version 2.4.16 Yao Wei (魏銘廷) 1 year, 10 months ago
124 changed file(s) with 5023 addition(s) and 2329 deletion(s). Raw diff Collapse all Expand all
00 # These are supported funding model platforms
11
2 custom: willmcgugan
2 github: willmcgugan
3 ko_fi: willmcgugan
4 tidelift: "pypi/rich"
0 name: Package
1
2 on:
3 push:
4 tags:
5 - 'v2.*'
6
7 jobs:
8
9 build-wheel:
10 runs-on: ubuntu-latest
11 name: Build wheel distribution
12 steps:
13 - name: Checkout code
14 uses: actions/checkout@v2
15 with:
16 submodules: true
17 - name: Setup Python ${{ matrix.python-version }}
18 uses: actions/setup-python@v2
19 with:
20 python-version: ${{ matrix.python-version }}
21 - name: Update build dependencies
22 run: python -m pip install -U pip wheel setuptools
23 - name: Build wheel distribution
24 run: python setup.py bdist_wheel
25 - name: Store built wheel
26 uses: actions/upload-artifact@v2
27 with:
28 name: dist
29 path: dist/*
30
31 build-sdist:
32 runs-on: ubuntu-latest
33 name: Build source distribution
34 steps:
35 - name: Checkout code
36 uses: actions/checkout@v2
37 with:
38 submodules: true
39 - name: Set up Python 3.9
40 uses: actions/setup-python@v2
41 with:
42 python-version: 3.9
43 - name: Update build dependencies
44 run: python -m pip install -U pip wheel setuptools
45 - name: Build source distribution
46 run: python setup.py sdist
47 - name: Store source distribution
48 uses: actions/upload-artifact@v2
49 with:
50 name: dist
51 path: dist/*
52
53 test-sdist:
54 runs-on: ubuntu-latest
55 name: Test source distribution
56 needs:
57 - build-sdist
58 steps:
59 - name: Checkout code
60 uses: actions/checkout@v2
61 with:
62 submodules: true
63 - name: Setup Python 3.9
64 uses: actions/setup-python@v2
65 with:
66 python-version: 3.9
67 - name: Download source distribution
68 uses: actions/download-artifact@v2
69 with:
70 name: dist
71 path: dist
72 - name: Install source distribution
73 run: python -m pip install dist/fs-*.tar.gz
74 - name: Remove source code
75 run: rm -rvd fs
76 - name: Install test requirements
77 run: python -m pip install -r tests/requirements.txt
78 - name: Test installed package
79 run: python -m unittest discover -vv
80
81 test-wheel:
82 runs-on: ubuntu-latest
83 name: Test wheel distribution
84 needs:
85 - build-wheel
86 steps:
87 - name: Checkout code
88 uses: actions/checkout@v2
89 with:
90 submodules: true
91 - name: Setup Python 3.9
92 uses: actions/setup-python@v2
93 with:
94 python-version: 3.9
95 - name: Download wheel distribution
96 uses: actions/download-artifact@v2
97 with:
98 name: dist
99 path: dist
100 - name: Install wheel distribution
101 run: python -m pip install dist/fs-*.whl
102 - name: Remove source code
103 run: rm -rvd fs
104 - name: Install test requirements
105 run: python -m pip install -r tests/requirements.txt
106 - name: Test installed package
107 run: python -m unittest discover -vv
108
109 upload:
110 environment: PyPI
111 runs-on: ubuntu-latest
112 name: Upload
113 needs:
114 - build-sdist
115 - build-wheel
116 - test-sdist
117 - test-wheel
118 steps:
119 - name: Download built distributions
120 uses: actions/download-artifact@v2
121 with:
122 name: dist
123 path: dist
124 - name: Publish distributions to PyPI
125 if: startsWith(github.ref, 'refs/tags/v')
126 uses: pypa/gh-action-pypi-publish@master
127 with:
128 user: __token__
129 password: ${{ secrets.PYPI_API_TOKEN }}
130 skip_existing: false
131
132 release:
133 environment: GitHub Releases
134 runs-on: ubuntu-latest
135 if: "startsWith(github.ref, 'refs/tags/v')"
136 name: Release
137 needs: upload
138 steps:
139 - name: Checkout code
140 uses: actions/checkout@v1
141 - name: Release a Changelog
142 uses: rasmus-saks/release-a-changelog-action@v1.0.1
143 with:
144 github-token: '${{ secrets.GITHUB_TOKEN }}'
0 name: Test
1
2 on:
3 - push
4 - pull_request
5
6 jobs:
7 test:
8 runs-on: ubuntu-latest
9 strategy:
10 fail-fast: false
11 matrix:
12 python-version:
13 - 2.7
14 - 3.5
15 - 3.6
16 - 3.7
17 - 3.8
18 - 3.9
19 - '3.10'
20 - pypy-2.7
21 - pypy-3.6
22 - pypy-3.7
23 steps:
24 - name: Checkout code
25 uses: actions/checkout@v1
26 - name: Setup Python ${{ matrix.python-version }}
27 uses: actions/setup-python@v2
28 with:
29 python-version: ${{ matrix.python-version }}
30 - name: Update pip
31 run: python -m pip install -U pip wheel setuptools
32 - name: Install tox
33 run: python -m pip install tox tox-gh-actions
34 - name: Test with tox
35 run: python -m tox
36 - name: Store partial coverage reports
37 uses: actions/upload-artifact@v2
38 with:
39 name: coverage
40 path: .coverage.*
41
42 coveralls:
43 needs: test
44 runs-on: ubuntu-latest
45 steps:
46 - name: Checkout code
47 uses: actions/checkout@v1
48 - name: Setup Python 3.10
49 uses: actions/setup-python@v2
50 with:
51 python-version: '3.10'
52 - name: Install coverage package
53 run: python -m pip install -U coverage
54 - name: Download partial coverage reports
55 uses: actions/download-artifact@v2
56 with:
57 name: coverage
58 - name: Combine coverage
59 run: python -m coverage combine
60 - name: Report coverage
61 run: python -m coverage report
62 - name: Export coverage to XML
63 run: python -m coverage xml
64 - name: Upload coverage statistics to Coveralls
65 uses: AndreMiras/coveralls-python-action@develop
66
67 lint:
68 runs-on: ubuntu-latest
69 strategy:
70 fail-fast: false
71 matrix:
72 linter:
73 - typecheck
74 - codestyle
75 - docstyle
76 - codeformat
77 steps:
78 - name: Checkout code
79 uses: actions/checkout@v1
80 - name: Setup Python '3.10'
81 uses: actions/setup-python@v2
82 with:
83 python-version: '3.10'
84 - name: Update pip
85 run: python -m pip install -U pip wheel setuptools
86 - name: Install tox
87 run: python -m pip install tox tox-gh-actions
88 - name: Run ${{ matrix.linter }} linter
89 run: python -m tox -e ${{ matrix.linter }}
+0
-57
.travis.yml less more
0 dist: xenial
1 sudo: false
2 language: python
3
4 python:
5 - "2.7"
6 - "3.4"
7 - "3.5"
8 - "3.6"
9 - "3.7"
10 - "3.8"
11 - "3.9"
12 - "pypy"
13 - "pypy3.5-7.0" # Need 7.0+ due to a bug in earlier versions that broke our tests.
14
15 matrix:
16 include:
17 - name: "Type checking"
18 python: "3.7"
19 env: TOXENV=typecheck
20 - name: "Lint"
21 python: "3.7"
22 env: TOXENV=lint
23
24 # Temporary bandaid for https://github.com/PyFilesystem/pyfilesystem2/issues/342
25 allow_failures:
26 - python: pypy
27 - python: pypy3.5-7.0
28
29 before_install:
30 - pip install -U tox tox-travis
31 - pip --version
32 - pip install -r testrequirements.txt
33 - pip freeze
34
35 install:
36 - pip install -e .
37
38 # command to run tests
39 script: tox
40
41 after_success:
42 - coveralls
43
44 before_deploy:
45 - pip install -U twine wheel
46 - python setup.py sdist bdist_wheel
47
48 deploy:
49 provider: script
50 script: twine upload dist/*
51 skip_cleanup: true
52 on:
53 python: 3.9
54 tags: true
55 repo: PyFilesystem/pyfilesystem2
56
33
44 The format is based on [Keep a Changelog](http://keepachangelog.com/)
55 and this project adheres to [Semantic Versioning](http://semver.org/).
6
7
8 ## Unreleased
9
10
11 ## [2.4.16] - 2022-05-02
12
13 ### Changed
14
15 - Make `fs.zipfs._ZipExtFile` use the seeking mechanism implemented
16 in the Python standard library in Python version 3.7 and later
17 ([#527](https://github.com/PyFilesystem/pyfilesystem2/pull/527)).
18 - Mark `fs.zipfs.ReadZipFS` as a case-sensitive filesystem
19 ([#527](https://github.com/PyFilesystem/pyfilesystem2/pull/527)).
20 - Optimized moving files between filesystems with syspaths.
21 ([#523](https://github.com/PyFilesystem/pyfilesystem2/pull/523)).
22 - Fixed `fs.move.move_file` to clean up the copy on the destination in case of errors.
23 - `fs.opener.manage_fs` with `writeable=True` will now raise a `ResourceReadOnly`
24 exception if the managed filesystem is not writeable.
25 - Marked filesystems wrapped with `fs.wrap.WrapReadOnly` as read-only.
26
27
28 ## [2.4.15] - 2022-02-07
29
30 ### Changed
31
32 - Support more lenient usernames and group names in FTP servers
33 ([#507](https://github.com/PyFilesystem/pyfilesystem2/pull/507)).
34 Closes [#506](https://github.com/PyFilesystem/pyfilesystem2/issues/506).
35 - Removed dependency on pytz ([#518](https://github.com/PyFilesystem/pyfilesystem2/pull/518)).
36 Closes [#516](https://github.com/PyFilesystem/pyfilesystem2/issues/518).
37
38 ### Fixed
39
40 - Fixed `MemoryFS.move` and `MemoryFS.movedir` not updating the name of moved
41 resources, causing `MemoryFS.scandir` to use the old name.
42 ([#510](https://github.com/PyFilesystem/pyfilesystem2/pull/510)).
43 Closes [#509](https://github.com/PyFilesystem/pyfilesystem2/issues/509).
44 - Make `WrapFS.move` and `WrapFS.movedir` use the delegate FS methods instead
45 of `fs.move` functions, which was causing optimized implementation of
46 `movedir` to be always skipped.
47 ([#511](https://github.com/PyFilesystem/pyfilesystem2/pull/511)).
48
49
50 ## [2.4.14] - 2021-11-16
51
52 ### Added
53
54 - Added `fs.copy.copy_file_if`, `fs.copy.copy_dir_if`, and `fs.copy.copy_fs_if`.
55 Closes [#458](https://github.com/PyFilesystem/pyfilesystem2/issues/458).
56 - Added `fs.base.FS.getmodified`.
57
58 ### Changed
59
60 - FTP servers that do not support the MLST command now try to use the MDTM command to
61 retrieve the last modification timestamp of a resource.
62 Closes [#456](https://github.com/PyFilesystem/pyfilesystem2/pull/456).
63
64 ### Fixed
65
66 - Fixed performance bugs in `fs.copy.copy_dir_if_newer`. Test cases were adapted to catch those bugs in the future.
67 - Fixed precision bug for timestamps in `fs.OSFS.setinfo`.
68
69
70 ## [2.4.13] - 2021-03-27
71
72 ### Added
73
74 - Added FTP over TLS (FTPS) support to FTPFS.
75 Closes [#437](https://github.com/PyFilesystem/pyfilesystem2/issues/437),
76 [#449](https://github.com/PyFilesystem/pyfilesystem2/pull/449).
77 - `PathError` now supports wrapping an exception using the `exc` argument.
78 Closes [#453](https://github.com/PyFilesystem/pyfilesystem2/issues/453).
79 - Better documentation of the `writable` parameter of `fs.open_fs`, and
80 hint about using `fs.wrap.read_only` when a read-only filesystem is
81 required. Closes [#441](https://github.com/PyFilesystem/pyfilesystem2/issues/441).
82 - Copy and move operations now provide a parameter `preserve_time` that, when
83 passed as `True`, makes sure the "mtime" of the destination file will be
84 the same as that of the source file.
85
86 ### Changed
87
88 - Make `FS.upload` explicit about the expected error when the parent directory of the destination does not exist.
89 Closes [#445](https://github.com/PyFilesystem/pyfilesystem2/pull/445).
90 - Migrate continuous integration from Travis-CI to GitHub Actions and introduce several linters
91 again in the build steps ([#448](https://github.com/PyFilesystem/pyfilesystem2/pull/448)).
92 Closes [#446](https://github.com/PyFilesystem/pyfilesystem2/issues/446).
93 - Stop requiring `pytest` to run tests, allowing any test runner supporting `unittest`-style
94 test suites.
95 - `FSTestCases` now builds the large data required for `upload` and `download` tests only
96 once in order to reduce the total testing time.
97 - `MemoryFS.move` and `MemoryFS.movedir` will now avoid copying data.
98 Closes [#452](https://github.com/PyFilesystem/pyfilesystem2/issues/452).
99 - `FS.removetree("/")` behaviour has been standardized in all filesystems, and
100 is expected to clear the contents of the root folder without deleting it.
101 Closes [#471](https://github.com/PyFilesystem/pyfilesystem2/issues/471).
102 - `FS.getbasic` is now deprecated, as it is redundant with `FS.getinfo`,
103 and `FS.getinfo` is now explicitly expected to return the *basic* info
104 namespace unconditionally. Closes [#469](https://github.com/PyFilesystem/pyfilesystem2/issues/469).
105
106 ### Fixed
107
108 - Make `FTPFile`, `MemoryFile` and `RawWrapper` accept [`array.array`](https://docs.python.org/3/library/array.html)
109 arguments for the `write` and `writelines` methods, as expected by their base class [`io.RawIOBase`](https://docs.python.org/3/library/io.html#io.RawIOBase).
110 - Various documentation issues, including `MemoryFS` docstring not rendering properly.
111 - Avoid creating a new connection on every call of `FTPFS.upload`. Closes [#455](https://github.com/PyFilesystem/pyfilesystem2/issues/455).
112 - `WrapReadOnly.removetree` not raising a `ResourceReadOnly` when called. Closes [#468](https://github.com/PyFilesystem/pyfilesystem2/issues/468).
113 - `WrapCachedDir.isdir` and `WrapCachedDir.isfile` raising a `ResourceNotFound` error on non-existing path ([#470](https://github.com/PyFilesystem/pyfilesystem2/pull/470)).
114 - `FTPFS` not listing certain entries with sticky/SUID/SGID permissions set by Linux server ([#473](https://github.com/PyFilesystem/pyfilesystem2/pull/473)).
115 Closes [#451](https://github.com/PyFilesystem/pyfilesystem2/issues/451).
116 - `scandir` iterator not being closed explicitly in `OSFS.scandir`, occasionally causing a `ResourceWarning`
117 to be thrown. Closes [#311](https://github.com/PyFilesystem/pyfilesystem2/issues/311).
118 - Incomplete type annotations for the `temp_fs` parameter of `WriteTarFS` and `WriteZipFS`.
119 Closes [#410](https://github.com/PyFilesystem/pyfilesystem2/issues/410).
120
6121
7122 ## [2.4.12] - 2021-01-14
8123
13128 [#380](https://github.com/PyFilesystem/pyfilesystem2/issues/380).
14129 - Added compatibility if a Windows FTP server returns file information to the
15130 `LIST` command with 24-hour times. Closes [#438](https://github.com/PyFilesystem/pyfilesystem2/issues/438).
131 - Added Python 3.9 support. Closes [#443](https://github.com/PyFilesystem/pyfilesystem2/issues/443).
16132
17133 ### Changed
18134
21137 be able to see if we break something aside from known issues with FTP tests.
22138 - Include docs in source distributions as well as the whole tests folder,
23139 ensuring `conftest.py` is present, fixes [#364](https://github.com/PyFilesystem/pyfilesystem2/issues/364).
24 - Stop patching copy with Python 3.8+ because it already uses `sendfile`.
140 - Stop patching copy with Python 3.8+ because it already uses `sendfile`
141 ([#424](https://github.com/PyFilesystem/pyfilesystem2/pull/424)).
142 Closes [#421](https://github.com/PyFilesystem/pyfilesystem2/issues/421).
25143
26144 ### Fixed
27145
28146 - Fixed crash when CPython's -OO flag is used
29 - Fixed error when parsing timestamps from a FTP directory served from a WindowsNT FTP Server, fixes [#395](https://github.com/PyFilesystem/pyfilesystem2/issues/395).
147 - Fixed error when parsing timestamps from a FTP directory served from a WindowsNT FTP Server.
148 Closes [#395](https://github.com/PyFilesystem/pyfilesystem2/issues/395).
30149 - Fixed documentation of `Mode.to_platform_bin`. Closes [#382](https://github.com/PyFilesystem/pyfilesystem2/issues/382).
31150 - Fixed the code example in the "Testing Filesystems" section of the
32151 "Implementing Filesystems" guide. Closes [#407](https://github.com/PyFilesystem/pyfilesystem2/issues/407).
33152 - Fixed `FTPFS.openbin` not implicitly opening files in binary mode like expected
34153 from `openbin`. Closes [#406](https://github.com/PyFilesystem/pyfilesystem2/issues/406).
35154
155
36156 ## [2.4.11] - 2019-09-07
37157
38158 ### Added
39159
40160 - Added geturl for TarFS and ZipFS for 'fs' purpose. NoURL for 'download' purpose.
41 - Added helpful root path in CreateFailed exception [#340](https://github.com/PyFilesystem/pyfilesystem2/issues/340)
42 - Added Python 3.8 support
161 - Added helpful root path in CreateFailed exception.
162 Closes [#340](https://github.com/PyFilesystem/pyfilesystem2/issues/340).
163 - Added Python 3.8 support.
43164
44165 ### Fixed
45166
67188
68189 ### Fixed
69190
70 - Fixed broken WrapFS.movedir [#322](https://github.com/PyFilesystem/pyfilesystem2/issues/322)
191 - Fixed broken WrapFS.movedir [#322](https://github.com/PyFilesystem/pyfilesystem2/issues/322).
71192
72193 ## [2.4.9] - 2019-07-28
73194
449570
450571 ### Added
451572
452 - New `copy_if_newer' functionality in`copy` module.
573 - New `copy_if_newer` functionality in `copy` module.
453574
454575 ### Fixed
455576
460581 ### Changed
461582
462583 - Improved FTP support for non-compliant servers
463 - Fix for ZipFS implied directories
584 - Fix for `ZipFS` implied directories
464585
465586 ## [2.0.1] - 2017-03-11
466587
467588 ### Added
468589
469 - TarFS contributed by Martin Larralde
470
471 ### Fixed
472
473 - FTPFS bugs.
590 - `TarFS` contributed by Martin Larralde.
591
592 ### Fixed
593
594 - `FTPFS` bugs.
474595
475596 ## [2.0.0] - 2016-12-07
476597
11
22 Pull Requests are very welcome for this project!
33
4 For bug fixes or new features, please file an issue before submitting a pull request. If the change isn't trivial, it may be best to wait for feedback. For a quicker response, contact [Will McGugan](mailto:willmcgugan+pyfs@gmail.com) directly.
4 For bug fixes or new features, please file an issue before submitting a pull
5 request. If the change isn't trivial, it may be best to wait for feedback.
6 For a quicker response, contact [Will McGugan](mailto:willmcgugan+pyfs@gmail.com)
7 directly.
8
9
10 ## `tox`
11
12 Most of the guidelines that follow can be checked with a particular
13 [`tox`](https://pypi.org/project/tox/) environment. Having it installed will
14 help you develop and verify your code locally without having to wait for
15 our Continuous Integration pipeline to finish.
16
17
18 ## Tests
19
20 New code should have unit tests. We strive to have near 100% coverage.
21 Get in touch, if you need assistance with the tests. You shouldn't refrain
22 from opening a Pull Request even if all the tests were not added yet, or if
23 not all of them are passing yet.
24
25 ### Dependencies
26
27 The dependency for running the tests can be found in the `tests/requirements.txt` file.
28 If you're using `tox`, you won't have to install them manually. Otherwise,
29 they can be installed with `pip`:
30 ```console
31 $ pip install -r tests/requirements.txt
32 ```
33
34 ### Running (with `tox`)
35
36 Simply run in the repository folder to execute the tests for all available
37 environments:
38 ```console
39 $ tox
40 ```
41
42 Since this can take some time, you can use a single environment to run
43 tests only once, for instance to run tests only with Python 3.9:
44 ```console
45 $ tox -e py39
46 ```
47
48 ### Running (without `tox`)
49
50 Tests are written using the standard [`unittest`](https://docs.python.org/3/library/unittest.html)
51 framework. You should be able to run them using the standard library runner:
52 ```console
53 $ python -m unittest discover -vv
54 ```
55
556
657 ## Coding Guidelines
758
8 This project runs on Python2.7 and Python3.X. Python2.7 will be dropped at some point, but for now, please maintain compatibility.
59 This project runs on Python2.7 and Python3.X. Python2.7 will be dropped at
60 some point, but for now, please maintain compatibility. PyFilesystem2 uses
61 the [`six`](https://pypi.org/project/six/) library to write version-agnostic
62 Python code.
963
10 Please format new code with [black](https://github.com/ambv/black), using the default settings.
64 ### Style
1165
12 ## Tests
66 The code (including the tests) should follow PEP8. You can check for the
67 code style with:
68 ```console
69 $ tox -e codestyle
70 ```
1371
14 New code should have unit tests. We strive to have near 100% coverage. Get in touch, if you need assistance with the tests.
72 This will invoke [`flake8`](https://pypi.org/project/flake8/) with some common
73 plugins such as [`flake8-comprehensions`](https://pypi.org/project/flake8-comprehensions/).
74
75 ### Format
76
77 Please format new code with [black](https://github.com/ambv/black), using the
78 default settings. You can check whether the code is well-formatted with:
79 ```console
80 $ tox -e codeformat
81 ```
82
83 ### Type annotations
84
85 The code is typechecked with [`mypy`](https://pypi.org/project/mypy/), and
86 type annotations written as comments, to stay compatible with Python2. Run
87 the typechecking with:
88 ```console
89 $ tox -e typecheck
90 ```
91
92
93 ## Documentation
94
95 ### Dependencies
96
97 The documentation is built with [Sphinx](https://pypi.org/project/Sphinx/),
98 using the [ReadTheDocs](https://pypi.org/project/sphinx-rtd-theme/) theme.
99 The dependencies are listed in `docs/requirements.txt` and can be installed with
100 `pip`:
101 ```console
102 $ pip install -r docs/requirements.txt
103 ```
104
105 ### Building
106
107 Run the following command to build the HTML documentation:
108 ```console
109 $ python setup.py build_sphinx
110 ```
111
112 The documentation index will be written to the `build/sphinx/html/`
113 directory.
114
115 ### Style
116
117 The API reference is written in the Python source, using docstrings in
118 [Google format](https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html).
119 The documentation style can be checked with:
120 ```console
121 $ tox -e docstyle
122 ```
11
22 Many thanks to the following developers for contributing to this project:
33
4 - [Adrian Garcia Badaracco](https://github.com/adriangb)
5 - [Alex Povel](https://github.com/alexpovel)
46 - [Andreas Tollkötter](https://github.com/atollk)
5 - [C. W.](https://github.com/chfw)
7 - [Andrew Scheller](https://github.com/lurch)
8 - [Andrey Serov](https://github.com/zmej-serow)
9 - [Ben Lindsay](https://github.com/benlindsay)
10 - [Bernhard M. Wiedemann](https://github.com/bmwiedemann)
11 - [@chfw](https://github.com/chfw)
12 - [Dafna Hirschfeld](https://github.com/kamomil)
613 - [Diego Argueta](https://github.com/dargueta)
14 - [Eelke van den Bos](https://github.com/eelkevdbos)
15 - [Egor Namakonov](https://github.com/fresheed)
16 - [Felix Yan](https://github.com/felixonmars)
17 - [@FooBarQuaxx](https://github.com/FooBarQuaxx)
718 - [Geoff Jukes](https://github.com/geoffjukes)
19 - [George Macon](https://github.com/gmacon)
820 - [Giampaolo Cimino](https://github.com/gpcimino)
21 - [@Hoboneer](https://github.com/Hoboneer)
22 - [Jon Hagg](https://github.com/jon-hagg)
23 - [Joseph Atkins-Turkish](https://github.com/Spacerat)
24 - [Joshua Tauberer](https://github.com/JoshData)
925 - [Justin Charlong](https://github.com/jcharlong)
1026 - [Louis Sautier](https://github.com/sbraz)
27 - [Martin Durant](https://github.com/martindurant)
1128 - [Martin Larralde](https://github.com/althonos)
29 - [Masaya Nakamura](https://github.com/mashabow)
30 - [Matthew Gamble](https://github.com/djmattyg007)
1231 - [Morten Engelhardt Olsen](https://github.com/xoriath)
32 - [@mrg0029](https://github.com/mrg0029)
33 - [Nathan Goldbaum](https://github.com/ngoldbaum)
1334 - [Nick Henderson](https://github.com/nwh)
35 - [Oliver Galvin](https://github.com/odgalvin)
36 - [Philipp Wiesner](https://github.com/birnbaum)
37 - [Philippe Ombredanne](https://github.com/pombredanne)
38 - [Rehan Khwaja](https://github.com/rkhwaja)
39 - [Silvan Spross](https://github.com/sspross)
40 - [@sqwishy](https://github.com/sqwishy)
41 - [Sven Schliesing](https://github.com/muffl0n)
42 - [Thomas Feldmann](https://github.com/tfeldmann)
43 - [Tim Gates](https://github.com/timgates42/)
44 - [@tkossak](https://github.com/tkossak)
45 - [Todd Levi](https://github.com/televi)
46 - [Vilius Grigaliūnas](https://github.com/vilius-g)
1447 - [Will McGugan](https://github.com/willmcgugan)
1548 - [Zmej Serow](https://github.com/zmej-serow)
0 include CHANGELOG.md
1 include CONTRIBUTING.md
2 include CONTRIBUTORS.md
03 include LICENSE
14 graft tests
25 graft docs
11
22 Python's Filesystem abstraction layer.
33
4 [![PyPI version](https://badge.fury.io/py/fs.svg)](https://badge.fury.io/py/fs)
4 [![PyPI version](https://img.shields.io/pypi/v/fs)](https://pypi.org/project/fs/)
55 [![PyPI](https://img.shields.io/pypi/pyversions/fs.svg)](https://pypi.org/project/fs/)
6 [![Downloads](https://pepy.tech/badge/fs/month)](https://pepy.tech/project/fs/month)
7
8
9 [![Build Status](https://travis-ci.org/PyFilesystem/pyfilesystem2.svg?branch=master)](https://travis-ci.org/PyFilesystem/pyfilesystem2)
10 [![Windows Build Status](https://ci.appveyor.com/api/projects/status/github/pyfilesystem/pyfilesystem2?branch=master&svg=true)](https://ci.appveyor.com/project/willmcgugan/pyfilesystem2)
11 [![Coverage Status](https://coveralls.io/repos/github/PyFilesystem/pyfilesystem2/badge.svg)](https://coveralls.io/github/PyFilesystem/pyfilesystem2)
12 [![Codacy Badge](https://api.codacy.com/project/badge/Grade/30ad6445427349218425d93886ade9ee)](https://www.codacy.com/app/will-mcgugan/pyfilesystem2?utm_source=github.com&utm_medium=referral&utm_content=PyFilesystem/pyfilesystem2&utm_campaign=Badge_Grade)
6 [![Downloads](https://pepy.tech/badge/fs/month)](https://pepy.tech/project/fs/)
7 [![Build Status](https://img.shields.io/github/workflow/status/PyFilesystem/pyfilesystem2/Test/master?logo=github&cacheSeconds=600)](https://github.com/PyFilesystem/pyfilesystem2/actions?query=branch%3Amaster)
8 [![Windows Build Status](https://img.shields.io/appveyor/build/willmcgugan/pyfilesystem2/master?logo=appveyor&cacheSeconds=600)](https://ci.appveyor.com/project/willmcgugan/pyfilesystem2)
9 [![Coverage Status](https://img.shields.io/coveralls/github/PyFilesystem/pyfilesystem2/master?cacheSeconds=600)](https://coveralls.io/github/PyFilesystem/pyfilesystem2)
10 [![Codacy Badge](https://img.shields.io/codacy/grade/30ad6445427349218425d93886ade9ee/master?logo=codacy)](https://www.codacy.com/app/will-mcgugan/pyfilesystem2?utm_source=github.com&utm_medium=referral&utm_content=PyFilesystem/pyfilesystem2&utm_campaign=Badge_Grade)
11 [![Docs](https://img.shields.io/readthedocs/pyfilesystem2?maxAge=3600)](http://pyfilesystem2.readthedocs.io/en/stable/?badge=stable)
1312
1413 ## Documentation
1514
1919
2020 install:
2121 # We need wheel installed to build wheels
22 - "%PYTHON%\\python.exe -m pip install pytest pytest-randomly pytest-cov psutil pyftpdlib mock"
22 - "%PYTHON%\\python.exe -m pip install -U pip wheel setuptools"
23 - "%PYTHON%\\python.exe -m pip install pytest"
24 - "%PYTHON%\\python.exe -m pip install -r tests/requirements.txt"
2325 - "%PYTHON%\\python.exe setup.py install"
2426
2527 build: off
2628
2729 test_script:
28 - "%PYTHON%\\python.exe -m pytest -v tests"
30 - "%PYTHON%\\python.exe -m pytest"
0 # the bare requirements for building docs
1 Sphinx ~=3.0
2 sphinx-rtd-theme ~=0.5.1
3 recommonmark ~=0.6
1212 # serve to show the default.
1313
1414 import sys
15
1516 import os
16
17
1817 import sphinx_rtd_theme
1918
2019 html_theme = "sphinx_rtd_theme"
3837 'sphinx.ext.autodoc',
3938 'sphinx.ext.viewcode',
4039 'sphinx.ext.napoleon',
41 'sphinx.ext.intersphinx'
40 'sphinx.ext.intersphinx',
41 "recommonmark",
4242 ]
4343
4444 # Add any paths that contain templates here, relative to this directory.
6262
6363 # General information about the project.
6464 project = u'PyFilesystem'
65 copyright = u'2016-2017, Will McGugan'
65 copyright = u'2016-2021, Will McGugan and the PyFilesystem2 contributors'
6666 author = u'Will McGugan'
6767
6868 # The version info for the project you're documenting, acts as replacement for
7070 # built documents.
7171 #
7272 from fs import __version__
73
7374 # The short X.Y version.
7475 version = '.'.join(__version__.split('.')[:2])
7576 # The full version, including alpha/beta/rc tags.
303304 #texinfo_no_detailmenu = False
304305
305306 napoleon_include_special_with_doc = True
307
308
309 # -- Options for autodoc -----------------------------------------------------
310
311 # Configure autodoc so that it doesn't skip building the documentation for
312 # __init__ methods, since the arguments to instantiate classes should be in
313 # the __init__ docstring and not at the class-level.
314
315 autodoc_default_options = {
316 'special-members': '__init__',
317 }
0 ../../CONTRIBUTING.md
2828
2929 __all__ = ['S3FSOpener']
3030
31 from fs.opener import Opener, OpenerError
31 from fs.opener import Opener
32 from fs.opener.errors import OpenerError
3233
3334 from ._s3fs import S3FS
3435
175175
176176 In the case of a ``OSFS``, a standard file-like object will be returned. Other filesystems may return a different object supporting the same methods. For instance, :class:`~fs.memoryfs.MemoryFS` will return a ``io.BytesIO`` object.
177177
178 PyFilesystem also offers a number of shortcuts for common file related operations. For instance, :meth:`~fs.base.FS.readbytes` will return the file contents as a bytes, and :meth:`~fs.base.FS.readtext` will read unicode text. These methods is generally preferable to explicitly opening files, as the FS object may have an optimized implementation.
178 PyFilesystem also offers a number of shortcuts for common file related operations. For instance, :meth:`~fs.base.FS.readbytes` will return the file contents as bytes, and :meth:`~fs.base.FS.readtext` will read unicode text. These methods are generally preferable to explicitly opening files, as the FS object may have an optimized implementation.
179179
180180 Other *shortcut* methods are :meth:`~fs.base.FS.download`, :meth:`~fs.base.FS.upload`, :meth:`~fs.base.FS.writebytes`, :meth:`~fs.base.FS.writetext`.
181181
2424 external.rst
2525 interface.rst
2626 reference.rst
27
27 contributing.md
2828
2929
3030 Indices and tables
1919 * :meth:`~fs.base.FS.getdetails` Get details info namespace for a resource.
2020 * :meth:`~fs.base.FS.getinfo` Get info regarding a file or directory.
2121 * :meth:`~fs.base.FS.getmeta` Get meta information for a resource.
22 * :meth:`~fs.base.FS.getmodified` Get the last modified time of a resource.
2223 * :meth:`~fs.base.FS.getospath` Get path with encoding expected by the OS.
2324 * :meth:`~fs.base.FS.getsize` Get the size of a file.
2425 * :meth:`~fs.base.FS.getsyspath` Get the system path of a resource, if one exists.
5555
5656 from fs import open_fs
5757 projects_fs = open_fs('osfs://~/projects')
58
59
60 Manually registering Openers
61 ----------------------------
62
63 The ``fs.opener`` registry uses an entry point to install external openers
64 (see :ref:`extension`), and it does so once, when you import `fs` for the first
65 time. In some rare cases where entry points are not available (for instance,
66 when running an embedded interpreter) or when extensions are installed *after*
67 the interpreter has started (for instance in a notebook, see
68 `PyFilesystem2#485 <https://github.com/PyFilesystem/pyfilesystem2/issues/485>`_).
69
70 However, a new opener can be installed manually at any time with the
71 `fs.opener.registry.install` method. For instance, here's how the opener for
72 the `s3fs <https://github.com/PyFilesystem/s3fs>`_ extension can be added to
73 the registry::
74
75 import fs.opener
76 from fs_s3fs.opener import S3FSOpener
77
78 fs.opener.registry.install(S3FSOpener)
79 # fs.open_fs("s3fs://...") should now work
1010 from fs import open_fs
1111 from fs.filesize import traditional
1212
13
1413 fs_url = sys.argv[1]
1514 count = 0
1615
66
77 """
88
9 from collections import defaultdict
109 import sys
1110
11 from collections import defaultdict
12
1213 from fs import open_fs
13
1414
1515 hashes = defaultdict(list)
1616 with open_fs(sys.argv[1]) as fs:
1010
1111 from fs import open_fs
1212
13
1413 with open_fs(sys.argv[1]) as fs:
1514 count = fs.glob("**/*.pyc").remove()
1615 print(f"{count} .pyc files remove")
1111
1212 """
1313
14 import sys
15
1416 import os
15 import sys
1617
1718 from fs import open_fs
1819
22
33 __import__("pkg_resources").declare_namespace(__name__) # type: ignore
44
5 from . import path
6 from ._fscompat import fsdecode, fsencode
57 from ._version import __version__
68 from .enums import ResourceType, Seek
79 from .opener import open_fs
8 from ._fscompat import fsencode, fsdecode
9 from . import path
1010
1111 __all__ = ["__version__", "ResourceType", "Seek", "open_fs"]
55
66 from __future__ import unicode_literals
77
8 import threading
98 import typing
109
10 import threading
1111 from six.moves.queue import Queue
1212
13 from .copy import copy_file_internal
13 from .copy import copy_file_internal, copy_modified_time
1414 from .errors import BulkCopyFailed
1515 from .tools import copy_file_data
1616
1717 if typing.TYPE_CHECKING:
18 from typing import IO, List, Optional, Text, Tuple, Type
19
20 from types import TracebackType
21
1822 from .base import FS
19 from types import TracebackType
20 from typing import IO, List, Optional, Text, Type
2123
2224
2325 class _Worker(threading.Thread):
7476 class Copier(object):
7577 """Copy files in worker threads."""
7678
77 def __init__(self, num_workers=4):
78 # type: (int) -> None
79 def __init__(self, num_workers=4, preserve_time=False):
80 # type: (int, bool) -> None
7981 if num_workers < 0:
8082 raise ValueError("num_workers must be >= 0")
8183 self.num_workers = num_workers
84 self.preserve_time = preserve_time
85 self.all_tasks = [] # type: List[Tuple[FS, Text, FS, Text]]
8286 self.queue = None # type: Optional[Queue[_Task]]
8387 self.workers = [] # type: List[_Worker]
8488 self.errors = [] # type: List[Exception]
96100 def stop(self):
97101 """Stop the workers (will block until they are finished)."""
98102 if self.running and self.num_workers:
103 # Notify the workers that all tasks have arrived
104 # and wait for them to finish.
99105 for _worker in self.workers:
100106 self.queue.put(None)
101107 for worker in self.workers:
102108 worker.join()
109
110 # If the "last modified" time is to be preserved, do it now.
111 if self.preserve_time:
112 for args in self.all_tasks:
113 copy_modified_time(*args)
114
103115 # Free up references held by workers
104116 del self.workers[:]
105117 self.queue.join()
123135 if traceback is None and self.errors:
124136 raise BulkCopyFailed(self.errors)
125137
126 def copy(self, src_fs, src_path, dst_fs, dst_path):
127 # type: (FS, Text, FS, Text) -> None
138 def copy(self, src_fs, src_path, dst_fs, dst_path, preserve_time=False):
139 # type: (FS, Text, FS, Text, bool) -> None
128140 """Copy a file from one fs to another."""
129141 if self.queue is None:
130142 # This should be the most performant for a single-thread
131 copy_file_internal(src_fs, src_path, dst_fs, dst_path)
143 copy_file_internal(
144 src_fs, src_path, dst_fs, dst_path, preserve_time=self.preserve_time
145 )
132146 else:
147 self.all_tasks.append((src_fs, src_path, dst_fs, dst_path))
133148 src_file = src_fs.openbin(src_path, "r")
134149 try:
135150 dst_file = dst_fs.openbin(dst_path, "w")
00 import six
11
22 try:
3 from os import fsencode, fsdecode
3 from os import fsdecode, fsencode
44 except ImportError:
5 from backports.os import fsencode, fsdecode # type: ignore
5 from backports.os import fsdecode, fsencode # type: ignore
66
77 try:
88 from os import fspath
0 from __future__ import absolute_import
1 from __future__ import print_function
2 from __future__ import unicode_literals
3
4 import unicodedata
5 import datetime
0 from __future__ import absolute_import, print_function, unicode_literals
1
62 import re
73 import time
8
9 from pytz import UTC
4 import unicodedata
5 from datetime import datetime
6
7 try:
8 from datetime import timezone
9 except ImportError:
10 from ._tzcompat import timezone # type: ignore
1011
1112 from .enums import ResourceType
1213 from .permissions import Permissions
1314
14
15 EPOCH_DT = datetime.datetime.fromtimestamp(0, UTC)
15 EPOCH_DT = datetime.fromtimestamp(0, timezone.utc)
1616
1717
1818 RE_LINUX = re.compile(
1919 r"""
2020 ^
21 ([ldrwx-]{10})
21 ([-dlpscbD])
22 ([r-][w-][xsS-][r-][w-][xsS-][r-][w-][xtT-][\.\+]?)
2223 \s+?
2324 (\d+)
2425 \s+?
25 ([\w\-]+)
26 \s+?
27 ([\w\-]+)
26 ([A-Za-z0-9][A-Za-z0-9\-\.\_\@]*\$?)
27 \s+?
28 ([A-Za-z0-9][A-Za-z0-9\-\.\_\@]*\$?)
2829 \s+?
2930 (\d+)
3031 \s+?
5455
5556
5657 def get_decoders():
57 """
58 Returns all available FTP LIST line decoders with their matching regexes.
59 """
58 """Return all available FTP LIST line decoders with their matching regexes."""
6059 decoders = [
6160 (RE_LINUX, decode_linux),
6261 (RE_WINDOWSNT, decode_windowsnt),
9897 day = _t.tm_mday
9998 hour = _t.tm_hour
10099 minutes = _t.tm_min
101 dt = datetime.datetime(year, month, day, hour, minutes, tzinfo=UTC)
100 dt = datetime(year, month, day, hour, minutes, tzinfo=timezone.utc)
102101
103102 epoch_time = (dt - EPOCH_DT).total_seconds()
104103 return epoch_time
109108
110109
111110 def decode_linux(line, match):
112 perms, links, uid, gid, size, mtime, name = match.groups()
113 is_link = perms.startswith("l")
114 is_dir = perms.startswith("d") or is_link
111 ty, perms, links, uid, gid, size, mtime, name = match.groups()
112 is_link = ty == "l"
113 is_dir = ty == "d" or is_link
115114 if is_link:
116115 name, _, _link_name = name.partition("->")
117116 name = name.strip()
118117 _link_name = _link_name.strip()
119 permissions = Permissions.parse(perms[1:])
118 permissions = Permissions.parse(perms)
120119
121120 mtime_epoch = _decode_linux_time(mtime)
122121
147146
148147
149148 def decode_windowsnt(line, match):
150 """
151 Decodes a Windows NT FTP LIST line like one of these:
152
153 `11-02-18 02:12PM <DIR> images`
154 `11-02-18 03:33PM 9276 logo.gif`
155
156 Alternatively, the time (02:12PM) might also be present in 24-hour format (14:12).
149 """Decode a Windows NT FTP LIST line.
150
151 Examples:
152 Decode a directory line::
153
154 >>> line = "11-02-18 02:12PM <DIR> images"
155 >>> match = RE_WINDOWSNT.match(line)
156 >>> pprint(decode_windowsnt(line, match))
157 {'basic': {'is_dir': True, 'name': 'images'},
158 'details': {'modified': 1518358320.0, 'type': 1},
159 'ftp': {'ls': '11-02-18 02:12PM <DIR> images'}}
160
161 Decode a file line::
162
163 >>> line = "11-02-18 03:33PM 9276 logo.gif"
164 >>> match = RE_WINDOWSNT.match(line)
165 >>> pprint(decode_windowsnt(line, match))
166 {'basic': {'is_dir': False, 'name': 'logo.gif'},
167 'details': {'modified': 1518363180.0, 'size': 9276, 'type': 2},
168 'ftp': {'ls': '11-02-18 03:33PM 9276 logo.gif'}}
169
170 Alternatively, the time might also be present in 24-hour format::
171
172 >>> line = "11-02-18 15:33 9276 logo.gif"
173 >>> match = RE_WINDOWSNT.match(line)
174 >>> decode_windowsnt(line, match)["details"]["modified"]
175 1518363180.0
176
157177 """
158178 is_dir = match.group("size") == "<DIR>"
159179
0 # mypy: ignore-errors
1 try:
2 from os.path import commonpath
3 except ImportError:
4 # Return the longest common sub-path of the sequence of paths given as input.
5 # The paths are not normalized before comparing them (this is the
6 # responsibility of the caller). Any trailing separator is stripped from the
7 # returned path.
8
9 def commonpath(paths):
10 """Given a sequence of path names, returns the longest common sub-path."""
11
12 if not paths:
13 raise ValueError("commonpath() arg is an empty sequence")
14
15 paths = tuple(paths)
16 if isinstance(paths[0], bytes):
17 sep = b"/"
18 curdir = b"."
19 else:
20 sep = "/"
21 curdir = "."
22
23 split_paths = [path.split(sep) for path in paths]
24
25 try:
26 (isabs,) = set(p[:1] == sep for p in paths)
27 except ValueError:
28 raise ValueError("Can't mix absolute and relative paths")
29
30 split_paths = [[c for c in s if c and c != curdir] for s in split_paths]
31 s1 = min(split_paths)
32 s2 = max(split_paths)
33 common = s1
34 for i, c in enumerate(s1):
35 if c != s2[i]:
36 common = s1[:i]
37 break
38
39 prefix = sep if isabs else sep[:0]
40 return prefix + sep.join(common)
2626 >>> MyClass('Will')
2727 MyClass('foo', name='Will')
2828 >>> MyClass(None)
29 MyClass()
29 MyClass('foo')
3030
3131 """
3232 arguments = [repr(arg) for arg in args]
22
33 """
44 import sys
5
56 import six
6
77
88 _PY = sys.version_info
99
0 """Compatibility shim for python2's lack of datetime.timezone.
1
2 This is the example code from the Python 2 documentation:
3 https://docs.python.org/2.7/library/datetime.html#tzinfo-objects
4 """
5
6 from datetime import timedelta, tzinfo
7
8 ZERO = timedelta(0)
9
10
11 class UTC(tzinfo):
12 """UTC"""
13
14 def utcoffset(self, dt):
15 return ZERO
16
17 def tzname(self, dt):
18 return "UTC"
19
20 def dst(self, dt):
21 return ZERO
22
23
24 utc = UTC()
25
26
27 class timezone:
28 utc = utc
0 import typing
1
2 import platform
03 import re
14 import six
2 import platform
3 import typing
45
56 if typing.TYPE_CHECKING:
67 from typing import Text
1011
1112 def url_quote(path_snippet):
1213 # type: (Text) -> Text
13 """
14 On Windows, it will separate drive letter and quote windows
15 path alone. No magic on Unix-alie path, just pythonic
16 `pathname2url`
14 """Quote a URL without quoting the Windows drive letter, if any.
15
16 On Windows, it will separate drive letter and quote Windows
17 path alone. No magic on Unix-like path, just pythonic
18 `~urllib.request.pathname2url`.
1719
1820 Arguments:
19 path_snippet: a file path, relative or absolute.
21 path_snippet (str): a file path, relative or absolute.
22
2023 """
2124 if _WINDOWS_PLATFORM and _has_drive_letter(path_snippet):
2225 drive_letter, path = path_snippet.split(":", 1)
3336
3437 def _has_drive_letter(path_snippet):
3538 # type: (Text) -> bool
36 """
37 The following path will get True
38 D:/Data
39 C:\\My Dcouments\\ test
40
41 And will get False
42
43 /tmp/abc:test
39 """Check whether a path contains a drive letter.
4440
4541 Arguments:
46 path_snippet: a file path, relative or absolute.
42 path_snippet (str): a file path, relative or absolute.
43
44 Example:
45 >>> _has_drive_letter("D:/Data")
46 True
47 >>> _has_drive_letter(r"C:\\System32\\ test")
48 True
49 >>> _has_drive_letter("/tmp/abc:test")
50 False
51
4752 """
4853 windows_drive_pattern = ".:[/\\\\].*$"
4954 return re.match(windows_drive_pattern, path_snippet) is not None
00 """Version, used in module and setup.py.
11 """
2 __version__ = "2.4.12"
2 __version__ = "2.4.16"
1010
1111 import typing
1212
13 import abc
14 import six
15 from appdirs import AppDirs
16
17 from ._repr import make_repr
1318 from .osfs import OSFS
14 from ._repr import make_repr
15 from appdirs import AppDirs
1619
1720 if typing.TYPE_CHECKING:
1821 from typing import Optional, Text
2831 ]
2932
3033
34 class _CopyInitMeta(abc.ABCMeta):
35 """A metaclass that performs a hard copy of the `__init__`.
36
37 This is a fix for Sphinx, which is a pain to configure in a way that
38 it documents the ``__init__`` method of a class when it is inherited.
39 Copying ``__init__`` makes it think it is not inherited, and let us
40 share the documentation between all the `_AppFS` subclasses.
41
42 """
43
44 def __new__(mcls, classname, bases, cls_dict):
45 cls_dict.setdefault("__init__", bases[0].__init__)
46 return super(abc.ABCMeta, mcls).__new__(mcls, classname, bases, cls_dict)
47
48
49 @six.add_metaclass(_CopyInitMeta)
3150 class _AppFS(OSFS):
32 """Abstract base class for an app FS.
33 """
51 """Abstract base class for an app FS."""
3452
3553 # FIXME(@althonos): replace by ClassVar[Text] once
3654 # https://github.com/python/mypy/pull/4718 is accepted
4664 create=True, # type: bool
4765 ):
4866 # type: (...) -> None
67 """Create a new application-specific filesystem.
68
69 Arguments:
70 appname (str): The name of the application.
71 author (str): The name of the author (used on Windows).
72 version (str): Optional version string, if a unique location
73 per version of the application is required.
74 roaming (bool): If `True`, use a *roaming* profile on
75 Windows.
76 create (bool): If `True` (the default) the directory
77 will be created if it does not exist.
78
79 """
4980 self.app_dirs = AppDirs(appname, author, version, roaming)
5081 self._create = create
5182 super(_AppFS, self).__init__(
76107 May also be opened with
77108 ``open_fs('userdata://appname:author:version')``.
78109
79 Arguments:
80 appname (str): The name of the application.
81 author (str): The name of the author (used on Windows).
82 version (str): Optional version string, if a unique location
83 per version of the application is required.
84 roaming (bool): If `True`, use a *roaming* profile on
85 Windows.
86 create (bool): If `True` (the default) the directory
87 will be created if it does not exist.
88
89110 """
90111
91112 app_dir = "user_data_dir"
96117
97118 May also be opened with
98119 ``open_fs('userconf://appname:author:version')``.
99
100 Arguments:
101 appname (str): The name of the application.
102 author (str): The name of the author (used on Windows).
103 version (str): Optional version string, if a unique location
104 per version of the application is required.
105 roaming (bool): If `True`, use a *roaming* profile on
106 Windows.
107 create (bool): If `True` (the default) the directory
108 will be created if it does not exist.
109120
110121 """
111122
118129 May also be opened with
119130 ``open_fs('usercache://appname:author:version')``.
120131
121 Arguments:
122 appname (str): The name of the application.
123 author (str): The name of the author (used on Windows).
124 version (str): Optional version string, if a unique location
125 per version of the application is required.
126 roaming (bool): If `True`, use a *roaming* profile on
127 Windows.
128 create (bool): If `True` (the default) the directory
129 will be created if it does not exist.
130
131132 """
132133
133134 app_dir = "user_cache_dir"
138139
139140 May also be opened with
140141 ``open_fs('sitedata://appname:author:version')``.
141
142 Arguments:
143 appname (str): The name of the application.
144 author (str): The name of the author (used on Windows).
145 version (str): Optional version string, if a unique location
146 per version of the application is required.
147 roaming (bool): If `True`, use a *roaming* profile on
148 Windows.
149 create (bool): If `True` (the default) the directory
150 will be created if it does not exist.
151142
152143 """
153144
160151 May also be opened with
161152 ``open_fs('siteconf://appname:author:version')``.
162153
163 Arguments:
164 appname (str): The name of the application.
165 author (str): The name of the author (used on Windows).
166 version (str): Optional version string, if a unique location
167 per version of the application is required.
168 roaming (bool): If `True`, use a *roaming* profile on
169 Windows.
170 create (bool): If `True` (the default) the directory
171 will be created if it does not exist.
172
173154 """
174155
175156 app_dir = "site_config_dir"
181162 May also be opened with
182163 ``open_fs('userlog://appname:author:version')``.
183164
184 Arguments:
185 appname (str): The name of the application.
186 author (str): The name of the author (used on Windows).
187 version (str): Optional version string, if a unique location
188 per version of the application is required.
189 roaming (bool): If `True`, use a *roaming* profile on
190 Windows.
191 create (bool): If `True` (the default) the directory
192 will be created if it does not exist.
193
194165 """
195166
196167 app_dir = "user_log_dir"
66 """
77
88 from __future__ import absolute_import, print_function, unicode_literals
9
10 import typing
911
1012 import abc
1113 import hashlib
1214 import itertools
1315 import os
16 import six
1417 import threading
1518 import time
16 import typing
19 import warnings
1720 from contextlib import closing
1821 from functools import partial, wraps
19 import warnings
20
21 import six
22
23 from . import copy, errors, fsencode, iotools, move, tools, walk, wildcard
22
23 from . import copy, errors, fsencode, iotools, tools, walk, wildcard
24 from .copy import copy_modified_time
2425 from .glob import BoundGlobber
2526 from .mode import validate_open_mode
2627 from .path import abspath, join, normpath
2829 from .walk import Walker
2930
3031 if typing.TYPE_CHECKING:
31 from datetime import datetime
32 from threading import RLock
3332 from typing import (
33 IO,
3434 Any,
3535 BinaryIO,
3636 Callable,
3737 Collection,
3838 Dict,
39 IO,
4039 Iterable,
4140 Iterator,
4241 List,
4746 Type,
4847 Union,
4948 )
49
50 from datetime import datetime
51 from threading import RLock
5052 from types import TracebackType
53
5154 from .enums import ResourceType
5255 from .info import Info, RawInfo
56 from .permissions import Permissions
5357 from .subfs import SubFS
54 from .permissions import Permissions
5558 from .walk import BoundWalker
5659
5760 _F = typing.TypeVar("_F", bound="FS")
9194
9295 @six.add_metaclass(abc.ABCMeta)
9396 class FS(object):
94 """Base class for FS objects.
95 """
97 """Base class for FS objects."""
9698
9799 # This is the "standard" meta namespace.
98100 _meta = {} # type: Dict[Text, Union[Text, int, bool, None]]
105107
106108 def __init__(self):
107109 # type: (...) -> None
108 """Create a filesystem. See help(type(self)) for accurate signature.
109 """
110 """Create a filesystem. See help(type(self)) for accurate signature."""
110111 self._closed = False
111112 self._lock = threading.RLock()
112113 super(FS, self).__init__()
117118
118119 def __enter__(self):
119120 # type: (...) -> FS
120 """Allow use of filesystem as a context manager.
121 """
121 """Allow use of filesystem as a context manager."""
122122 return self
123123
124124 def __exit__(
128128 traceback, # type: Optional[TracebackType]
129129 ):
130130 # type: (...) -> None
131 """Close filesystem on exit.
132 """
131 """Close filesystem on exit."""
133132 self.close()
134133
135134 @property
136135 def glob(self):
137 """`~fs.glob.BoundGlobber`: a globber object..
138 """
136 """`~fs.glob.BoundGlobber`: a globber object.."""
139137 return BoundGlobber(self)
140138
141139 @property
142140 def walk(self):
143141 # type: (_F) -> BoundWalker[_F]
144 """`~fs.walk.BoundWalker`: a walker bound to this filesystem.
145 """
142 """`~fs.walk.BoundWalker`: a walker bound to this filesystem."""
146143 return self.walker_class.bind(self)
147144
148145 # ---------------------------------------------------------------- #
157154
158155 Arguments:
159156 path (str): A path to a resource on the filesystem.
160 namespaces (list, optional): Info namespaces to query
161 (defaults to *[basic]*).
157 namespaces (list, optional): Info namespaces to query. The
158 `"basic"` namespace is alway included in the returned
159 info, whatever the value of `namespaces` may be.
162160
163161 Returns:
164162 ~fs.info.Info: resource information object.
163
164 Raises:
165 fs.errors.ResourceNotFound: If ``path`` does not exist.
165166
166167 For more information regarding resource information, see :ref:`info`.
167168
240241 io.IOBase: a *file-like* object.
241242
242243 Raises:
243 fs.errors.FileExpected: If the path is not a file.
244 fs.errors.FileExists: If the file exists, and *exclusive mode*
245 is specified (``x`` in the mode).
246 fs.errors.ResourceNotFound: If the path does not exist.
244 fs.errors.FileExpected: If ``path`` exists and is not a file.
245 fs.errors.FileExists: If the ``path`` exists, and
246 *exclusive mode* is specified (``x`` in the mode).
247 fs.errors.ResourceNotFound: If ``path`` does not exist and
248 ``mode`` does not imply creating the file, or if any
249 ancestor of ``path`` does not exist.
247250
248251 """
249252
272275 Raises:
273276 fs.errors.DirectoryNotEmpty: If the directory is not empty (
274277 see `~fs.base.FS.removetree` for a way to remove the
275 directory contents.).
278 directory contents).
276279 fs.errors.DirectoryExpected: If the path does not refer to
277280 a directory.
278281 fs.errors.ResourceNotFound: If no resource exists at the
392395 """
393396 self._closed = True
394397
395 def copy(self, src_path, dst_path, overwrite=False):
396 # type: (Text, Text, bool) -> None
398 def copy(
399 self,
400 src_path, # type: Text
401 dst_path, # type: Text
402 overwrite=False, # type: bool
403 preserve_time=False, # type: bool
404 ):
405 # type: (...) -> None
397406 """Copy file contents from ``src_path`` to ``dst_path``.
398407
399408 Arguments:
401410 dst_path (str): Path to destination file.
402411 overwrite (bool): If `True`, overwrite the destination file
403412 if it exists (defaults to `False`).
413 preserve_time (bool): If `True`, try to preserve mtime of the
414 resource (defaults to `False`).
404415
405416 Raises:
406417 fs.errors.DestinationExists: If ``dst_path`` exists,
407418 and ``overwrite`` is `False`.
408419 fs.errors.ResourceNotFound: If a parent directory of
409420 ``dst_path`` does not exist.
421 fs.errors.FileExpected: If ``src_path`` is not a file.
410422
411423 """
412424 with self._lock:
415427 with closing(self.open(src_path, "rb")) as read_file:
416428 # FIXME(@althonos): typing complains because open return IO
417429 self.upload(dst_path, read_file) # type: ignore
418
419 def copydir(self, src_path, dst_path, create=False):
420 # type: (Text, Text, bool) -> None
430 if preserve_time:
431 copy_modified_time(self, src_path, self, dst_path)
432
433 def copydir(
434 self,
435 src_path, # type: Text
436 dst_path, # type: Text
437 create=False, # type: bool
438 preserve_time=False, # type: bool
439 ):
440 # type: (...) -> None
421441 """Copy the contents of ``src_path`` to ``dst_path``.
422442
423443 Arguments:
425445 dst_path (str): Path to destination directory.
426446 create (bool): If `True`, then ``dst_path`` will be created
427447 if it doesn't exist already (defaults to `False`).
448 preserve_time (bool): If `True`, try to preserve mtime of the
449 resource (defaults to `False`).
428450
429451 Raises:
430452 fs.errors.ResourceNotFound: If the ``dst_path``
431453 does not exist, and ``create`` is not `True`.
454 fs.errors.DirectoryExpected: If ``src_path`` is not a
455 directory.
432456
433457 """
434458 with self._lock:
436460 raise errors.ResourceNotFound(dst_path)
437461 if not self.getinfo(src_path).is_dir:
438462 raise errors.DirectoryExpected(src_path)
439 copy.copy_dir(self, src_path, self, dst_path)
463 copy.copy_dir(self, src_path, self, dst_path, preserve_time=preserve_time)
440464
441465 def create(self, path, wipe=False):
442466 # type: (Text, bool) -> bool
471495
472496 Returns:
473497 str: a short description of the path.
498
499 Raises:
500 fs.errors.ResourceNotFound: If ``path`` does not exist.
474501
475502 """
476503 if not self.exists(path):
543570
544571 def match_dir(patterns, info):
545572 # type: (Optional[Iterable[Text]], Info) -> bool
546 """Pattern match info.name.
547 """
573 """Pattern match info.name."""
548574 return info.is_file or self.match(patterns, info.name)
549575
550576 def match_file(patterns, info):
551577 # type: (Optional[Iterable[Text]], Info) -> bool
552 """Pattern match info.name.
553 """
578 """Pattern match info.name."""
554579 return info.is_dir or self.match(patterns, info.name)
555580
556581 def exclude_dir(patterns, info):
557582 # type: (Optional[Iterable[Text]], Info) -> bool
558 """Pattern match info.name.
559 """
583 """Pattern match info.name."""
560584 return info.is_file or not self.match(patterns, info.name)
561585
562586 def exclude_file(patterns, info):
563587 # type: (Optional[Iterable[Text]], Info) -> bool
564 """Pattern match info.name.
565 """
588 """Pattern match info.name."""
566589 return info.is_dir or not self.match(patterns, info.name)
567590
568591 if files:
596619 bytes: the file contents.
597620
598621 Raises:
622 fs.errors.FileExpected: if ``path`` exists but is not a file.
599623 fs.errors.ResourceNotFound: if ``path`` does not exist.
600624
601625 """
607631
608632 def download(self, path, file, chunk_size=None, **options):
609633 # type: (Text, BinaryIO, Optional[int], **Any) -> None
610 """Copies a file from the filesystem to a file-like object.
634 """Copy a file from the filesystem to a file-like object.
611635
612636 This may be more efficient that opening and copying files
613637 manually if the filesystem supplies an optimized method.
638
639 Note that the file object ``file`` will *not* be closed by this
640 method. Take care to close it after this method completes
641 (ideally with a context manager).
614642
615643 Arguments:
616644 path (str): Path to a resource.
622650 **options: Implementation specific options required to open
623651 the source file.
624652
625 Note that the file object ``file`` will *not* be closed by this
626 method. Take care to close it after this method completes
627 (ideally with a context manager).
628
629653 Example:
630654 >>> with open('starwars.mov', 'wb') as write_file:
631 ... my_fs.download('/movies/starwars.mov', write_file)
655 ... my_fs.download('/Videos/starwars.mov', write_file)
656
657 Raises:
658 fs.errors.ResourceNotFound: if ``path`` does not exist.
632659
633660 """
634661 with self._lock:
670697 return contents
671698
672699 gettext = _new_name(readtext, "gettext")
700
701 def getmodified(self, path):
702 # type: (Text) -> Optional[datetime]
703 """Get the timestamp of the last modifying access of a resource.
704
705 Arguments:
706 path (str): A path to a resource.
707
708 Returns:
709 datetime: The timestamp of the last modification.
710
711 The *modified timestamp* of a file is the point in time
712 that the file was last changed. Depending on the file system,
713 it might only have limited accuracy.
714
715 """
716 return self.getinfo(path, namespaces=["details"]).modified
673717
674718 def getmeta(self, namespace="standard"):
675719 # type: (Text) -> Mapping[Text, object]
735779 Returns:
736780 int: the *size* of the resource.
737781
782 Raises:
783 fs.errors.ResourceNotFound: if ``path`` does not exist.
784
738785 The *size* of a file is the total number of readable bytes,
739786 which may not reflect the exact number of bytes of reserved
740787 disk space (or other storage medium).
750797 # type: (Text) -> Text
751798 """Get the *system path* of a resource.
752799
753 Parameters:
800 Arguments:
754801 path (str): A path on the filesystem.
755802
756803 Returns:
786833
787834 def getospath(self, path):
788835 # type: (Text) -> bytes
789 """Get a *system path* to a resource, encoded in the operating
790 system's prefered encoding.
791
792 Parameters:
836 """Get the *system path* to a resource, in the OS' prefered encoding.
837
838 Arguments:
793839 path (str): A path on the filesystem.
794840
795841 Returns:
806852
807853 Note:
808854 If you want your code to work in Python2.7 and Python3 then
809 use this method if you want to work will the OS filesystem
855 use this method if you want to work with the OS filesystem
810856 outside of the OSFS interface.
811857
812858 """
818864 # type: (Text) -> ResourceType
819865 """Get the type of a resource.
820866
821 Parameters:
867 Arguments:
822868 path (str): A path on the filesystem.
823869
824870 Returns:
825871 ~fs.enums.ResourceType: the type of the resource.
872
873 Raises:
874 fs.errors.ResourceNotFound: if ``path`` does not exist.
826875
827876 A type of a resource is an integer that identifies the what
828877 the resource references. The standard type integers may be one
856905 # type: (Text, Text) -> Text
857906 """Get the URL to a given resource.
858907
859 Parameters:
908 Arguments:
860909 path (str): A path on the filesystem
861910 purpose (str): A short string that indicates which URL
862911 to retrieve for the given path (if there is more than
863912 one). The default is ``'download'``, which should return
864913 a URL that serves the file. Other filesystems may support
865 other values for ``purpose``.
914 other values for ``purpose``: for instance, `OSFS` supports
915 ``'fs'``, which returns a FS URL (see :ref:`fs-urls`).
866916
867917 Returns:
868918 str: a URL.
877927 # type: (Text) -> bool
878928 """Check if a path maps to a system path.
879929
880 Parameters:
930 Arguments:
881931 path (str): A path on the filesystem.
882932
883933 Returns:
895945 # type: (Text, Text) -> bool
896946 """Check if a path has a corresponding URL.
897947
898 Parameters:
948 Arguments:
899949 path (str): A path on the filesystem.
900950 purpose (str): A purpose parameter, as given in
901951 `~fs.base.FS.geturl`.
913963
914964 def isclosed(self):
915965 # type: () -> bool
916 """Check if the filesystem is closed.
917 """
966 """Check if the filesystem is closed."""
918967 return getattr(self, "_closed", False)
919968
920969 def isdir(self, path):
921970 # type: (Text) -> bool
922971 """Check if a path maps to an existing directory.
923972
924 Parameters:
973 Arguments:
925974 path (str): A path on the filesystem.
926975
927976 Returns:
940989 A directory is considered empty when it does not contain
941990 any file or any directory.
942991
943 Parameters:
992 Arguments:
944993 path (str): A path to a directory on the filesystem.
945994
946995 Returns:
9571006 # type: (Text) -> bool
9581007 """Check if a path maps to an existing file.
9591008
960 Parameters:
1009 Arguments:
9611010 path (str): A path on the filesystem.
9621011
9631012 Returns:
9731022 # type: (Text) -> bool
9741023 """Check if a path maps to a symlink.
9751024
976 Parameters:
1025 Arguments:
9771026 path (str): A path on the filesystem.
9781027
9791028 Returns:
9971046 Example:
9981047 >>> with my_fs.lock(): # May block
9991048 ... # code here has exclusive access to the filesystem
1049 ... pass
10001050
10011051 It is a good idea to put a lock around any operations that you
10021052 would like to be *atomic*. For instance if you are copying
10151065 """
10161066 return self._lock
10171067
1018 def movedir(self, src_path, dst_path, create=False):
1019 # type: (Text, Text, bool) -> None
1068 def movedir(self, src_path, dst_path, create=False, preserve_time=False):
1069 # type: (Text, Text, bool, bool) -> None
10201070 """Move directory ``src_path`` to ``dst_path``.
10211071
1022 Parameters:
1072 Arguments:
10231073 src_path (str): Path of source directory on the filesystem.
10241074 dst_path (str): Path to destination directory.
10251075 create (bool): If `True`, then ``dst_path`` will be created
10261076 if it doesn't exist already (defaults to `False`).
1077 preserve_time (bool): If `True`, try to preserve mtime of the
1078 resources (defaults to `False`).
10271079
10281080 Raises:
10291081 fs.errors.ResourceNotFound: if ``dst_path`` does not exist,
10301082 and ``create`` is `False`.
1031
1032 """
1083 fs.errors.DirectoryExpected: if ``src_path`` or one of its
1084 ancestors is not a directory.
1085
1086 """
1087 from .move import move_dir
1088
10331089 with self._lock:
10341090 if not create and not self.exists(dst_path):
10351091 raise errors.ResourceNotFound(dst_path)
1036 move.move_dir(self, src_path, self, dst_path)
1092 move_dir(self, src_path, self, dst_path, preserve_time=preserve_time)
10371093
10381094 def makedirs(
10391095 self,
10781134 raise
10791135 return self.opendir(path)
10801136
1081 def move(self, src_path, dst_path, overwrite=False):
1082 # type: (Text, Text, bool) -> None
1137 def move(self, src_path, dst_path, overwrite=False, preserve_time=False):
1138 # type: (Text, Text, bool, bool) -> None
10831139 """Move a file from ``src_path`` to ``dst_path``.
10841140
10851141 Arguments:
10881144 file will be written to.
10891145 overwrite (bool): If `True`, destination path will be
10901146 overwritten if it exists.
1147 preserve_time (bool): If `True`, try to preserve mtime of the
1148 resources (defaults to `False`).
10911149
10921150 Raises:
10931151 fs.errors.FileExpected: If ``src_path`` maps to a
11141172 except OSError:
11151173 pass
11161174 else:
1175 if preserve_time:
1176 copy_modified_time(self, src_path, self, dst_path)
11171177 return
11181178 with self._lock:
11191179 with self.open(src_path, "rb") as read_file:
11201180 # FIXME(@althonos): typing complains because open return IO
11211181 self.upload(dst_path, read_file) # type: ignore
1182 if preserve_time:
1183 copy_modified_time(self, src_path, self, dst_path)
11221184 self.remove(src_path)
11231185
11241186 def open(
11951257 ~fs.subfs.SubFS: A filesystem representing a sub-directory.
11961258
11971259 Raises:
1198 fs.errors.DirectoryExpected: If ``dst_path`` does not
1199 exist or is not a directory.
1260 fs.errors.ResourceNotFound: If ``path`` does not exist.
1261 fs.errors.DirectoryExpected: If ``path`` is not a directory.
12001262
12011263 """
12021264 from .subfs import SubFS
12031265
12041266 _factory = factory or self.subfs_class or SubFS
12051267
1206 if not self.getbasic(path).is_dir:
1268 if not self.getinfo(path).is_dir:
12071269 raise errors.DirectoryExpected(path=path)
12081270 return _factory(self, path)
12091271
12101272 def removetree(self, dir_path):
12111273 # type: (Text) -> None
1212 """Recursively remove the contents of a directory.
1213
1214 This method is similar to `~fs.base.removedir`, but will
1274 """Recursively remove a directory and all its contents.
1275
1276 This method is similar to `~fs.base.FS.removedir`, but will
12151277 remove the contents of the directory if it is not empty.
12161278
12171279 Arguments:
12181280 dir_path (str): Path to a directory on the filesystem.
1281
1282 Raises:
1283 fs.errors.ResourceNotFound: If ``dir_path`` does not exist.
1284 fs.errors.DirectoryExpected: If ``dir_path`` is not a directory.
1285
1286 Caution:
1287 A filesystem should never delete its root folder, so
1288 ``FS.removetree("/")`` has different semantics: the
1289 contents of the root folder will be deleted, but the
1290 root will be untouched::
1291
1292 >>> home_fs = fs.open_fs("~")
1293 >>> home_fs.removetree("/")
1294 >>> home_fs.exists("/")
1295 True
1296 >>> home_fs.isempty("/")
1297 True
1298
1299 Combined with `~fs.base.FS.opendir`, this can be used
1300 to clear a directory without removing the directory
1301 itself::
1302
1303 >>> home_fs = fs.open_fs("~")
1304 >>> home_fs.opendir("/Videos").removetree("/")
1305 >>> home_fs.exists("/Videos")
1306 True
1307 >>> home_fs.isempty("/Videos")
1308 True
12191309
12201310 """
12211311 _dir_path = abspath(normpath(dir_path))
13081398 **options: Implementation specific options required to open
13091399 the source file.
13101400
1401 Raises:
1402 fs.errors.ResourceNotFound: If a parent directory of
1403 ``path`` does not exist.
1404
13111405 Note that the file object ``file`` will *not* be closed by this
13121406 method. Take care to close it after this method completes
13131407 (ideally with a context manager).
14491543
14501544 def validatepath(self, path):
14511545 # type: (Text) -> Text
1452 """Check if a path is valid, returning a normalized absolute
1453 path.
1546 """Validate a path, returning a normalized absolute path on sucess.
14541547
14551548 Many filesystems have restrictions on the format of paths they
14561549 support. This method will check that ``path`` is valid on the
14641557 str: A normalized, absolute path.
14651558
14661559 Raises:
1560 fs.errors.InvalidPath: If the path is invalid.
1561 fs.errors.FilesystemClosed: if the filesystem is closed.
14671562 fs.errors.InvalidCharsInPath: If the path contains
14681563 invalid characters.
1469 fs.errors.InvalidPath: If the path is invalid.
1470 fs.errors.FilesystemClosed: if the filesystem
1471 is closed.
14721564
14731565 """
14741566 self.check()
15201612 Returns:
15211613 ~fs.info.Info: Resource information object for ``path``.
15221614
1523 """
1615 Note:
1616 .. deprecated:: 2.4.13
1617 Please use `~FS.getinfo` directly, which is
1618 required to always return the *basic* namespace.
1619
1620 """
1621 warnings.warn(
1622 "method 'getbasic' has been deprecated, please use 'getinfo'",
1623 DeprecationWarning,
1624 )
15241625 return self.getinfo(path, namespaces=["basic"])
15251626
15261627 def getdetails(self, path):
15541655 def match(self, patterns, name):
15551656 # type: (Optional[Iterable[Text]], Text) -> bool
15561657 """Check if a name matches any of a list of wildcards.
1557
1558 Arguments:
1559 patterns (list): A list of patterns, e.g. ``['*.py']``
1560 name (str): A file or directory name (not a path)
1561
1562 Returns:
1563 bool: `True` if ``name`` matches any of the patterns.
15641658
15651659 If a filesystem is case *insensitive* (such as Windows) then
15661660 this method will perform a case insensitive match (i.e. ``*.py``
15681662 be case sensitive (``*.py`` and ``*.PY`` will match different
15691663 names).
15701664
1665 Arguments:
1666 patterns (list, optional): A list of patterns, e.g.
1667 ``['*.py']``, or `None` to match everything.
1668 name (str): A file or directory name (not a path)
1669
1670 Returns:
1671 bool: `True` if ``name`` matches any of the patterns.
1672
1673 Raises:
1674 TypeError: If ``patterns`` is a single string instead of
1675 a list (or `None`).
1676
15711677 Example:
1572 >>> home_fs.match(['*.py'], '__init__.py')
1678 >>> my_fs.match(['*.py'], '__init__.py')
15731679 True
1574 >>> home_fs.match(['*.jpg', '*.png'], 'foo.gif')
1680 >>> my_fs.match(['*.jpg', '*.png'], 'foo.gif')
15751681 False
15761682
15771683 Note:
16241730 Arguments:
16251731 path(str): A path on the filesystem.
16261732 name(str):
1627 One of the algorithms supported by the hashlib module, e.g. `"md5"`
1733 One of the algorithms supported by the `hashlib` module,
1734 e.g. `"md5"` or `"sha256"`.
16281735
16291736 Returns:
16301737 str: The hex digest of the hash.
16311738
16321739 Raises:
16331740 fs.errors.UnsupportedHash: If the requested hash is not supported.
1741 fs.errors.ResourceNotFound: If ``path`` does not exist.
1742 fs.errors.FileExpected: If ``path`` exists but is not a file.
16341743
16351744 """
16361745 self.validatepath(path)
33 `tarfile` modules from the standard library.
44 """
55
6 from __future__ import absolute_import
7 from __future__ import print_function
8 from __future__ import unicode_literals
6 from __future__ import absolute_import, print_function, unicode_literals
97
8 import typing
9
10 import six
11 import tarfile
1012 import time
11 import tarfile
12 import typing
1313 import zipfile
1414 from datetime import datetime
1515
16 import six
17
1816 from .enums import ResourceType
17 from .errors import MissingInfoNamespace, NoSysPath
1918 from .path import relpath
2019 from .time import datetime_to_epoch
21 from .errors import NoSysPath, MissingInfoNamespace
2220 from .walk import Walker
2321
2422 if typing.TYPE_CHECKING:
2523 from typing import BinaryIO, Optional, Text, Tuple, Union
24
2625 from .base import FS
2726
2827 ZipTime = Tuple[int, int, int, int, int, int]
4544 compression (int): Compression to use (one of the constants
4645 defined in the `zipfile` module in the stdlib). Defaults
4746 to `zipfile.ZIP_DEFLATED`.
48 encoding (str):
49 The encoding to use for filenames. The default is ``"utf-8"``,
50 use ``"CP437"`` if compatibility with WinZip is desired.
47 encoding (str): The encoding to use for filenames. The default
48 is ``"utf-8"``, use ``"CP437"`` if compatibility with WinZip
49 is desired.
5150 walker (~fs.walk.Walker, optional): A `Walker` instance, or `None`
5251 to use default walker. You can use this to specify which files
5352 you want to compress.
115114 """Write the contents of a filesystem to a tar file.
116115
117116 Arguments:
117 src_fs (~fs.base.FS): The source filesystem to compress.
118118 file (str or io.IOBase): Destination file, may be a file
119119 name or an open file object.
120120 compression (str, optional): Compression to use, or `None`
22
33 import io
44
5
65 DEFAULT_CHUNK_SIZE = io.DEFAULT_BUFFER_SIZE * 16
76 """`int`: the size of a single chunk read from or written to a file.
87 """
44
55 import typing
66
7 from .errors import FSError
7 import warnings
8
9 from .errors import ResourceNotFound
810 from .opener import manage_fs
911 from .path import abspath, combine, frombase, normpath
1012 from .tools import is_thread_safe
1214
1315 if typing.TYPE_CHECKING:
1416 from typing import Callable, Optional, Text, Union
17
1518 from .base import FS
1619
1720 _OnCopy = Callable[[FS, Text, FS, Text], object]
2326 walker=None, # type: Optional[Walker]
2427 on_copy=None, # type: Optional[_OnCopy]
2528 workers=0, # type: int
29 preserve_time=False, # type: bool
2630 ):
2731 # type: (...) -> None
2832 """Copy the contents of one filesystem to another.
3842 dst_path)``.
3943 workers (int): Use `worker` threads to copy data, or ``0`` (default) for
4044 a single-threaded copy.
41
42 """
43 return copy_dir(
44 src_fs, "/", dst_fs, "/", walker=walker, on_copy=on_copy, workers=workers
45 preserve_time (bool): If `True`, try to preserve mtime of the
46 resources (defaults to `False`).
47
48 """
49 return copy_fs_if(
50 src_fs, dst_fs, "always", walker, on_copy, workers, preserve_time=preserve_time
4551 )
4652
4753
5157 walker=None, # type: Optional[Walker]
5258 on_copy=None, # type: Optional[_OnCopy]
5359 workers=0, # type: int
60 preserve_time=False, # type: bool
5461 ):
5562 # type: (...) -> None
5663 """Copy the contents of one filesystem to another, checking times.
5764
58 If both source and destination files exist, the copy is executed
59 only if the source file is newer than the destination file. In case
60 modification times of source or destination files are not available,
61 copy file is always executed.
65 .. deprecated:: 2.5.0
66 Use `~fs.copy.copy_fs_if` with ``condition="newer"`` instead.
67
68 """
69 warnings.warn(
70 "copy_fs_if_newer is deprecated. Use copy_fs_if instead.", DeprecationWarning
71 )
72 return copy_fs_if(
73 src_fs, dst_fs, "newer", walker, on_copy, workers, preserve_time=preserve_time
74 )
75
76
77 def copy_fs_if(
78 src_fs, # type: Union[FS, Text]
79 dst_fs, # type: Union[FS, Text]
80 condition="always", # type: Text
81 walker=None, # type: Optional[Walker]
82 on_copy=None, # type: Optional[_OnCopy]
83 workers=0, # type: int
84 preserve_time=False, # type: bool
85 ):
86 # type: (...) -> None
87 """Copy the contents of one filesystem to another, depending on a condition.
6288
6389 Arguments:
6490 src_fs (FS or str): Source filesystem (URL or instance).
6591 dst_fs (FS or str): Destination filesystem (URL or instance).
92 condition (str): Name of the condition to check for each file.
6693 walker (~fs.walk.Walker, optional): A walker object that will be
6794 used to scan for files in ``src_fs``. Set this if you only want
6895 to consider a sub-set of the resources in ``src_fs``.
6996 on_copy (callable):A function callback called after a single file copy
7097 is executed. Expected signature is ``(src_fs, src_path, dst_fs,
7198 dst_path)``.
72 workers (int): Use ``worker`` threads to copy data, or ``0`` (default) for
73 a single-threaded copy.
74
75 """
76 return copy_dir_if_newer(
77 src_fs, "/", dst_fs, "/", walker=walker, on_copy=on_copy, workers=workers
78 )
79
80
81 def _source_is_newer(src_fs, src_path, dst_fs, dst_path):
82 # type: (FS, Text, FS, Text) -> bool
83 """Determine if source file is newer than destination file.
84
85 Arguments:
86 src_fs (FS): Source filesystem (instance or URL).
87 src_path (str): Path to a file on the source filesystem.
88 dst_fs (FS): Destination filesystem (instance or URL).
89 dst_path (str): Path to a file on the destination filesystem.
90
91 Returns:
92 bool: `True` if the source file is newer than the destination
93 file or file modification time cannot be determined, `False`
94 otherwise.
95
96 """
97 try:
98 if dst_fs.exists(dst_path):
99 namespace = ("details", "modified")
100 src_modified = src_fs.getinfo(src_path, namespace).modified
101 if src_modified is not None:
102 dst_modified = dst_fs.getinfo(dst_path, namespace).modified
103 return dst_modified is None or src_modified > dst_modified
104 return True
105 except FSError: # pragma: no cover
106 # todo: should log something here
107 return True
99 workers (int): Use ``worker`` threads to copy data, or ``0`` (default)
100 for a single-threaded copy.
101 preserve_time (bool): If `True`, try to preserve mtime of the
102 resources (defaults to `False`).
103
104 See Also:
105 `~fs.copy.copy_file_if` for the full list of supported values for the
106 ``condition`` argument.
107
108 """
109 return copy_dir_if(
110 src_fs,
111 "/",
112 dst_fs,
113 "/",
114 condition,
115 walker=walker,
116 on_copy=on_copy,
117 workers=workers,
118 preserve_time=preserve_time,
119 )
108120
109121
110122 def copy_file(
112124 src_path, # type: Text
113125 dst_fs, # type: Union[FS, Text]
114126 dst_path, # type: Text
127 preserve_time=False, # type: bool
115128 ):
116129 # type: (...) -> None
117130 """Copy a file from one filesystem to another.
123136 src_path (str): Path to a file on the source filesystem.
124137 dst_fs (FS or str): Destination filesystem (instance or URL).
125138 dst_path (str): Path to a file on the destination filesystem.
126
127 """
128 with manage_fs(src_fs, writeable=False) as _src_fs:
129 with manage_fs(dst_fs, create=True) as _dst_fs:
130 if _src_fs is _dst_fs:
131 # Same filesystem, so we can do a potentially optimized
132 # copy
133 _src_fs.copy(src_path, dst_path, overwrite=True)
134 else:
135 # Standard copy
136 with _src_fs.lock(), _dst_fs.lock():
137 if _dst_fs.hassyspath(dst_path):
138 with _dst_fs.openbin(dst_path, "w") as write_file:
139 _src_fs.download(src_path, write_file)
140 else:
141 with _src_fs.openbin(src_path) as read_file:
142 _dst_fs.upload(dst_path, read_file)
143
144
145 def copy_file_internal(
146 src_fs, # type: FS
147 src_path, # type: Text
148 dst_fs, # type: FS
149 dst_path, # type: Text
150 ):
151 # type: (...) -> None
152 """Low level copy, that doesn't call manage_fs or lock.
153
154 If the destination exists, and is a file, it will be first truncated.
155
156 This method exists to optimize copying in loops. In general you
157 should prefer `copy_file`.
158
159 Arguments:
160 src_fs (FS): Source filesystem.
161 src_path (str): Path to a file on the source filesystem.
162 dst_fs (FS: Destination filesystem.
163 dst_path (str): Path to a file on the destination filesystem.
164
165 """
166 if src_fs is dst_fs:
167 # Same filesystem, so we can do a potentially optimized
168 # copy
169 src_fs.copy(src_path, dst_path, overwrite=True)
170 elif dst_fs.hassyspath(dst_path):
171 with dst_fs.openbin(dst_path, "w") as write_file:
172 src_fs.download(src_path, write_file)
173 else:
174 with src_fs.openbin(src_path) as read_file:
175 dst_fs.upload(dst_path, read_file)
139 preserve_time (bool): If `True`, try to preserve mtime of the
140 resource (defaults to `False`).
141
142 """
143 copy_file_if(
144 src_fs, src_path, dst_fs, dst_path, "always", preserve_time=preserve_time
145 )
176146
177147
178148 def copy_file_if_newer(
180150 src_path, # type: Text
181151 dst_fs, # type: Union[FS, Text]
182152 dst_path, # type: Text
153 preserve_time=False, # type: bool
183154 ):
184155 # type: (...) -> bool
185156 """Copy a file from one filesystem to another, checking times.
186157
187 If the destination exists, and is a file, it will be first truncated.
188 If both source and destination files exist, the copy is executed only
189 if the source file is newer than the destination file. In case
190 modification times of source or destination files are not available,
191 copy is always executed.
158 .. deprecated:: 2.5.0
159 Use `~fs.copy.copy_file_if` with ``condition="newer"`` instead.
160
161 """
162 warnings.warn(
163 "copy_file_if_newer is deprecated. Use copy_file_if instead.",
164 DeprecationWarning,
165 )
166 return copy_file_if(
167 src_fs, src_path, dst_fs, dst_path, "newer", preserve_time=preserve_time
168 )
169
170
171 def copy_file_if(
172 src_fs, # type: Union[FS, Text]
173 src_path, # type: Text
174 dst_fs, # type: Union[FS, Text]
175 dst_path, # type: Text
176 condition, # type: Text
177 preserve_time=False, # type: bool
178 ):
179 # type: (...) -> bool
180 """Copy a file from one filesystem to another, depending on a condition.
181
182 Depending on the value of ``condition``, certain requirements must
183 be fulfilled for a file to be copied to ``dst_fs``. The following
184 values are supported:
185
186 ``"always"``
187 The source file is always copied.
188 ``"newer"``
189 The last modification time of the source file must be newer than that
190 of the destination file. If either file has no modification time, the
191 copy is performed always.
192 ``"older"``
193 The last modification time of the source file must be older than that
194 of the destination file. If either file has no modification time, the
195 copy is performed always.
196 ``"exists"``
197 The source file is only copied if a file of the same path already
198 exists in ``dst_fs``.
199 ``"not_exists"``
200 The source file is only copied if no file of the same path already
201 exists in ``dst_fs``.
192202
193203 Arguments:
194204 src_fs (FS or str): Source filesystem (instance or URL).
195205 src_path (str): Path to a file on the source filesystem.
196206 dst_fs (FS or str): Destination filesystem (instance or URL).
197207 dst_path (str): Path to a file on the destination filesystem.
208 condition (str): Name of the condition to check for each file.
209 preserve_time (bool): If `True`, try to preserve mtime of the
210 resource (defaults to `False`).
198211
199212 Returns:
200213 bool: `True` if the file copy was executed, `False` otherwise.
202215 """
203216 with manage_fs(src_fs, writeable=False) as _src_fs:
204217 with manage_fs(dst_fs, create=True) as _dst_fs:
205 if _src_fs is _dst_fs:
206 # Same filesystem, so we can do a potentially optimized
207 # copy
208 if _source_is_newer(_src_fs, src_path, _dst_fs, dst_path):
209 _src_fs.copy(src_path, dst_path, overwrite=True)
210 return True
211 else:
212 return False
213 else:
214 # Standard copy
215 with _src_fs.lock(), _dst_fs.lock():
216 if _source_is_newer(_src_fs, src_path, _dst_fs, dst_path):
217 copy_file_internal(_src_fs, src_path, _dst_fs, dst_path)
218 return True
219 else:
220 return False
218 do_copy = _copy_is_necessary(
219 _src_fs, src_path, _dst_fs, dst_path, condition
220 )
221 if do_copy:
222 copy_file_internal(
223 _src_fs,
224 src_path,
225 _dst_fs,
226 dst_path,
227 preserve_time=preserve_time,
228 lock=True,
229 )
230 return do_copy
231
232
233 def copy_file_internal(
234 src_fs, # type: FS
235 src_path, # type: Text
236 dst_fs, # type: FS
237 dst_path, # type: Text
238 preserve_time=False, # type: bool
239 lock=False, # type: bool
240 ):
241 # type: (...) -> None
242 """Copy a file at low level, without calling `manage_fs` or locking.
243
244 If the destination exists, and is a file, it will be first truncated.
245
246 This method exists to optimize copying in loops. In general you
247 should prefer `copy_file`.
248
249 Arguments:
250 src_fs (FS): Source filesystem.
251 src_path (str): Path to a file on the source filesystem.
252 dst_fs (FS): Destination filesystem.
253 dst_path (str): Path to a file on the destination filesystem.
254 preserve_time (bool): If `True`, try to preserve mtime of the
255 resource (defaults to `False`).
256 lock (bool): Lock both filesystems before copying.
257
258 """
259 if src_fs is dst_fs:
260 # Same filesystem, so we can do a potentially optimized
261 # copy
262 src_fs.copy(src_path, dst_path, overwrite=True, preserve_time=preserve_time)
263 return
264
265 def _copy_locked():
266 if dst_fs.hassyspath(dst_path):
267 with dst_fs.openbin(dst_path, "w") as write_file:
268 src_fs.download(src_path, write_file)
269 else:
270 with src_fs.openbin(src_path) as read_file:
271 dst_fs.upload(dst_path, read_file)
272
273 if preserve_time:
274 copy_modified_time(src_fs, src_path, dst_fs, dst_path)
275
276 if lock:
277 with src_fs.lock(), dst_fs.lock():
278 _copy_locked()
279 else:
280 _copy_locked()
221281
222282
223283 def copy_structure(
224284 src_fs, # type: Union[FS, Text]
225285 dst_fs, # type: Union[FS, Text]
226286 walker=None, # type: Optional[Walker]
287 src_root="/", # type: Text
288 dst_root="/", # type: Text
227289 ):
228290 # type: (...) -> None
229291 """Copy directories (but not files) from ``src_fs`` to ``dst_fs``.
234296 walker (~fs.walk.Walker, optional): A walker object that will be
235297 used to scan for files in ``src_fs``. Set this if you only
236298 want to consider a sub-set of the resources in ``src_fs``.
299 src_root (str): Path of the base directory to consider as the root
300 of the tree structure to copy.
301 dst_root (str): Path to the target root of the tree structure.
237302
238303 """
239304 walker = walker or Walker()
240305 with manage_fs(src_fs) as _src_fs:
241306 with manage_fs(dst_fs, create=True) as _dst_fs:
242307 with _src_fs.lock(), _dst_fs.lock():
243 for dir_path in walker.dirs(_src_fs):
244 _dst_fs.makedir(dir_path, recreate=True)
308 _dst_fs.makedirs(dst_root, recreate=True)
309 for dir_path in walker.dirs(_src_fs, src_root):
310 _dst_fs.makedir(
311 combine(dst_root, frombase(src_root, dir_path)), recreate=True
312 )
245313
246314
247315 def copy_dir(
252320 walker=None, # type: Optional[Walker]
253321 on_copy=None, # type: Optional[_OnCopy]
254322 workers=0, # type: int
323 preserve_time=False, # type: bool
255324 ):
256325 # type: (...) -> None
257326 """Copy a directory from one filesystem to another.
269338 ``(src_fs, src_path, dst_fs, dst_path)``.
270339 workers (int): Use ``worker`` threads to copy data, or ``0`` (default) for
271340 a single-threaded copy.
341 preserve_time (bool): If `True`, try to preserve mtime of the
342 resources (defaults to `False`).
343
344 """
345 copy_dir_if(
346 src_fs,
347 src_path,
348 dst_fs,
349 dst_path,
350 "always",
351 walker,
352 on_copy,
353 workers,
354 preserve_time=preserve_time,
355 )
356
357
358 def copy_dir_if_newer(
359 src_fs, # type: Union[FS, Text]
360 src_path, # type: Text
361 dst_fs, # type: Union[FS, Text]
362 dst_path, # type: Text
363 walker=None, # type: Optional[Walker]
364 on_copy=None, # type: Optional[_OnCopy]
365 workers=0, # type: int
366 preserve_time=False, # type: bool
367 ):
368 # type: (...) -> None
369 """Copy a directory from one filesystem to another, checking times.
370
371 .. deprecated:: 2.5.0
372 Use `~fs.copy.copy_dir_if` with ``condition="newer"`` instead.
373
374 """
375 warnings.warn(
376 "copy_dir_if_newer is deprecated. Use copy_dir_if instead.", DeprecationWarning
377 )
378 copy_dir_if(
379 src_fs,
380 src_path,
381 dst_fs,
382 dst_path,
383 "newer",
384 walker,
385 on_copy,
386 workers,
387 preserve_time=preserve_time,
388 )
389
390
391 def copy_dir_if(
392 src_fs, # type: Union[FS, Text]
393 src_path, # type: Text
394 dst_fs, # type: Union[FS, Text]
395 dst_path, # type: Text
396 condition, # type: Text
397 walker=None, # type: Optional[Walker]
398 on_copy=None, # type: Optional[_OnCopy]
399 workers=0, # type: int
400 preserve_time=False, # type: bool
401 ):
402 # type: (...) -> None
403 """Copy a directory from one filesystem to another, depending on a condition.
404
405 Arguments:
406 src_fs (FS or str): Source filesystem (instance or URL).
407 src_path (str): Path to a directory on the source filesystem.
408 dst_fs (FS or str): Destination filesystem (instance or URL).
409 dst_path (str): Path to a directory on the destination filesystem.
410 condition (str): Name of the condition to check for each file.
411 walker (~fs.walk.Walker, optional): A walker object that will be
412 used to scan for files in ``src_fs``. Set this if you only want
413 to consider a sub-set of the resources in ``src_fs``.
414 on_copy (callable):A function callback called after a single file copy
415 is executed. Expected signature is ``(src_fs, src_path, dst_fs,
416 dst_path)``.
417 workers (int): Use ``worker`` threads to copy data, or ``0`` (default) for
418 a single-threaded copy.
419 preserve_time (bool): If `True`, try to preserve mtime of the
420 resources (defaults to `False`).
421
422 See Also:
423 `~fs.copy.copy_file_if` for the full list of supported values for the
424 ``condition`` argument.
272425
273426 """
274427 on_copy = on_copy or (lambda *args: None)
276429 _src_path = abspath(normpath(src_path))
277430 _dst_path = abspath(normpath(dst_path))
278431
279 def src():
280 return manage_fs(src_fs, writeable=False)
281
282 def dst():
283 return manage_fs(dst_fs, create=True)
284
285432 from ._bulk import Copier
286433
287 with src() as _src_fs, dst() as _dst_fs:
434 copy_structure(src_fs, dst_fs, walker, src_path, dst_path)
435
436 with manage_fs(src_fs, writeable=False) as _src_fs, manage_fs(
437 dst_fs, create=True
438 ) as _dst_fs:
288439 with _src_fs.lock(), _dst_fs.lock():
289440 _thread_safe = is_thread_safe(_src_fs, _dst_fs)
290 with Copier(num_workers=workers if _thread_safe else 0) as copier:
291 _dst_fs.makedir(_dst_path, recreate=True)
292 for dir_path, dirs, files in walker.walk(_src_fs, _src_path):
441 with Copier(
442 num_workers=workers if _thread_safe else 0, preserve_time=preserve_time
443 ) as copier:
444 for dir_path in walker.files(_src_fs, _src_path):
293445 copy_path = combine(_dst_path, frombase(_src_path, dir_path))
294 for info in dirs:
295 _dst_fs.makedir(info.make_path(copy_path), recreate=True)
296 for info in files:
297 src_path = info.make_path(dir_path)
298 dst_path = info.make_path(copy_path)
299 copier.copy(_src_fs, src_path, _dst_fs, dst_path)
300 on_copy(_src_fs, src_path, _dst_fs, dst_path)
301
302
303 def copy_dir_if_newer(
304 src_fs, # type: Union[FS, Text]
305 src_path, # type: Text
306 dst_fs, # type: Union[FS, Text]
307 dst_path, # type: Text
308 walker=None, # type: Optional[Walker]
309 on_copy=None, # type: Optional[_OnCopy]
310 workers=0, # type: int
311 ):
312 # type: (...) -> None
313 """Copy a directory from one filesystem to another, checking times.
314
315 If both source and destination files exist, the copy is executed only
316 if the source file is newer than the destination file. In case
317 modification times of source or destination files are not available,
318 copy is always executed.
446 if _copy_is_necessary(
447 _src_fs, dir_path, _dst_fs, copy_path, condition
448 ):
449 copier.copy(_src_fs, dir_path, _dst_fs, copy_path)
450 on_copy(_src_fs, dir_path, _dst_fs, copy_path)
451
452
453 def _copy_is_necessary(
454 src_fs, # type: FS
455 src_path, # type: Text
456 dst_fs, # type: FS
457 dst_path, # type: Text
458 condition, # type: Text
459 ):
460 # type: (...) -> bool
461
462 if condition == "always":
463 return True
464
465 elif condition == "newer":
466 try:
467 src_modified = src_fs.getmodified(src_path)
468 dst_modified = dst_fs.getmodified(dst_path)
469 except ResourceNotFound:
470 return True
471 else:
472 return (
473 src_modified is None
474 or dst_modified is None
475 or src_modified > dst_modified
476 )
477
478 elif condition == "older":
479 try:
480 src_modified = src_fs.getmodified(src_path)
481 dst_modified = dst_fs.getmodified(dst_path)
482 except ResourceNotFound:
483 return True
484 else:
485 return (
486 src_modified is None
487 or dst_modified is None
488 or src_modified < dst_modified
489 )
490
491 elif condition == "exists":
492 return dst_fs.exists(dst_path)
493
494 elif condition == "not_exists":
495 return not dst_fs.exists(dst_path)
496
497 else:
498 raise ValueError("{} is not a valid copy condition.".format(condition))
499
500
501 def copy_modified_time(
502 src_fs, # type: Union[FS, Text]
503 src_path, # type: Text
504 dst_fs, # type: Union[FS, Text]
505 dst_path, # type: Text
506 ):
507 # type: (...) -> None
508 """Copy modified time metadata from one file to another.
319509
320510 Arguments:
321511 src_fs (FS or str): Source filesystem (instance or URL).
322512 src_path (str): Path to a directory on the source filesystem.
323513 dst_fs (FS or str): Destination filesystem (instance or URL).
324514 dst_path (str): Path to a directory on the destination filesystem.
325 walker (~fs.walk.Walker, optional): A walker object that will be
326 used to scan for files in ``src_fs``. Set this if you only
327 want to consider a sub-set of the resources in ``src_fs``.
328 on_copy (callable, optional): A function callback called after
329 a single file copy is executed. Expected signature is
330 ``(src_fs, src_path, dst_fs, dst_path)``.
331 workers (int): Use ``worker`` threads to copy data, or ``0`` (default) for
332 a single-threaded copy.
333
334 """
335 on_copy = on_copy or (lambda *args: None)
336 walker = walker or Walker()
337 _src_path = abspath(normpath(src_path))
338 _dst_path = abspath(normpath(dst_path))
339
340 def src():
341 return manage_fs(src_fs, writeable=False)
342
343 def dst():
344 return manage_fs(dst_fs, create=True)
345
346 from ._bulk import Copier
347
348 with src() as _src_fs, dst() as _dst_fs:
349 with _src_fs.lock(), _dst_fs.lock():
350 _thread_safe = is_thread_safe(_src_fs, _dst_fs)
351 with Copier(num_workers=workers if _thread_safe else 0) as copier:
352 _dst_fs.makedir(_dst_path, recreate=True)
353 namespace = ("details", "modified")
354 dst_state = {
355 path: info
356 for path, info in walker.info(_dst_fs, _dst_path, namespace)
357 if info.is_file
358 }
359 src_state = [
360 (path, info)
361 for path, info in walker.info(_src_fs, _src_path, namespace)
362 ]
363 for dir_path, copy_info in src_state:
364 copy_path = combine(_dst_path, frombase(_src_path, dir_path))
365 if copy_info.is_dir:
366 _dst_fs.makedir(copy_path, recreate=True)
367 elif copy_info.is_file:
368 # dst file is present, try to figure out if copy
369 # is necessary
370 try:
371 src_modified = copy_info.modified
372 dst_modified = dst_state[dir_path].modified
373 except KeyError:
374 do_copy = True
375 else:
376 do_copy = (
377 src_modified is None
378 or dst_modified is None
379 or src_modified > dst_modified
380 )
381
382 if do_copy:
383 copier.copy(_src_fs, dir_path, _dst_fs, copy_path)
384 on_copy(_src_fs, dir_path, _dst_fs, copy_path)
515
516 """
517 namespaces = ("details",)
518 with manage_fs(src_fs, writeable=False) as _src_fs:
519 with manage_fs(dst_fs, create=True) as _dst_fs:
520 src_meta = _src_fs.getinfo(src_path, namespaces)
521 src_details = src_meta.raw.get("details", {})
522 dst_details = {}
523 for value in ("metadata_changed", "modified"):
524 if value in src_details:
525 dst_details[value] = src_details[value]
526 _dst_fs.setinfo(dst_path, {"details": dst_details})
00 """Enums used by PyFilesystem.
11 """
22
3 from __future__ import absolute_import
4 from __future__ import unicode_literals
3 from __future__ import absolute_import, unicode_literals
54
65 import os
76 from enum import IntEnum, unique
00 """Tools for managing OS errors.
11 """
22
3 from __future__ import print_function
4 from __future__ import unicode_literals
3 from __future__ import print_function, unicode_literals
4
5 import sys
6 import typing
57
68 import errno
79 import platform
8 import sys
9 import typing
1010 from contextlib import contextmanager
11
1211 from six import reraise
1312
1413 from . import errors
1514
1615 if typing.TYPE_CHECKING:
16 from typing import Iterator, Optional, Text, Type, Union
17
1718 from types import TracebackType
18 from typing import Iterator, Optional, Text, Type, Union
1919
2020 try:
2121 from collections.abc import Mapping
2727
2828
2929 class _ConvertOSErrors(object):
30 """Context manager to convert OSErrors in to FS Errors.
31 """
30 """Context manager to convert OSErrors in to FS Errors."""
3231
3332 FILE_ERRORS = {
3433 64: errors.RemoteConnectionError, # ENONET
77
88 """
99
10 from __future__ import unicode_literals
11 from __future__ import print_function
10 from __future__ import print_function, unicode_literals
11
12 import typing
1213
1314 import functools
14 import typing
15
1615 import six
1716 from six import text_type
1817
5049 "ResourceNotFound",
5150 "ResourceReadOnly",
5251 "Unsupported",
52 "UnsupportedHash",
5353 ]
5454
5555
5656 class MissingInfoNamespace(AttributeError):
57 """An expected namespace is missing.
58 """
59
60 def __init__(self, namespace):
57 """An expected namespace is missing."""
58
59 def __init__(self, namespace): # noqa: D107
6160 # type: (Text) -> None
6261 self.namespace = namespace
6362 msg = "namespace '{}' is required for this attribute"
6968
7069 @six.python_2_unicode_compatible
7170 class FSError(Exception):
72 """Base exception for the `fs` module.
73 """
71 """Base exception for the `fs` module."""
7472
7573 default_message = "Unspecified error"
7674
77 def __init__(self, msg=None):
75 def __init__(self, msg=None): # noqa: D107
7876 # type: (Optional[Text]) -> None
7977 self._msg = msg or self.default_message
8078 super(FSError, self).__init__()
8179
8280 def __str__(self):
8381 # type: () -> Text
84 """Return the error message.
85 """
82 """Return the error message."""
8683 msg = self._msg.format(**self.__dict__)
8784 return msg
8885
9390
9491
9592 class FilesystemClosed(FSError):
96 """Attempt to use a closed filesystem.
97 """
93 """Attempt to use a closed filesystem."""
9894
9995 default_message = "attempt to use closed filesystem"
10096
104100
105101 default_message = "One or more copy operations failed (see errors attribute)"
106102
107 def __init__(self, errors):
103 def __init__(self, errors): # noqa: D107
108104 self.errors = errors
109105 super(BulkCopyFailed, self).__init__()
110106
111107
112108 class CreateFailed(FSError):
113 """Filesystem could not be created.
114 """
109 """Filesystem could not be created."""
115110
116111 default_message = "unable to create filesystem, {details}"
117112
118 def __init__(self, msg=None, exc=None):
113 def __init__(self, msg=None, exc=None): # noqa: D107
119114 # type: (Optional[Text], Optional[Exception]) -> None
120115 self._msg = msg or self.default_message
121116 self.details = "" if exc is None else text_type(exc)
139134
140135
141136 class PathError(FSError):
142 """Base exception for errors to do with a path string.
143 """
137 """Base exception for errors to do with a path string."""
144138
145139 default_message = "path '{path}' is invalid"
146140
147 def __init__(self, path, msg=None):
148 # type: (Text, Optional[Text]) -> None
141 def __init__(self, path, msg=None, exc=None): # noqa: D107
142 # type: (Text, Optional[Text], Optional[Exception]) -> None
149143 self.path = path
144 self.exc = exc
150145 super(PathError, self).__init__(msg=msg)
151146
152147 def __reduce__(self):
153 return type(self), (self.path, self._msg)
148 return type(self), (self.path, self._msg, self.exc)
154149
155150
156151 class NoSysPath(PathError):
157 """The filesystem does not provide *sys paths* to the resource.
158 """
152 """The filesystem does not provide *sys paths* to the resource."""
159153
160154 default_message = "path '{path}' does not map to the local filesystem"
161155
162156
163157 class NoURL(PathError):
164 """The filesystem does not provide an URL for the resource.
165 """
158 """The filesystem does not provide an URL for the resource."""
166159
167160 default_message = "path '{path}' has no '{purpose}' URL"
168161
169 def __init__(self, path, purpose, msg=None):
162 def __init__(self, path, purpose, msg=None): # noqa: D107
170163 # type: (Text, Text, Optional[Text]) -> None
171164 self.purpose = purpose
172165 super(NoURL, self).__init__(path, msg=msg)
176169
177170
178171 class InvalidPath(PathError):
179 """Path can't be mapped on to the underlaying filesystem.
180 """
172 """Path can't be mapped on to the underlaying filesystem."""
181173
182174 default_message = "path '{path}' is invalid on this filesystem "
183175
184176
185177 class InvalidCharsInPath(InvalidPath):
186 """Path contains characters that are invalid on this filesystem.
187 """
178 """Path contains characters that are invalid on this filesystem."""
188179
189180 default_message = "path '{path}' contains invalid characters"
190181
191182
192183 class OperationFailed(FSError):
193 """A specific operation failed.
194 """
184 """A specific operation failed."""
195185
196186 default_message = "operation failed, {details}"
197187
200190 path=None, # type: Optional[Text]
201191 exc=None, # type: Optional[Exception]
202192 msg=None, # type: Optional[Text]
203 ):
193 ): # noqa: D107
204194 # type: (...) -> None
205195 self.path = path
206196 self.exc = exc
213203
214204
215205 class Unsupported(OperationFailed):
216 """Operation not supported by the filesystem.
217 """
206 """Operation not supported by the filesystem."""
218207
219208 default_message = "not supported"
220209
221210
222211 class RemoteConnectionError(OperationFailed):
223 """Operations encountered remote connection trouble.
224 """
212 """Operations encountered remote connection trouble."""
225213
226214 default_message = "remote connection error"
227215
228216
229217 class InsufficientStorage(OperationFailed):
230 """Storage is insufficient for requested operation.
231 """
218 """Storage is insufficient for requested operation."""
232219
233220 default_message = "insufficient storage space"
234221
235222
236223 class PermissionDenied(OperationFailed):
237 """Not enough permissions.
238 """
224 """Not enough permissions."""
239225
240226 default_message = "permission denied"
241227
242228
243229 class OperationTimeout(OperationFailed):
244 """Filesystem took too long.
245 """
230 """Filesystem took too long."""
246231
247232 default_message = "operation timed out"
248233
249234
250235 class RemoveRootError(OperationFailed):
251 """Attempt to remove the root directory.
252 """
236 """Attempt to remove the root directory."""
253237
254238 default_message = "root directory may not be removed"
255239
256240
257241 class ResourceError(FSError):
258 """Base exception class for error associated with a specific resource.
259 """
242 """Base exception class for error associated with a specific resource."""
260243
261244 default_message = "failed on path {path}"
262245
263 def __init__(self, path, exc=None, msg=None):
246 def __init__(self, path, exc=None, msg=None): # noqa: D107
264247 # type: (Text, Optional[Exception], Optional[Text]) -> None
265248 self.path = path
266249 self.exc = exc
271254
272255
273256 class ResourceNotFound(ResourceError):
274 """Required resource not found.
275 """
257 """Required resource not found."""
276258
277259 default_message = "resource '{path}' not found"
278260
279261
280262 class ResourceInvalid(ResourceError):
281 """Resource has the wrong type.
282 """
263 """Resource has the wrong type."""
283264
284265 default_message = "resource '{path}' is invalid for this operation"
285266
286267
287268 class FileExists(ResourceError):
288 """File already exists.
289 """
269 """File already exists."""
290270
291271 default_message = "resource '{path}' exists"
292272
293273
294274 class FileExpected(ResourceInvalid):
295 """Operation only works on files.
296 """
275 """Operation only works on files."""
297276
298277 default_message = "path '{path}' should be a file"
299278
300279
301280 class DirectoryExpected(ResourceInvalid):
302 """Operation only works on directories.
303 """
281 """Operation only works on directories."""
304282
305283 default_message = "path '{path}' should be a directory"
306284
307285
308286 class DestinationExists(ResourceError):
309 """Target destination already exists.
310 """
287 """Target destination already exists."""
311288
312289 default_message = "destination '{path}' exists"
313290
314291
315292 class DirectoryExists(ResourceError):
316 """Directory already exists.
317 """
293 """Directory already exists."""
318294
319295 default_message = "directory '{path}' exists"
320296
321297
322298 class DirectoryNotEmpty(ResourceError):
323 """Attempt to remove a non-empty directory.
324 """
299 """Attempt to remove a non-empty directory."""
325300
326301 default_message = "directory '{path}' is not empty"
327302
328303
329304 class ResourceLocked(ResourceError):
330 """Attempt to use a locked resource.
331 """
305 """Attempt to use a locked resource."""
332306
333307 default_message = "resource '{path}' is locked"
334308
335309
336310 class ResourceReadOnly(ResourceError):
337 """Attempting to modify a read-only resource.
338 """
311 """Attempting to modify a read-only resource."""
339312
340313 default_message = "resource '{path}' is read only"
341314
353326
354327 """
355328
356 def __init__(self, path):
329 def __init__(self, path): # noqa: D107
357330 # type: (Text) -> None
358331 self.path = path
359332 msg = ("path '{path}' contains back-references outside of filesystem").format(
1010
1111 """
1212
13 from __future__ import division
14 from __future__ import unicode_literals
13 from __future__ import division, unicode_literals
1514
1615 import typing
1716
3534
3635 # TODO (dargueta): Don't rely on unit or suffix being defined in the loop.
3736 for i, suffix in enumerate(suffixes, 2): # noqa: B007
38 unit = base ** i
37 unit = base**i
3938 if size < unit:
4039 break
4140 return "{:,.1f} {}".format((base * size / unit), suffix)
6059 `str`: A string containing an abbreviated file size and units.
6160
6261 Example:
63 >>> filesize.traditional(30000)
62 >>> fs.filesize.traditional(30000)
6463 '29.3 KB'
6564
6665 """
8685 `str`: A string containing a abbreviated file size and units.
8786
8887 Example:
89 >>> filesize.binary(30000)
88 >>> fs.filesize.binary(30000)
9089 '29.3 KiB'
9190
9291 """
111110 `str`: A string containing a abbreviated file size and units.
112111
113112 Example:
114 >>> filesize.decimal(30000)
113 >>> fs.filesize.decimal(30000)
115114 '30.0 kB'
116115
117116 """
00 """Manage filesystems on remote FTP servers.
11 """
22
3 from __future__ import print_function
4 from __future__ import unicode_literals
5
3 from __future__ import print_function, unicode_literals
4
5 import typing
6
7 import array
68 import calendar
9 import datetime
710 import io
811 import itertools
912 import socket
1013 import threading
11 import typing
1214 from collections import OrderedDict
1315 from contextlib import contextmanager
1416 from ftplib import FTP
15 from ftplib import error_perm
16 from ftplib import error_temp
17
18 try:
19 from ftplib import FTP_TLS
20 except ImportError as err:
21 FTP_TLS = err # type: ignore
1722 from typing import cast
1823
19 from six import PY2
20 from six import text_type
21
24 from ftplib import error_perm, error_temp
25 from six import PY2, raise_from, text_type
26
27 from . import _ftp_parse as ftp_parse
2228 from . import errors
2329 from .base import FS
2430 from .constants import DEFAULT_CHUNK_SIZE
25 from .enums import ResourceType
26 from .enums import Seek
31 from .enums import ResourceType, Seek
2732 from .info import Info
2833 from .iotools import line_iterator
2934 from .mode import Mode
30 from .path import abspath
31 from .path import dirname
32 from .path import basename
33 from .path import normpath
34 from .path import split
35 from . import _ftp_parse as ftp_parse
35 from .path import abspath, basename, dirname, normpath, split
36 from .time import epoch_to_datetime
3637
3738 if typing.TYPE_CHECKING:
38 import ftplib
3939 from typing import (
4040 Any,
4141 BinaryIO,
4242 ByteString,
43 Container,
4344 ContextManager,
45 Dict,
4446 Iterable,
4547 Iterator,
46 Container,
47 Dict,
4848 List,
4949 Optional,
5050 SupportsInt,
5252 Tuple,
5353 Union,
5454 )
55
56 import ftplib
57 import mmap
58
5559 from .base import _OpendirFactory
5660 from .info import RawInfo
5761 from .permissions import Permissions
121125 # type: (Union[Text, bytes], Text) -> Text
122126 return st.decode(encoding, "replace") if isinstance(st, bytes) else st
123127
124
125128 else:
126129
127130 def _encode(st, _):
235238 return b"".join(chunks)
236239
237240 def readinto(self, buffer):
238 # type: (bytearray) -> int
241 # type: (Union[bytearray, memoryview, array.array[Any], mmap.mmap]) -> int
239242 data = self.read(len(buffer))
240243 bytes_read = len(data)
241 buffer[:bytes_read] = data
244 if isinstance(buffer, array.array):
245 buffer[:bytes_read] = array.array(buffer.typecode, data)
246 else:
247 buffer[:bytes_read] = data # type: ignore
242248 return bytes_read
243249
244 def readline(self, size=-1):
245 # type: (int) -> bytes
250 def readline(self, size=None):
251 # type: (Optional[int]) -> bytes
246252 return next(line_iterator(self, size)) # type: ignore
247253
248254 def readlines(self, hint=-1):
261267 return self.mode.writing
262268
263269 def write(self, data):
264 # type: (bytes) -> int
270 # type: (Union[bytes, memoryview, array.array[Any], mmap.mmap]) -> int
265271 if not self.mode.writing:
266272 raise IOError("File not open for writing")
273
274 if isinstance(data, array.array):
275 data = data.tobytes()
267276
268277 with self._lock:
269278 conn = self.write_conn
280289 return data_pos
281290
282291 def writelines(self, lines):
283 # type: (Iterable[bytes]) -> None
284 self.write(b"".join(lines))
292 # type: (Iterable[Union[bytes, memoryview, array.array[Any], mmap.mmap]]) -> None # noqa: E501
293 if not self.mode.writing:
294 raise IOError("File not open for writing")
295 data = bytearray()
296 for line in lines:
297 if isinstance(line, array.array):
298 data.extend(line.tobytes())
299 else:
300 data.extend(line) # type: ignore
301 self.write(data)
285302
286303 def truncate(self, size=None):
287304 # type: (Optional[int]) -> int
331348 class FTPFS(FS):
332349 """A FTP (File Transport Protocol) Filesystem.
333350
334 Arguments:
335 host (str): A FTP host, e.g. ``'ftp.mirror.nl'``.
336 user (str): A username (default is ``'anonymous'``).
337 passwd (str): Password for the server, or `None` for anon.
338 acct (str): FTP account.
339 timeout (int): Timeout for contacting server (in seconds,
340 defaults to 10).
341 port (int): FTP port number (default 21).
342 proxy (str, optional): An FTP proxy, or ``None`` (default)
343 for no proxy.
351 Optionally, the connection can be made securely via TLS. This is known as
352 FTPS, or FTP Secure. TLS will be enabled when using the ftps:// protocol,
353 or when setting the `tls` argument to True in the constructor.
354
355 Examples:
356 Create with the constructor::
357
358 >>> from fs.ftpfs import FTPFS
359 >>> ftp_fs = FTPFS("demo.wftpserver.com")
360
361 Or via an FS URL::
362
363 >>> ftp_fs = fs.open_fs('ftp://test.rebex.net')
364
365 Or via an FS URL, using TLS::
366
367 >>> ftp_fs = fs.open_fs('ftps://demo.wftpserver.com')
368
369 You can also use a non-anonymous username, and optionally a
370 password, even within a FS URL::
371
372 >>> ftp_fs = FTPFS("test.rebex.net", user="demo", passwd="password")
373 >>> ftp_fs = fs.open_fs('ftp://demo:password@test.rebex.net')
374
375 Connecting via a proxy is supported. If using a FS URL, the proxy
376 URL will need to be added as a URL parameter::
377
378 >>> ftp_fs = FTPFS("ftp.ebi.ac.uk", proxy="test.rebex.net")
379 >>> ftp_fs = fs.open_fs('ftp://ftp.ebi.ac.uk/?proxy=test.rebex.net')
344380
345381 """
346382
362398 timeout=10, # type: int
363399 port=21, # type: int
364400 proxy=None, # type: Optional[Text]
401 tls=False, # type: bool
365402 ):
366403 # type: (...) -> None
404 """Create a new `FTPFS` instance.
405
406 Arguments:
407 host (str): A FTP host, e.g. ``'ftp.mirror.nl'``.
408 user (str): A username (default is ``'anonymous'``).
409 passwd (str): Password for the server, or `None` for anon.
410 acct (str): FTP account.
411 timeout (int): Timeout for contacting server (in seconds,
412 defaults to 10).
413 port (int): FTP port number (default 21).
414 proxy (str, optional): An FTP proxy, or ``None`` (default)
415 for no proxy.
416 tls (bool): Attempt to use FTP over TLS (FTPS) (default: False)
417
418 """
367419 super(FTPFS, self).__init__()
368420 self._host = host
369421 self._user = user
372424 self.timeout = timeout
373425 self.port = port
374426 self.proxy = proxy
427 self.tls = tls
428
429 if self.tls and isinstance(FTP_TLS, Exception):
430 raise_from(errors.CreateFailed("FTP over TLS not supported"), FTP_TLS)
375431
376432 self.encoding = "latin-1"
377433 self._ftp = None # type: Optional[FTP]
402458 @classmethod
403459 def _parse_features(cls, feat_response):
404460 # type: (Text) -> Dict[Text, Text]
405 """Parse a dict of features from FTP feat response.
406 """
461 """Parse a dict of features from FTP feat response."""
407462 features = {}
408463 if feat_response.split("-")[0] == "211":
409464 for line in feat_response.splitlines():
414469
415470 def _open_ftp(self):
416471 # type: () -> FTP
417 """Open a new ftp object.
418 """
419 _ftp = FTP()
472 """Open a new ftp object."""
473 _ftp = FTP_TLS() if self.tls else FTP()
420474 _ftp.set_debuglevel(0)
421475 with ftp_errors(self):
422476 _ftp.connect(self.host, self.port, self.timeout)
423477 _ftp.login(self.user, self.passwd, self.acct)
478 try:
479 _ftp.prot_p() # type: ignore
480 except AttributeError:
481 pass
424482 self._features = {}
425483 try:
426484 feat_response = _decode(_ftp.sendcmd("FEAT"), "latin-1")
455513 _user_part = ""
456514 else:
457515 _user_part = "{}:{}@".format(self.user, self.passwd)
458 url = "ftp://{}{}".format(_user_part, _host_part)
516
517 scheme = "ftps" if self.tls else "ftp"
518 url = "{}://{}{}".format(scheme, _user_part, _host_part)
459519 return url
460520
461521 @property
462522 def ftp(self):
463523 # type: () -> FTP
464 """~ftplib.FTP: the underlying FTP client.
465 """
524 """~ftplib.FTP: the underlying FTP client."""
466525 return self._get_ftp()
467526
468527 def geturl(self, path, purpose="download"):
480539 return self._ftp
481540
482541 @property
483 def features(self):
542 def features(self): # noqa: D401
484543 # type: () -> Dict[Text, Text]
485 """dict: features of the remote FTP server.
486 """
544 """`dict`: Features of the remote FTP server."""
487545 self._get_ftp()
488546 return self._features
489547
505563 @property
506564 def supports_mlst(self):
507565 # type: () -> bool
508 """bool: whether the server supports MLST feature.
509 """
566 """bool: whether the server supports MLST feature."""
510567 return "MLST" in self.features
568
569 @property
570 def supports_mdtm(self):
571 # type: () -> bool
572 """bool: whether the server supports the MDTM feature."""
573 return "MDTM" in self.features
511574
512575 def create(self, path, wipe=False):
513576 # type: (Text, bool) -> bool
524587 @classmethod
525588 def _parse_ftp_time(cls, time_text):
526589 # type: (Text) -> Optional[int]
527 """Parse a time from an ftp directory listing.
528 """
590 """Parse a time from an ftp directory listing."""
529591 try:
530592 tm_year = int(time_text[0:4])
531593 tm_month = int(time_text[4:6])
630692 if namespace == "standard":
631693 _meta = self._meta.copy()
632694 _meta["unicode_paths"] = "UTF8" in self.features
695 _meta["supports_mtime"] = "MDTM" in self.features
633696 return _meta
697
698 def getmodified(self, path):
699 # type: (Text) -> Optional[datetime.datetime]
700 if self.supports_mdtm:
701 _path = self.validatepath(path)
702 with self._lock:
703 with ftp_errors(self, path=path):
704 cmd = "MDTM " + _encode(_path, self.ftp.encoding)
705 response = self.ftp.sendcmd(cmd)
706 mtime = self._parse_ftp_time(response.split()[1])
707 return epoch_to_datetime(mtime)
708 return super(FTPFS, self).getmodified(path)
634709
635710 def listdir(self, path):
636711 # type: (Text) -> List[Text]
761836 # type: (Text, BinaryIO, Optional[int], **Any) -> None
762837 _path = self.validatepath(path)
763838 with self._lock:
764 with self._manage_ftp() as ftp:
765 with ftp_errors(self, path):
766 ftp.storbinary(
767 str("STOR ") + _encode(_path, self.ftp.encoding), file
768 )
839 with ftp_errors(self, path):
840 self.ftp.storbinary(
841 str("STOR ") + _encode(_path, self.ftp.encoding), file
842 )
769843
770844 def writebytes(self, path, contents):
771845 # type: (Text, ByteString) -> None
775849
776850 def setinfo(self, path, info):
777851 # type: (Text, RawInfo) -> None
778 if not self.exists(path):
779 raise errors.ResourceNotFound(path)
852 use_mfmt = False
853 if "MFMT" in self.features:
854 info_details = None
855 if "modified" in info:
856 info_details = info["modified"]
857 elif "details" in info:
858 info_details = info["details"]
859 if info_details and "modified" in info_details:
860 use_mfmt = True
861 mtime = cast(float, info_details["modified"])
862
863 if use_mfmt:
864 with ftp_errors(self, path):
865 cmd = (
866 "MFMT "
867 + datetime.datetime.utcfromtimestamp(mtime).strftime("%Y%m%d%H%M%S")
868 + " "
869 + _encode(path, self.ftp.encoding)
870 )
871 try:
872 self.ftp.sendcmd(cmd)
873 except error_perm:
874 pass
875 else:
876 if not self.exists(path):
877 raise errors.ResourceNotFound(path)
780878
781879 def readbytes(self, path):
782880 # type: (Text) -> bytes
0 """Useful functions for working with glob patterns.
1 """
2
03 from __future__ import unicode_literals
14
5 import typing
6
7 import re
28 from collections import namedtuple
3 import re
4 import typing
5
9
10 from . import wildcard
11 from ._repr import make_repr
612 from .lrucache import LRUCache
7 from ._repr import make_repr
813 from .path import iteratepath
9 from . import wildcard
10
1114
1215 GlobMatch = namedtuple("GlobMatch", ["path", "info"])
1316 Counts = namedtuple("Counts", ["files", "directories", "data"])
1518
1619 if typing.TYPE_CHECKING:
1720 from typing import Iterator, List, Optional, Pattern, Text, Tuple
21
1822 from .base import FS
1923
2024
9195
9296
9397 class Globber(object):
94 """A generator of glob results.
95
96 Arguments:
97 fs (~fs.base.FS): A filesystem object
98 pattern (str): A glob pattern, e.g. ``"**/*.py"``
99 path (str): A path to a directory in the filesystem.
100 namespaces (list): A list of additional info namespaces.
101 case_sensitive (bool): If ``True``, the path matching will be
102 case *sensitive* i.e. ``"FOO.py"`` and ``"foo.py"`` will
103 be different, otherwise path matching will be case *insensitive*.
104 exclude_dirs (list): A list of patterns to exclude when searching,
105 e.g. ``["*.git"]``.
106
107 """
98 """A generator of glob results."""
10899
109100 def __init__(
110101 self,
116107 exclude_dirs=None,
117108 ):
118109 # type: (FS, str, str, Optional[List[str]], bool, Optional[List[str]]) -> None
110 """Create a new Globber instance.
111
112 Arguments:
113 fs (~fs.base.FS): A filesystem object
114 pattern (str): A glob pattern, e.g. ``"**/*.py"``
115 path (str): A path to a directory in the filesystem.
116 namespaces (list): A list of additional info namespaces.
117 case_sensitive (bool): If ``True``, the path matching will be
118 case *sensitive* i.e. ``"FOO.py"`` and ``"foo.py"`` will be
119 different, otherwise path matching will be case *insensitive*.
120 exclude_dirs (list): A list of patterns to exclude when searching,
121 e.g. ``["*.git"]``.
122
123 """
119124 self.fs = fs
120125 self.pattern = pattern
121126 self.path = path
159164
160165 def __iter__(self):
161166 # type: () -> Iterator[GlobMatch]
162 """An iterator of :class:`fs.glob.GlobMatch` objects."""
167 """Get an iterator of :class:`fs.glob.GlobMatch` objects."""
163168 return self._make_iter()
164169
165170 def count(self):
167172 """Count files / directories / data in matched paths.
168173
169174 Example:
170 >>> import fs
171 >>> fs.open_fs('~/projects').glob('**/*.py').count()
172 Counts(files=18519, directories=0, data=206690458)
175 >>> my_fs.glob('**/*.py').count()
176 Counts(files=2, directories=0, data=55)
173177
174178 Returns:
175179 `~Counts`: A named tuple containing results.
194198 `~LineCounts`: A named tuple containing line counts.
195199
196200 Example:
197 >>> import fs
198 >>> fs.open_fs('~/projects').glob('**/*.py').count_lines()
199 LineCounts(lines=5767102, non_blank=4915110)
200
201 """
202
201 >>> my_fs.glob('**/*.py').count_lines()
202 LineCounts(lines=4, non_blank=3)
203
204 """
203205 lines = 0
204206 non_blank = 0
205207 for path, info in self._make_iter():
212214
213215 def remove(self):
214216 # type: () -> int
215 """Removed all matched paths.
217 """Remove all matched paths.
216218
217219 Returns:
218220 int: Number of file and directories removed.
219221
220222 Example:
221 >>> import fs
222 >>> fs.open_fs('~/projects/my_project').glob('**/*.pyc').remove()
223 29
223 >>> my_fs.glob('**/*.pyc').remove()
224 2
224225
225226 """
226227 removes = 0
234235
235236
236237 class BoundGlobber(object):
237 """A :class:`~Globber` object bound to a filesystem.
238 """A `~fs.glob.Globber` object bound to a filesystem.
238239
239240 An instance of this object is available on every Filesystem object
240 as ``.glob``.
241
242 Arguments:
243 fs (FS): A filesystem object.
241 as the `~fs.base.FS.glob` property.
244242
245243 """
246244
248246
249247 def __init__(self, fs):
250248 # type: (FS) -> None
249 """Create a new bound Globber.
250
251 Arguments:
252 fs (FS): A filesystem object to bind to.
253
254 """
251255 self.fs = fs
252256
253257 def __repr__(self):
269273 e.g. ``["*.git"]``.
270274
271275 Returns:
272 `~Globber`:
273 An object that may be queried for the glob matches.
274
276 `Globber`: An object that may be queried for the glob matches.
275277
276278 """
277279 return Globber(
00 """Container for filesystem resource informations.
11 """
22
3 from __future__ import absolute_import
4 from __future__ import print_function
5 from __future__ import unicode_literals
3 from __future__ import absolute_import, print_function, unicode_literals
64
75 import typing
86 from typing import cast
7
8 import six
99 from copy import deepcopy
1010
11 import six
12
13 from .path import join
11 from ._typing import Text, overload
1412 from .enums import ResourceType
1513 from .errors import MissingInfoNamespace
14 from .path import join
1615 from .permissions import Permissions
1716 from .time import epoch_to_datetime
18 from ._typing import overload, Text
1917
2018 if typing.TYPE_CHECKING:
19 from typing import Any, Callable, List, Mapping, Optional, Union
20
2121 from datetime import datetime
22 from typing import Any, Callable, List, Mapping, Optional, Union
2322
2423 RawInfo = Mapping[Text, Mapping[Text, object]]
2524 ToDatetime = Callable[[int], datetime]
4039 raw_info (dict): A dict containing resource info.
4140 to_datetime (callable): A callable that converts an
4241 epoch time to a datetime object. The default uses
43 :func:`~fs.time.epoch_to_datetime`.
42 `~fs.time.epoch_to_datetime`.
4443
4544 """
4645
4847
4948 def __init__(self, raw_info, to_datetime=epoch_to_datetime):
5049 # type: (RawInfo, ToDatetime) -> None
51 """Create a resource info object from a raw info dict.
52 """
50 """Create a resource info object from a raw info dict."""
5351 self.raw = raw_info
5452 self._to_datetime = to_datetime
5553 self.namespaces = frozenset(self.raw.keys())
7270 # type: (None) -> None
7371 pass
7472
75 @overload # noqa: F811
76 def _make_datetime(self, t):
73 @overload
74 def _make_datetime(self, t): # noqa: F811
7775 # type: (int) -> datetime
7876 pass
7977
9088 pass
9189
9290 @overload # noqa: F811
93 def get(self, namespace, key, default):
91 def get(self, namespace, key, default): # noqa: F811
9492 # type: (Text, Text, T) -> Union[Any, T]
9593 pass
9694
106104 is not found.
107105
108106 Example:
109 >>> info.get('access', 'permissions')
110 ['u_r', 'u_w', '_wx']
107 >>> info = my_fs.getinfo("foo.py", namespaces=["details"])
108 >>> info.get('details', 'type')
109 2
111110
112111 """
113112 try:
131130 # type: (Text, Text) -> bool
132131 """Check if a given key in a namespace is writable.
133132
134 Uses `~fs.base.FS.setinfo`.
133 When creating an `Info` object, you can add a ``_write`` key to
134 each raw namespace that lists which keys are writable or not.
135
136 In general, this means they are compatible with the `setinfo`
137 function of filesystem objects.
135138
136139 Arguments:
137140 namespace (str): A namespace identifier.
140143 Returns:
141144 bool: `True` if the key can be modified, `False` otherwise.
142145
146 Example:
147 Create an `Info` object that marks only the ``modified`` key
148 as writable in the ``details`` namespace::
149
150 >>> now = time.time()
151 >>> info = Info({
152 ... "basic": {"name": "foo", "is_dir": False},
153 ... "details": {
154 ... "modified": now,
155 ... "created": now,
156 ... "_write": ["modified"],
157 ... }
158 ... })
159 >>> info.is_writeable("details", "created")
160 False
161 >>> info.is_writeable("details", "modified")
162 True
163
143164 """
144165 _writeable = self.get(namespace, "_write", ())
145166 return key in _writeable
159180
160181 def copy(self, to_datetime=None):
161182 # type: (Optional[ToDatetime]) -> Info
162 """Create a copy of this resource info object.
163 """
183 """Create a copy of this resource info object."""
164184 return Info(deepcopy(self.raw), to_datetime=to_datetime or self._to_datetime)
165185
166186 def make_path(self, dir_path):
179199 @property
180200 def name(self):
181201 # type: () -> Text
182 """`str`: the resource name.
183 """
202 """`str`: the resource name."""
184203 return cast(Text, self.get("basic", "name"))
185204
186205 @property
187206 def suffix(self):
188207 # type: () -> Text
189 """`str`: the last component of the name (including dot), or an
190 empty string if there is no suffix.
208 """`str`: the last component of the name (with dot).
209
210 In case there is no suffix, an empty string is returned.
191211
192212 Example:
193 >>> info
194 <info 'foo.py'>
213 >>> info = my_fs.getinfo("foo.py")
195214 >>> info.suffix
196215 '.py'
216 >>> info2 = my_fs.getinfo("bar")
217 >>> info2.suffix
218 ''
219
197220 """
198221 name = self.get("basic", "name")
199222 if name.startswith(".") and name.count(".") == 1:
207230 """`List`: a list of any suffixes in the name.
208231
209232 Example:
210 >>> info
211 <info 'foo.tar.gz'>
233 >>> info = my_fs.getinfo("foo.tar.gz")
212234 >>> info.suffixes
213235 ['.tar', '.gz']
236
214237 """
215238 name = self.get("basic", "name")
216239 if name.startswith(".") and name.count(".") == 1:
223246 """`str`: the name minus any suffixes.
224247
225248 Example:
226 >>> info
227 <info 'foo.tar.gz'>
249 >>> info = my_fs.getinfo("foo.tar.gz")
228250 >>> info.stem
229251 'foo'
230252
237259 @property
238260 def is_dir(self):
239261 # type: () -> bool
240 """`bool`: `True` if the resource references a directory.
241 """
262 """`bool`: `True` if the resource references a directory."""
242263 return cast(bool, self.get("basic", "is_dir"))
243264
244265 @property
245266 def is_file(self):
246267 # type: () -> bool
247 """`bool`: `True` if the resource references a file.
248 """
268 """`bool`: `True` if the resource references a file."""
249269 return not cast(bool, self.get("basic", "is_dir"))
250270
251271 @property
252272 def is_link(self):
253273 # type: () -> bool
254 """`bool`: `True` if the resource is a symlink.
255 """
274 """`bool`: `True` if the resource is a symlink."""
256275 self._require_namespace("link")
257276 return self.get("link", "target", None) is not None
258277
00 """Compatibility tools between Python 2 and Python 3 I/O interfaces.
11 """
22
3 from __future__ import print_function
4 from __future__ import unicode_literals
5
3 from __future__ import print_function, unicode_literals
4
5 import typing
6
7 import array
68 import io
7 import typing
8 from io import SEEK_SET, SEEK_CUR
9 from io import SEEK_CUR, SEEK_SET
910
1011 from .mode import Mode
1112
1213 if typing.TYPE_CHECKING:
14 from typing import IO, Any, Iterable, Iterator, List, Optional, Text, Union
15
16 import mmap
1317 from io import RawIOBase
14 from typing import (
15 Any,
16 Iterable,
17 Iterator,
18 IO,
19 List,
20 Optional,
21 Text,
22 Union,
23 )
2418
2519
2620 class RawWrapper(io.RawIOBase):
27 """Convert a Python 2 style file-like object in to a IO object.
28 """
29
30 def __init__(self, f, mode=None, name=None):
21 """Convert a Python 2 style file-like object in to a IO object."""
22
23 def __init__(self, f, mode=None, name=None): # noqa: D107
3124 # type: (IO[bytes], Optional[Text], Optional[Text]) -> None
3225 self._f = f
3326 self.mode = mode or getattr(f, "mode", None)
8881 return self._f.truncate(size)
8982
9083 def write(self, data):
91 # type: (bytes) -> int
92 count = self._f.write(data)
84 # type: (Union[bytes, memoryview, array.array[Any], mmap.mmap]) -> int
85 if isinstance(data, array.array):
86 count = self._f.write(data.tobytes())
87 else:
88 count = self._f.write(data) # type: ignore
9389 return len(data) if count is None else count
9490
9591 @typing.no_type_check
130126 b[:bytes_read] = data
131127 return bytes_read
132128
133 def readline(self, limit=-1):
134 # type: (int) -> bytes
135 return self._f.readline(limit)
136
137 def readlines(self, hint=-1):
138 # type: (int) -> List[bytes]
139 return self._f.readlines(hint)
140
141 def writelines(self, sequence):
142 # type: (Iterable[Union[bytes, bytearray]]) -> None
143 return self._f.writelines(sequence)
129 def readline(self, limit=None):
130 # type: (Optional[int]) -> bytes
131 return self._f.readline(-1 if limit is None else limit)
132
133 def readlines(self, hint=None):
134 # type: (Optional[int]) -> List[bytes]
135 return self._f.readlines(-1 if hint is None else hint)
136
137 def writelines(self, lines):
138 # type: (Iterable[Union[bytes, memoryview, array.array[Any], mmap.mmap]]) -> None # noqa: E501
139 _lines = (
140 line.tobytes() if isinstance(line, array.array) else line for line in lines
141 )
142 return self._f.writelines(typing.cast("Iterable[bytes]", _lines))
144143
145144 def __iter__(self):
146145 # type: () -> Iterator[bytes]
160159 **kwargs # type: Any
161160 ):
162161 # type: (...) -> IO
163 """Take a Python 2.x binary file and return an IO Stream.
164 """
162 """Take a Python 2.x binary file and return an IO Stream."""
165163 reading = "r" in mode
166164 writing = "w" in mode
167165 appending = "a" in mode
00 """Least Recently Used cache mapping.
11 """
22
3 from __future__ import absolute_import
4 from __future__ import unicode_literals
3 from __future__ import absolute_import, unicode_literals
54
65 import typing
6
77 from collections import OrderedDict
8
98
109 _K = typing.TypeVar("_K")
1110 _V = typing.TypeVar("_V")
2120
2221 def __init__(self, cache_size):
2322 # type: (int) -> None
23 """Create a new LRUCache with the given size."""
2424 self.cache_size = cache_size
2525 super(LRUCache, self).__init__()
2626
2727 def __setitem__(self, key, value):
2828 # type: (_K, _V) -> None
29 """Store a new views, potentially discarding an old value.
30 """
29 """Store a new views, potentially discarding an old value."""
3130 if key not in self:
3231 if len(self) >= self.cache_size:
3332 self.popitem(last=False)
3534
3635 def __getitem__(self, key):
3736 # type: (_K) -> _V
38 """Get the item, but also makes it most recent.
39 """
37 """Get the item, but also makes it most recent."""
4038 _super = typing.cast(OrderedDict, super(LRUCache, self))
4139 value = _super.__getitem__(key)
4240 _super.__delitem__(key)
00 """Manage a volatile in-memory filesystem.
11 """
2 from __future__ import absolute_import
3 from __future__ import unicode_literals
2 from __future__ import absolute_import, unicode_literals
3
4 import typing
45
56 import contextlib
67 import io
78 import os
9 import six
810 import time
9 import typing
1011 from collections import OrderedDict
1112 from threading import RLock
1213
13 import six
14
1514 from . import errors
15 from ._typing import overload
1616 from .base import FS
17 from .copy import copy_modified_time
1718 from .enums import ResourceType, Seek
1819 from .info import Info
1920 from .mode import Mode
20 from .path import iteratepath
21 from .path import normpath
22 from .path import split
23 from ._typing import overload
21 from .path import iteratepath, normpath, split
2422
2523 if typing.TYPE_CHECKING:
2624 from typing import (
2826 BinaryIO,
2927 Collection,
3028 Dict,
29 Iterable,
3130 Iterator,
3231 List,
3332 Optional,
3433 SupportsInt,
34 Text,
35 Tuple,
3536 Union,
36 Text,
3737 )
38
39 import array
40 import mmap
41
3842 from .base import _OpendirFactory
3943 from .info import RawInfo
4044 from .permissions import Permissions
8993
9094 def on_modify(self): # noqa: D401
9195 # type: () -> None
92 """Called when file data is modified.
93 """
96 """Called when file data is modified."""
9497 self._dir_entry.modified_time = self.modified_time = time.time()
9598
9699 def on_access(self): # noqa: D401
97100 # type: () -> None
98 """Called when file is accessed.
99 """
101 """Called when file is accessed."""
100102 self._dir_entry.accessed_time = self.accessed_time = time.time()
101103
102104 def flush(self):
117119
118120 __next__ = next
119121
120 def readline(self, size=-1):
121 # type: (int) -> bytes
122 def readline(self, size=None):
123 # type: (Optional[int]) -> bytes
122124 if not self._mode.reading:
123125 raise IOError("File not open for reading")
124126 with self._seek_lock():
132134 self._dir_entry.remove_open_file(self)
133135 super(_MemoryFile, self).close()
134136
135 def read(self, size=-1):
137 def read(self, size=None):
136138 # type: (Optional[int]) -> bytes
137139 if not self._mode.reading:
138140 raise IOError("File not open for reading")
191193 return self._mode.writing
192194
193195 def write(self, data):
194 # type: (bytes) -> int
196 # type: (Union[bytes, memoryview, array.array[Any], mmap.mmap]) -> int
195197 if not self._mode.writing:
196198 raise IOError("File not open for writing")
197199 with self._seek_lock():
198200 self.on_modify()
199201 return self._bytes_io.write(data)
200202
201 def writelines(self, sequence): # type: ignore
202 # type: (List[bytes]) -> None
203 # FIXME(@althonos): For some reason the stub for IOBase.writelines
204 # is List[Any] ?! It should probably be Iterable[ByteString]
203 def writelines(self, sequence):
204 # type: (Iterable[Union[bytes, memoryview, array.array[Any], mmap.mmap]]) -> None # noqa: E501
205205 with self._seek_lock():
206206 self.on_modify()
207207 self._bytes_io.writelines(sequence)
246246 _bytes_file.seek(0, os.SEEK_END)
247247 return _bytes_file.tell()
248248
249 @overload # noqa: F811
250 def get_entry(self, name, default):
249 @overload
250 def get_entry(self, name, default): # noqa: F811
251251 # type: (Text, _DirEntry) -> _DirEntry
252252 pass
253253
254 @overload # noqa: F811
255 def get_entry(self, name):
254 @overload
255 def get_entry(self, name): # noqa: F811
256256 # type: (Text) -> Optional[_DirEntry]
257257 pass
258258
259 @overload # noqa: F811
260 def get_entry(self, name, default):
259 @overload
260 def get_entry(self, name, default): # noqa: F811
261261 # type: (Text, None) -> Optional[_DirEntry]
262262 pass
263263
274274 # type: (Text) -> None
275275 del self._dir[name]
276276
277 def clear(self):
278 # type: () -> None
279 self._dir.clear()
280
277281 def __contains__(self, name):
278282 # type: (object) -> bool
279283 return name in self._dir
293297 def remove_open_file(self, memory_file):
294298 # type: (_MemoryFile) -> None
295299 self._open_files.remove(memory_file)
300
301 def to_info(self, namespaces=None):
302 # type: (Optional[Collection[Text]]) -> Info
303 namespaces = namespaces or ()
304 info = {"basic": {"name": self.name, "is_dir": self.is_dir}}
305 if "details" in namespaces:
306 info["details"] = {
307 "_write": ["accessed", "modified"],
308 "type": int(self.resource_type),
309 "size": self.size,
310 "accessed": self.accessed_time,
311 "modified": self.modified_time,
312 "created": self.created_time,
313 }
314 return Info(info)
296315
297316
298317 @six.python_2_unicode_compatible
304323 fast, but non-permanent. The `MemoryFS` constructor takes no
305324 arguments.
306325
307 Example:
308 >>> mem_fs = MemoryFS()
309
310 Or via an FS URL:
311 >>> import fs
312 >>> mem_fs = fs.open_fs('mem://')
326 Examples:
327 Create with the constructor::
328
329 >>> from fs.memoryfs import MemoryFS
330 >>> mem_fs = MemoryFS()
331
332 Or via an FS URL::
333
334 >>> import fs
335 >>> mem_fs = fs.open_fs('mem://')
313336
314337 """
315338
325348
326349 def __init__(self):
327350 # type: () -> None
328 """Create an in-memory filesystem.
329 """
351 """Create an in-memory filesystem."""
330352 self._meta = self._meta.copy()
331353 self.root = self._make_dir_entry(ResourceType.directory, "")
332354 super(MemoryFS, self).__init__()
345367
346368 def _get_dir_entry(self, dir_path):
347369 # type: (Text) -> Optional[_DirEntry]
348 """Get a directory entry, or `None` if one doesn't exist.
349 """
370 """Get a directory entry, or `None` if one doesn't exist."""
350371 with self._lock:
351372 dir_path = normpath(dir_path)
352373 current_entry = self.root # type: Optional[_DirEntry]
366387
367388 def getinfo(self, path, namespaces=None):
368389 # type: (Text, Optional[Collection[Text]]) -> Info
369 namespaces = namespaces or ()
370390 _path = self.validatepath(path)
371391 dir_entry = self._get_dir_entry(_path)
372392 if dir_entry is None:
373393 raise errors.ResourceNotFound(path)
374 info = {"basic": {"name": dir_entry.name, "is_dir": dir_entry.is_dir}}
375 if "details" in namespaces:
376 info["details"] = {
377 "_write": ["accessed", "modified"],
378 "type": int(dir_entry.resource_type),
379 "size": dir_entry.size,
380 "accessed": dir_entry.accessed_time,
381 "modified": dir_entry.modified_time,
382 "created": dir_entry.created_time,
383 }
384 return Info(info)
394 return dir_entry.to_info(namespaces=namespaces)
385395
386396 def listdir(self, path):
387397 # type: (Text) -> List[Text]
388398 self.check()
389399 _path = self.validatepath(path)
390400 with self._lock:
401 # locate and validate the entry corresponding to the given path
391402 dir_entry = self._get_dir_entry(_path)
392403 if dir_entry is None:
393404 raise errors.ResourceNotFound(path)
394405 if not dir_entry.is_dir:
395406 raise errors.DirectoryExpected(path)
407 # return the filenames in the order they were created
396408 return dir_entry.list()
397409
398410 if typing.TYPE_CHECKING:
430442 new_dir = self._make_dir_entry(ResourceType.directory, dir_name)
431443 parent_dir.set_entry(dir_name, new_dir)
432444 return self.opendir(path)
445
446 def move(self, src_path, dst_path, overwrite=False, preserve_time=False):
447 src_dir, src_name = split(self.validatepath(src_path))
448 dst_dir, dst_name = split(self.validatepath(dst_path))
449
450 with self._lock:
451 src_dir_entry = self._get_dir_entry(src_dir)
452 if src_dir_entry is None or src_name not in src_dir_entry:
453 raise errors.ResourceNotFound(src_path)
454 src_entry = src_dir_entry.get_entry(src_name)
455 if src_entry.is_dir:
456 raise errors.FileExpected(src_path)
457
458 dst_dir_entry = self._get_dir_entry(dst_dir)
459 if dst_dir_entry is None:
460 raise errors.ResourceNotFound(dst_path)
461 elif not overwrite and dst_name in dst_dir_entry:
462 raise errors.DestinationExists(dst_path)
463
464 # move the entry from the src folder to the dst folder
465 dst_dir_entry.set_entry(dst_name, src_entry)
466 src_dir_entry.remove_entry(src_name)
467 # make sure to update the entry name itself (see #509)
468 src_entry.name = dst_name
469
470 if preserve_time:
471 copy_modified_time(self, src_path, self, dst_path)
472
473 def movedir(self, src_path, dst_path, create=False, preserve_time=False):
474 src_dir, src_name = split(self.validatepath(src_path))
475 dst_dir, dst_name = split(self.validatepath(dst_path))
476
477 with self._lock:
478 src_dir_entry = self._get_dir_entry(src_dir)
479 if src_dir_entry is None or src_name not in src_dir_entry:
480 raise errors.ResourceNotFound(src_path)
481 src_entry = src_dir_entry.get_entry(src_name)
482 if not src_entry.is_dir:
483 raise errors.DirectoryExpected(src_path)
484
485 # move the entry from the src folder to the dst folder
486 dst_dir_entry = self._get_dir_entry(dst_dir)
487 if dst_dir_entry is None or (not create and dst_name not in dst_dir_entry):
488 raise errors.ResourceNotFound(dst_path)
489
490 # move the entry from the src folder to the dst folder
491 dst_dir_entry.set_entry(dst_name, src_entry)
492 src_dir_entry.remove_entry(src_name)
493 # make sure to update the entry name itself (see #509)
494 src_entry.name = dst_name
495
496 if preserve_time:
497 copy_modified_time(self, src_path, self, dst_path)
433498
434499 def openbin(self, path, mode="r", buffering=-1, **options):
435500 # type: (Text, Text, int, **Any) -> BinaryIO
497562
498563 def removedir(self, path):
499564 # type: (Text) -> None
500 _path = self.validatepath(path)
501
565 # make sure we are not removing root
566 _path = self.validatepath(path)
502567 if _path == "/":
503568 raise errors.RemoveRootError()
504
505 with self._lock:
569 # make sure the directory is empty
570 if not self.isempty(path):
571 raise errors.DirectoryNotEmpty(path)
572 # we can now delegate to removetree since we confirmed that
573 # * path exists (isempty)
574 # * path is a folder (isempty)
575 # * path is not root
576 self.removetree(_path)
577
578 def removetree(self, path):
579 # type: (Text) -> None
580 _path = self.validatepath(path)
581
582 with self._lock:
583
584 if _path == "/":
585 self.root.clear()
586 return
587
506588 dir_path, file_name = split(_path)
507589 parent_dir_entry = self._get_dir_entry(dir_path)
508590
513595 if not dir_dir_entry.is_dir:
514596 raise errors.DirectoryExpected(path)
515597
516 if len(dir_dir_entry):
517 raise errors.DirectoryNotEmpty(path)
518
519598 parent_dir_entry.remove_entry(file_name)
599
600 def scandir(
601 self,
602 path, # type: Text
603 namespaces=None, # type: Optional[Collection[Text]]
604 page=None, # type: Optional[Tuple[int, int]]
605 ):
606 # type: (...) -> Iterator[Info]
607 self.check()
608 _path = self.validatepath(path)
609 with self._lock:
610 # locate and validate the entry corresponding to the given path
611 dir_entry = self._get_dir_entry(_path)
612 if dir_entry is None:
613 raise errors.ResourceNotFound(path)
614 if not dir_entry.is_dir:
615 raise errors.DirectoryExpected(path)
616 # if paging was requested, slice the filenames
617 filenames = dir_entry.list()
618 if page is not None:
619 start, end = page
620 filenames = filenames[start:end]
621 # yield info with the right namespaces
622 for name in filenames:
623 entry = typing.cast(_DirEntry, dir_entry.get_entry(name))
624 yield entry.to_info(namespaces=namespaces)
520625
521626 def setinfo(self, path, info):
522627 # type: (Text, RawInfo) -> None
1515
1616 """
1717
18 from __future__ import print_function
19 from __future__ import unicode_literals
18 from __future__ import print_function, unicode_literals
2019
2120 import typing
2221
2928
3029 if typing.TYPE_CHECKING:
3130 from typing import Callable, Optional, Text, Union
31
3232 from .base import FS
3333 from .info import Info
3434
5656 walker=None, # type: Optional[Walker]
5757 copy_if_newer=True, # type: bool
5858 workers=0, # type: int
59 preserve_time=False, # type: bool
5960 ):
6061 # type: (...) -> None
6162 """Mirror files / directories from one filesystem to another.
7273 workers (int): Number of worker threads used
7374 (0 for single threaded). Set to a relatively low number
7475 for network filesystems, 4 would be a good start.
76 preserve_time (bool): If `True`, try to preserve mtime of the
77 resources (defaults to `False`).
78
7579 """
7680
7781 def src():
8185 return manage_fs(dst_fs, create=True)
8286
8387 with src() as _src_fs, dst() as _dst_fs:
84 with _src_fs.lock(), _dst_fs.lock():
85 _thread_safe = is_thread_safe(_src_fs, _dst_fs)
86 with Copier(num_workers=workers if _thread_safe else 0) as copier:
88 _thread_safe = is_thread_safe(_src_fs, _dst_fs)
89 with Copier(
90 num_workers=workers if _thread_safe else 0, preserve_time=preserve_time
91 ) as copier:
92 with _src_fs.lock(), _dst_fs.lock():
8793 _mirror(
8894 _src_fs,
8995 _dst_fs,
9096 walker=walker,
9197 copy_if_newer=copy_if_newer,
9298 copy_file=copier.copy,
99 preserve_time=preserve_time,
93100 )
94101
95102
96103 def _mirror(
97 src_fs, dst_fs, walker=None, copy_if_newer=True, copy_file=copy_file_internal
104 src_fs, # type: FS
105 dst_fs, # type: FS
106 walker=None, # type: Optional[Walker]
107 copy_if_newer=True, # type: bool
108 copy_file=copy_file_internal, # type: Callable[[FS, str, FS, str, bool], None]
109 preserve_time=False, # type: bool
98110 ):
99 # type: (FS, FS, Optional[Walker], bool, Callable[[FS, str, FS, str], None]) -> None
111 # type: (...) -> None
100112 walker = walker or Walker()
101113 walk = walker.walk(src_fs, namespaces=["details"])
102114 for path, dirs, files in walk:
120132 # Compare file info
121133 if copy_if_newer and not _compare(_file, dst_file):
122134 continue
123 copy_file(src_fs, _path, dst_fs, _path)
135 copy_file(src_fs, _path, dst_fs, _path, preserve_time)
124136
125137 # Make directories
126138 for _dir in dirs:
44
55 """
66
7 from __future__ import print_function
8 from __future__ import unicode_literals
7 from __future__ import print_function, unicode_literals
98
109 import typing
1110
1211 import six
1312
1413 from ._typing import Text
15
1614
1715 if typing.TYPE_CHECKING:
1816 from typing import FrozenSet, Set, Union
2927 A mode object provides properties that can be used to interrogate the
3028 `mode strings <https://docs.python.org/3/library/functions.html#open>`_
3129 used when opening files.
32
33 Arguments:
34 mode (str): A *mode* string, as used by `io.open`.
35
36 Raises:
37 ValueError: If the mode string is invalid.
3830
3931 Example:
4032 >>> mode = Mode('rb')
5143
5244 def __init__(self, mode):
5345 # type: (Text) -> None
46 """Create a new `Mode` instance.
47
48 Arguments:
49 mode (str): A *mode* string, as used by `io.open`.
50
51 Raises:
52 ValueError: If the mode string is invalid.
53
54 """
5455 self._mode = mode
5556 self.validate()
5657
6465
6566 def __contains__(self, character):
6667 # type: (object) -> bool
67 """Check if a mode contains a given character.
68 """
68 """Check if a mode contains a given character."""
6969 assert isinstance(character, Text)
7070 return character in self._mode
7171
122122 @property
123123 def create(self):
124124 # type: () -> bool
125 """`bool`: `True` if the mode would create a file.
126 """
125 """`bool`: `True` if the mode would create a file."""
127126 return "a" in self or "w" in self or "x" in self
128127
129128 @property
130129 def reading(self):
131130 # type: () -> bool
132 """`bool`: `True` if the mode permits reading.
133 """
131 """`bool`: `True` if the mode permits reading."""
134132 return "r" in self or "+" in self
135133
136134 @property
137135 def writing(self):
138136 # type: () -> bool
139 """`bool`: `True` if the mode permits writing.
140 """
137 """`bool`: `True` if the mode permits writing."""
141138 return "w" in self or "a" in self or "+" in self or "x" in self
142139
143140 @property
144141 def appending(self):
145142 # type: () -> bool
146 """`bool`: `True` if the mode permits appending.
147 """
143 """`bool`: `True` if the mode permits appending."""
148144 return "a" in self
149145
150146 @property
151147 def updating(self):
152148 # type: () -> bool
153 """`bool`: `True` if the mode permits both reading and writing.
154 """
149 """`bool`: `True` if the mode permits both reading and writing."""
155150 return "+" in self
156151
157152 @property
158153 def truncate(self):
159154 # type: () -> bool
160 """`bool`: `True` if the mode would truncate an existing file.
161 """
155 """`bool`: `True` if the mode would truncate an existing file."""
162156 return "w" in self or "x" in self
163157
164158 @property
165159 def exclusive(self):
166160 # type: () -> bool
167 """`bool`: `True` if the mode require exclusive creation.
168 """
161 """`bool`: `True` if the mode require exclusive creation."""
169162 return "x" in self
170163
171164 @property
172165 def binary(self):
173166 # type: () -> bool
174 """`bool`: `True` if a mode specifies binary.
175 """
167 """`bool`: `True` if a mode specifies binary."""
176168 return "b" in self
177169
178170 @property
179171 def text(self):
180172 # type: () -> bool
181 """`bool`: `True` if a mode specifies text.
182 """
173 """`bool`: `True` if a mode specifies text."""
183174 return "t" in self or "b" not in self
184175
185176
00 """Manage other filesystems as a folder hierarchy.
11 """
22
3 from __future__ import absolute_import
4 from __future__ import print_function
5 from __future__ import unicode_literals
3 from __future__ import absolute_import, print_function, unicode_literals
64
75 import typing
86
119 from . import errors
1210 from .base import FS
1311 from .memoryfs import MemoryFS
14 from .path import abspath
15 from .path import forcedir
16 from .path import normpath
17 from .mode import validate_open_mode
18 from .mode import validate_openbin_mode
12 from .mode import validate_open_mode, validate_openbin_mode
13 from .path import abspath, forcedir, normpath
1914
2015 if typing.TYPE_CHECKING:
2116 from typing import (
17 IO,
2218 Any,
2319 BinaryIO,
2420 Collection,
2521 Iterator,
26 IO,
2722 List,
2823 MutableSequence,
2924 Optional,
3126 Tuple,
3227 Union,
3328 )
29
3430 from .enums import ResourceType
3531 from .info import Info, RawInfo
3632 from .permissions import Permissions
4036
4137
4238 class MountError(Exception):
43 """Thrown when mounts conflict.
44 """
39 """Thrown when mounts conflict."""
4540
4641
4742 class MountFS(FS):
48 """A virtual filesystem that maps directories on to other file-systems.
49
50 Arguments:
51 auto_close (bool): If `True` (the default), the child
52 filesystems will be closed when `MountFS` is closed.
53
54 """
43 """A virtual filesystem that maps directories on to other file-systems."""
5544
5645 _meta = {
5746 "virtual": True,
6352
6453 def __init__(self, auto_close=True):
6554 # type: (bool) -> None
55 """Create a new `MountFS` instance.
56
57 Arguments:
58 auto_close (bool): If `True` (the default), the child
59 filesystems will be closed when `MountFS` is closed.
60
61 """
6662 super(MountFS, self).__init__()
6763 self.auto_close = auto_close
6864 self.default_fs = MemoryFS() # type: FS
00 """Functions for moving files between filesystems.
11 """
22
3 from __future__ import print_function
4 from __future__ import unicode_literals
3 from __future__ import print_function, unicode_literals
54
65 import typing
76
8 from .copy import copy_dir
9 from .copy import copy_file
7 from ._pathcompat import commonpath
8 from .copy import copy_dir, copy_file
9 from .errors import FSError
1010 from .opener import manage_fs
11 from .osfs import OSFS
12 from .path import frombase
1113
1214 if typing.TYPE_CHECKING:
13 from .base import FS
1415 from typing import Text, Union
1516
17 from .base import FS
1618
17 def move_fs(src_fs, dst_fs, workers=0):
18 # type: (Union[Text, FS], Union[Text, FS], int) -> None
19
20 def move_fs(
21 src_fs, # type: Union[Text, FS]
22 dst_fs, # type:Union[Text, FS]
23 workers=0, # type: int
24 preserve_time=False, # type: bool
25 ):
26 # type: (...) -> None
1927 """Move the contents of a filesystem to another filesystem.
2028
2129 Arguments:
2331 dst_fs (FS or str): Destination filesystem (instance or URL).
2432 workers (int): Use `worker` threads to copy data, or ``0`` (default) for
2533 a single-threaded copy.
34 preserve_time (bool): If `True`, try to preserve mtime of the
35 resources (defaults to `False`).
2636
2737 """
28 move_dir(src_fs, "/", dst_fs, "/", workers=workers)
38 move_dir(src_fs, "/", dst_fs, "/", workers=workers, preserve_time=preserve_time)
2939
3040
3141 def move_file(
3343 src_path, # type: Text
3444 dst_fs, # type: Union[Text, FS]
3545 dst_path, # type: Text
46 preserve_time=False, # type: bool
47 cleanup_dst_on_error=True, # type: bool
3648 ):
3749 # type: (...) -> None
3850 """Move a file from one filesystem to another.
4052 Arguments:
4153 src_fs (FS or str): Source filesystem (instance or URL).
4254 src_path (str): Path to a file on ``src_fs``.
43 dst_fs (FS or str); Destination filesystem (instance or URL).
55 dst_fs (FS or str): Destination filesystem (instance or URL).
4456 dst_path (str): Path to a file on ``dst_fs``.
57 preserve_time (bool): If `True`, try to preserve mtime of the
58 resources (defaults to `False`).
59 cleanup_dst_on_error (bool): If `True`, tries to delete the file copied to
60 ``dst_fs`` if deleting the file from ``src_fs`` fails (defaults to `True`).
4561
4662 """
47 with manage_fs(src_fs) as _src_fs:
48 with manage_fs(dst_fs, create=True) as _dst_fs:
63 with manage_fs(src_fs, writeable=True) as _src_fs:
64 with manage_fs(dst_fs, writeable=True, create=True) as _dst_fs:
4965 if _src_fs is _dst_fs:
5066 # Same filesystem, may be optimized
51 _src_fs.move(src_path, dst_path, overwrite=True)
52 else:
53 # Standard copy and delete
54 with _src_fs.lock(), _dst_fs.lock():
55 copy_file(_src_fs, src_path, _dst_fs, dst_path)
67 _src_fs.move(
68 src_path, dst_path, overwrite=True, preserve_time=preserve_time
69 )
70 return
71
72 if _src_fs.hassyspath(src_path) and _dst_fs.hassyspath(dst_path):
73 # if both filesystems have a syspath we create a new OSFS from a
74 # common parent folder and use it to move the file.
75 try:
76 src_syspath = _src_fs.getsyspath(src_path)
77 dst_syspath = _dst_fs.getsyspath(dst_path)
78 common = commonpath([src_syspath, dst_syspath])
79 if common:
80 rel_src = frombase(common, src_syspath)
81 rel_dst = frombase(common, dst_syspath)
82 with _src_fs.lock(), _dst_fs.lock():
83 with OSFS(common) as base:
84 base.move(rel_src, rel_dst, preserve_time=preserve_time)
85 return # optimization worked, exit early
86 except ValueError:
87 # This is raised if we cannot find a common base folder.
88 # In this case just fall through to the standard method.
89 pass
90
91 # Standard copy and delete
92 with _src_fs.lock(), _dst_fs.lock():
93 copy_file(
94 _src_fs,
95 src_path,
96 _dst_fs,
97 dst_path,
98 preserve_time=preserve_time,
99 )
100 try:
56101 _src_fs.remove(src_path)
102 except FSError as e:
103 # if the source cannot be removed we delete the copy on the
104 # destination
105 if cleanup_dst_on_error:
106 _dst_fs.remove(dst_path)
107 raise e
57108
58109
59110 def move_dir(
62113 dst_fs, # type: Union[Text, FS]
63114 dst_path, # type: Text
64115 workers=0, # type: int
116 preserve_time=False, # type: bool
65117 ):
66118 # type: (...) -> None
67119 """Move a directory from one filesystem to another.
71123 src_path (str): Path to a directory on ``src_fs``
72124 dst_fs (FS or str): Destination filesystem (instance or URL).
73125 dst_path (str): Path to a directory on ``dst_fs``.
74 workers (int): Use `worker` threads to copy data, or ``0`` (default) for
75 a single-threaded copy.
126 workers (int): Use ``worker`` threads to copy data, or ``0``
127 (default) for a single-threaded copy.
128 preserve_time (bool): If `True`, try to preserve mtime of the
129 resources (defaults to `False`).
76130
77131 """
78
79 def src():
80 return manage_fs(src_fs, writeable=False)
81
82 def dst():
83 return manage_fs(dst_fs, create=True)
84
85 with src() as _src_fs, dst() as _dst_fs:
86 with _src_fs.lock(), _dst_fs.lock():
87 _dst_fs.makedir(dst_path, recreate=True)
88 copy_dir(src_fs, src_path, dst_fs, dst_path, workers=workers)
89 _src_fs.removetree(src_path)
132 with manage_fs(src_fs, writeable=True) as _src_fs:
133 with manage_fs(dst_fs, writeable=True, create=True) as _dst_fs:
134 with _src_fs.lock(), _dst_fs.lock():
135 _dst_fs.makedir(dst_path, recreate=True)
136 copy_dir(
137 src_fs,
138 src_path,
139 dst_fs,
140 dst_path,
141 workers=workers,
142 preserve_time=preserve_time,
143 )
144 _src_fs.removetree(src_path)
00 """Manage several filesystems through a single view.
11 """
22
3 from __future__ import absolute_import
4 from __future__ import unicode_literals
5 from __future__ import print_function
3 from __future__ import absolute_import, print_function, unicode_literals
64
75 import typing
8 from collections import namedtuple, OrderedDict
6
7 from collections import OrderedDict, namedtuple
98 from operator import itemgetter
10
119 from six import text_type
1210
1311 from . import errors
1816
1917 if typing.TYPE_CHECKING:
2018 from typing import (
19 IO,
2120 Any,
2221 BinaryIO,
2322 Collection,
2423 Iterator,
25 IO,
24 List,
2625 MutableMapping,
27 List,
2826 MutableSet,
2927 Optional,
3028 Text,
3129 Tuple,
3230 )
31
3332 from .enums import ResourceType
3433 from .info import Info, RawInfo
3534 from .permissions import Permissions
5453
5554 def __init__(self, auto_close=True):
5655 # type: (bool) -> None
56 """Create a new MultiFS.
57
58 Arguments:
59 auto_close (bool): If `True` (the default), the child
60 filesystems will be closed when `MultiFS` is closed.
61
62 """
5763 super(MultiFS, self).__init__()
5864
5965 self._auto_close = auto_close
126132
127133 def _resort(self):
128134 # type: () -> None
129 """Force `iterate_fs` to re-sort on next reference.
130 """
135 """Force `iterate_fs` to re-sort on next reference."""
131136 self._fs_sequence = None
132137
133138 def iterate_fs(self):
134139 # type: () -> Iterator[Tuple[Text, FS]]
135 """Get iterator that returns (name, fs) in priority order.
136 """
140 """Get iterator that returns (name, fs) in priority order."""
137141 if self._fs_sequence is None:
138142 self._fs_sequence = [
139143 (name, fs)
145149
146150 def _delegate(self, path):
147151 # type: (Text) -> Optional[FS]
148 """Get a filesystem which has a given path.
149 """
152 """Get a filesystem which has a given path."""
150153 for _name, fs in self.iterate_fs():
151154 if fs.exists(path):
152155 return fs
154157
155158 def _delegate_required(self, path):
156159 # type: (Text) -> FS
157 """Check that there is a filesystem with the given ``path``.
158 """
160 """Check that there is a filesystem with the given ``path``."""
159161 fs = self._delegate(path)
160162 if fs is None:
161163 raise errors.ResourceNotFound(path)
163165
164166 def _writable_required(self, path):
165167 # type: (Text) -> FS
166 """Check that ``path`` is writeable.
167 """
168 """Check that ``path`` is writeable."""
168169 if self.write_fs is None:
169170 raise errors.ResourceReadOnly(path)
170171 return self.write_fs
44 # Declare fs.opener as a namespace package
55 __import__("pkg_resources").declare_namespace(__name__) # type: ignore
66
7 # Import opener modules so that `registry.install` if called on each opener
8 from . import appfs, ftpfs, memoryfs, osfs, tarfs, tempfs, zipfs
9
710 # Import objects into fs.opener namespace
811 from .base import Opener
912 from .parse import parse_fs_url as parse
1013 from .registry import registry
11
12 # Import opener modules so that `registry.install` if called on each opener
13 from . import appfs, ftpfs, memoryfs, osfs, tarfs, tempfs, zipfs
1414
1515 # Alias functions defined as Registry methods
1616 open_fs = registry.open_fs
11 """``AppFS`` opener definition.
22 """
33
4 from __future__ import absolute_import
5 from __future__ import print_function
6 from __future__ import unicode_literals
4 from __future__ import absolute_import, print_function, unicode_literals
75
86 import typing
97
108 from .base import Opener
9 from .errors import OpenerError
1110 from .registry import registry
12 from .errors import OpenerError
1311
1412 if typing.TYPE_CHECKING:
1513 from typing import Text, Union
16 from .parse import ParseResult
14
1715 from ..appfs import _AppFS
1816 from ..subfs import SubFS
17 from .parse import ParseResult
1918
2019
2120 @registry.install
2221 class AppFSOpener(Opener):
23 """``AppFS`` opener.
24 """
22 """``AppFS`` opener."""
2523
2624 protocols = ["userdata", "userconf", "sitedata", "siteconf", "usercache", "userlog"]
2725 _protocol_mapping = None
3634 ):
3735 # type: (...) -> Union[_AppFS, SubFS[_AppFS]]
3836
37 from .. import appfs
3938 from ..subfs import ClosingSubFS
40 from .. import appfs
4139
4240 if self._protocol_mapping is None:
4341 self._protocol_mapping = {
11 """`Opener` abstract base class.
22 """
33
4 import abc
54 import typing
65
6 import abc
77 import six
88
99 if typing.TYPE_CHECKING:
1010 from typing import List, Text
11
1112 from ..base import FS
1213 from .parse import ParseResult
1314
33
44
55 class ParseError(ValueError):
6 """Attempt to parse an invalid FS URL.
7 """
6 """Attempt to parse an invalid FS URL."""
87
98
109 class OpenerError(Exception):
11 """Base exception for opener related errors.
12 """
10 """Base exception for opener related errors."""
1311
1412
1513 class UnsupportedProtocol(OpenerError):
16 """No opener found for the given protocol.
17 """
14 """No opener found for the given protocol."""
1815
1916
2017 class EntryPointError(OpenerError):
21 """An entry point could not be loaded.
22 """
18 """An entry point could not be loaded."""
2319
2420
2521 class NotWriteable(OpenerError):
26 """A writable FS could not be created.
27 """
22 """A writable FS could not be created."""
11 """`FTPFS` opener definition.
22 """
33
4 from __future__ import absolute_import
5 from __future__ import print_function
6 from __future__ import unicode_literals
4 from __future__ import absolute_import, print_function, unicode_literals
75
86 import typing
97
8 from ..errors import CreateFailed
109 from .base import Opener
1110 from .registry import registry
12 from ..errors import CreateFailed
1311
1412 if typing.TYPE_CHECKING:
1513 from typing import Text, Union
14
1615 from ..ftpfs import FTPFS # noqa: F401
1716 from ..subfs import SubFS
1817 from .parse import ParseResult
2019
2120 @registry.install
2221 class FTPOpener(Opener):
23 """`FTPFS` opener.
24 """
22 """`FTPFS` opener."""
2523
26 protocols = ["ftp"]
24 protocols = ["ftp", "ftps"]
2725
2826 @CreateFailed.catch_all
2927 def open_fs(
4846 passwd=parse_result.password,
4947 proxy=parse_result.params.get("proxy"),
5048 timeout=int(parse_result.params.get("timeout", "10")),
49 tls=bool(parse_result.protocol == "ftps"),
5150 )
5251 if dir_path:
5352 if create:
11 """`MemoryFS` opener definition.
22 """
33
4 from __future__ import absolute_import
5 from __future__ import print_function
6 from __future__ import unicode_literals
4 from __future__ import absolute_import, print_function, unicode_literals
75
86 import typing
97
1210
1311 if typing.TYPE_CHECKING:
1412 from typing import Text
13
14 from ..memoryfs import MemoryFS # noqa: F401
1515 from .parse import ParseResult
16 from ..memoryfs import MemoryFS # noqa: F401
1716
1817
1918 @registry.install
2019 class MemOpener(Opener):
21 """`MemoryFS` opener.
22 """
20 """`MemoryFS` opener."""
2321
2422 protocols = ["mem"]
2523
11 """`OSFS` opener definition.
22 """
33
4 from __future__ import absolute_import
5 from __future__ import print_function
6 from __future__ import unicode_literals
4 from __future__ import absolute_import, print_function, unicode_literals
75
86 import typing
97
1210
1311 if typing.TYPE_CHECKING:
1412 from typing import Text
13
14 from ..osfs import OSFS # noqa: F401
1515 from .parse import ParseResult
16 from ..osfs import OSFS # noqa: F401
1716
1817
1918 @registry.install
2019 class OSFSOpener(Opener):
21 """`OSFS` opener.
22 """
20 """`OSFS` opener."""
2321
2422 protocols = ["file", "osfs"]
2523
3230 cwd, # type: Text
3331 ):
3432 # type: (...) -> OSFS
33 from os.path import abspath, expanduser, join, normpath
34
3535 from ..osfs import OSFS
36 from os.path import abspath, expanduser, normpath, join
3736
3837 _path = abspath(join(cwd, expanduser(parse_result.resource)))
3938 path = normpath(_path)
00 """Function to parse FS URLs in to their constituent parts.
11 """
22
3 from __future__ import absolute_import
4 from __future__ import print_function
5 from __future__ import unicode_literals
3 from __future__ import absolute_import, print_function, unicode_literals
4
5 import typing
66
77 import collections
88 import re
9 import typing
10
119 import six
1210 from six.moves.urllib.parse import parse_qs, unquote
1311
1715 from typing import Optional, Text
1816
1917
20 _ParseResult = collections.namedtuple(
21 "ParseResult", ["protocol", "username", "password", "resource", "params", "path"]
22 )
23
24
25 class ParseResult(_ParseResult):
18 class ParseResult(
19 collections.namedtuple(
20 "ParseResult",
21 ["protocol", "username", "password", "resource", "params", "path"],
22 )
23 ):
2624 """A named tuple containing fields of a parsed FS URL.
2725
2826 Attributes:
11 """`Registry` class mapping protocols and FS URLs to their `Opener`.
22 """
33
4 from __future__ import absolute_import
5 from __future__ import print_function
6 from __future__ import unicode_literals
4 from __future__ import absolute_import, print_function, unicode_literals
5
6 import typing
77
88 import collections
99 import contextlib
10 import typing
11
1210 import pkg_resources
1311
12 from ..errors import ResourceReadOnly
1413 from .base import Opener
15 from .errors import UnsupportedProtocol, EntryPointError
14 from .errors import EntryPointError, UnsupportedProtocol
1615 from .parse import parse_fs_url
1716
1817 if typing.TYPE_CHECKING:
19 from typing import (
20 Callable,
21 Dict,
22 Iterator,
23 List,
24 Text,
25 Type,
26 Tuple,
27 Union,
28 )
18 from typing import Callable, Dict, Iterator, List, Text, Tuple, Type, Union
19
2920 from ..base import FS
3021
3122
3223 class Registry(object):
33 """A registry for `Opener` instances.
34 """
24 """A registry for `Opener` instances."""
3525
3626 def __init__(self, default_opener="osfs", load_extern=False):
3727 # type: (Text, bool) -> None
6353
6454 Note:
6555 May be used as a class decorator. For example::
56
6657 registry = Registry()
6758 @registry.install
6859 class ArchiveOpener(Opener):
6960 protocols = ['zip', 'tar']
61
7062 """
7163 _opener = opener if isinstance(opener, Opener) else opener()
7264 assert isinstance(_opener, Opener), "Opener instance required"
7870 @property
7971 def protocols(self):
8072 # type: () -> List[Text]
81 """`list`: the list of supported protocols.
82 """
83
73 """`list`: the list of supported protocols."""
8474 _protocols = list(self._protocols)
8575 if self.load_extern:
8676 _protocols.extend(
198188 """Open a filesystem from a FS URL (ignoring the path component).
199189
200190 Arguments:
201 fs_url (str): A filesystem URL.
191 fs_url (str): A filesystem URL. If a filesystem instance is
192 given instead, it will be returned transparently.
202193 writeable (bool, optional): `True` if the filesystem must
203194 be writeable.
204195 create (bool, optional): `True` if the filesystem should be
210201
211202 Returns:
212203 ~fs.base.FS: A filesystem instance.
204
205 Caution:
206 The ``writeable`` parameter only controls whether the
207 filesystem *needs* to be writable, which is relevant for
208 some archive filesystems. Passing ``writeable=False`` will
209 **not** make the return filesystem read-only. For this,
210 consider using `fs.wrap.read_only` to wrap the returned
211 instance.
213212
214213 """
215214 from ..base import FS
251250 required logic for that.
252251
253252 Example:
254 >>> def print_ls(list_fs):
255 ... '''List a directory.'''
256 ... with manage_fs(list_fs) as fs:
257 ... print(' '.join(fs.listdir()))
253 The `~Registry.manage_fs` method can be used to define a small
254 utility function::
255
256 >>> def print_ls(list_fs):
257 ... '''List a directory.'''
258 ... with manage_fs(list_fs) as fs:
259 ... print(' '.join(fs.listdir()))
258260
259261 This function may be used in two ways. You may either pass
260262 a ``str``, as follows::
270272 """
271273 from ..base import FS
272274
275 def assert_writeable(fs):
276 if fs.getmeta().get("read_only", True):
277 raise ResourceReadOnly(path="/")
278
273279 if isinstance(fs_url, FS):
280 if writeable:
281 assert_writeable(fs_url)
274282 yield fs_url
275283 else:
276284 _fs = self.open_fs(fs_url, create=create, writeable=writeable, cwd=cwd)
285 if writeable:
286 assert_writeable(_fs)
277287 try:
278288 yield _fs
279289 finally:
11 """`TarFS` opener definition.
22 """
33
4 from __future__ import absolute_import
5 from __future__ import print_function
6 from __future__ import unicode_literals
4 from __future__ import absolute_import, print_function, unicode_literals
75
86 import typing
97
108 from .base import Opener
9 from .errors import NotWriteable
1110 from .registry import registry
12 from .errors import NotWriteable
1311
1412 if typing.TYPE_CHECKING:
1513 from typing import Text
14
15 from ..tarfs import TarFS # noqa: F401
1616 from .parse import ParseResult
17 from ..tarfs import TarFS # noqa: F401
1817
1918
2019 @registry.install
2120 class TarOpener(Opener):
22 """`TarFS` opener.
23 """
21 """`TarFS` opener."""
2422
2523 protocols = ["tar"]
2624
11 """`TempFS` opener definition.
22 """
33
4 from __future__ import absolute_import
5 from __future__ import print_function
6 from __future__ import unicode_literals
4 from __future__ import absolute_import, print_function, unicode_literals
75
86 import typing
97
1210
1311 if typing.TYPE_CHECKING:
1412 from typing import Text
13
14 from ..tempfs import TempFS # noqa: F401
1515 from .parse import ParseResult
16 from ..tempfs import TempFS # noqa: F401
1716
1817
1918 @registry.install
2019 class TempOpener(Opener):
21 """`TempFS` opener.
22 """
20 """`TempFS` opener."""
2321
2422 protocols = ["temp"]
2523
11 """`ZipFS` opener definition.
22 """
33
4 from __future__ import absolute_import
5 from __future__ import print_function
6 from __future__ import unicode_literals
4 from __future__ import absolute_import, print_function, unicode_literals
75
86 import typing
97
108 from .base import Opener
9 from .errors import NotWriteable
1110 from .registry import registry
12 from .errors import NotWriteable
1311
1412 if typing.TYPE_CHECKING:
1513 from typing import Text
14
15 from ..zipfs import ZipFS # noqa: F401
1616 from .parse import ParseResult
17 from ..zipfs import ZipFS # noqa: F401
1817
1918
2019 @registry.install
2120 class ZipOpener(Opener):
22 """`ZipFS` opener.
23 """
21 """`ZipFS` opener."""
2422
2523 protocols = ["zip"]
2624
33 of the Python standard library.
44 """
55
6 from __future__ import absolute_import
7 from __future__ import print_function
8 from __future__ import unicode_literals
6 from __future__ import absolute_import, print_function, unicode_literals
7
8 import sys
9 import typing
910
1011 import errno
1112 import io
1415 import os
1516 import platform
1617 import shutil
18 import six
1719 import stat
18 import sys
1920 import tempfile
20 import typing
21
22 import six
2321
2422 try:
2523 from os import scandir
3836 sendfile = None # type: ignore # pragma: no cover
3937
4038 from . import errors
39 from ._fscompat import fsdecode, fsencode, fspath
40 from ._url_tools import url_quote
4141 from .base import FS
42 from .copy import copy_modified_time
4243 from .enums import ResourceType
43 from ._fscompat import fsencode, fsdecode, fspath
44 from .error_tools import convert_os_errors
45 from .errors import FileExpected, NoURL
4446 from .info import Info
47 from .mode import Mode, validate_open_mode
4548 from .path import basename, dirname
4649 from .permissions import Permissions
47 from .error_tools import convert_os_errors
48 from .mode import Mode, validate_open_mode
49 from .errors import FileExpected, NoURL
50 from ._url_tools import url_quote
5150
5251 if typing.TYPE_CHECKING:
5352 from typing import (
53 IO,
5454 Any,
5555 BinaryIO,
5656 Collection,
5757 Dict,
5858 Iterator,
59 IO,
6059 List,
6160 Optional,
6261 SupportsInt,
6362 Text,
6463 Tuple,
6564 )
65
6666 from .base import _OpendirFactory
6767 from .info import RawInfo
6868 from .subfs import SubFS
7979 @six.python_2_unicode_compatible
8080 class OSFS(FS):
8181 """Create an OSFS.
82
83 Arguments:
84 root_path (str or ~os.PathLike): An OS path or path-like object to
85 the location on your HD you wish to manage.
86 create (bool): Set to `True` to create the root directory if it
87 does not already exist, otherwise the directory should exist
88 prior to creating the ``OSFS`` instance (defaults to `False`).
89 create_mode (int): The permissions that will be used to create
90 the directory if ``create`` is `True` and the path doesn't
91 exist, defaults to ``0o777``.
92 expand_vars(bool): If `True` (the default) environment variables of
93 the form $name or ${name} will be expanded.
94
95 Raises:
96 `fs.errors.CreateFailed`: If ``root_path`` does not
97 exist, or could not be created.
9882
9983 Examples:
10084 >>> current_directory_fs = OSFS('.')
11296 ):
11397 # type: (...) -> None
11498 """Create an OSFS instance.
99
100 Arguments:
101 root_path (str or ~os.PathLike): An OS path or path-like object
102 to the location on your HD you wish to manage.
103 create (bool): Set to `True` to create the root directory if it
104 does not already exist, otherwise the directory should exist
105 prior to creating the ``OSFS`` instance (defaults to `False`).
106 create_mode (int): The permissions that will be used to create
107 the directory if ``create`` is `True` and the path doesn't
108 exist, defaults to ``0o777``.
109 expand_vars(bool): If `True` (the default) environment variables
110 of the form ``~``, ``$name`` or ``${name}`` will be expanded.
111
112 Raises:
113 `fs.errors.CreateFailed`: If ``root_path`` does not
114 exist, or could not be created.
115
115116 """
116117 super(OSFS, self).__init__()
117118 if isinstance(root_path, bytes):
187188
188189 def _to_sys_path(self, path):
189190 # type: (Text) -> bytes
190 """Convert a FS path to a path on the OS.
191 """
191 """Convert a FS path to a path on the OS."""
192192 sys_path = fsencode(
193193 os.path.join(self._root_path, path.lstrip("/").replace("/", os.sep))
194194 )
197197 @classmethod
198198 def _make_details_from_stat(cls, stat_result):
199199 # type: (os.stat_result) -> Dict[Text, object]
200 """Make a *details* info dict from an `os.stat_result` object.
201 """
200 """Make a *details* info dict from an `os.stat_result` object."""
202201 details = {
203202 "_write": ["accessed", "modified"],
204203 "accessed": stat_result.st_atime,
217216 @classmethod
218217 def _make_access_from_stat(cls, stat_result):
219218 # type: (os.stat_result) -> Dict[Text, object]
220 """Make an *access* info dict from an `os.stat_result` object.
221 """
219 """Make an *access* info dict from an `os.stat_result` object."""
222220 access = {} # type: Dict[Text, object]
223221 access["permissions"] = Permissions(mode=stat_result.st_mode).dump()
224222 access["gid"] = gid = stat_result.st_gid
251249 @classmethod
252250 def _get_type_from_stat(cls, _stat):
253251 # type: (os.stat_result) -> ResourceType
254 """Get the resource type from an `os.stat_result` object.
255 """
252 """Get the resource type from an `os.stat_result` object."""
256253 st_mode = _stat.st_mode
257254 st_type = stat.S_IFMT(st_mode)
258255 return cls.STAT_TO_RESOURCE_TYPE.get(st_type, ResourceType.unknown)
433430 if hasattr(errno, "ENOTSUP"):
434431 _sendfile_error_codes.add(errno.ENOTSUP)
435432
436 def copy(self, src_path, dst_path, overwrite=False):
437 # type: (Text, Text, bool) -> None
433 def copy(self, src_path, dst_path, overwrite=False, preserve_time=False):
434 # type: (Text, Text, bool, bool) -> None
438435 with self._lock:
439436 # validate and canonicalise paths
440437 _src_path, _dst_path = self._check_copy(src_path, dst_path, overwrite)
454451 while sent > 0:
455452 sent = sendfile(fd_dst, fd_src, offset, maxsize)
456453 offset += sent
454 if preserve_time:
455 copy_modified_time(self, src_path, self, dst_path)
457456 except OSError as e:
458457 # the error is not a simple "sendfile not supported" error
459458 if e.errno not in self._sendfile_error_codes:
463462
464463 else:
465464
466 def copy(self, src_path, dst_path, overwrite=False):
467 # type: (Text, Text, bool) -> None
465 def copy(self, src_path, dst_path, overwrite=False, preserve_time=False):
466 # type: (Text, Text, bool, bool) -> None
468467 with self._lock:
469468 _src_path, _dst_path = self._check_copy(src_path, dst_path, overwrite)
470469 shutil.copy2(self.getsyspath(_src_path), self.getsyspath(_dst_path))
477476 # type: (Text, Optional[Collection[Text]]) -> Iterator[Info]
478477 self.check()
479478 namespaces = namespaces or ()
479 requires_stat = not {"details", "stat", "access"}.isdisjoint(namespaces)
480480 _path = self.validatepath(path)
481481 if _WINDOWS_PLATFORM:
482482 sys_path = os.path.join(
485485 else:
486486 sys_path = self._to_sys_path(_path) # type: ignore
487487 with convert_os_errors("scandir", path, directory=True):
488 for dir_entry in scandir(sys_path):
489 info = {
490 "basic": {
491 "name": fsdecode(dir_entry.name),
492 "is_dir": dir_entry.is_dir(),
488 scandir_iter = scandir(sys_path)
489 try:
490 for dir_entry in scandir_iter:
491 info = {
492 "basic": {
493 "name": fsdecode(dir_entry.name),
494 "is_dir": dir_entry.is_dir(),
495 }
493496 }
494 }
495 if "details" in namespaces:
496 stat_result = dir_entry.stat()
497 info["details"] = self._make_details_from_stat(stat_result)
498 if "stat" in namespaces:
499 stat_result = dir_entry.stat()
500 info["stat"] = {
501 k: getattr(stat_result, k)
502 for k in dir(stat_result)
503 if k.startswith("st_")
504 }
505 if "lstat" in namespaces:
506 lstat_result = dir_entry.stat(follow_symlinks=False)
507 info["lstat"] = {
508 k: getattr(lstat_result, k)
509 for k in dir(lstat_result)
510 if k.startswith("st_")
511 }
512 if "link" in namespaces:
513 info["link"] = self._make_link_info(
514 os.path.join(sys_path, dir_entry.name)
515 )
516 if "access" in namespaces:
517 stat_result = dir_entry.stat()
518 info["access"] = self._make_access_from_stat(stat_result)
519
520 yield Info(info)
497 if requires_stat:
498 stat_result = dir_entry.stat()
499 if "details" in namespaces:
500 info["details"] = self._make_details_from_stat(
501 stat_result
502 )
503 if "stat" in namespaces:
504 info["stat"] = {
505 k: getattr(stat_result, k)
506 for k in dir(stat_result)
507 if k.startswith("st_")
508 }
509 if "access" in namespaces:
510 info["access"] = self._make_access_from_stat(
511 stat_result
512 )
513 if "lstat" in namespaces:
514 lstat_result = dir_entry.stat(follow_symlinks=False)
515 info["lstat"] = {
516 k: getattr(lstat_result, k)
517 for k in dir(lstat_result)
518 if k.startswith("st_")
519 }
520 if "link" in namespaces:
521 info["link"] = self._make_link_info(
522 os.path.join(sys_path, dir_entry.name)
523 )
524
525 yield Info(info)
526 finally:
527 if sys.version_info >= (3, 6):
528 scandir_iter.close()
521529
522530 else:
523531
655663 if "details" in info:
656664 details = info["details"]
657665 if "accessed" in details or "modified" in details:
658 _accessed = typing.cast(int, details.get("accessed"))
659 _modified = typing.cast(int, details.get("modified", _accessed))
660 accessed = int(_modified if _accessed is None else _accessed)
661 modified = int(_modified)
666 _accessed = typing.cast(float, details.get("accessed"))
667 _modified = typing.cast(float, details.get("modified", _accessed))
668 accessed = float(_modified if _accessed is None else _accessed)
669 modified = float(_modified)
662670 if accessed is not None or modified is not None:
663671 with convert_os_errors("setinfo", path):
664672 os.utime(sys_path, (accessed, modified))
672680 raise errors.InvalidCharsInPath(
673681 path,
674682 msg="path '{path}' could not be encoded for the filesystem (check LANG"
675 " env var); {error}".format(path=path, error=error),
683 " env var); {error}".format(path=path, error=error),
676684 )
677685 return super(OSFS, self).validatepath(path)
77
88 """
99
10 from __future__ import print_function
11 from __future__ import unicode_literals
10 from __future__ import print_function, unicode_literals
11
12 import typing
1213
1314 import re
14 import typing
1515
1616 from .errors import IllegalBackReference
1717
6363 >>> normpath("/foo//bar/frob/../baz")
6464 '/foo/bar/baz'
6565 >>> normpath("foo/../../bar")
66 Traceback (most recent call last)
66 Traceback (most recent call last):
6767 ...
68 IllegalBackReference: path 'foo/../../bar' contains back-references outside of filesystem"
68 fs.errors.IllegalBackReference: path 'foo/../../bar' contains back-references outside of filesystem
6969
7070 """ # noqa: E501
7171 if path in "/":
8585 else:
8686 components.append(component)
8787 except IndexError:
88 # FIXME (@althonos): should be raised from the IndexError
8889 raise IllegalBackReference(path)
8990 return prefix + "/".join(components)
9091
00 """Abstract permissions container.
11 """
22
3 from __future__ import print_function
4 from __future__ import unicode_literals
3 from __future__ import print_function, unicode_literals
54
65 import typing
76 from typing import Iterable
109
1110 from ._typing import Text
1211
13
1412 if typing.TYPE_CHECKING:
1513 from typing import Iterator, List, Optional, Tuple, Type, Union
1614
1715
1816 def make_mode(init):
1917 # type: (Union[int, Iterable[Text], None]) -> int
20 """Make a mode integer from an initial value.
21 """
18 """Make a mode integer from an initial value."""
2219 return Permissions.get_mode(init)
2320
2421
2522 class _PermProperty(object):
26 """Creates simple properties to get/set permissions.
27 """
23 """Creates simple properties to get/set permissions."""
2824
2925 def __init__(self, name):
3026 # type: (Text) -> None
5046 Permissions objects store information regarding the permissions
5147 on a resource. It supports Linux permissions, but is generic enough
5248 to manage permission information from almost any filesystem.
53
54 Arguments:
55 names (list, optional): A list of permissions.
56 mode (int, optional): A mode integer.
57 user (str, optional): A triplet of *user* permissions, e.g.
58 ``"rwx"`` or ``"r--"``
59 group (str, optional): A triplet of *group* permissions, e.g.
60 ``"rwx"`` or ``"r--"``
61 other (str, optional): A triplet of *other* permissions, e.g.
62 ``"rwx"`` or ``"r--"``
63 sticky (bool, optional): A boolean for the *sticky* bit.
64 setuid (bool, optional): A boolean for the *setuid* bit.
65 setguid (bool, optional): A boolean for the *setguid* bit.
6649
6750 Example:
6851 >>> from fs.permissions import Permissions
7255 >>> p.mode
7356 500
7457 >>> oct(p.mode)
75 '0764'
58 '0o764'
7659
7760 """
7861
10487 setguid=None, # type: Optional[bool]
10588 ):
10689 # type: (...) -> None
90 """Create a new `Permissions` instance.
91
92 Arguments:
93 names (list, optional): A list of permissions.
94 mode (int, optional): A mode integer.
95 user (str, optional): A triplet of *user* permissions, e.g.
96 ``"rwx"`` or ``"r--"``
97 group (str, optional): A triplet of *group* permissions, e.g.
98 ``"rwx"`` or ``"r--"``
99 other (str, optional): A triplet of *other* permissions, e.g.
100 ``"rwx"`` or ``"r--"``
101 sticky (bool, optional): A boolean for the *sticky* bit.
102 setuid (bool, optional): A boolean for the *setuid* bit.
103 setguid (bool, optional): A boolean for the *setguid* bit.
104
105 """
107106 if names is not None:
108107 self._perms = set(names)
109108 elif mode is not None:
173172 @classmethod
174173 def parse(cls, ls):
175174 # type: (Text) -> Permissions
176 """Parse permissions in Linux notation.
177 """
175 """Parse permissions in Linux notation."""
178176 user = ls[:3]
179177 group = ls[3:6]
180178 other = ls[6:9]
183181 @classmethod
184182 def load(cls, permissions):
185183 # type: (List[Text]) -> Permissions
186 """Load a serialized permissions object.
187 """
184 """Load a serialized permissions object."""
188185 return cls(names=permissions)
189186
190187 @classmethod
221218 @classmethod
222219 def get_mode(cls, init):
223220 # type: (Union[int, Iterable[Text], None]) -> int
224 """Convert an initial value to a mode integer.
225 """
221 """Convert an initial value to a mode integer."""
226222 return cls.create(init).mode
227223
228224 def copy(self):
229225 # type: () -> Permissions
230 """Make a copy of this permissions object.
231 """
226 """Make a copy of this permissions object."""
232227 return Permissions(names=list(self._perms))
233228
234229 def dump(self):
235230 # type: () -> List[Text]
236 """Get a list suitable for serialization.
237 """
231 """Get a list suitable for serialization."""
238232 return sorted(self._perms)
239233
240234 def as_str(self):
241235 # type: () -> Text
242 """Get a Linux-style string representation of permissions.
243 """
236 """Get a Linux-style string representation of permissions."""
244237 perms = [
245238 c if name in self._perms else "-"
246239 for name, c in zip(self._LINUX_PERMS_NAMES[-9:], "rwxrwxrwx")
258251 @property
259252 def mode(self):
260253 # type: () -> int
261 """`int`: mode integer.
262 """
254 """`int`: mode integer."""
263255 mode = 0
264256 for name, mask in self._LINUX_PERMS:
265257 if name in self._perms:
00 """Manage a directory in a *parent* filesystem.
11 """
22
3 from __future__ import print_function
4 from __future__ import unicode_literals
3 from __future__ import print_function, unicode_literals
54
65 import typing
76
87 import six
98
9 from .path import abspath, join, normpath, relpath
1010 from .wrapfs import WrapFS
11 from .path import abspath, join, normpath, relpath
1211
1312 if typing.TYPE_CHECKING:
13 from typing import Text, Tuple
14
1415 from .base import FS # noqa: F401
15 from typing import Text, Tuple
1616
1717
1818 _F = typing.TypeVar("_F", bound="FS", covariant=True)
2020
2121 @six.python_2_unicode_compatible
2222 class SubFS(WrapFS[_F], typing.Generic[_F]):
23 """A sub-directory on another filesystem.
23 """A sub-directory on a parent filesystem.
2424
2525 A SubFS is a filesystem object that maps to a sub-directory of
2626 another filesystem. This is the object that is returned by
2828
2929 """
3030
31 def __init__(self, parent_fs, path):
31 def __init__(self, parent_fs, path): # noqa: D107
3232 # type: (_F, Text) -> None
3333 super(SubFS, self).__init__(parent_fs)
3434 self._sub_dir = abspath(normpath(path))
5454
5555
5656 class ClosingSubFS(SubFS[_F], typing.Generic[_F]):
57 """A version of `SubFS` which closes its parent when closed.
58 """
57 """A version of `SubFS` which closes its parent when closed."""
5958
6059 def close(self):
6160 # type: () -> None
00 """Manage the filesystem in a Tar archive.
11 """
22
3 from __future__ import print_function
4 from __future__ import unicode_literals
3 from __future__ import print_function, unicode_literals
4
5 import typing
6 from typing import IO, cast
57
68 import os
9 import six
710 import tarfile
8 import typing
911 from collections import OrderedDict
10 from typing import cast, IO
11
12 import six
1312
1413 from . import errors
14 from ._url_tools import url_quote
1515 from .base import FS
1616 from .compress import write_tar
1717 from .enums import ResourceType
1919 from .info import Info
2020 from .iotools import RawWrapper
2121 from .opener import open_fs
22 from .path import basename, frombase, isbase, normpath, parts, relpath
2223 from .permissions import Permissions
23 from ._url_tools import url_quote
24 from .path import relpath, basename, isbase, normpath, parts, frombase
2524 from .wrapfs import WrapFS
2625
2726 if typing.TYPE_CHECKING:
28 from tarfile import TarInfo
2927 from typing import (
3028 Any,
3129 BinaryIO,
3735 Tuple,
3836 Union,
3937 )
38
39 from tarfile import TarInfo
40
4041 from .info import RawInfo
4142 from .subfs import SubFS
4243
5152 def _get_member_info(member, encoding):
5253 # type: (TarInfo, Text) -> Dict[Text, object]
5354 return member.get_info(encoding, None)
54
5555
5656 else:
5757
6565 class TarFS(WrapFS):
6666 """Read and write tar files.
6767
68 There are two ways to open a TarFS for the use cases of reading
68 There are two ways to open a `TarFS` for the use cases of reading
6969 a tar file, and creating a new one.
7070
71 If you open the TarFS with ``write`` set to `False` (the
71 If you open the `TarFS` with ``write`` set to `False` (the
7272 default), then the filesystem will be a read only filesystem which
7373 maps to the files and directories within the tar file. Files are
7474 decompressed on the fly when you open them.
7878 with TarFS('foo.tar.gz') as tar_fs:
7979 readme = tar_fs.readtext('readme.txt')
8080
81 If you open the TarFS with ``write`` set to `True`, then the TarFS
81 If you open the TarFS with ``write`` set to `True`, then the `TarFS`
8282 will be a empty temporary filesystem. Any files / directories you
83 create in the TarFS will be written in to a tar file when the TarFS
83 create in the `TarFS` will be written in to a tar file when the `TarFS`
8484 is closed. The compression is set from the new file name but may be
8585 set manually with the ``compression`` argument.
8686
9999 use default (`False`) to read an existing tar file.
100100 compression (str, optional): Compression to use (one of the formats
101101 supported by `tarfile`: ``xz``, ``gz``, ``bz2``, or `None`).
102 temp_fs (str): An FS URL for the temporary filesystem
103 used to store data prior to tarring.
102 temp_fs (str): An FS URL or an FS instance to use to store
103 data prior to tarring. Defaults to creating a new
104 `~fs.tempfs.TempFS`.
104105
105106 """
106107
117118 write=False, # type: bool
118119 compression=None, # type: Optional[Text]
119120 encoding="utf-8", # type: Text
120 temp_fs="temp://__tartemp__", # type: Text
121 temp_fs="temp://__tartemp__", # type: Union[Text, FS]
121122 ):
122123 # type: (...) -> FS
123124 if isinstance(file, (six.text_type, six.binary_type)):
149150 compression=None, # type: Optional[Text]
150151 encoding="utf-8", # type: Text
151152 temp_fs="temp://__tartemp__", # type: Text
152 ):
153 ): # noqa: D107
153154 # type: (...) -> None
154155 pass
155156
156157
157158 @six.python_2_unicode_compatible
158159 class WriteTarFS(WrapFS):
159 """A writable tar file.
160 """
160 """A writable tar file."""
161161
162162 def __init__(
163163 self,
164164 file, # type: Union[Text, BinaryIO]
165165 compression=None, # type: Optional[Text]
166166 encoding="utf-8", # type: Text
167 temp_fs="temp://__tartemp__", # type: Text
168 ):
167 temp_fs="temp://__tartemp__", # type: Union[Text, FS]
168 ): # noqa: D107
169169 # type: (...) -> None
170170 self._file = file # type: Union[Text, BinaryIO]
171171 self.compression = compression
221221
222222 Note:
223223 This is called automatically when the TarFS is closed.
224
224225 """
225226 if not self.isclosed():
226227 write_tar(
233234
234235 @six.python_2_unicode_compatible
235236 class ReadTarFS(FS):
236 """A readable tar file.
237 """
237 """A readable tar file."""
238238
239239 _meta = {
240240 "case_insensitive": True,
259259 }
260260
261261 @errors.CreateFailed.catch_all
262 def __init__(self, file, encoding="utf-8"):
262 def __init__(self, file, encoding="utf-8"): # noqa: D107
263263 # type: (Union[Text, BinaryIO], Text) -> None
264264 super(ReadTarFS, self).__init__()
265265 self._file = file
88
99 """
1010
11 from __future__ import print_function
12 from __future__ import unicode_literals
11 from __future__ import print_function, unicode_literals
12
13 import typing
1314
1415 import shutil
16 import six
1517 import tempfile
16 import typing
17
18 import six
1918
2019 from . import errors
2120 from .osfs import OSFS
2827 class TempFS(OSFS):
2928 """A temporary filesystem on the OS.
3029
31 Arguments:
32 identifier (str): A string to distinguish the directory within
33 the OS temp location, used as part of the directory name.
34 temp_dir (str, optional): An OS path to your temp directory
35 (leave as `None` to auto-detect)
36 auto_clean (bool): If `True` (the default), the directory
37 contents will be wiped on close.
38 ignore_clean_errors (bool): If `True` (the default), any errors
39 in the clean process will be suppressed. If `False`, they
40 will be raised.
30 Temporary filesystems are created using the `tempfile.mkdtemp`
31 function to obtain a temporary folder in an OS-specific location.
32 You can provide an alternative location with the ``temp_dir``
33 argument of the constructor.
34
35 Examples:
36 Create with the constructor::
37
38 >>> from fs.tempfs import TempFS
39 >>> tmp_fs = TempFS()
40
41 Or via an FS URL::
42
43 >>> import fs
44 >>> tmp_fs = fs.open_fs("temp://")
45
46 Use a specific identifier for the temporary folder to better
47 illustrate its purpose::
48
49 >>> named_tmp_fs = fs.open_fs("temp://local_copy")
50 >>> named_tmp_fs = TempFS(identifier="local_copy")
4151
4252 """
4353
4959 ignore_clean_errors=True, # type: bool
5060 ):
5161 # type: (...) -> None
62 """Create a new `TempFS` instance.
63
64 Arguments:
65 identifier (str): A string to distinguish the directory within
66 the OS temp location, used as part of the directory name.
67 temp_dir (str, optional): An OS path to your temp directory
68 (leave as `None` to auto-detect).
69 auto_clean (bool): If `True` (the default), the directory
70 contents will be wiped on close.
71 ignore_clean_errors (bool): If `True` (the default), any errors
72 in the clean process will be suppressed. If `False`, they
73 will be raised.
74
75 """
5276 self.identifier = identifier
5377 self._auto_clean = auto_clean
5478 self._ignore_clean_errors = ignore_clean_errors
6993
7094 def close(self):
7195 # type: () -> None
96 """Close the filesystem and release any resources.
97
98 It is important to call this method when you have finished
99 working with the filesystem. Some filesystems may not finalize
100 changes until they are closed (archives for example). You may
101 call this method explicitly (it is safe to call close multiple
102 times), or you can use the filesystem as a context manager to
103 automatically close.
104
105 Hint:
106 Depending on the value of ``auto_clean`` passed when creating
107 the `TempFS`, the underlying temporary folder may be removed
108 or not.
109
110 Example:
111 >>> tmp_fs = TempFS(auto_clean=False)
112 >>> syspath = tmp_fs.getsyspath("/")
113 >>> tmp_fs.close()
114 >>> os.path.exists(syspath)
115 True
116
117 """
72118 if self._auto_clean:
73119 self.clean()
74120 super(TempFS, self).close()
75121
76122 def clean(self):
77123 # type: () -> None
78 """Clean (delete) temporary files created by this filesystem.
79 """
124 """Clean (delete) temporary files created by this filesystem."""
80125 if self._cleaned:
81126 return
82127
44
55 """
66
7 from __future__ import absolute_import
8 from __future__ import unicode_literals
9
10 from datetime import datetime
7 from __future__ import absolute_import, unicode_literals
8
119 import io
1210 import itertools
1311 import json
14 import math
1512 import os
13 import six
1614 import time
1715 import unittest
16 import warnings
17 from datetime import datetime
18 from six import text_type
1819
1920 import fs.copy
2021 import fs.move
21 from fs import ResourceType, Seek
22 from fs import errors
23 from fs import walk
24 from fs import glob
22 from fs import ResourceType, Seek, errors, glob, walk
2523 from fs.opener import open_fs
2624 from fs.subfs import ClosingSubFS, SubFS
27
28 import pytz
29 import six
30 from six import text_type
3125
3226 if six.PY2:
3327 import collections as collections_abc
3428 else:
3529 import collections.abc as collections_abc
30
31 try:
32 from datetime import timezone
33 except ImportError:
34 from ._tzcompat import timezone # type: ignore
3635
3736
3837 UNICODE_TEXT = """
244243
245244
246245 class FSTestCases(object):
247 """Basic FS tests.
248 """
246 """Basic FS tests."""
247
248 data1 = b"foo" * 256 * 1024
249 data2 = b"bar" * 2 * 256 * 1024
250 data3 = b"baz" * 3 * 256 * 1024
251 data4 = b"egg" * 7 * 256 * 1024
249252
250253 def make_fs(self):
251 """Return an FS instance.
252
253 """
254 """Return an FS instance."""
254255 raise NotImplementedError("implement me")
255256
256257 def destroy_fs(self, fs):
287288
288289 """
289290 self.assertFalse(self.fs.exists(path))
291
292 def assert_isempty(self, path):
293 """Assert a path is an empty directory.
294
295 Arguments:
296 path (str): A path on the filesystem.
297
298 """
299 self.assertTrue(self.fs.isempty(path))
290300
291301 def assert_isfile(self, path):
292302 """Assert a path is a file.
429439 self.fs.hasurl("a/b/c/foo/bar")
430440
431441 def test_geturl_purpose(self):
432 """Check an unknown purpose raises a NoURL error.
433 """
442 """Check an unknown purpose raises a NoURL error."""
434443 self.fs.create("foo")
435444 with self.assertRaises(errors.NoURL):
436445 self.fs.geturl("foo", purpose="__nosuchpurpose__")
437446
438447 def test_validatepath(self):
439 """Check validatepath returns an absolute path.
440 """
448 """Check validatepath returns an absolute path."""
441449 path = self.fs.validatepath("foo")
442450 self.assertEqual(path, "/foo")
443451
455463 root_info = self.fs.getinfo("/")
456464 self.assertEqual(root_info.name, "")
457465 self.assertTrue(root_info.is_dir)
466 self.assertIn("basic", root_info.namespaces)
458467
459468 # Make a file of known size
460469 self.fs.writebytes("foo", b"bar")
462471
463472 # Check basic namespace
464473 info = self.fs.getinfo("foo").raw
474 self.assertIn("basic", info)
465475 self.assertIsInstance(info["basic"]["name"], text_type)
466476 self.assertEqual(info["basic"]["name"], "foo")
467477 self.assertFalse(info["basic"]["is_dir"])
468478
469479 # Check basic namespace dir
470480 info = self.fs.getinfo("dir").raw
481 self.assertIn("basic", info)
471482 self.assertEqual(info["basic"]["name"], "dir")
472483 self.assertTrue(info["basic"]["is_dir"])
473484
474485 # Get the info
475486 info = self.fs.getinfo("foo", namespaces=["details"]).raw
487 self.assertIn("basic", info)
476488 self.assertIsInstance(info, dict)
477489 self.assertEqual(info["details"]["size"], 3)
478490 self.assertEqual(info["details"]["type"], int(ResourceType.file))
883895 self.assertFalse(f.closed)
884896 self.assertTrue(f.closed)
885897
886 iter_lines = iter(self.fs.open("text"))
887 self.assertEqual(next(iter_lines), "Hello\n")
898 with self.fs.open("text") as f:
899 iter_lines = iter(f)
900 self.assertEqual(next(iter_lines), "Hello\n")
888901
889902 with self.fs.open("unicode", "w") as f:
890903 self.assertEqual(12, f.write("Héllo\nWörld\n"))
10981111 self.fs.removedir("foo/bar")
10991112
11001113 def test_removetree(self):
1114 self.fs.makedirs("spam")
11011115 self.fs.makedirs("foo/bar/baz")
11021116 self.fs.makedirs("foo/egg")
11031117 self.fs.makedirs("foo/a/b/c/d/e")
11131127
11141128 self.fs.removetree("foo")
11151129 self.assert_not_exists("foo")
1130 self.assert_exists("spam")
1131
1132 # Errors on files
1133 self.fs.create("bar")
1134 with self.assertRaises(errors.DirectoryExpected):
1135 self.fs.removetree("bar")
1136
1137 # Errors on non-existing path
1138 with self.assertRaises(errors.ResourceNotFound):
1139 self.fs.removetree("foofoo")
1140
1141 def test_removetree_root(self):
1142 self.fs.makedirs("foo/bar/baz")
1143 self.fs.makedirs("foo/egg")
1144 self.fs.makedirs("foo/a/b/c/d/e")
1145 self.fs.create("foo/egg.txt")
1146 self.fs.create("foo/bar/egg.bin")
1147 self.fs.create("foo/a/b/c/1.txt")
1148 self.fs.create("foo/a/b/c/2.txt")
1149 self.fs.create("foo/a/b/c/3.txt")
1150
1151 self.assert_exists("foo/egg.txt")
1152 self.assert_exists("foo/bar/egg.bin")
1153
1154 # removetree("/") removes the contents,
1155 # but not the root folder itself
1156 self.fs.removetree("/")
1157 self.assert_exists("/")
1158 self.assert_isempty("/")
1159
1160 # we check we can create a file after
1161 # to catch potential issues with the
1162 # root folder being deleted on faulty
1163 # implementations
1164 self.fs.create("egg")
1165 self.fs.makedir("yolk")
1166 self.assert_exists("egg")
1167 self.assert_exists("yolk")
11161168
11171169 def test_setinfo(self):
11181170 self.fs.create("birthday.txt")
1119 now = math.floor(time.time())
1171 now = time.time()
11201172
11211173 change_info = {"details": {"accessed": now + 60, "modified": now + 60 * 60}}
11221174 self.fs.setinfo("birthday.txt", change_info)
1123 new_info = self.fs.getinfo("birthday.txt", namespaces=["details"]).raw
1124 if "accessed" in new_info.get("_write", []):
1125 self.assertEqual(new_info["details"]["accessed"], now + 60)
1126 if "modified" in new_info.get("_write", []):
1127 self.assertEqual(new_info["details"]["modified"], now + 60 * 60)
1175 new_info = self.fs.getinfo("birthday.txt", namespaces=["details"])
1176 can_write_acccess = new_info.is_writeable("details", "accessed")
1177 can_write_modified = new_info.is_writeable("details", "modified")
1178 if can_write_acccess:
1179 self.assertAlmostEqual(
1180 new_info.get("details", "accessed"), now + 60, places=4
1181 )
1182 if can_write_modified:
1183 self.assertAlmostEqual(
1184 new_info.get("details", "modified"), now + 60 * 60, places=4
1185 )
11281186
11291187 with self.assertRaises(errors.ResourceNotFound):
11301188 self.fs.setinfo("nothing", {})
11331191 self.fs.create("birthday.txt")
11341192 self.fs.settimes("birthday.txt", accessed=datetime(2016, 7, 5))
11351193 info = self.fs.getinfo("birthday.txt", namespaces=["details"])
1136 writeable = info.get("details", "_write", [])
1137 if "accessed" in writeable:
1138 self.assertEqual(info.accessed, datetime(2016, 7, 5, tzinfo=pytz.UTC))
1139 if "modified" in writeable:
1140 self.assertEqual(info.modified, datetime(2016, 7, 5, tzinfo=pytz.UTC))
1194 can_write_acccess = info.is_writeable("details", "accessed")
1195 can_write_modified = info.is_writeable("details", "modified")
1196 if can_write_acccess:
1197 self.assertEqual(info.accessed, datetime(2016, 7, 5, tzinfo=timezone.utc))
1198 if can_write_modified:
1199 self.assertEqual(info.modified, datetime(2016, 7, 5, tzinfo=timezone.utc))
11411200
11421201 def test_touch(self):
11431202 self.fs.touch("new.txt")
11451204 self.fs.settimes("new.txt", datetime(2016, 7, 5))
11461205 info = self.fs.getinfo("new.txt", namespaces=["details"])
11471206 if info.is_writeable("details", "accessed"):
1148 self.assertEqual(info.accessed, datetime(2016, 7, 5, tzinfo=pytz.UTC))
1207 self.assertEqual(info.accessed, datetime(2016, 7, 5, tzinfo=timezone.utc))
11491208 now = time.time()
11501209 self.fs.touch("new.txt")
11511210 accessed = self.fs.getinfo("new.txt", namespaces=["details"]).raw[
11951254
11961255 def _test_upload(self, workers):
11971256 """Test fs.copy with varying number of worker threads."""
1198 data1 = b"foo" * 256 * 1024
1199 data2 = b"bar" * 2 * 256 * 1024
1200 data3 = b"baz" * 3 * 256 * 1024
1201 data4 = b"egg" * 7 * 256 * 1024
1202
12031257 with open_fs("temp://") as src_fs:
1204 src_fs.writebytes("foo", data1)
1205 src_fs.writebytes("bar", data2)
1206 src_fs.makedir("dir1").writebytes("baz", data3)
1207 src_fs.makedirs("dir2/dir3").writebytes("egg", data4)
1258 src_fs.writebytes("foo", self.data1)
1259 src_fs.writebytes("bar", self.data2)
1260 src_fs.makedir("dir1").writebytes("baz", self.data3)
1261 src_fs.makedirs("dir2/dir3").writebytes("egg", self.data4)
12081262 dst_fs = self.fs
12091263 fs.copy.copy_fs(src_fs, dst_fs, workers=workers)
1210 self.assertEqual(dst_fs.readbytes("foo"), data1)
1211 self.assertEqual(dst_fs.readbytes("bar"), data2)
1212 self.assertEqual(dst_fs.readbytes("dir1/baz"), data3)
1213 self.assertEqual(dst_fs.readbytes("dir2/dir3/egg"), data4)
1264 self.assertEqual(dst_fs.readbytes("foo"), self.data1)
1265 self.assertEqual(dst_fs.readbytes("bar"), self.data2)
1266 self.assertEqual(dst_fs.readbytes("dir1/baz"), self.data3)
1267 self.assertEqual(dst_fs.readbytes("dir2/dir3/egg"), self.data4)
12141268
12151269 def test_upload_0(self):
12161270 self._test_upload(0)
12261280
12271281 def _test_download(self, workers):
12281282 """Test fs.copy with varying number of worker threads."""
1229 data1 = b"foo" * 256 * 1024
1230 data2 = b"bar" * 2 * 256 * 1024
1231 data3 = b"baz" * 3 * 256 * 1024
1232 data4 = b"egg" * 7 * 256 * 1024
12331283 src_fs = self.fs
12341284 with open_fs("temp://") as dst_fs:
1235 src_fs.writebytes("foo", data1)
1236 src_fs.writebytes("bar", data2)
1237 src_fs.makedir("dir1").writebytes("baz", data3)
1238 src_fs.makedirs("dir2/dir3").writebytes("egg", data4)
1285 src_fs.writebytes("foo", self.data1)
1286 src_fs.writebytes("bar", self.data2)
1287 src_fs.makedir("dir1").writebytes("baz", self.data3)
1288 src_fs.makedirs("dir2/dir3").writebytes("egg", self.data4)
12391289 fs.copy.copy_fs(src_fs, dst_fs, workers=workers)
1240 self.assertEqual(dst_fs.readbytes("foo"), data1)
1241 self.assertEqual(dst_fs.readbytes("bar"), data2)
1242 self.assertEqual(dst_fs.readbytes("dir1/baz"), data3)
1243 self.assertEqual(dst_fs.readbytes("dir2/dir3/egg"), data4)
1290 self.assertEqual(dst_fs.readbytes("foo"), self.data1)
1291 self.assertEqual(dst_fs.readbytes("bar"), self.data2)
1292 self.assertEqual(dst_fs.readbytes("dir1/baz"), self.data3)
1293 self.assertEqual(dst_fs.readbytes("dir2/dir3/egg"), self.data4)
12441294
12451295 def test_download_0(self):
12461296 self._test_download(0)
14931543 with self.fs.open("foo", "rb") as f:
14941544 data = f.read()
14951545 self.assertEqual(data, b"bar")
1546
1547 # upload to non-existing path (/spam/eggs)
1548 with self.assertRaises(errors.ResourceNotFound):
1549 self.fs.upload("/spam/eggs", bytes_file)
14961550
14971551 def test_upload_chunk_size(self):
14981552 test_data = b"bar" * 128
15891643 self.assert_bytes("foo2", b"help")
15901644
15911645 # Test __del__ doesn't throw traceback
1592 f = self.fs.open("foo2", "r")
1593 del f
1646 with warnings.catch_warnings():
1647 warnings.simplefilter("ignore")
1648 f = self.fs.open("foo2", "r")
1649 del f
15941650
15951651 with self.assertRaises(IOError):
15961652 with self.fs.open("foo2", "r") as f:
16801736 self._test_copy_dir("temp://")
16811737 self._test_copy_dir_write("temp://")
16821738
1739 def test_move_dir_same_fs(self):
1740 self.fs.makedirs("foo/bar/baz")
1741 self.fs.makedir("egg")
1742 self.fs.writetext("top.txt", "Hello, World")
1743 self.fs.writetext("/foo/bar/baz/test.txt", "Goodbye, World")
1744
1745 fs.move.move_dir(self.fs, "foo", self.fs, "foo2")
1746
1747 expected = {"/egg", "/foo2", "/foo2/bar", "/foo2/bar/baz"}
1748 self.assertEqual(set(walk.walk_dirs(self.fs)), expected)
1749 self.assert_text("top.txt", "Hello, World")
1750 self.assert_text("/foo2/bar/baz/test.txt", "Goodbye, World")
1751
1752 self.assertEqual(sorted(self.fs.listdir("/")), ["egg", "foo2", "top.txt"])
1753 self.assertEqual(
1754 sorted(x.name for x in self.fs.scandir("/")), ["egg", "foo2", "top.txt"]
1755 )
1756
16831757 def _test_move_dir_write(self, protocol):
16841758 # Test moving to this filesystem from another.
16851759 other_fs = open_fs(protocol)
17021776 def test_move_dir_temp(self):
17031777 self._test_move_dir_write("temp://")
17041778
1705 def test_move_same_fs(self):
1706 self.fs.makedirs("foo/bar/baz")
1707 self.fs.makedir("egg")
1708 self.fs.writetext("top.txt", "Hello, World")
1709 self.fs.writetext("/foo/bar/baz/test.txt", "Goodbye, World")
1710
1711 fs.move.move_dir(self.fs, "foo", self.fs, "foo2")
1712
1713 expected = {"/egg", "/foo2", "/foo2/bar", "/foo2/bar/baz"}
1714 self.assertEqual(set(walk.walk_dirs(self.fs)), expected)
1715 self.assert_text("top.txt", "Hello, World")
1716 self.assert_text("/foo2/bar/baz/test.txt", "Goodbye, World")
1717
17181779 def test_move_file_same_fs(self):
17191780 text = "Hello, World"
17201781 self.fs.makedir("foo").writetext("test.txt", text)
17231784 fs.move.move_file(self.fs, "foo/test.txt", self.fs, "foo/test2.txt")
17241785 self.assert_not_exists("foo/test.txt")
17251786 self.assert_text("foo/test2.txt", text)
1787
1788 self.assertEqual(self.fs.listdir("foo"), ["test2.txt"])
1789 self.assertEqual(next(self.fs.scandir("foo")).name, "test2.txt")
17261790
17271791 def _test_move_file(self, protocol):
17281792 other_fs = open_fs(protocol)
00 """Time related tools.
11 """
22
3 from __future__ import print_function
4 from __future__ import unicode_literals
3 from __future__ import print_function, unicode_literals
4
5 import typing
56
67 from calendar import timegm
78 from datetime import datetime
8 from pytz import UTC, timezone
99
10 try:
11 from datetime import timezone
12 except ImportError:
13 from ._tzcompat import timezone # type: ignore
1014
11 utcfromtimestamp = datetime.utcfromtimestamp
12 utclocalize = UTC.localize
13 GMT = timezone("GMT")
15 if typing.TYPE_CHECKING:
16 from typing import Optional
1417
1518
1619 def datetime_to_epoch(d):
1720 # type: (datetime) -> int
18 """Convert datetime to epoch.
19 """
21 """Convert datetime to epoch."""
2022 return timegm(d.utctimetuple())
2123
2224
25 @typing.overload
26 def epoch_to_datetime(t): # noqa: D103
27 # type: (None) -> None
28 pass
29
30
31 @typing.overload
32 def epoch_to_datetime(t): # noqa: D103
33 # type: (int) -> datetime
34 pass
35
36
2337 def epoch_to_datetime(t):
24 # type: (int) -> datetime
25 """Convert epoch time to a UTC datetime.
26 """
27 return utclocalize(utcfromtimestamp(t)) if t is not None else None
38 # type: (Optional[int]) -> Optional[datetime]
39 """Convert epoch time to a UTC datetime."""
40 if t is None:
41 return None
42 return datetime.fromtimestamp(t, tz=timezone.utc)
00 """Miscellaneous tools for operating on filesystems.
11 """
22
3 from __future__ import print_function
4 from __future__ import unicode_literals
3 from __future__ import print_function, unicode_literals
54
65 import typing
76
87 from . import errors
9 from .errors import DirectoryNotEmpty
10 from .errors import ResourceNotFound
11 from .path import abspath
12 from .path import dirname
13 from .path import normpath
14 from .path import recursepath
8 from .errors import DirectoryNotEmpty, ResourceNotFound
9 from .path import abspath, dirname, normpath, recursepath
1510
1611 if typing.TYPE_CHECKING:
1712 from typing import IO, List, Optional, Text, Union
13
1814 from .base import FS
1915
2016
33 Color is supported on UNIX terminals.
44 """
55
6 from __future__ import print_function
7 from __future__ import unicode_literals
6 from __future__ import print_function, unicode_literals
87
98 import sys
109 import typing
1312
1413 if typing.TYPE_CHECKING:
1514 from typing import List, Optional, Text, TextIO, Tuple
15
1616 from .base import FS
1717 from .info import Info
1818
7878
7979 def write(line):
8080 # type: (Text) -> None
81 """Write a line to the output.
82 """
81 """Write a line to the output."""
8382 print(line, file=file)
8483
8584 # FIXME(@althonos): define functions using `with_color` and
8786
8887 def format_prefix(prefix):
8988 # type: (Text) -> Text
90 """Format the prefix lines.
91 """
89 """Format the prefix lines."""
9290 if not with_color:
9391 return prefix
9492 return "\x1b[32m%s\x1b[0m" % prefix
9593
9694 def format_dirname(dirname):
9795 # type: (Text) -> Text
98 """Format a directory name.
99 """
96 """Format a directory name."""
10097 if not with_color:
10198 return dirname
10299 return "\x1b[1;34m%s\x1b[0m" % dirname
103100
104101 def format_error(msg):
105102 # type: (Text) -> Text
106 """Format an error.
107 """
103 """Format an error."""
108104 if not with_color:
109105 return msg
110106 return "\x1b[31m%s\x1b[0m" % msg
111107
112108 def format_filename(fname):
113109 # type: (Text) -> Text
114 """Format a filename.
115 """
110 """Format a filename."""
116111 if not with_color:
117112 return fname
118113 if fname.startswith("."):
121116
122117 def sort_key_dirs_first(info):
123118 # type: (Info) -> Tuple[bool, Text]
124 """Get the info sort function with directories first.
125 """
119 """Get the info sort function with directories first."""
126120 return (not info.is_dir, info.name.lower())
127121
128122 def sort_key(info):
129123 # type: (Info) -> Text
130 """Get the default info sort function using resource name.
131 """
124 """Get the default info sort function using resource name."""
132125 return info.name.lower()
133126
134127 counts = {"dirs": 0, "files": 0}
135128
136129 def format_directory(path, levels):
137130 # type: (Text, List[bool]) -> None
138 """Recursive directory function.
139 """
131 """Recursive directory function."""
140132 try:
141133 directory = sorted(
142134 fs.filterdir(path, exclude_dirs=exclude, files=filter),
143 key=sort_key_dirs_first if dirs_first else sort_key,
135 key=sort_key_dirs_first if dirs_first else sort_key, # type: ignore
144136 )
145137 except Exception as error:
146138 prefix = (
77 from __future__ import unicode_literals
88
99 import typing
10 from collections import defaultdict
11 from collections import deque
12 from collections import namedtuple
10
11 from collections import defaultdict, deque, namedtuple
1312
1413 from ._repr import make_repr
1514 from .errors import FSError
16 from .path import abspath
17 from .path import combine
18 from .path import normpath
15 from .path import abspath, combine, normpath
1916
2017 if typing.TYPE_CHECKING:
2118 from typing import (
2421 Collection,
2522 Iterator,
2623 List,
24 MutableMapping,
2725 Optional,
28 MutableMapping,
2926 Text,
3027 Tuple,
3128 Type,
3229 )
30
3331 from .base import FS
3432 from .info import Info
3533
4947
5048
5149 class Walker(object):
52 """A walker object recursively lists directories in a filesystem.
53
54 Arguments:
55 ignore_errors (bool): If `True`, any errors reading a
56 directory will be ignored, otherwise exceptions will
57 be raised.
58 on_error (callable, optional): If ``ignore_errors`` is `False`,
59 then this callable will be invoked for a path and the exception
60 object. It should return `True` to ignore the error, or `False`
61 to re-raise it.
62 search (str): If ``'breadth'`` then the directory will be
63 walked *top down*. Set to ``'depth'`` to walk *bottom up*.
64 filter (list, optional): If supplied, this parameter should be
65 a list of filename patterns, e.g. ``['*.py']``. Files will
66 only be returned if the final component matches one of the
67 patterns.
68 exclude (list, optional): If supplied, this parameter should be
69 a list of filename patterns, e.g. ``['~*']``. Files matching
70 any of these patterns will be removed from the walk.
71 filter_dirs (list, optional): A list of patterns that will be used
72 to match directories paths. The walk will only open directories
73 that match at least one of these patterns.
74 exclude_dirs (list, optional): A list of patterns that will be
75 used to filter out directories from the walk. e.g.
76 ``['*.svn', '*.git']``.
77 max_depth (int, optional): Maximum directory depth to walk.
78
79 """
50 """A walker object recursively lists directories in a filesystem."""
8051
8152 def __init__(
8253 self,
9061 max_depth=None, # type: Optional[int]
9162 ):
9263 # type: (...) -> None
64 """Create a new `Walker` instance.
65
66 Arguments:
67 ignore_errors (bool): If `True`, any errors reading a
68 directory will be ignored, otherwise exceptions will
69 be raised.
70 on_error (callable, optional): If ``ignore_errors`` is `False`,
71 then this callable will be invoked for a path and the
72 exception object. It should return `True` to ignore the error,
73 or `False` to re-raise it.
74 search (str): If ``"breadth"`` then the directory will be
75 walked *top down*. Set to ``"depth"`` to walk *bottom up*.
76 filter (list, optional): If supplied, this parameter should be
77 a list of filename patterns, e.g. ``["*.py"]``. Files will
78 only be returned if the final component matches one of the
79 patterns.
80 exclude (list, optional): If supplied, this parameter should be
81 a list of filename patterns, e.g. ``["~*"]``. Files matching
82 any of these patterns will be removed from the walk.
83 filter_dirs (list, optional): A list of patterns that will be used
84 to match directories paths. The walk will only open directories
85 that match at least one of these patterns.
86 exclude_dirs (list, optional): A list of patterns that will be
87 used to filter out directories from the walk. e.g.
88 ``['*.svn', '*.git']``.
89 max_depth (int, optional): Maximum directory depth to walk.
90
91 """
9392 if search not in ("breadth", "depth"):
9493 raise ValueError("search must be 'breadth' or 'depth'")
9594 self.ignore_errors = ignore_errors
113112 @classmethod
114113 def _ignore_errors(cls, path, error):
115114 # type: (Text, Exception) -> bool
116 """Default on_error callback."""
115 """Ignore dir scan errors when called."""
117116 return True
118117
119118 @classmethod
120119 def _raise_errors(cls, path, error):
121120 # type: (Text, Exception) -> bool
122 """Callback to re-raise dir scan errors."""
121 """Re-raise dir scan errors when called."""
123122 return False
124123
125124 @classmethod
126125 def _calculate_depth(cls, path):
127126 # type: (Text) -> int
128 """Calculate the 'depth' of a directory path (number of
129 components).
130 """
127 """Calculate the 'depth' of a directory path (i.e. count components)."""
131128 _path = path.strip("/")
132129 return _path.count("/") + 1 if _path else 0
133130
146143 Returns:
147144 ~fs.walk.BoundWalker: a bound walker.
148145
149 Example:
150 >>> from fs import open_fs
151 >>> from fs.walk import Walker
152 >>> home_fs = open_fs('~/')
153 >>> walker = Walker.bind(home_fs)
154 >>> for path in walker.files(filter=['*.py']):
155 ... print(path)
156
157 Unless you have written a customized walker class, you will be
158 unlikely to need to call this explicitly, as filesystem objects
159 already have a ``walk`` attribute which is a bound walker
160 object.
161
162 Example:
163 >>> from fs import open_fs
164 >>> home_fs = open_fs('~/')
165 >>> for path in home_fs.walk.files(filter=['*.py']):
166 ... print(path)
146 Examples:
147 Use this method to explicitly bind a filesystem instance::
148
149 >>> walker = Walker.bind(my_fs)
150 >>> for path in walker.files(filter=['*.py']):
151 ... print(path)
152 /foo.py
153 /bar.py
154
155 Unless you have written a customized walker class, you will
156 be unlikely to need to call this explicitly, as filesystem
157 objects already have a ``walk`` attribute which is a bound
158 walker object::
159
160 >>> for path in my_fs.walk.files(filter=['*.py']):
161 ... print(path)
162 /foo.py
163 /bar.py
167164
168165 """
169166 return BoundWalker(fs)
197194
198195 def _check_open_dir(self, fs, path, info):
199196 # type: (FS, Text, Info) -> bool
200 """Check if a directory should be considered in the walk.
201 """
197 """Check if a directory should be considered in the walk."""
202198 if self.exclude_dirs is not None and fs.match(self.exclude_dirs, info.name):
203199 return False
204200 if self.filter_dirs is not None and not fs.match(self.filter_dirs, info.name):
262258 bool: `True` if the file should be included.
263259
264260 """
265
266261 if self.exclude is not None and fs.match(self.exclude, info.name):
267262 return False
268263 return fs.match(self.filter, info.name)
318313 `~fs.info.Info` objects for directories and files in ``<path>``.
319314
320315 Example:
321 >>> home_fs = open_fs('~/')
322316 >>> walker = Walker(filter=['*.py'])
323 >>> namespaces = ['details']
324 >>> for path, dirs, files in walker.walk(home_fs, namespaces)
317 >>> for path, dirs, files in walker.walk(my_fs, namespaces=["details"]):
325318 ... print("[{}]".format(path))
326319 ... print("{} directories".format(len(dirs)))
327320 ... total = sum(info.size for info in files)
328 ... print("{} bytes {}".format(total))
321 ... print("{} bytes".format(total))
322 [/]
323 2 directories
324 55 bytes
325 ...
329326
330327 """
331328 _path = abspath(normpath(path))
410407 namespaces=None, # type: Optional[Collection[Text]]
411408 ):
412409 # type: (...) -> Iterator[Tuple[Text, Optional[Info]]]
413 """Walk files using a *breadth first* search.
414 """
410 """Walk files using a *breadth first* search."""
415411 queue = deque([path])
416412 push = queue.appendleft
417413 pop = queue.pop
446442 namespaces=None, # type: Optional[Collection[Text]]
447443 ):
448444 # type: (...) -> Iterator[Tuple[Text, Optional[Info]]]
449 """Walk files using a *depth first* search.
450 """
445 """Walk files using a *depth first* search."""
451446 # No recursion!
452447
453448 _combine = combine
494489 class BoundWalker(typing.Generic[_F]):
495490 """A class that binds a `Walker` instance to a `FS` instance.
496491
497 Arguments:
498 fs (FS): A filesystem instance.
499 walker_class (type): A `~fs.walk.WalkerBase`
500 sub-class. The default uses `~fs.walk.Walker`.
501
502492 You will typically not need to create instances of this class
503493 explicitly. Filesystems have a `~FS.walk` property which returns a
504494 `BoundWalker` object.
505495
506496 Example:
507 >>> import fs
508 >>> home_fs = fs.open_fs('~/')
509 >>> home_fs.walk
510 BoundWalker(OSFS('/Users/will', encoding='utf-8'))
511
512 A `BoundWalker` is callable. Calling it is an alias for
513 `~fs.walk.BoundWalker.walk`.
497 >>> tmp_fs = fs.tempfs.TempFS()
498 >>> tmp_fs.walk
499 BoundWalker(TempFS())
500
501 A `BoundWalker` is callable. Calling it is an alias for the
502 `~fs.walk.BoundWalker.walk` method.
514503
515504 """
516505
517506 def __init__(self, fs, walker_class=Walker):
518507 # type: (_F, Type[Walker]) -> None
508 """Create a new walker bound to the given filesystem.
509
510 Arguments:
511 fs (FS): A filesystem instance.
512 walker_class (type): A `~fs.walk.WalkerBase`
513 sub-class. The default uses `~fs.walk.Walker`.
514
515 """
519516 self.fs = fs
520517 self.walker_class = walker_class
521518
525522
526523 def _make_walker(self, *args, **kwargs):
527524 # type: (*Any, **Any) -> Walker
528 """Create a walker instance.
529 """
525 """Create a walker instance."""
530526 walker = self.walker_class(*args, **kwargs)
531527 return walker
532528
577573 `~fs.info.Info` objects for directories and files in ``<path>``.
578574
579575 Example:
580 >>> home_fs = open_fs('~/')
581576 >>> walker = Walker(filter=['*.py'])
582 >>> for path, dirs, files in walker.walk(home_fs, namespaces=['details']):
577 >>> for path, dirs, files in walker.walk(my_fs, namespaces=['details']):
583578 ... print("[{}]".format(path))
584579 ... print("{} directories".format(len(dirs)))
585580 ... total = sum(info.size for info in files)
586 ... print("{} bytes {}".format(total))
581 ... print("{} bytes".format(total))
582 [/]
583 2 directories
584 55 bytes
585 ...
587586
588587 This method invokes `Walker.walk` with bound `FS` object.
589588
11 """
22 # Adapted from https://hg.python.org/cpython/file/2.7/Lib/fnmatch.py
33
4 from __future__ import unicode_literals, print_function
4 from __future__ import print_function, unicode_literals
5
6 import typing
57
68 import re
7 import typing
89 from functools import partial
910
1011 from .lrucache import LRUCache
1112
1213 if typing.TYPE_CHECKING:
13 from typing import Callable, Iterable, Text, Tuple, Pattern
14 from typing import Callable, Iterable, Pattern, Text, Tuple
1415
1516
1617 _PATTERN_CACHE = LRUCache(1000) # type: LRUCache[Tuple[Text, bool], Pattern]
3132 try:
3233 re_pat = _PATTERN_CACHE[(pattern, True)]
3334 except KeyError:
34 res = "(?ms)" + _translate(pattern) + r'\Z'
35 res = "(?ms)" + _translate(pattern) + r"\Z"
3536 _PATTERN_CACHE[(pattern, True)] = re_pat = re.compile(res)
3637 return re_pat.match(name) is not None
3738
5152 try:
5253 re_pat = _PATTERN_CACHE[(pattern, False)]
5354 except KeyError:
54 res = "(?ms)" + _translate(pattern, case_sensitive=False) + r'\Z'
55 res = "(?ms)" + _translate(pattern, case_sensitive=False) + r"\Z"
5556 _PATTERN_CACHE[(pattern, False)] = re_pat = re.compile(res, re.IGNORECASE)
5657 return re_pat.match(name) is not None
5758
11
22 Here's an example that opens a filesystem then makes it *read only*::
33
4 >>> from fs import open_fs
5 >>> from fs.wrap import read_only
6 >>> projects_fs = open_fs('~/projects')
7 >>> read_only_projects_fs = read_only(projects_fs)
8 >>> read_only_projects_fs.remove('__init__.py')
4 >>> home_fs = fs.open_fs('~')
5 >>> read_only_home_fs = fs.wrap.read_only(home_fs)
6 >>> read_only_home_fs.removedir('Desktop')
97 Traceback (most recent call last):
108 ...
11 fs.errors.ResourceReadOnly: resource '__init__.py' is read only
9 fs.errors.ResourceReadOnly: resource 'Desktop' is read only
1210
1311 """
1412
15 from __future__ import print_function
16 from __future__ import unicode_literals
13 from __future__ import print_function, unicode_literals
1714
1815 import typing
1916
20 from .wrapfs import WrapFS
21 from .path import abspath, normpath, split
22 from .errors import ResourceReadOnly, ResourceNotFound
17 from .errors import ResourceNotFound, ResourceReadOnly
2318 from .info import Info
2419 from .mode import check_writable
20 from .path import abspath, normpath, split
21 from .wrapfs import WrapFS
2522
2623 if typing.TYPE_CHECKING:
27 from datetime import datetime
2824 from typing import (
25 IO,
2926 Any,
3027 BinaryIO,
3128 Collection,
3229 Dict,
3330 Iterator,
34 IO,
31 Mapping,
3532 Optional,
3633 Text,
3734 Tuple,
3835 )
36
37 from datetime import datetime
38
3939 from .base import FS # noqa: F401
4040 from .info import RawInfo
41 from .permissions import Permissions
4142 from .subfs import SubFS
42 from .permissions import Permissions
4343
4444
4545 _W = typing.TypeVar("_W", bound="WrapFS")
9191
9292 """
9393
94 # FIXME (@althonos): The caching data structure can very likely be
95 # improved. With the current implementation, if `scandir` result was
96 # cached for `namespaces=["details", "access"]`, calling `scandir`
97 # again only with `names=["details"]` will miss the cache, even though
98 # we are already storing the totality of the required metadata.
99 #
100 # A possible solution would be to replaced the cached with a
101 # Dict[Text, Dict[Text, Dict[Text, Info]]]
102 # ^ ^ ^ ^-- the actual info object
103 # | | \-- the path of the directory entry
104 # | \-- the namespace of the info
105 # \-- the cached directory entry
106 #
107 # Furthermore, `listdir` and `filterdir` calls should be cached as well,
108 # since they can be written as wrappers of `scandir`.
109
94110 wrap_name = "cached-dir"
95111
96 def __init__(self, wrap_fs):
112 def __init__(self, wrap_fs): # noqa: D107
97113 # type: (_F) -> None
98114 super(WrapCachedDir, self).__init__(wrap_fs)
99115 self._cache = {} # type: Dict[Tuple[Text, frozenset], Dict[Text, Info]]
134150
135151 def isdir(self, path):
136152 # type: (Text) -> bool
137 # FIXME(@althonos): this raises an error on non-existing file !
138 return self.getinfo(path).is_dir
153 try:
154 return self.getinfo(path).is_dir
155 except ResourceNotFound:
156 return False
139157
140158 def isfile(self, path):
141159 # type: (Text) -> bool
142 # FIXME(@althonos): this raises an error on non-existing file !
143 return not self.getinfo(path).is_dir
160 try:
161 return not self.getinfo(path).is_dir
162 except ResourceNotFound:
163 return False
144164
145165
146166 class WrapReadOnly(WrapFS[_F], typing.Generic[_F]):
180200 self.check()
181201 raise ResourceReadOnly(path)
182202
183 def move(self, src_path, dst_path, overwrite=False):
184 # type: (Text, Text, bool) -> None
203 def move(self, src_path, dst_path, overwrite=False, preserve_time=False):
204 # type: (Text, Text, bool, bool) -> None
185205 self.check()
186206 raise ResourceReadOnly(dst_path)
187207
198218 raise ResourceReadOnly(path)
199219
200220 def removedir(self, path):
221 # type: (Text) -> None
222 self.check()
223 raise ResourceReadOnly(path)
224
225 def removetree(self, path):
201226 # type: (Text) -> None
202227 self.check()
203228 raise ResourceReadOnly(path)
224249 self.check()
225250 raise ResourceReadOnly(path)
226251
227 def copy(self, src_path, dst_path, overwrite=False):
228 # type: (Text, Text, bool) -> None
252 def copy(self, src_path, dst_path, overwrite=False, preserve_time=False):
253 # type: (Text, Text, bool, bool) -> None
229254 self.check()
230255 raise ResourceReadOnly(dst_path)
231256
296321 # type: (Text) -> None
297322 self.check()
298323 raise ResourceReadOnly(path)
324
325 def getmeta(self, namespace="standard"):
326 # type: (Text) -> Mapping[Text, object]
327 self.check()
328 meta = dict(self.delegate_fs().getmeta(namespace=namespace))
329 meta.update(read_only=True, supports_rename=False)
330 return meta
88
99 from . import errors
1010 from .base import FS
11 from .copy import copy_file, copy_dir
11 from .copy import copy_dir, copy_file
12 from .error_tools import unwrap_errors
1213 from .info import Info
13 from .move import move_file, move_dir
14 from .path import abspath, normpath
15 from .error_tools import unwrap_errors
14 from .path import abspath, join, normpath
1615
1716 if typing.TYPE_CHECKING:
18 from datetime import datetime
19 from threading import RLock
2017 from typing import (
18 IO,
2119 Any,
2220 AnyStr,
2321 BinaryIO,
2422 Callable,
2523 Collection,
24 Iterable,
2625 Iterator,
27 Iterable,
28 IO,
2926 List,
3027 Mapping,
3128 Optional,
3330 Tuple,
3431 Union,
3532 )
33
34 from datetime import datetime
35 from threading import RLock
36
3637 from .enums import ResourceType
3738 from .info import RawInfo
3839 from .permissions import Permissions
5960
6061 wrap_name = None # type: Optional[Text]
6162
62 def __init__(self, wrap_fs):
63 def __init__(self, wrap_fs): # noqa: D107
6364 # type: (_F) -> None
6465 self._wrap_fs = wrap_fs
6566 super(WrapFS, self).__init__()
166167 with unwrap_errors(path):
167168 return _fs.makedir(_path, permissions=permissions, recreate=recreate)
168169
169 def move(self, src_path, dst_path, overwrite=False):
170 # type: (Text, Text, bool) -> None
171 # A custom move permits a potentially optimized code path
170 def move(self, src_path, dst_path, overwrite=False, preserve_time=False):
171 # type: (Text, Text, bool, bool) -> None
172 _fs, _src_path = self.delegate_path(src_path)
173 _, _dst_path = self.delegate_path(dst_path)
174 with unwrap_errors({_src_path: src_path, _dst_path: dst_path}):
175 _fs.move(
176 _src_path, _dst_path, overwrite=overwrite, preserve_time=preserve_time
177 )
178
179 def movedir(self, src_path, dst_path, create=False, preserve_time=False):
180 # type: (Text, Text, bool, bool) -> None
181 _fs, _src_path = self.delegate_path(src_path)
182 _, _dst_path = self.delegate_path(dst_path)
183 with unwrap_errors({_src_path: src_path, _dst_path: dst_path}):
184 _fs.movedir(
185 _src_path, _dst_path, create=create, preserve_time=preserve_time
186 )
187
188 def openbin(self, path, mode="r", buffering=-1, **options):
189 # type: (Text, Text, int, **Any) -> BinaryIO
190 self.check()
191 _fs, _path = self.delegate_path(path)
192 with unwrap_errors(path):
193 bin_file = _fs.openbin(_path, mode=mode, buffering=-1, **options)
194 return bin_file
195
196 def remove(self, path):
197 # type: (Text) -> None
198 self.check()
199 _fs, _path = self.delegate_path(path)
200 with unwrap_errors(path):
201 _fs.remove(_path)
202
203 def removedir(self, path):
204 # type: (Text) -> None
205 self.check()
206 _path = abspath(normpath(path))
207 if _path == "/":
208 raise errors.RemoveRootError()
209 _fs, _path = self.delegate_path(path)
210 with unwrap_errors(path):
211 _fs.removedir(_path)
212
213 def removetree(self, dir_path):
214 # type: (Text) -> None
215 self.check()
216 _path = abspath(normpath(dir_path))
217 _delegate_fs, _delegate_path = self.delegate_path(dir_path)
218 with unwrap_errors(dir_path):
219 if _path == "/":
220 # with root path, we must remove the contents but
221 # not the directory itself, so we can't just directly
222 # delegate
223 for info in _delegate_fs.scandir(_delegate_path):
224 info_path = join(_delegate_path, info.name)
225 if info.is_dir:
226 _delegate_fs.removetree(info_path)
227 else:
228 _delegate_fs.remove(info_path)
229 else:
230 _delegate_fs.removetree(_delegate_path)
231
232 def scandir(
233 self,
234 path, # type: Text
235 namespaces=None, # type: Optional[Collection[Text]]
236 page=None, # type: Optional[Tuple[int, int]]
237 ):
238 # type: (...) -> Iterator[Info]
239 self.check()
240 _fs, _path = self.delegate_path(path)
241 with unwrap_errors(path):
242 for info in _fs.scandir(_path, namespaces=namespaces, page=page):
243 yield info
244
245 def setinfo(self, path, info):
246 # type: (Text, RawInfo) -> None
247 self.check()
248 _fs, _path = self.delegate_path(path)
249 return _fs.setinfo(_path, info)
250
251 def settimes(self, path, accessed=None, modified=None):
252 # type: (Text, Optional[datetime], Optional[datetime]) -> None
253 self.check()
254 _fs, _path = self.delegate_path(path)
255 with unwrap_errors(path):
256 _fs.settimes(_path, accessed=accessed, modified=modified)
257
258 def touch(self, path):
259 # type: (Text) -> None
260 self.check()
261 _fs, _path = self.delegate_path(path)
262 with unwrap_errors(path):
263 _fs.touch(_path)
264
265 def copy(self, src_path, dst_path, overwrite=False, preserve_time=False):
266 # type: (Text, Text, bool, bool) -> None
172267 src_fs, _src_path = self.delegate_path(src_path)
173268 dst_fs, _dst_path = self.delegate_path(dst_path)
174269 with unwrap_errors({_src_path: src_path, _dst_path: dst_path}):
175270 if not overwrite and dst_fs.exists(_dst_path):
176271 raise errors.DestinationExists(_dst_path)
177 move_file(src_fs, _src_path, dst_fs, _dst_path)
178
179 def movedir(self, src_path, dst_path, create=False):
180 # type: (Text, Text, bool) -> None
272 copy_file(src_fs, _src_path, dst_fs, _dst_path, preserve_time=preserve_time)
273
274 def copydir(self, src_path, dst_path, create=False, preserve_time=False):
275 # type: (Text, Text, bool, bool) -> None
181276 src_fs, _src_path = self.delegate_path(src_path)
182277 dst_fs, _dst_path = self.delegate_path(dst_path)
183278 with unwrap_errors({_src_path: src_path, _dst_path: dst_path}):
185280 raise errors.ResourceNotFound(dst_path)
186281 if not src_fs.getinfo(_src_path).is_dir:
187282 raise errors.DirectoryExpected(src_path)
188 move_dir(src_fs, _src_path, dst_fs, _dst_path)
189
190 def openbin(self, path, mode="r", buffering=-1, **options):
191 # type: (Text, Text, int, **Any) -> BinaryIO
192 self.check()
193 _fs, _path = self.delegate_path(path)
194 with unwrap_errors(path):
195 bin_file = _fs.openbin(_path, mode=mode, buffering=-1, **options)
196 return bin_file
197
198 def remove(self, path):
199 # type: (Text) -> None
200 self.check()
201 _fs, _path = self.delegate_path(path)
202 with unwrap_errors(path):
203 _fs.remove(_path)
204
205 def removedir(self, path):
206 # type: (Text) -> None
207 self.check()
208 _path = abspath(normpath(path))
209 if _path == "/":
210 raise errors.RemoveRootError()
211 _fs, _path = self.delegate_path(path)
212 with unwrap_errors(path):
213 _fs.removedir(_path)
214
215 def removetree(self, dir_path):
216 # type: (Text) -> None
217 self.check()
218 _path = abspath(normpath(dir_path))
219 if _path == "/":
220 raise errors.RemoveRootError()
221 _fs, _path = self.delegate_path(dir_path)
222 with unwrap_errors(dir_path):
223 _fs.removetree(_path)
224
225 def scandir(
226 self,
227 path, # type: Text
228 namespaces=None, # type: Optional[Collection[Text]]
229 page=None, # type: Optional[Tuple[int, int]]
230 ):
231 # type: (...) -> Iterator[Info]
232 self.check()
233 _fs, _path = self.delegate_path(path)
234 with unwrap_errors(path):
235 for info in _fs.scandir(_path, namespaces=namespaces, page=page):
236 yield info
237
238 def setinfo(self, path, info):
239 # type: (Text, RawInfo) -> None
240 self.check()
241 _fs, _path = self.delegate_path(path)
242 return _fs.setinfo(_path, info)
243
244 def settimes(self, path, accessed=None, modified=None):
245 # type: (Text, Optional[datetime], Optional[datetime]) -> None
246 self.check()
247 _fs, _path = self.delegate_path(path)
248 with unwrap_errors(path):
249 _fs.settimes(_path, accessed=accessed, modified=modified)
250
251 def touch(self, path):
252 # type: (Text) -> None
253 self.check()
254 _fs, _path = self.delegate_path(path)
255 with unwrap_errors(path):
256 _fs.touch(_path)
257
258 def copy(self, src_path, dst_path, overwrite=False):
259 # type: (Text, Text, bool) -> None
260 src_fs, _src_path = self.delegate_path(src_path)
261 dst_fs, _dst_path = self.delegate_path(dst_path)
262 with unwrap_errors({_src_path: src_path, _dst_path: dst_path}):
263 if not overwrite and dst_fs.exists(_dst_path):
264 raise errors.DestinationExists(_dst_path)
265 copy_file(src_fs, _src_path, dst_fs, _dst_path)
266
267 def copydir(self, src_path, dst_path, create=False):
268 # type: (Text, Text, bool) -> None
269 src_fs, _src_path = self.delegate_path(src_path)
270 dst_fs, _dst_path = self.delegate_path(dst_path)
271 with unwrap_errors({_src_path: src_path, _dst_path: dst_path}):
272 if not create and not dst_fs.exists(_dst_path):
273 raise errors.ResourceNotFound(dst_path)
274 if not src_fs.getinfo(_src_path).is_dir:
275 raise errors.DirectoryExpected(src_path)
276 copy_dir(src_fs, _src_path, dst_fs, _dst_path)
283 copy_dir(src_fs, _src_path, dst_fs, _dst_path, preserve_time=preserve_time)
277284
278285 def create(self, path, wipe=False):
279286 # type: (Text, bool) -> bool
00 """Manage the filesystem in a Zip archive.
11 """
22
3 from __future__ import print_function
4 from __future__ import unicode_literals
5
3 from __future__ import print_function, unicode_literals
4
5 import sys
66 import typing
7
8 import six
79 import zipfile
810 from datetime import datetime
911
10 import six
11
1212 from . import errors
13 from ._url_tools import url_quote
1314 from .base import FS
1415 from .compress import write_zip
1516 from .enums import ResourceType, Seek
1617 from .info import Info
1718 from .iotools import RawWrapper
18 from .permissions import Permissions
1919 from .memoryfs import MemoryFS
2020 from .opener import open_fs
2121 from .path import dirname, forcedir, normpath, relpath
22 from .permissions import Permissions
2223 from .time import datetime_to_epoch
2324 from .wrapfs import WrapFS
24 from ._url_tools import url_quote
2525
2626 if typing.TYPE_CHECKING:
2727 from typing import (
3636 Tuple,
3737 Union,
3838 )
39
3940 from .info import RawInfo
4041 from .subfs import SubFS
4142
4344
4445
4546 class _ZipExtFile(RawWrapper):
46 def __init__(self, fs, name):
47 def __init__(self, fs, name): # noqa: D107
4748 # type: (ReadZipFS, Text) -> None
4849 self._zip = _zip = fs._zip
4950 self._end = _zip.getinfo(name).file_size
5051 self._pos = 0
5152 super(_ZipExtFile, self).__init__(_zip.open(name), "r", name)
5253
53 def read(self, size=-1):
54 # type: (int) -> bytes
55 buf = self._f.read(-1 if size is None else size)
56 self._pos += len(buf)
57 return buf
58
59 def read1(self, size=-1):
60 # type: (int) -> bytes
61 buf = self._f.read1(-1 if size is None else size) # type: ignore
62 self._pos += len(buf)
63 return buf
64
65 def seek(self, offset, whence=Seek.set):
66 # type: (int, SupportsInt) -> int
67 """Change stream position.
68
69 Change the stream position to the given byte offset. The
70 offset is interpreted relative to the position indicated by
71 ``whence``.
72
73 Arguments:
74 offset (int): the offset to the new position, in bytes.
75 whence (int): the position reference. Possible values are:
76 * `Seek.set`: start of stream (the default).
77 * `Seek.current`: current position; offset may be negative.
78 * `Seek.end`: end of stream; offset must be negative.
79
80 Returns:
81 int: the new absolute position.
82
83 Raises:
84 ValueError: when ``whence`` is not known, or ``offset``
85 is invalid.
86
87 Note:
88 Zip compression does not support seeking, so the seeking
89 is emulated. Seeking somewhere else than the current position
90 will need to either:
91 * reopen the file and restart decompression
92 * read and discard data to advance in the file
93
94 """
95 _whence = int(whence)
96 if _whence == Seek.current:
97 offset += self._pos
98 if _whence == Seek.current or _whence == Seek.set:
99 if offset < 0:
100 raise ValueError("Negative seek position {}".format(offset))
101 elif _whence == Seek.end:
102 if offset > 0:
103 raise ValueError("Positive seek position {}".format(offset))
104 offset += self._end
105 else:
106 raise ValueError(
107 "Invalid whence ({}, should be {}, {} or {})".format(
108 _whence, Seek.set, Seek.current, Seek.end
54 # NOTE(@althonos): Starting from Python 3.7, files inside a Zip archive are
55 # seekable provided they were opened from a seekable file
56 # handle. Before that, we can emulate a seek using the
57 # read method, although it adds a ton of overhead and is
58 # way less efficient than extracting once to a BytesIO.
59 if sys.version_info < (3, 7):
60
61 def read(self, size=-1):
62 # type: (int) -> bytes
63 buf = self._f.read(-1 if size is None else size)
64 self._pos += len(buf)
65 return buf
66
67 def read1(self, size=-1):
68 # type: (int) -> bytes
69 buf = self._f.read1(-1 if size is None else size) # type: ignore
70 self._pos += len(buf)
71 return buf
72
73 def tell(self):
74 # type: () -> int
75 return self._pos
76
77 def seekable(self):
78 return True
79
80 def seek(self, offset, whence=Seek.set):
81 # type: (int, SupportsInt) -> int
82 """Change stream position.
83
84 Change the stream position to the given byte offset. The
85 offset is interpreted relative to the position indicated by
86 ``whence``.
87
88 Arguments:
89 offset (int): the offset to the new position, in bytes.
90 whence (int): the position reference. Possible values are:
91 * `Seek.set`: start of stream (the default).
92 * `Seek.current`: current position; offset may be negative.
93 * `Seek.end`: end of stream; offset must be negative.
94
95 Returns:
96 int: the new absolute position.
97
98 Raises:
99 ValueError: when ``whence`` is not known, or ``offset``
100 is invalid.
101
102 Note:
103 Zip compression does not support seeking, so the seeking
104 is emulated. Seeking somewhere else than the current position
105 will need to either:
106 * reopen the file and restart decompression
107 * read and discard data to advance in the file
108
109 """
110 _whence = int(whence)
111 if _whence == Seek.current:
112 offset += self._pos
113 if _whence == Seek.current or _whence == Seek.set:
114 if offset < 0:
115 raise ValueError("Negative seek position {}".format(offset))
116 elif _whence == Seek.end:
117 if offset > 0:
118 raise ValueError("Positive seek position {}".format(offset))
119 offset += self._end
120 else:
121 raise ValueError(
122 "Invalid whence ({}, should be {}, {} or {})".format(
123 _whence, Seek.set, Seek.current, Seek.end
124 )
109125 )
110 )
111
112 if offset < self._pos:
113 self._f = self._zip.open(self.name) # type: ignore
114 self._pos = 0
115 self.read(offset - self._pos)
116 return self._pos
117
118 def tell(self):
119 # type: () -> int
120 return self._pos
126
127 if offset < self._pos:
128 self._f = self._zip.open(self.name) # type: ignore
129 self._pos = 0
130 self.read(offset - self._pos)
131 return self._pos
132
133 else:
134
135 def seek(self, offset, whence=Seek.set):
136 # type: (int, SupportsInt) -> int
137 """Change stream position.
138
139 Change the stream position to the given byte offset. The
140 offset is interpreted relative to the position indicated by
141 ``whence``.
142
143 Arguments:
144 offset (int): the offset to the new position, in bytes.
145 whence (int): the position reference. Possible values are:
146 * `Seek.set`: start of stream (the default).
147 * `Seek.current`: current position; offset may be negative.
148 * `Seek.end`: end of stream; offset must be negative.
149
150 Returns:
151 int: the new absolute position.
152
153 Raises:
154 ValueError: when ``whence`` is not known, or ``offset``
155 is invalid.
156
157 """
158 _whence = int(whence)
159 _pos = self.tell()
160 if _whence == Seek.set:
161 if offset < 0:
162 raise ValueError("Negative seek position {}".format(offset))
163 elif _whence == Seek.current:
164 if _pos + offset < 0:
165 raise ValueError("Negative seek position {}".format(offset))
166 elif _whence == Seek.end:
167 if offset > 0:
168 raise ValueError("Positive seek position {}".format(offset))
169 else:
170 raise ValueError(
171 "Invalid whence ({}, should be {}, {} or {})".format(
172 _whence, Seek.set, Seek.current, Seek.end
173 )
174 )
175
176 return self._f.seek(offset, _whence)
121177
122178
123179 class ZipFS(WrapFS):
124180 """Read and write zip files.
125181
126 There are two ways to open a ZipFS for the use cases of reading
182 There are two ways to open a `ZipFS` for the use cases of reading
127183 a zip file, and creating a new one.
128184
129 If you open the ZipFS with ``write`` set to `False` (the default)
130 then the filesystem will be a read only filesystem which maps to
185 If you open the `ZipFS` with ``write`` set to `False` (the default)
186 then the filesystem will be a read-only filesystem which maps to
131187 the files and directories within the zip file. Files are
132188 decompressed on the fly when you open them.
133189
136192 with ZipFS('foo.zip') as zip_fs:
137193 readme = zip_fs.readtext('readme.txt')
138194
139 If you open the ZipFS with ``write`` set to `True`, then the ZipFS
140 will be a empty temporary filesystem. Any files / directories you
141 create in the ZipFS will be written in to a zip file when the ZipFS
195 If you open the `ZipFS` with ``write`` set to `True`, then the `ZipFS`
196 will be an empty temporary filesystem. Any files / directories you
197 create in the `ZipFS` will be written in to a zip file when the `ZipFS`
142198 is closed.
143199
144 Here's how you might write a new zip file containing a readme.txt
200 Here's how you might write a new zip file containing a ``readme.txt``
145201 file::
146202
147203 with ZipFS('foo.zip', write=True) as new_zip:
157213 (default) to read an existing zip file.
158214 compression (int): Compression to use (one of the constants
159215 defined in the `zipfile` module in the stdlib).
160 temp_fs (str): An FS URL for the temporary filesystem used to
161 store data prior to zipping.
216 temp_fs (str or FS): An FS URL or an FS instance to use to
217 store data prior to zipping. Defaults to creating a new
218 `~fs.tempfs.TempFS`.
162219
163220 """
164221
169226 write=False, # type: bool
170227 compression=zipfile.ZIP_DEFLATED, # type: int
171228 encoding="utf-8", # type: Text
172 temp_fs="temp://__ziptemp__", # type: Text
229 temp_fs="temp://__ziptemp__", # type: Union[Text, FS]
173230 ):
174231 # type: (...) -> FS
175232 # This magic returns a different class instance based on the
190247 compression=zipfile.ZIP_DEFLATED, # type: int
191248 encoding="utf-8", # type: Text
192249 temp_fs="temp://__ziptemp__", # type: Text
193 ):
250 ): # noqa: D107
194251 # type: (...) -> None
195252 pass
196253
197254
198255 @six.python_2_unicode_compatible
199256 class WriteZipFS(WrapFS):
200 """A writable zip file.
201 """
257 """A writable zip file."""
202258
203259 def __init__(
204260 self,
205261 file, # type: Union[Text, BinaryIO]
206262 compression=zipfile.ZIP_DEFLATED, # type: int
207263 encoding="utf-8", # type: Text
208 temp_fs="temp://__ziptemp__", # type: Text
209 ):
264 temp_fs="temp://__ziptemp__", # type: Union[Text, FS]
265 ): # noqa: D107
210266 # type: (...) -> None
211267 self._file = file
212268 self.compression = compression
275331
276332 @six.python_2_unicode_compatible
277333 class ReadZipFS(FS):
278 """A readable zip file.
279 """
334 """A readable zip file."""
280335
281336 _meta = {
282 "case_insensitive": True,
337 "case_insensitive": False,
283338 "network": False,
284339 "read_only": True,
285340 "supports_rename": False,
289344 }
290345
291346 @errors.CreateFailed.catch_all
292 def __init__(self, file, encoding="utf-8"):
347 def __init__(self, file, encoding="utf-8"): # noqa: D107
293348 # type: (Union[BinaryIO, Text], Text) -> None
294349 super(ReadZipFS, self).__init__()
295350 self._file = file
307362
308363 def _path_to_zip_name(self, path):
309364 # type: (Text) -> str
310 """Convert a path to a zip file name.
311 """
365 """Convert a path to a zip file name."""
312366 path = relpath(normpath(path))
313367 if self._directory.isdir(path):
314368 path = forcedir(path)
319373 @property
320374 def _directory(self):
321375 # type: () -> MemoryFS
322 """`MemoryFS`: a filesystem with the same folder hierarchy as the zip.
323 """
376 """`MemoryFS`: a filesystem with the same folder hierarchy as the zip."""
324377 self.check()
325378 with self._lock:
326379 if self._directory_fs is None:
0 [bdist_wheel]
1 universal = 1
0 # --- Project configuration -------------------------------------------------
21
32 [metadata]
43 version = attr: fs._version.__version__
2120 Operating System :: OS Independent
2221 Programming Language :: Python
2322 Programming Language :: Python :: 2.7
24 Programming Language :: Python :: 3.4
2523 Programming Language :: Python :: 3.5
2624 Programming Language :: Python :: 3.6
2725 Programming Language :: Python :: 3.7
2826 Programming Language :: Python :: 3.8
2927 Programming Language :: Python :: 3.9
28 Programming Language :: Python :: 3.10
3029 Programming Language :: Python :: Implementation :: CPython
3130 Programming Language :: Python :: Implementation :: PyPy
3231 Topic :: System :: Filesystems
32 Typing :: Typed
3333 project_urls =
3434 Bug Reports = https://github.com/PyFilesystem/pyfilesystem2/issues
3535 Documentation = https://pyfilesystem2.readthedocs.io/en/latest/
4242 setuptools >=38.3.0
4343 install_requires =
4444 appdirs~=1.4.3
45 pytz
4645 setuptools
4746 six ~=1.10
4847 enum34 ~=1.1.6 ; python_version < '3.4'
5857
5958 [options.package_data]
6059 fs = py.typed
60
61 [bdist_wheel]
62 universal = 1
63
64 # --- Individual linter configuration ---------------------------------------
6165
6266 [pydocstyle]
6367 inherit = false
8286 [mypy-fs.test]
8387 disallow_untyped_defs = false
8488
85 [coverage:run]
86 branch = true
87 omit = fs/test.py
88 source = fs
89
90 [coverage:report]
91 show_missing = true
92 skip_covered = true
93 exclude_lines =
94 pragma: no cover
95 if False:
96 @typing.overload
97 @overload
98
99 [tool:pytest]
100 markers =
101 slow: marks tests as slow (deselect with '-m "not slow"')
102
10389 [flake8]
10490 extend-ignore = E203,E402,W503
10591 max-line-length = 88
10995 tests/*:E501
11096 fs/opener/*:F811
11197 fs/_fscompat.py:F401
98 fs/_pathcompat.py:C401
11299
113100 [isort]
114 default_section = THIRD_PARTY
101 default_section = THIRDPARTY
115102 known_first_party = fs
116 known_standard_library = typing
103 known_standard_library = sys, typing
117104 line_length = 88
105 profile = black
106 skip_gitignore = true
107
108 # --- Test and coverage configuration ------------------------------------------
109
110 [coverage:run]
111 branch = true
112 omit = fs/test.py
113 source = fs
114 relative_files = true
115 parallel = true
116
117 [coverage:report]
118 show_missing = true
119 skip_covered = true
120 exclude_lines =
121 pragma: no cover
122 if False:
123 it typing.TYPE_CHECKING:
124 @typing.overload
125 @overload
126
127 [tool:pytest]
128 markers =
129 slow: marks tests as slow (deselect with '-m "not slow"')
130
131 # --- Tox automation configuration ---------------------------------------------
132
133 [tox:tox]
134 envlist = py{27,34}{,-scandir}, py{35,36,37,38,39,310}, pypy{27,36,37}, typecheck, codestyle, docstyle, codeformat
135 sitepackages = false
136 skip_missing_interpreters = true
137 requires =
138 setuptools >=38.3.0
139
140 [testenv]
141 commands = python -m coverage run --rcfile {toxinidir}/setup.cfg -m pytest {posargs} {toxinidir}/tests
142 deps =
143 -rtests/requirements.txt
144 coverage~=5.0
145 py{35,36,37,38,39,310,py36,py37}: pytest~=6.0
146 py{27,34,py27}: pytest~=4.6
147 py{35,36,37,38,39,310,py36,py37}: pytest-randomly~=3.5
148 py{27,34,py27}: pytest-randomly~=1.2
149 scandir: .[scandir]
150 !scandir: .
151
152 [testenv:typecheck]
153 commands = mypy --config-file {toxinidir}/setup.cfg {toxinidir}/fs
154 deps =
155 .
156 mypy==0.800
157
158 [testenv:codestyle]
159 commands = flake8 --config={toxinidir}/setup.cfg {toxinidir}/fs {toxinidir}/tests
160 deps =
161 flake8==3.7.9
162 #flake8-builtins==1.5.3
163 flake8-bugbear==19.8.0
164 flake8-comprehensions==3.1.4
165 flake8-mutable==1.2.0
166 flake8-tuple==0.4.0
167
168 [testenv:codeformat]
169 commands = black --check {toxinidir}/fs
170 deps =
171 black==22.3.0
172
173 [testenv:docstyle]
174 commands = pydocstyle --config={toxinidir}/setup.cfg {toxinidir}/fs
175 deps =
176 pydocstyle==5.1.1
177
178 [gh-actions]
179 python =
180 2.7: py27, py27-scandir
181 3.4: py34, py34-scandir
182 3.5: py35
183 3.6: py36
184 3.7: py37
185 3.8: py38
186 3.9: py39
187 3.10: py310
188 pypy-2.7: pypy27
189 pypy-3.6: pypy36
190 pypy-3.7: pypy37
55 exec(f.read())
66
77 from setuptools import setup
8
89 setup(version=__version__)
+0
-11
testrequirements.txt less more
0 pytest==4.6.5
1 pytest-cov==2.7.1
2 pytest-randomly==1.2.3 ; python_version<"3.5"
3 pytest-randomly==3.0.0 ; python_version>="3.5"
4 mock==3.0.5 ; python_version<"3.3"
5 pyftpdlib==1.5.5
6
7 # Not directly required. `pyftpdlib` appears to need these but doesn't list them
8 # as requirements.
9 psutil
10 pysendfile
+0
-34
tests/conftest.py less more
0 import pytest
1
2 try:
3 from unittest import mock
4 except ImportError:
5 import mock
6
7
8 @pytest.fixture
9 @mock.patch("appdirs.user_data_dir", autospec=True, spec_set=True)
10 @mock.patch("appdirs.site_data_dir", autospec=True, spec_set=True)
11 @mock.patch("appdirs.user_config_dir", autospec=True, spec_set=True)
12 @mock.patch("appdirs.site_config_dir", autospec=True, spec_set=True)
13 @mock.patch("appdirs.user_cache_dir", autospec=True, spec_set=True)
14 @mock.patch("appdirs.user_state_dir", autospec=True, spec_set=True)
15 @mock.patch("appdirs.user_log_dir", autospec=True, spec_set=True)
16 def mock_appdir_directories(
17 user_log_dir_mock,
18 user_state_dir_mock,
19 user_cache_dir_mock,
20 site_config_dir_mock,
21 user_config_dir_mock,
22 site_data_dir_mock,
23 user_data_dir_mock,
24 tmpdir
25 ):
26 """Mock out every single AppDir directory so tests can't access real ones."""
27 user_log_dir_mock.return_value = str(tmpdir.join("user_log").mkdir())
28 user_state_dir_mock.return_value = str(tmpdir.join("user_state").mkdir())
29 user_cache_dir_mock.return_value = str(tmpdir.join("user_cache").mkdir())
30 site_config_dir_mock.return_value = str(tmpdir.join("site_config").mkdir())
31 user_config_dir_mock.return_value = str(tmpdir.join("user_config").mkdir())
32 site_data_dir_mock.return_value = str(tmpdir.join("site_data").mkdir())
33 user_data_dir_mock.return_value = str(tmpdir.join("user_data").mkdir())
0 def slow(cls):
1 return cls
0 # the bare requirements for running tests
1
2 # pyftpdlib is needed to spawn a FTP server for the
3 # FTPFS test suite
4 pyftpdlib ~=1.5
5
6 # these are optional dependencies for pyftpdlib that
7 # are not explicitly listed, we need to install these
8 # ourselves
9 psutil ~=5.0
10 pysendfile ~=2.0 ; python_version <= "3.3"
11
12 # mock is only available from Python 3.3 onward, and
13 # mock v4+ doesn't support Python 2.7 anymore
14 mock ~=3.0 ; python_version < "3.3"
15
16 # parametrized to prevent code duplication in tests.
17 parameterized ~=0.8
00 from __future__ import unicode_literals
11
2 import pytest
2 import shutil
33 import six
4 import tempfile
5 import unittest
46
7 try:
8 from unittest import mock
9 except ImportError:
10 import mock
11
12 import fs.test
513 from fs import appfs
614
715
8 @pytest.fixture
9 def fs(mock_appdir_directories):
10 """Create a UserDataFS but strictly using a temporary directory."""
11 return appfs.UserDataFS("fstest", "willmcgugan", "1.0")
16 class _TestAppFS(fs.test.FSTestCases):
17
18 AppFS = None
19
20 @classmethod
21 def setUpClass(cls):
22 super(_TestAppFS, cls).setUpClass()
23 cls.tmpdir = tempfile.mkdtemp()
24
25 @classmethod
26 def tearDownClass(cls):
27 shutil.rmtree(cls.tmpdir)
28
29 def make_fs(self):
30 with mock.patch(
31 "appdirs.{}".format(self.AppFS.app_dir),
32 autospec=True,
33 spec_set=True,
34 return_value=tempfile.mkdtemp(dir=self.tmpdir),
35 ):
36 return self.AppFS("fstest", "willmcgugan", "1.0")
37
38 if six.PY2:
39
40 def test_repr(self):
41 self.assertEqual(
42 repr(self.fs),
43 "{}(u'fstest', author=u'willmcgugan', version=u'1.0')".format(
44 self.AppFS.__name__
45 ),
46 )
47
48 else:
49
50 def test_repr(self):
51 self.assertEqual(
52 repr(self.fs),
53 "{}('fstest', author='willmcgugan', version='1.0')".format(
54 self.AppFS.__name__
55 ),
56 )
57
58 def test_str(self):
59 self.assertEqual(
60 str(self.fs), "<{} 'fstest'>".format(self.AppFS.__name__.lower())
61 )
1262
1363
14 @pytest.mark.skipif(six.PY2, reason="Test requires Python 3 repr")
15 def test_user_data_repr_py3(fs):
16 assert repr(fs) == "UserDataFS('fstest', author='willmcgugan', version='1.0')"
17 assert str(fs) == "<userdatafs 'fstest'>"
64 class TestUserDataFS(_TestAppFS, unittest.TestCase):
65 AppFS = appfs.UserDataFS
1866
1967
20 @pytest.mark.skipif(not six.PY2, reason="Test requires Python 2 repr")
21 def test_user_data_repr_py2(fs):
22 assert repr(fs) == "UserDataFS(u'fstest', author=u'willmcgugan', version=u'1.0')"
23 assert str(fs) == "<userdatafs 'fstest'>"
68 class TestUserConfigFS(_TestAppFS, unittest.TestCase):
69 AppFS = appfs.UserConfigFS
70
71
72 class TestUserCacheFS(_TestAppFS, unittest.TestCase):
73 AppFS = appfs.UserCacheFS
74
75
76 class TestSiteDataFS(_TestAppFS, unittest.TestCase):
77 AppFS = appfs.SiteDataFS
78
79
80 class TestSiteConfigFS(_TestAppFS, unittest.TestCase):
81 AppFS = appfs.SiteConfigFS
82
83
84 class TestUserLogFS(_TestAppFS, unittest.TestCase):
85 AppFS = appfs.UserLogFS
22
33 import os
44 import stat
5
65 from six import text_type
76
7 from fs import errors, walk
8 from fs.enums import ResourceType
89 from fs.opener import open_fs
9 from fs.enums import ResourceType
10 from fs import walk
11 from fs import errors
1210 from fs.test import UNICODE_TEXT
1311
1412
33
44 import unittest
55
6 from fs import errors
67 from fs.base import FS
7 from fs import errors
88
99
1010 class DummyFS(FS):
00 from __future__ import unicode_literals
11
2 import calendar
3 import datetime
24 import errno
3 import datetime
45 import os
6 import shutil
7 import tempfile
58 import unittest
6 import tempfile
7 import shutil
8 import calendar
9 from parameterized import parameterized
910
1011 import fs.copy
1112 from fs import open_fs
1213
1314
14 class TestCopy(unittest.TestCase):
15 def test_copy_fs(self):
16 for workers in (0, 1, 2, 4):
17 src_fs = open_fs("mem://")
18 src_fs.makedirs("foo/bar")
19 src_fs.makedirs("foo/empty")
20 src_fs.touch("test.txt")
21 src_fs.touch("foo/bar/baz.txt")
22
23 dst_fs = open_fs("mem://")
24 fs.copy.copy_fs(src_fs, dst_fs, workers=workers)
25
26 self.assertTrue(dst_fs.isdir("foo/empty"))
27 self.assertTrue(dst_fs.isdir("foo/bar"))
28 self.assertTrue(dst_fs.isfile("test.txt"))
15 def _create_sandbox_dir(prefix="pyfilesystem2_sandbox_", home=None):
16 if home is None:
17 return tempfile.mkdtemp(prefix=prefix)
18 else:
19 sandbox_path = os.path.join(home, prefix)
20 mkdirp(sandbox_path)
21 return sandbox_path
22
23
24 def _touch(root, filepath):
25 # create abs filename
26 abs_filepath = os.path.join(root, filepath)
27 # create dir
28 dirname = os.path.dirname(abs_filepath)
29 mkdirp(dirname)
30 # touch file
31 with open(abs_filepath, "a"):
32 os.utime(
33 abs_filepath, None
34 ) # update the mtime in case the file exists, same as touch
35
36 return abs_filepath
37
38
39 def _write_file(filepath, write_chars=1024):
40 with open(filepath, "w") as f:
41 f.write("1" * write_chars)
42 return filepath
43
44
45 def _delay_file_utime(filepath, delta_sec):
46 utcnow = datetime.datetime.utcnow()
47 unix_timestamp = calendar.timegm(utcnow.timetuple())
48 times = unix_timestamp + delta_sec, unix_timestamp + delta_sec
49 os.utime(filepath, times)
50
51
52 def mkdirp(path):
53 # os.makedirs(path, exist_ok=True) only for python3.?
54 try:
55 os.makedirs(path)
56 except OSError as exc:
57 if exc.errno == errno.EEXIST and os.path.isdir(path):
58 pass
59 else:
60 raise
61
62
63 class TestCopySimple(unittest.TestCase):
64 @parameterized.expand([(0,), (1,), (2,), (4,)])
65 def test_copy_fs(self, workers):
66 namespaces = ("details", "modified")
67
68 src_fs = open_fs("mem://")
69 src_fs.makedirs("foo/bar")
70 src_fs.makedirs("foo/empty")
71 src_fs.touch("test.txt")
72 src_fs.touch("foo/bar/baz.txt")
73 src_file1_info = src_fs.getinfo("test.txt", namespaces)
74 src_file2_info = src_fs.getinfo("foo/bar/baz.txt", namespaces)
75
76 dst_fs = open_fs("mem://")
77 fs.copy.copy_fs(src_fs, dst_fs, workers=workers, preserve_time=True)
78
79 self.assertTrue(dst_fs.isdir("foo/empty"))
80 self.assertTrue(dst_fs.isdir("foo/bar"))
81 self.assertTrue(dst_fs.isfile("test.txt"))
82
83 dst_file1_info = dst_fs.getinfo("test.txt", namespaces)
84 dst_file2_info = dst_fs.getinfo("foo/bar/baz.txt", namespaces)
85 self.assertEqual(dst_file1_info.modified, src_file1_info.modified)
86 self.assertEqual(dst_file2_info.modified, src_file2_info.modified)
2987
3088 def test_copy_value_error(self):
3189 src_fs = open_fs("mem://")
3391 with self.assertRaises(ValueError):
3492 fs.copy.copy_fs(src_fs, dst_fs, workers=-1)
3593
36 def test_copy_dir(self):
94 def test_copy_dir0(self):
95 namespaces = ("details", "modified")
96
3797 src_fs = open_fs("mem://")
3898 src_fs.makedirs("foo/bar")
3999 src_fs.makedirs("foo/empty")
40100 src_fs.touch("test.txt")
41101 src_fs.touch("foo/bar/baz.txt")
42 for workers in (0, 1, 2, 4):
43 with open_fs("mem://") as dst_fs:
44 fs.copy.copy_dir(src_fs, "/foo", dst_fs, "/", workers=workers)
45 self.assertTrue(dst_fs.isdir("bar"))
46 self.assertTrue(dst_fs.isdir("empty"))
47 self.assertTrue(dst_fs.isfile("bar/baz.txt"))
102 src_file2_info = src_fs.getinfo("foo/bar/baz.txt", namespaces)
103
104 with open_fs("mem://") as dst_fs:
105 fs.copy.copy_dir(src_fs, "/foo", dst_fs, "/", workers=0, preserve_time=True)
106 self.assertTrue(dst_fs.isdir("bar"))
107 self.assertTrue(dst_fs.isdir("empty"))
108 self.assertTrue(dst_fs.isfile("bar/baz.txt"))
109
110 dst_file2_info = dst_fs.getinfo("bar/baz.txt", namespaces)
111 self.assertEqual(dst_file2_info.modified, src_file2_info.modified)
112
113 @parameterized.expand([(0,), (1,), (2,), (4,)])
114 def test_copy_dir(self, workers):
115 namespaces = ("details", "modified")
116
117 src_fs = open_fs("mem://")
118 src_fs.makedirs("foo/bar")
119 src_fs.makedirs("foo/empty")
120 src_fs.touch("test.txt")
121 src_fs.touch("foo/bar/baz.txt")
122 src_file2_info = src_fs.getinfo("foo/bar/baz.txt", namespaces)
123
124 with open_fs("mem://") as dst_fs:
125 fs.copy.copy_dir(
126 src_fs, "/foo", dst_fs, "/", workers=workers, preserve_time=True
127 )
128 self.assertTrue(dst_fs.isdir("bar"))
129 self.assertTrue(dst_fs.isdir("empty"))
130 self.assertTrue(dst_fs.isfile("bar/baz.txt"))
131
132 dst_file2_info = dst_fs.getinfo("bar/baz.txt", namespaces)
133 self.assertEqual(dst_file2_info.modified, src_file2_info.modified)
48134
49135 def test_copy_large(self):
50136 data1 = b"foo" * 512 * 1024
77163 fs.copy.copy_dir(src_fs, "/", dst_fs, "/", on_copy=on_copy)
78164 self.assertEqual(on_copy_calls, [(src_fs, "/baz.txt", dst_fs, "/baz.txt")])
79165
80 def mkdirp(self, path):
81 # os.makedirs(path, exist_ok=True) only for python3.?
82 try:
83 os.makedirs(path)
84 except OSError as exc:
85 if exc.errno == errno.EEXIST and os.path.isdir(path):
86 pass
87 else:
88 raise
89
90 def _create_sandbox_dir(self, prefix="pyfilesystem2_sandbox_", home=None):
91 if home is None:
92 return tempfile.mkdtemp(prefix=prefix)
93 else:
94 sandbox_path = os.path.join(home, prefix)
95 self.mkdirp(sandbox_path)
96 return sandbox_path
97
98 def _touch(self, root, filepath):
99 # create abs filename
100 abs_filepath = os.path.join(root, filepath)
101 # create dir
102 dirname = os.path.dirname(abs_filepath)
103 self.mkdirp(dirname)
104 # touch file
105 with open(abs_filepath, "a"):
106 os.utime(
107 abs_filepath, None
108 ) # update the mtime in case the file exists, same as touch
109
110 return abs_filepath
111
112 def _write_file(self, filepath, write_chars=1024):
113 with open(filepath, "w") as f:
114 f.write("1" * write_chars)
115 return filepath
116
117 def _delay_file_utime(self, filepath, delta_sec):
118 utcnow = datetime.datetime.utcnow()
119 unix_timestamp = calendar.timegm(utcnow.timetuple())
120 times = unix_timestamp + delta_sec, unix_timestamp + delta_sec
121 os.utime(filepath, times)
122
123 def test_copy_file_if_newer_same_fs(self):
166
167 class TestCopyIfNewer(unittest.TestCase):
168 copy_if_condition = "newer"
169
170 def test_copy_file_if_same_fs(self):
124171 src_fs = open_fs("mem://")
125172 src_fs.makedir("foo2").touch("exists")
126173 src_fs.makedir("foo1").touch("test1.txt")
128175 "foo2/exists", datetime.datetime.utcnow() + datetime.timedelta(hours=1)
129176 )
130177 self.assertTrue(
131 fs.copy.copy_file_if_newer(
132 src_fs, "foo1/test1.txt", src_fs, "foo2/test1.txt.copy"
178 fs.copy.copy_file_if(
179 src_fs,
180 "foo1/test1.txt",
181 src_fs,
182 "foo2/test1.txt.copy",
183 self.copy_if_condition,
133184 )
134185 )
135186 self.assertFalse(
136 fs.copy.copy_file_if_newer(src_fs, "foo1/test1.txt", src_fs, "foo2/exists")
187 fs.copy.copy_file_if(
188 src_fs, "foo1/test1.txt", src_fs, "foo2/exists", self.copy_if_condition
189 )
137190 )
138191 self.assertTrue(src_fs.exists("foo2/test1.txt.copy"))
139192
140 def test_copy_file_if_newer_dst_older(self):
193 def test_copy_file_if_dst_is_older(self):
141194 try:
142195 # create first dst ==> dst is older the src ==> file should be copied
143 dst_dir = self._create_sandbox_dir()
144 dst_file1 = self._touch(dst_dir, "file1.txt")
145 self._write_file(dst_file1)
146
147 src_dir = self._create_sandbox_dir()
148 src_file1 = self._touch(src_dir, "file1.txt")
149 self._write_file(src_file1)
196 dst_dir = _create_sandbox_dir()
197 dst_file1 = _touch(dst_dir, "file1.txt")
198 _write_file(dst_file1)
199
200 src_dir = _create_sandbox_dir()
201 src_file1 = _touch(src_dir, "file1.txt")
202 _write_file(src_file1)
203
150204 # ensure src file is newer than dst, changing its modification time
151 self._delay_file_utime(src_file1, delta_sec=60)
152
153 src_fs = open_fs("osfs://" + src_dir)
154 dst_fs = open_fs("osfs://" + dst_dir)
155
156 self.assertTrue(dst_fs.exists("/file1.txt"))
157
158 copied = fs.copy.copy_file_if_newer(
159 src_fs, "/file1.txt", dst_fs, "/file1.txt"
205 _delay_file_utime(src_file1, delta_sec=60)
206
207 src_fs = open_fs("osfs://" + src_dir)
208 dst_fs = open_fs("osfs://" + dst_dir)
209
210 self.assertTrue(dst_fs.exists("/file1.txt"))
211
212 copied = fs.copy.copy_file_if(
213 src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition
160214 )
161215
162216 self.assertTrue(copied)
165219 shutil.rmtree(src_dir)
166220 shutil.rmtree(dst_dir)
167221
168 def test_copy_file_if_newer_dst_doesnt_exists(self):
169 try:
170 src_dir = self._create_sandbox_dir()
171 src_file1 = self._touch(src_dir, "file1.txt")
172 self._write_file(src_file1)
173
174 dst_dir = self._create_sandbox_dir()
175
176 src_fs = open_fs("osfs://" + src_dir)
177 dst_fs = open_fs("osfs://" + dst_dir)
178
179 copied = fs.copy.copy_file_if_newer(
180 src_fs, "/file1.txt", dst_fs, "/file1.txt"
222 def test_copy_file_if_dst_doesnt_exists(self):
223 try:
224 src_dir = _create_sandbox_dir()
225 src_file1 = _touch(src_dir, "file1.txt")
226 _write_file(src_file1)
227
228 dst_dir = _create_sandbox_dir()
229
230 src_fs = open_fs("osfs://" + src_dir)
231 dst_fs = open_fs("osfs://" + dst_dir)
232
233 copied = fs.copy.copy_file_if(
234 src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition
181235 )
182236
183237 self.assertTrue(copied)
186240 shutil.rmtree(src_dir)
187241 shutil.rmtree(dst_dir)
188242
189 def test_copy_file_if_newer_dst_is_newer(self):
190 try:
191 src_dir = self._create_sandbox_dir()
192 src_file1 = self._touch(src_dir, "file1.txt")
193 self._write_file(src_file1)
194
195 dst_dir = self._create_sandbox_dir()
196 dst_file1 = self._touch(dst_dir, "file1.txt")
197 self._write_file(dst_file1)
198
199 src_fs = open_fs("osfs://" + src_dir)
200 dst_fs = open_fs("osfs://" + dst_dir)
201
202 self.assertTrue(dst_fs.exists("/file1.txt"))
203
204 copied = fs.copy.copy_file_if_newer(
205 src_fs, "/file1.txt", dst_fs, "/file1.txt"
206 )
207
208 self.assertEqual(copied, False)
209 finally:
210 shutil.rmtree(src_dir)
211 shutil.rmtree(dst_dir)
212
213 def test_copy_fs_if_newer_dst_older(self):
214 try:
215 # create first dst ==> dst is older the src ==> file should be copied
216 dst_dir = self._create_sandbox_dir()
217 dst_file1 = self._touch(dst_dir, "file1.txt")
218 self._write_file(dst_file1)
219
220 src_dir = self._create_sandbox_dir()
221 src_file1 = self._touch(src_dir, "file1.txt")
222 self._write_file(src_file1)
223 # ensure src file is newer than dst, changing its modification time
224 self._delay_file_utime(src_file1, delta_sec=60)
225
226 src_fs = open_fs("osfs://" + src_dir)
227 dst_fs = open_fs("osfs://" + dst_dir)
228
229 self.assertTrue(dst_fs.exists("/file1.txt"))
230
231 copied = []
232
233 def on_copy(src_fs, src_path, dst_fs, dst_path):
234 copied.append(dst_path)
235
236 fs.copy.copy_fs_if_newer(src_fs, dst_fs, on_copy=on_copy)
237
238 self.assertEqual(copied, ["/file1.txt"])
239 self.assertTrue(dst_fs.exists("/file1.txt"))
243 def test_copy_file_if_dst_is_newer(self):
244 try:
245 src_dir = _create_sandbox_dir()
246 src_file1 = _touch(src_dir, "file1.txt")
247 _write_file(src_file1)
248
249 dst_dir = _create_sandbox_dir()
250 dst_file1 = _touch(dst_dir, "file1.txt")
251 _write_file(dst_file1)
252
253 # ensure dst file is newer than src, changing its modification time
254 _delay_file_utime(dst_file1, delta_sec=60)
255
256 src_fs = open_fs("osfs://" + src_dir)
257 dst_fs = open_fs("osfs://" + dst_dir)
258
259 self.assertTrue(dst_fs.exists("/file1.txt"))
260
261 copied = fs.copy.copy_file_if(
262 src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition
263 )
264
265 self.assertFalse(copied)
266 finally:
267 shutil.rmtree(src_dir)
268 shutil.rmtree(dst_dir)
269
270 def test_copy_fs_if(self):
271 try:
272 dst_dir = _create_sandbox_dir()
273 dst_file1 = _touch(dst_dir, "file1.txt")
274 dst_file2 = _touch(dst_dir, "file2.txt")
275 _write_file(dst_file1)
276 _write_file(dst_file2)
277
278 src_dir = _create_sandbox_dir()
279 src_file1 = _touch(src_dir, "file1.txt")
280 src_file2 = _touch(src_dir, "file2.txt")
281 src_file3 = _touch(src_dir, "file3.txt")
282 _write_file(src_file1)
283 _write_file(src_file2)
284 _write_file(src_file3)
285
286 # ensure src_file1 is newer than dst_file1, changing its modification time
287 # ensure dst_file2 is newer than src_file2, changing its modification time
288 _delay_file_utime(src_file1, delta_sec=60)
289 _delay_file_utime(dst_file2, delta_sec=60)
290
291 src_fs = open_fs("osfs://" + src_dir)
292 dst_fs = open_fs("osfs://" + dst_dir)
293
294 self.assertTrue(dst_fs.exists("/file1.txt"))
295 self.assertTrue(dst_fs.exists("/file2.txt"))
296
297 copied = []
298
299 def on_copy(src_fs, src_path, dst_fs, dst_path):
300 copied.append(dst_path)
301
302 fs.copy.copy_fs_if(
303 src_fs, dst_fs, on_copy=on_copy, condition=self.copy_if_condition
304 )
305
306 self.assertTrue("/file1.txt" in copied)
307 self.assertTrue("/file2.txt" not in copied)
308 self.assertTrue("/file3.txt" in copied)
309 self.assertTrue(dst_fs.exists("/file1.txt"))
310 self.assertTrue(dst_fs.exists("/file2.txt"))
311 self.assertTrue(dst_fs.exists("/file3.txt"))
240312
241313 src_fs.close()
242314 dst_fs.close()
245317 shutil.rmtree(src_dir)
246318 shutil.rmtree(dst_dir)
247319
248 def test_copy_fs_if_newer_when_dst_doesnt_exists(self):
249 try:
250 src_dir = self._create_sandbox_dir()
251 src_file1 = self._touch(src_dir, "file1.txt")
252 self._write_file(src_file1)
253
254 src_file2 = self._touch(src_dir, "one_level_down" + os.sep + "file2.txt")
255 self._write_file(src_file2)
256
257 dst_dir = self._create_sandbox_dir()
258
259 src_fs = open_fs("osfs://" + src_dir)
260 dst_fs = open_fs("osfs://" + dst_dir)
261
262 copied = []
263
264 def on_copy(src_fs, src_path, dst_fs, dst_path):
265 copied.append(dst_path)
266
267 fs.copy.copy_fs_if_newer(src_fs, dst_fs, on_copy=on_copy)
268
269 self.assertEqual(copied, ["/file1.txt", "/one_level_down/file2.txt"])
270 self.assertTrue(dst_fs.exists("/file1.txt"))
271 self.assertTrue(dst_fs.exists("/one_level_down/file2.txt"))
320 def test_copy_dir_if(self):
321 try:
322 src_dir = _create_sandbox_dir()
323 src_file1 = _touch(src_dir, "file1.txt")
324 _write_file(src_file1)
325
326 src_file2 = _touch(src_dir, os.path.join("one_level_down", "file2.txt"))
327 _write_file(src_file2)
328
329 dst_dir = _create_sandbox_dir()
330 mkdirp(os.path.join(dst_dir, "target_dir"))
331 dst_file1 = _touch(dst_dir, os.path.join("target_dir", "file1.txt"))
332 _write_file(dst_file1)
333
334 # ensure dst file is newer than src, changing its modification time
335 _delay_file_utime(dst_file1, delta_sec=60)
336
337 src_fs = open_fs("osfs://" + src_dir)
338 dst_fs = open_fs("osfs://" + dst_dir)
339
340 copied = []
341
342 def on_copy(src_fs, src_path, dst_fs, dst_path):
343 copied.append(dst_path)
344
345 fs.copy.copy_dir_if(
346 src_fs,
347 "/",
348 dst_fs,
349 "/target_dir/",
350 on_copy=on_copy,
351 condition=self.copy_if_condition,
352 )
353
354 self.assertEqual(copied, ["/target_dir/one_level_down/file2.txt"])
355 self.assertTrue(dst_fs.exists("/target_dir/one_level_down/file2.txt"))
272356
273357 src_fs.close()
274358 dst_fs.close()
275
276 finally:
277 shutil.rmtree(src_dir)
278 shutil.rmtree(dst_dir)
279
280 def test_copy_fs_if_newer_dont_copy_when_dst_exists(self):
281 try:
282 # src is older than dst => no copy should be necessary
283 src_dir = self._create_sandbox_dir()
284 src_file1 = self._touch(src_dir, "file1.txt")
285 self._write_file(src_file1)
286
287 dst_dir = self._create_sandbox_dir()
288 dst_file1 = self._touch(dst_dir, "file1.txt")
289 self._write_file(dst_file1)
290 # ensure dst file is newer than src, changing its modification time
291 self._delay_file_utime(dst_file1, delta_sec=60)
292
293 src_fs = open_fs("osfs://" + src_dir)
294 dst_fs = open_fs("osfs://" + dst_dir)
295
296 self.assertTrue(dst_fs.exists("/file1.txt"))
297
298 copied = []
299
300 def on_copy(src_fs, src_path, dst_fs, dst_path):
301 copied.append(dst_path)
302
303 fs.copy.copy_fs_if_newer(src_fs, dst_fs, on_copy=on_copy)
304
305 self.assertEqual(copied, [])
306 self.assertTrue(dst_fs.exists("/file1.txt"))
307
308 src_fs.close()
309 dst_fs.close()
310
311 finally:
312 shutil.rmtree(src_dir)
313 shutil.rmtree(dst_dir)
314
315 def test_copy_dir_if_newer_one_dst_doesnt_exist(self):
316 try:
317
318 src_dir = self._create_sandbox_dir()
319 src_file1 = self._touch(src_dir, "file1.txt")
320 self._write_file(src_file1)
321
322 src_file2 = self._touch(src_dir, "one_level_down" + os.sep + "file2.txt")
323 self._write_file(src_file2)
324
325 dst_dir = self._create_sandbox_dir()
326 dst_file1 = self._touch(dst_dir, "file1.txt")
327 self._write_file(dst_file1)
328 # ensure dst file is newer than src, changing its modification time
329 self._delay_file_utime(dst_file1, delta_sec=60)
330
331 src_fs = open_fs("osfs://" + src_dir)
332 dst_fs = open_fs("osfs://" + dst_dir)
333
334 copied = []
335
336 def on_copy(src_fs, src_path, dst_fs, dst_path):
337 copied.append(dst_path)
338
339 fs.copy.copy_dir_if_newer(src_fs, "/", dst_fs, "/", on_copy=on_copy)
340
341 self.assertEqual(copied, ["/one_level_down/file2.txt"])
342 self.assertTrue(dst_fs.exists("/one_level_down/file2.txt"))
343
344 src_fs.close()
345 dst_fs.close()
346 finally:
347 shutil.rmtree(src_dir)
348 shutil.rmtree(dst_dir)
349
350 def test_copy_dir_if_newer_same_fs(self):
351 try:
352 src_dir = self._create_sandbox_dir()
353 src_file1 = self._touch(src_dir, "src" + os.sep + "file1.txt")
354 self._write_file(src_file1)
355
356 self._create_sandbox_dir(home=src_dir)
357
358 src_fs = open_fs("osfs://" + src_dir)
359
360 copied = []
361
362 def on_copy(src_fs, src_path, dst_fs, dst_path):
363 copied.append(dst_path)
364
365 fs.copy.copy_dir_if_newer(src_fs, "/src", src_fs, "/dst", on_copy=on_copy)
359 finally:
360 shutil.rmtree(src_dir)
361 shutil.rmtree(dst_dir)
362
363 def test_copy_dir_if_same_fs(self):
364 try:
365 src_dir = _create_sandbox_dir()
366 src_file1 = _touch(src_dir, "src" + os.sep + "file1.txt")
367 _write_file(src_file1)
368
369 _create_sandbox_dir(home=src_dir)
370
371 src_fs = open_fs("osfs://" + src_dir)
372
373 copied = []
374
375 def on_copy(src_fs, src_path, dst_fs, dst_path):
376 copied.append(dst_path)
377
378 fs.copy.copy_dir_if(
379 src_fs, "/src", src_fs, "/dst", on_copy=on_copy, condition="newer"
380 )
366381
367382 self.assertEqual(copied, ["/dst/file1.txt"])
368383 self.assertTrue(src_fs.exists("/dst/file1.txt"))
372387 finally:
373388 shutil.rmtree(src_dir)
374389
375 def test_copy_dir_if_newer_multiple_files(self):
376 try:
377 src_dir = self._create_sandbox_dir()
390 def test_copy_dir_if_multiple_files(self):
391 try:
392 src_dir = _create_sandbox_dir()
378393 src_fs = open_fs("osfs://" + src_dir)
379394 src_fs.makedirs("foo/bar")
380395 src_fs.makedirs("foo/empty")
381396 src_fs.touch("test.txt")
382397 src_fs.touch("foo/bar/baz.txt")
383398
384 dst_dir = self._create_sandbox_dir()
385 dst_fs = open_fs("osfs://" + dst_dir)
386
387 fs.copy.copy_dir_if_newer(src_fs, "/foo", dst_fs, "/")
399 dst_dir = _create_sandbox_dir()
400 dst_fs = open_fs("osfs://" + dst_dir)
401
402 fs.copy.copy_dir_if(src_fs, "/foo", dst_fs, "/", condition="newer")
388403
389404 self.assertTrue(dst_fs.isdir("bar"))
390405 self.assertTrue(dst_fs.isdir("empty"))
394409 shutil.rmtree(dst_dir)
395410
396411
412 class TestCopyIfOlder(unittest.TestCase):
413 copy_if_condition = "older"
414
415 def test_copy_file_if_same_fs(self):
416 src_fs = open_fs("mem://")
417 src_fs.makedir("foo2").touch("exists")
418 src_fs.makedir("foo1").touch("test1.txt")
419 src_fs.settimes(
420 "foo2/exists", datetime.datetime.utcnow() - datetime.timedelta(hours=1)
421 )
422 self.assertTrue(
423 fs.copy.copy_file_if(
424 src_fs,
425 "foo1/test1.txt",
426 src_fs,
427 "foo2/test1.txt.copy",
428 self.copy_if_condition,
429 )
430 )
431 self.assertFalse(
432 fs.copy.copy_file_if(
433 src_fs, "foo1/test1.txt", src_fs, "foo2/exists", self.copy_if_condition
434 )
435 )
436 self.assertTrue(src_fs.exists("foo2/test1.txt.copy"))
437
438 def test_copy_file_if_dst_is_older(self):
439 try:
440 # create first dst ==> dst is older the src ==> file should be copied
441 dst_dir = _create_sandbox_dir()
442 dst_file1 = _touch(dst_dir, "file1.txt")
443 _write_file(dst_file1)
444
445 src_dir = _create_sandbox_dir()
446 src_file1 = _touch(src_dir, "file1.txt")
447 _write_file(src_file1)
448
449 # ensure src file is newer than dst, changing its modification time
450 _delay_file_utime(src_file1, delta_sec=60)
451
452 src_fs = open_fs("osfs://" + src_dir)
453 dst_fs = open_fs("osfs://" + dst_dir)
454
455 self.assertTrue(dst_fs.exists("/file1.txt"))
456
457 copied = fs.copy.copy_file_if(
458 src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition
459 )
460
461 self.assertFalse(copied)
462 self.assertTrue(dst_fs.exists("/file1.txt"))
463 finally:
464 shutil.rmtree(src_dir)
465 shutil.rmtree(dst_dir)
466
467 def test_copy_file_if_dst_doesnt_exists(self):
468 try:
469 src_dir = _create_sandbox_dir()
470 src_file1 = _touch(src_dir, "file1.txt")
471 _write_file(src_file1)
472
473 dst_dir = _create_sandbox_dir()
474
475 src_fs = open_fs("osfs://" + src_dir)
476 dst_fs = open_fs("osfs://" + dst_dir)
477
478 copied = fs.copy.copy_file_if(
479 src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition
480 )
481
482 self.assertTrue(copied)
483 self.assertTrue(dst_fs.exists("/file1.txt"))
484 finally:
485 shutil.rmtree(src_dir)
486 shutil.rmtree(dst_dir)
487
488 def test_copy_file_if_dst_is_newer(self):
489 try:
490 src_dir = _create_sandbox_dir()
491 src_file1 = _touch(src_dir, "file1.txt")
492 _write_file(src_file1)
493
494 dst_dir = _create_sandbox_dir()
495 dst_file1 = _touch(dst_dir, "file1.txt")
496 _write_file(dst_file1)
497
498 # ensure dst file is newer than src, changing its modification time
499 _delay_file_utime(dst_file1, delta_sec=60)
500
501 src_fs = open_fs("osfs://" + src_dir)
502 dst_fs = open_fs("osfs://" + dst_dir)
503
504 self.assertTrue(dst_fs.exists("/file1.txt"))
505
506 copied = fs.copy.copy_file_if(
507 src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition
508 )
509
510 self.assertTrue(copied)
511 finally:
512 shutil.rmtree(src_dir)
513 shutil.rmtree(dst_dir)
514
515 def test_copy_fs_if(self):
516 try:
517 dst_dir = _create_sandbox_dir()
518 dst_file1 = _touch(dst_dir, "file1.txt")
519 dst_file2 = _touch(dst_dir, "file2.txt")
520 _write_file(dst_file1)
521 _write_file(dst_file2)
522
523 src_dir = _create_sandbox_dir()
524 src_file1 = _touch(src_dir, "file1.txt")
525 src_file2 = _touch(src_dir, "file2.txt")
526 src_file3 = _touch(src_dir, "file3.txt")
527 _write_file(src_file1)
528 _write_file(src_file2)
529 _write_file(src_file3)
530
531 # ensure src_file1 is newer than dst_file1, changing its modification time
532 # ensure dst_file2 is newer than src_file2, changing its modification time
533 _delay_file_utime(src_file1, delta_sec=60)
534 _delay_file_utime(dst_file2, delta_sec=60)
535
536 src_fs = open_fs("osfs://" + src_dir)
537 dst_fs = open_fs("osfs://" + dst_dir)
538
539 self.assertTrue(dst_fs.exists("/file1.txt"))
540 self.assertTrue(dst_fs.exists("/file2.txt"))
541
542 copied = []
543
544 def on_copy(src_fs, src_path, dst_fs, dst_path):
545 copied.append(dst_path)
546
547 fs.copy.copy_fs_if(
548 src_fs, dst_fs, on_copy=on_copy, condition=self.copy_if_condition
549 )
550
551 self.assertTrue("/file1.txt" not in copied)
552 self.assertTrue("/file2.txt" in copied)
553 self.assertTrue("/file3.txt" in copied)
554 self.assertTrue(dst_fs.exists("/file1.txt"))
555 self.assertTrue(dst_fs.exists("/file2.txt"))
556 self.assertTrue(dst_fs.exists("/file3.txt"))
557
558 src_fs.close()
559 dst_fs.close()
560
561 finally:
562 shutil.rmtree(src_dir)
563 shutil.rmtree(dst_dir)
564
565 def test_copy_dir_if(self):
566 try:
567 src_dir = _create_sandbox_dir()
568 src_file1 = _touch(src_dir, "file1.txt")
569 _write_file(src_file1)
570
571 src_file2 = _touch(src_dir, os.path.join("one_level_down", "file2.txt"))
572 _write_file(src_file2)
573
574 dst_dir = _create_sandbox_dir()
575 mkdirp(os.path.join(dst_dir, "target_dir"))
576 dst_file1 = _touch(dst_dir, os.path.join("target_dir", "file1.txt"))
577 _write_file(dst_file1)
578
579 # ensure src file is newer than dst, changing its modification time
580 _delay_file_utime(src_file1, delta_sec=60)
581
582 src_fs = open_fs("osfs://" + src_dir)
583 dst_fs = open_fs("osfs://" + dst_dir)
584
585 copied = []
586
587 def on_copy(src_fs, src_path, dst_fs, dst_path):
588 copied.append(dst_path)
589
590 fs.copy.copy_dir_if(
591 src_fs,
592 "/",
593 dst_fs,
594 "/target_dir/",
595 on_copy=on_copy,
596 condition=self.copy_if_condition,
597 )
598
599 self.assertEqual(copied, ["/target_dir/one_level_down/file2.txt"])
600 self.assertTrue(dst_fs.exists("/target_dir/one_level_down/file2.txt"))
601
602 src_fs.close()
603 dst_fs.close()
604 finally:
605 shutil.rmtree(src_dir)
606 shutil.rmtree(dst_dir)
607
608 def test_copy_dir_if_same_fs(self):
609 try:
610 src_dir = _create_sandbox_dir()
611 src_file1 = _touch(src_dir, "src" + os.sep + "file1.txt")
612 _write_file(src_file1)
613
614 _create_sandbox_dir(home=src_dir)
615
616 src_fs = open_fs("osfs://" + src_dir)
617
618 copied = []
619
620 def on_copy(src_fs, src_path, dst_fs, dst_path):
621 copied.append(dst_path)
622
623 fs.copy.copy_dir_if(
624 src_fs, "/src", src_fs, "/dst", on_copy=on_copy, condition="newer"
625 )
626
627 self.assertEqual(copied, ["/dst/file1.txt"])
628 self.assertTrue(src_fs.exists("/dst/file1.txt"))
629
630 src_fs.close()
631
632 finally:
633 shutil.rmtree(src_dir)
634
635 def test_copy_dir_if_multiple_files(self):
636 try:
637 src_dir = _create_sandbox_dir()
638 src_fs = open_fs("osfs://" + src_dir)
639 src_fs.makedirs("foo/bar")
640 src_fs.makedirs("foo/empty")
641 src_fs.touch("test.txt")
642 src_fs.touch("foo/bar/baz.txt")
643
644 dst_dir = _create_sandbox_dir()
645 dst_fs = open_fs("osfs://" + dst_dir)
646
647 fs.copy.copy_dir_if(src_fs, "/foo", dst_fs, "/", condition="newer")
648
649 self.assertTrue(dst_fs.isdir("bar"))
650 self.assertTrue(dst_fs.isdir("empty"))
651 self.assertTrue(dst_fs.isfile("bar/baz.txt"))
652 finally:
653 shutil.rmtree(src_dir)
654 shutil.rmtree(dst_dir)
655
656
657 class TestCopyIfExists(unittest.TestCase):
658 copy_if_condition = "exists"
659
660 def test_copy_file_if_same_fs(self):
661 src_fs = open_fs("mem://")
662 src_fs.makedir("foo2").touch("exists")
663 src_fs.makedir("foo1").touch("test1.txt")
664 self.assertFalse(
665 fs.copy.copy_file_if(
666 src_fs,
667 "foo1/test1.txt",
668 src_fs,
669 "foo2/test1.txt.copy",
670 self.copy_if_condition,
671 )
672 )
673 self.assertTrue(
674 fs.copy.copy_file_if(
675 src_fs, "foo1/test1.txt", src_fs, "foo2/exists", self.copy_if_condition
676 )
677 )
678 self.assertFalse(src_fs.exists("foo2/test1.txt.copy"))
679
680 def test_copy_file_if_dst_doesnt_exists(self):
681 try:
682 src_dir = _create_sandbox_dir()
683 src_file1 = _touch(src_dir, "file1.txt")
684 _write_file(src_file1)
685
686 dst_dir = _create_sandbox_dir()
687
688 src_fs = open_fs("osfs://" + src_dir)
689 dst_fs = open_fs("osfs://" + dst_dir)
690
691 copied = fs.copy.copy_file_if(
692 src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition
693 )
694
695 self.assertFalse(copied)
696 self.assertFalse(dst_fs.exists("/file1.txt"))
697 finally:
698 shutil.rmtree(src_dir)
699 shutil.rmtree(dst_dir)
700
701 def test_copy_file_if_dst_exists(self):
702 try:
703 src_dir = _create_sandbox_dir()
704 src_file1 = _touch(src_dir, "file1.txt")
705 _write_file(src_file1)
706
707 dst_dir = _create_sandbox_dir()
708 dst_file1 = _touch(dst_dir, "file1.txt")
709 _write_file(dst_file1)
710
711 src_fs = open_fs("osfs://" + src_dir)
712 dst_fs = open_fs("osfs://" + dst_dir)
713
714 self.assertTrue(dst_fs.exists("/file1.txt"))
715
716 copied = fs.copy.copy_file_if(
717 src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition
718 )
719
720 self.assertTrue(copied)
721 finally:
722 shutil.rmtree(src_dir)
723 shutil.rmtree(dst_dir)
724
725 def test_copy_fs_if(self):
726 try:
727 dst_dir = _create_sandbox_dir()
728 dst_file1 = _touch(dst_dir, "file1.txt")
729 _write_file(dst_file1)
730
731 src_dir = _create_sandbox_dir()
732 src_file1 = _touch(src_dir, "file1.txt")
733 src_file2 = _touch(src_dir, "file2.txt")
734 _write_file(src_file1)
735 _write_file(src_file2)
736
737 src_fs = open_fs("osfs://" + src_dir)
738 dst_fs = open_fs("osfs://" + dst_dir)
739
740 self.assertTrue(dst_fs.exists("/file1.txt"))
741
742 copied = []
743
744 def on_copy(src_fs, src_path, dst_fs, dst_path):
745 copied.append(dst_path)
746
747 fs.copy.copy_fs_if(
748 src_fs, dst_fs, on_copy=on_copy, condition=self.copy_if_condition
749 )
750
751 self.assertEqual(copied, ["/file1.txt"])
752 self.assertTrue(dst_fs.exists("/file1.txt"))
753 self.assertFalse(dst_fs.exists("/file2.txt"))
754
755 src_fs.close()
756 dst_fs.close()
757
758 finally:
759 shutil.rmtree(src_dir)
760 shutil.rmtree(dst_dir)
761
762 def test_copy_dir_if(self):
763 try:
764 src_dir = _create_sandbox_dir()
765 src_file1 = _touch(src_dir, "file1.txt")
766 _write_file(src_file1)
767
768 src_file2 = _touch(src_dir, os.path.join("one_level_down", "file2.txt"))
769 _write_file(src_file2)
770
771 dst_dir = _create_sandbox_dir()
772 mkdirp(os.path.join(dst_dir, "target_dir"))
773 dst_file1 = _touch(dst_dir, os.path.join("target_dir", "file1.txt"))
774 _write_file(dst_file1)
775
776 src_fs = open_fs("osfs://" + src_dir)
777 dst_fs = open_fs("osfs://" + dst_dir)
778
779 copied = []
780
781 def on_copy(src_fs, src_path, dst_fs, dst_path):
782 copied.append(dst_path)
783
784 fs.copy.copy_dir_if(
785 src_fs,
786 "/",
787 dst_fs,
788 "/target_dir/",
789 on_copy=on_copy,
790 condition=self.copy_if_condition,
791 )
792
793 self.assertEqual(copied, ["/target_dir/file1.txt"])
794 self.assertFalse(dst_fs.exists("/target_dir/one_level_down/file2.txt"))
795
796 src_fs.close()
797 dst_fs.close()
798 finally:
799 shutil.rmtree(src_dir)
800 shutil.rmtree(dst_dir)
801
802 def test_copy_dir_if_same_fs(self):
803 try:
804 src_dir = _create_sandbox_dir()
805 src_file1 = _touch(src_dir, "src" + os.sep + "file1.txt")
806 _write_file(src_file1)
807
808 _create_sandbox_dir(home=src_dir)
809
810 src_fs = open_fs("osfs://" + src_dir)
811
812 copied = []
813
814 def on_copy(src_fs, src_path, dst_fs, dst_path):
815 copied.append(dst_path)
816
817 fs.copy.copy_dir_if(
818 src_fs, "/src", src_fs, "/dst", on_copy=on_copy, condition="newer"
819 )
820
821 self.assertEqual(copied, ["/dst/file1.txt"])
822 self.assertTrue(src_fs.exists("/dst/file1.txt"))
823
824 src_fs.close()
825
826 finally:
827 shutil.rmtree(src_dir)
828
829 def test_copy_dir_if_multiple_files(self):
830 try:
831 src_dir = _create_sandbox_dir()
832 src_fs = open_fs("osfs://" + src_dir)
833 src_fs.makedirs("foo/bar")
834 src_fs.makedirs("foo/empty")
835 src_fs.touch("test.txt")
836 src_fs.touch("foo/bar/baz.txt")
837
838 dst_dir = _create_sandbox_dir()
839 dst_fs = open_fs("osfs://" + dst_dir)
840
841 fs.copy.copy_dir_if(src_fs, "/foo", dst_fs, "/", condition="newer")
842
843 self.assertTrue(dst_fs.isdir("bar"))
844 self.assertTrue(dst_fs.isdir("empty"))
845 self.assertTrue(dst_fs.isfile("bar/baz.txt"))
846 finally:
847 shutil.rmtree(src_dir)
848 shutil.rmtree(dst_dir)
849
850
851 class TestCopyIfNotExists(unittest.TestCase):
852 copy_if_condition = "not_exists"
853
854 def test_copy_file_if_same_fs(self):
855 src_fs = open_fs("mem://")
856 src_fs.makedir("foo2").touch("exists")
857 src_fs.makedir("foo1").touch("test1.txt")
858 self.assertTrue(
859 fs.copy.copy_file_if(
860 src_fs,
861 "foo1/test1.txt",
862 src_fs,
863 "foo2/test1.txt.copy",
864 self.copy_if_condition,
865 )
866 )
867 self.assertFalse(
868 fs.copy.copy_file_if(
869 src_fs, "foo1/test1.txt", src_fs, "foo2/exists", self.copy_if_condition
870 )
871 )
872 self.assertTrue(src_fs.exists("foo2/test1.txt.copy"))
873
874 def test_copy_file_if_dst_doesnt_exists(self):
875 try:
876 src_dir = _create_sandbox_dir()
877 src_file1 = _touch(src_dir, "file1.txt")
878 _write_file(src_file1)
879
880 dst_dir = _create_sandbox_dir()
881
882 src_fs = open_fs("osfs://" + src_dir)
883 dst_fs = open_fs("osfs://" + dst_dir)
884
885 copied = fs.copy.copy_file_if(
886 src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition
887 )
888
889 self.assertTrue(copied)
890 self.assertTrue(dst_fs.exists("/file1.txt"))
891 finally:
892 shutil.rmtree(src_dir)
893 shutil.rmtree(dst_dir)
894
895 def test_copy_file_if_dst_exists(self):
896 try:
897 src_dir = _create_sandbox_dir()
898 src_file1 = _touch(src_dir, "file1.txt")
899 _write_file(src_file1)
900
901 dst_dir = _create_sandbox_dir()
902 dst_file1 = _touch(dst_dir, "file1.txt")
903 _write_file(dst_file1)
904
905 src_fs = open_fs("osfs://" + src_dir)
906 dst_fs = open_fs("osfs://" + dst_dir)
907
908 self.assertTrue(dst_fs.exists("/file1.txt"))
909
910 copied = fs.copy.copy_file_if(
911 src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition
912 )
913
914 self.assertFalse(copied)
915 self.assertTrue(dst_fs.exists("/file1.txt"))
916 finally:
917 shutil.rmtree(src_dir)
918 shutil.rmtree(dst_dir)
919
920 def test_copy_fs_if(self):
921 try:
922 dst_dir = _create_sandbox_dir()
923 dst_file1 = _touch(dst_dir, "file1.txt")
924 _write_file(dst_file1)
925
926 src_dir = _create_sandbox_dir()
927 src_file1 = _touch(src_dir, "file1.txt")
928 src_file2 = _touch(src_dir, "file2.txt")
929 _write_file(src_file1)
930 _write_file(src_file2)
931
932 src_fs = open_fs("osfs://" + src_dir)
933 dst_fs = open_fs("osfs://" + dst_dir)
934
935 self.assertTrue(dst_fs.exists("/file1.txt"))
936
937 copied = []
938
939 def on_copy(src_fs, src_path, dst_fs, dst_path):
940 copied.append(dst_path)
941
942 fs.copy.copy_fs_if(
943 src_fs, dst_fs, on_copy=on_copy, condition=self.copy_if_condition
944 )
945
946 self.assertEqual(copied, ["/file2.txt"])
947 self.assertTrue(dst_fs.exists("/file1.txt"))
948 self.assertTrue(dst_fs.exists("/file2.txt"))
949
950 src_fs.close()
951 dst_fs.close()
952
953 finally:
954 shutil.rmtree(src_dir)
955 shutil.rmtree(dst_dir)
956
957 def test_copy_dir_if(self):
958 try:
959 src_dir = _create_sandbox_dir()
960 src_file1 = _touch(src_dir, "file1.txt")
961 _write_file(src_file1)
962
963 src_file2 = _touch(src_dir, os.path.join("one_level_down", "file2.txt"))
964 _write_file(src_file2)
965
966 dst_dir = _create_sandbox_dir()
967 mkdirp(os.path.join(dst_dir, "target_dir"))
968 dst_file1 = _touch(dst_dir, os.path.join("target_dir", "file1.txt"))
969 _write_file(dst_file1)
970
971 src_fs = open_fs("osfs://" + src_dir)
972 dst_fs = open_fs("osfs://" + dst_dir)
973
974 copied = []
975
976 def on_copy(src_fs, src_path, dst_fs, dst_path):
977 copied.append(dst_path)
978
979 fs.copy.copy_dir_if(
980 src_fs,
981 "/",
982 dst_fs,
983 "/target_dir/",
984 on_copy=on_copy,
985 condition=self.copy_if_condition,
986 )
987
988 self.assertEqual(copied, ["/target_dir/one_level_down/file2.txt"])
989 self.assertTrue(dst_fs.exists("/target_dir/file1.txt"))
990 self.assertTrue(dst_fs.exists("/target_dir/one_level_down/file2.txt"))
991
992 src_fs.close()
993 dst_fs.close()
994 finally:
995 shutil.rmtree(src_dir)
996 shutil.rmtree(dst_dir)
997
998 def test_copy_dir_if_same_fs(self):
999 try:
1000 src_dir = _create_sandbox_dir()
1001 src_file1 = _touch(src_dir, "src" + os.sep + "file1.txt")
1002 _write_file(src_file1)
1003
1004 _create_sandbox_dir(home=src_dir)
1005
1006 src_fs = open_fs("osfs://" + src_dir)
1007
1008 copied = []
1009
1010 def on_copy(src_fs, src_path, dst_fs, dst_path):
1011 copied.append(dst_path)
1012
1013 fs.copy.copy_dir_if(
1014 src_fs, "/src", src_fs, "/dst", on_copy=on_copy, condition="newer"
1015 )
1016
1017 self.assertEqual(copied, ["/dst/file1.txt"])
1018 self.assertTrue(src_fs.exists("/dst/file1.txt"))
1019
1020 src_fs.close()
1021
1022 finally:
1023 shutil.rmtree(src_dir)
1024
1025 def test_copy_dir_if_multiple_files(self):
1026 try:
1027 src_dir = _create_sandbox_dir()
1028 src_fs = open_fs("osfs://" + src_dir)
1029 src_fs.makedirs("foo/bar")
1030 src_fs.makedirs("foo/empty")
1031 src_fs.touch("test.txt")
1032 src_fs.touch("foo/bar/baz.txt")
1033
1034 dst_dir = _create_sandbox_dir()
1035 dst_fs = open_fs("osfs://" + dst_dir)
1036
1037 fs.copy.copy_dir_if(src_fs, "/foo", dst_fs, "/", condition="newer")
1038
1039 self.assertTrue(dst_fs.isdir("bar"))
1040 self.assertTrue(dst_fs.isdir("empty"))
1041 self.assertTrue(dst_fs.isfile("bar/baz.txt"))
1042 finally:
1043 shutil.rmtree(src_dir)
1044 shutil.rmtree(dst_dir)
1045
1046
3971047 if __name__ == "__main__":
3981048 unittest.main()
0 # coding: utf-8
1 """Test doctest contained tests in every file of the module.
2 """
3 import doctest
4 import importlib
5 import os
6 import pkgutil
7 import tempfile
8 import time
9 import types
10 import unittest
11 import warnings
12 from pprint import pprint
13
14 try:
15 from unittest import mock
16 except ImportError:
17 import mock
18
19 import six
20
21 import fs
22 import fs.opener.parse
23 from fs.memoryfs import MemoryFS
24 from fs.subfs import ClosingSubFS
25
26 # --- Mocks ------------------------------------------------------------------
27
28
29 def _home_fs():
30 """Create a mock filesystem that matches the XDG user-dirs spec."""
31 home_fs = MemoryFS()
32 home_fs.makedir("Desktop")
33 home_fs.makedir("Documents")
34 home_fs.makedir("Downloads")
35 home_fs.makedir("Music")
36 home_fs.makedir("Pictures")
37 home_fs.makedir("Public")
38 home_fs.makedir("Templates")
39 home_fs.makedir("Videos")
40 return home_fs
41
42
43 def _open_fs(path):
44 """A mock `open_fs` that avoids side effects when running doctests."""
45 if "://" not in path:
46 path = "osfs://{}".format(path)
47 parse_result = fs.opener.parse(path)
48 if parse_result.protocol == "osfs" and parse_result.resource == "~":
49 home_fs = _home_fs()
50 if parse_result.path is not None:
51 home_fs = home_fs.opendir(parse_result.path, factory=ClosingSubFS)
52 return home_fs
53 elif parse_result.protocol in {"ftp", "ftps", "mem", "temp"}:
54 return MemoryFS()
55 else:
56 raise RuntimeError("not allowed in doctests: {}".format(path))
57
58
59 def _my_fs(module):
60 """Create a mock filesystem to be used in examples."""
61 my_fs = MemoryFS()
62 if module == "fs.base":
63 my_fs.makedir("Desktop")
64 my_fs.makedir("Videos")
65 my_fs.touch("Videos/starwars.mov")
66 my_fs.touch("file.txt")
67 elif module == "fs.info":
68 my_fs.touch("foo.tar.gz")
69 my_fs.settext("foo.py", "print('Hello, world!')")
70 my_fs.makedir("bar")
71 elif module in {"fs.walk", "fs.glob"}:
72 my_fs.makedir("dir1")
73 my_fs.makedir("dir2")
74 my_fs.settext("foo.py", "print('Hello, world!')")
75 my_fs.touch("foo.pyc")
76 my_fs.settext("bar.py", "print('ok')\n\n# this is a comment\n")
77 my_fs.touch("bar.pyc")
78 return my_fs
79
80
81 def _open(filename, mode="r"):
82 """A mock `open` that actually opens a temporary file."""
83 return tempfile.NamedTemporaryFile(mode="r+" if mode == "r" else mode)
84
85
86 # --- Loader protocol --------------------------------------------------------
87
88
89 def _load_tests_from_module(tests, module, globs, setUp=None, tearDown=None):
90 """Load tests from module, iterating through submodules."""
91 for attr in (getattr(module, x) for x in dir(module) if not x.startswith("_")):
92 if isinstance(attr, types.ModuleType):
93 suite = doctest.DocTestSuite(
94 attr,
95 globs,
96 setUp=setUp,
97 tearDown=tearDown,
98 optionflags=+doctest.ELLIPSIS,
99 )
100 tests.addTests(suite)
101 return tests
102
103
104 def _load_tests(loader, tests, ignore):
105 """`load_test` function used by unittest to find the doctests."""
106
107 # NB (@althonos): we only test docstrings on Python 3 because it's
108 # extremely hard to maintain compatibility for both versions without
109 # extensively hacking `doctest` and `unittest`.
110 if six.PY2:
111 return tests
112
113 def setUp(self):
114 warnings.simplefilter("ignore")
115 self._open_fs_mock = mock.patch.object(fs, "open_fs", new=_open_fs)
116 self._open_fs_mock.__enter__()
117 self._ftpfs_mock = mock.patch.object(fs.ftpfs, "FTPFS")
118 self._ftpfs_mock.__enter__()
119
120 def tearDown(self):
121 self._open_fs_mock.__exit__(None, None, None)
122 self._ftpfs_mock.__exit__(None, None, None)
123 warnings.simplefilter(warnings.defaultaction)
124
125 # recursively traverse all library submodules and load tests from them
126 packages = [None, fs]
127 for pkg in iter(packages.pop, None):
128 for (_, subpkgname, subispkg) in pkgutil.walk_packages(pkg.__path__):
129 # import the submodule and add it to the tests
130 module = importlib.import_module(".".join([pkg.__name__, subpkgname]))
131
132 # load some useful modules / classes / mocks to the
133 # globals so that we don't need to explicitly import
134 # them in the doctests
135 globs = dict(**module.__dict__)
136 globs.update(
137 os=os,
138 fs=fs,
139 my_fs=_my_fs(module.__name__),
140 open=_open,
141 # NB (@althonos): This allows using OSFS in some examples,
142 # while not actually opening the real filesystem
143 OSFS=lambda path: MemoryFS(),
144 # NB (@althonos): This is for compatibility in `fs.registry`
145 print_list=lambda path: None,
146 pprint=pprint,
147 time=time,
148 )
149
150 # load the doctests into the unittest test suite
151 tests.addTests(
152 doctest.DocTestSuite(
153 module,
154 globs=globs,
155 setUp=setUp,
156 tearDown=tearDown,
157 optionflags=+doctest.ELLIPSIS,
158 )
159 )
160
161 # if the submodule is a package, we need to process its submodules
162 # as well, so we add it to the package queue
163 if subispkg:
164 packages.append(module)
165
166 return tests
167
168
169 # --- Unit test wrapper ------------------------------------------------------
170 #
171 # NB (@althonos): Since pytest doesn't support the `load_tests` protocol
172 # above, we manually build a `unittest.TestCase` using a dedicated test
173 # method for each doctest. This should be safe to remove when pytest
174 # supports it, or if we move away from pytest to run tests.
175
176
177 class TestDoctest(unittest.TestCase):
178 pass
179
180
181 def make_wrapper(x):
182 def _test_wrapper(self):
183 x.setUp()
184 try:
185 x.runTest()
186 finally:
187 x.tearDown()
188
189 return _test_wrapper
190
191
192 for x in _load_tests(None, unittest.TestSuite(), False):
193 setattr(TestDoctest, "test_{}".format(x.id().replace(".", "_")), make_wrapper(x))
22 import os
33 import platform
44 import shutil
5 import six
56 import tempfile
67 import unittest
7
8 import pytest
9
10 import six
118
129 import fs
1310 from fs.osfs import OSFS
1411
15
1612 if platform.system() != "Windows":
1713
18 @pytest.mark.skipif(
19 platform.system() == "Darwin", reason="Bad unicode not possible on OSX"
20 )
14 @unittest.skipIf(platform.system() == "Darwin", "Bad unicode not possible on OSX")
2115 class TestEncoding(unittest.TestCase):
2216
2317 TEST_FILENAME = b"foo\xb1bar"
00 import os
1 import unittest
12
23 from fs import enums
3
4 import unittest
54
65
76 class TestEnums(unittest.TestCase):
22 import errno
33 import unittest
44
5 import fs.errors
56 from fs.error_tools import convert_os_errors
6 from fs import errors as fserrors
77
88
99 class TestErrorTools(unittest.TestCase):
10 def assert_convert_os_errors(self):
10 def test_convert_enoent(self):
11 exception = OSError(errno.ENOENT, "resource not found")
12 with self.assertRaises(fs.errors.ResourceNotFound) as ctx:
13 with convert_os_errors("stat", "/tmp/test"):
14 raise exception
15 self.assertEqual(ctx.exception.exc, exception)
16 self.assertEqual(ctx.exception.path, "/tmp/test")
1117
12 with self.assertRaises(fserrors.ResourceNotFound):
13 with convert_os_errors("foo", "test"):
14 raise OSError(errno.ENOENT)
18 def test_convert_enametoolong(self):
19 exception = OSError(errno.ENAMETOOLONG, "File name too long: test")
20 with self.assertRaises(fs.errors.PathError) as ctx:
21 with convert_os_errors("stat", "/tmp/test"):
22 raise exception
23 self.assertEqual(ctx.exception.exc, exception)
24 self.assertEqual(ctx.exception.path, "/tmp/test")
11
22 import multiprocessing
33 import unittest
4
54 from six import text_type
65
76 from fs import errors
2928 [errors.NoURL, "some_path", "some_purpose"],
3029 [errors.Unsupported],
3130 [errors.IllegalBackReference, "path"],
32 [errors.MissingInfoNamespace, "path"]
31 [errors.MissingInfoNamespace, "path"],
3332 ]
3433 try:
3534 pool = multiprocessing.Pool(1)
00 from __future__ import unicode_literals
11
2 import unittest
3
24 from fs import filesize
3
4 import unittest
55
66
77 class TestFilesize(unittest.TestCase):
00 from __future__ import unicode_literals
11
2 import six
23 import unittest
34
4 import six
5
6 from fs._fscompat import fsencode, fsdecode, fspath
5 from fs._fscompat import fsdecode, fsencode, fspath
76
87
98 class PathMock(object):
00 from __future__ import unicode_literals
11
2 import textwrap
23 import time
34 import unittest
45
3233 self.assertEqual(ftp_parse._parse_time("notadate", formats=["%b %d %Y"]), None)
3334
3435 def test_parse(self):
35 self.assertEqual(ftp_parse.parse([""]), [])
36 self.assertListEqual(ftp_parse.parse([""]), [])
3637
3738 def test_parse_line(self):
3839 self.assertIs(ftp_parse.parse_line("not a dir"), None)
4041 @mock.patch("time.localtime")
4142 def test_decode_linux(self, mock_localtime):
4243 mock_localtime.return_value = time2017
43 directory = """\
44 lrwxrwxrwx 1 0 0 19 Jan 18 2006 debian -> ./pub/mirror/debian
45 drwxr-xr-x 10 0 0 4096 Aug 03 09:21 debian-archive
46 lrwxrwxrwx 1 0 0 27 Nov 30 2015 debian-backports -> pub/mirror/debian-backports
47 drwxr-xr-x 12 0 0 4096 Sep 29 13:13 pub
48 -rw-r--r-- 1 0 0 26 Mar 04 2010 robots.txt
49 drwxr-xr-x 8 foo bar 4096 Oct 4 09:05 test
50 drwxr-xr-x 2 foo-user foo-group 0 Jan 5 11:59 240485
51 """
44 directory = textwrap.dedent(
45 """
46 lrwxrwxrwx 1 0 0 19 Jan 18 2006 debian -> ./pub/mirror/debian
47 drwxr-xr-x 10 0 0 4096 Aug 03 09:21 debian-archive
48 lrwxrwxrwx 1 0 0 27 Nov 30 2015 debian-backports -> pub/mirror/debian-backports
49 drwxr-xr-x 12 0 0 4096 Sep 29 13:13 pub
50 -rw-r--r-- 1 0 0 26 Mar 04 2010 robots.txt
51 drwxr-xr-x 8 foo bar 4096 Oct 4 09:05 test
52 drwxr-xr-x 8 f b 4096 Oct 4 09:05 test
53 drwxr-xr-x 2 foo-user foo-group 0 Jan 5 11:59 240485
54 drwxr-xr-x 2 foo.user$ foo@group_ 0 Jan 5 11:59 240485
55 """
56 )
5257
5358 expected = [
5459 {
145150 },
146151 {
147152 "access": {
153 "group": "b",
154 "permissions": ["g_r", "g_x", "o_r", "o_x", "u_r", "u_w", "u_x"],
155 "user": "f",
156 },
157 "basic": {"is_dir": True, "name": "test"},
158 "details": {"modified": 1507107900.0, "size": 4096, "type": 1},
159 "ftp": {
160 "ls": "drwxr-xr-x 8 f b 4096 Oct 4 09:05 test"
161 },
162 },
163 {
164 "access": {
148165 "group": "foo-group",
149166 "permissions": ["g_r", "g_x", "o_r", "o_x", "u_r", "u_w", "u_x"],
150167 "user": "foo-user",
155172 "ls": "drwxr-xr-x 2 foo-user foo-group 0 Jan 5 11:59 240485"
156173 },
157174 },
175 {
176 "access": {
177 "group": "foo@group_",
178 "permissions": ["g_r", "g_x", "o_r", "o_x", "u_r", "u_w", "u_x"],
179 "user": "foo.user$",
180 },
181 "basic": {"is_dir": True, "name": "240485"},
182 "details": {"modified": 1483617540.0, "size": 0, "type": 1},
183 "ftp": {
184 "ls": "drwxr-xr-x 2 foo.user$ foo@group_ 0 Jan 5 11:59 240485"
185 },
186 },
158187 ]
159188
160 parsed = ftp_parse.parse(directory.splitlines())
161 self.assertEqual(parsed, expected)
189 parsed = ftp_parse.parse(directory.strip().splitlines())
190 self.assertListEqual(parsed, expected)
162191
163192 @mock.patch("time.localtime")
164193 def test_decode_windowsnt(self, mock_localtime):
165194 mock_localtime.return_value = time2017
166 directory = """\
167 unparsable line
168 11-02-17 02:00AM <DIR> docs
169 11-02-17 02:12PM <DIR> images
170 11-02-17 02:12PM <DIR> AM to PM
171 11-02-17 03:33PM 9276 logo.gif
172 05-11-20 22:11 <DIR> src
173 11-02-17 01:23 1 12
174 11-02-17 4:54 0 icon.bmp
175 11-02-17 4:54AM 0 icon.gif
176 11-02-17 4:54PM 0 icon.png
177 11-02-17 16:54 0 icon.jpg
178 """
195 directory = textwrap.dedent(
196 """
197 unparsable line
198 11-02-17 02:00AM <DIR> docs
199 11-02-17 02:12PM <DIR> images
200 11-02-17 02:12PM <DIR> AM to PM
201 11-02-17 03:33PM 9276 logo.gif
202 05-11-20 22:11 <DIR> src
203 11-02-17 01:23 1 12
204 11-02-17 4:54 0 icon.bmp
205 11-02-17 4:54AM 0 icon.gif
206 11-02-17 4:54PM 0 icon.png
207 11-02-17 16:54 0 icon.jpg
208 """
209 )
179210 expected = [
180211 {
181212 "basic": {"is_dir": True, "name": "docs"},
229260 },
230261 ]
231262
232 parsed = ftp_parse.parse(directory.splitlines())
263 parsed = ftp_parse.parse(directory.strip().splitlines())
233264 self.assertEqual(parsed, expected)
265
266 @mock.patch("time.localtime")
267 def test_decode_linux_suid(self, mock_localtime):
268 # reported in #451
269 mock_localtime.return_value = time2017
270 directory = textwrap.dedent(
271 """
272 drwxr-sr-x 66 ftp ftp 8192 Mar 16 17:54 pub
273 -rw-r--r-- 1 ftp ftp 25 Mar 18 19:34 robots.txt
274 """
275 )
276 expected = [
277 {
278 "access": {
279 "group": "ftp",
280 "permissions": [
281 "g_r",
282 "g_s",
283 "o_r",
284 "o_x",
285 "u_r",
286 "u_w",
287 "u_x",
288 ],
289 "user": "ftp",
290 },
291 "basic": {"is_dir": True, "name": "pub"},
292 "details": {"modified": 1489686840.0, "size": 8192, "type": 1},
293 "ftp": {
294 "ls": "drwxr-sr-x 66 ftp ftp 8192 Mar 16 17:54 pub"
295 },
296 },
297 {
298 "access": {
299 "group": "ftp",
300 "permissions": [
301 "g_r",
302 "o_r",
303 "u_r",
304 "u_w",
305 ],
306 "user": "ftp",
307 },
308 "basic": {"is_dir": False, "name": "robots.txt"},
309 "details": {"modified": 1489865640.0, "size": 25, "type": 2},
310 "ftp": {
311 "ls": "-rw-r--r-- 1 ftp ftp 25 Mar 18 19:34 robots.txt"
312 },
313 },
314 ]
315
316 parsed = ftp_parse.parse(directory.strip().splitlines())
317 self.assertListEqual(parsed, expected)
318
319 @mock.patch("time.localtime")
320 def test_decode_linux_sticky(self, mock_localtime):
321 # reported in #451
322 mock_localtime.return_value = time2017
323 directory = textwrap.dedent(
324 """
325 drwxr-xr-t 66 ftp ftp 8192 Mar 16 17:54 pub
326 """
327 )
328 expected = [
329 {
330 "access": {
331 "group": "ftp",
332 "permissions": [
333 "g_r",
334 "g_x",
335 "o_r",
336 "o_t",
337 "u_r",
338 "u_w",
339 "u_x",
340 ],
341 "user": "ftp",
342 },
343 "basic": {"is_dir": True, "name": "pub"},
344 "details": {"modified": 1489686840.0, "size": 8192, "type": 1},
345 "ftp": {
346 "ls": "drwxr-xr-t 66 ftp ftp 8192 Mar 16 17:54 pub"
347 },
348 },
349 ]
350
351 self.maxDiff = None
352 parsed = ftp_parse.parse(directory.strip().splitlines())
353 self.assertListEqual(parsed, expected)
00 # coding: utf-8
1 from __future__ import absolute_import
2 from __future__ import print_function
3 from __future__ import unicode_literals
4
5 import socket
1 from __future__ import absolute_import, print_function, unicode_literals
2
3 import calendar
4 import datetime
65 import os
76 import platform
87 import shutil
8 import socket
99 import tempfile
1010 import time
1111 import unittest
1212 import uuid
1313
14 import pytest
15 from six import text_type
16
17 from ftplib import error_perm
18 from ftplib import error_temp
19
14 try:
15 from unittest import mock
16 except ImportError:
17 import mock
18
19 from ftplib import error_perm, error_temp
2020 from pyftpdlib.authorizers import DummyAuthorizer
21 from six import BytesIO, text_type
2122
2223 from fs import errors
24 from fs.ftpfs import FTPFS, ftp_errors
2325 from fs.opener import open_fs
24 from fs.ftpfs import FTPFS, ftp_errors
2526 from fs.path import join
2627 from fs.subfs import SubFS
2728 from fs.test import FSTestCases
2829
30 try:
31 from pytest import mark
32 except ImportError:
33 from . import mark
2934
3035 # Prevent socket timeouts from slowing tests too much
3136 socket.setdefaulttimeout(1)
8489 self.assertIsInstance(ftp_fs, FTPFS)
8590 self.assertEqual(ftp_fs.host, "ftp.example.org")
8691
92 ftps_fs = open_fs("ftps://will:wfc@ftp.example.org")
93 self.assertIsInstance(ftps_fs, FTPFS)
94 self.assertTrue(ftps_fs.tls)
95
8796
8897 class TestFTPErrors(unittest.TestCase):
8998 """Test the ftp_errors context manager."""
128137 )
129138
130139
131 @pytest.mark.slow
140 @mark.slow
141 @unittest.skipIf(platform.python_implementation() == "PyPy", "ftp unreliable with PyPy")
132142 class TestFTPFS(FSTestCases, unittest.TestCase):
133
134143 user = "user"
135144 pasw = "1234"
136145
148157 cls.server.shutdown_after = -1
149158 cls.server.handler.authorizer = DummyAuthorizer()
150159 cls.server.handler.authorizer.add_user(
151 cls.user, cls.pasw, cls._temp_path, perm="elradfmw"
160 cls.user, cls.pasw, cls._temp_path, perm="elradfmwT"
152161 )
153162 cls.server.handler.authorizer.add_anonymous(cls._temp_path)
154163 cls.server.start()
209218 ),
210219 )
211220
221 def test_setinfo(self):
222 # TODO: temporary test, since FSTestCases.test_setinfo is broken.
223 self.fs.create("bar")
224 original_modified = self.fs.getinfo("bar", ("details",)).modified
225 new_modified = original_modified - datetime.timedelta(hours=1)
226 new_modified_stamp = calendar.timegm(new_modified.timetuple())
227 self.fs.setinfo("bar", {"details": {"modified": new_modified_stamp}})
228 new_modified_get = self.fs.getinfo("bar", ("details",)).modified
229 if original_modified.microsecond == 0 or new_modified_get.microsecond == 0:
230 original_modified = original_modified.replace(microsecond=0)
231 new_modified_get = new_modified_get.replace(microsecond=0)
232 if original_modified.second == 0 or new_modified_get.second == 0:
233 original_modified = original_modified.replace(second=0)
234 new_modified_get = new_modified_get.replace(second=0)
235 new_modified_get = new_modified_get + datetime.timedelta(hours=1)
236 self.assertEqual(original_modified, new_modified_get)
237
212238 def test_host(self):
213239 self.assertEqual(self.fs.host, self.server.host)
214240
229255 del self.fs.features["UTF8"]
230256 self.assertFalse(self.fs.getmeta().get("unicode_paths"))
231257
258 def test_getinfo_modified(self):
259 self.assertIn("MDTM", self.fs.features)
260 self.fs.create("bar")
261 mtime_detail = self.fs.getinfo("bar", ("basic", "details")).modified
262 mtime_modified = self.fs.getmodified("bar")
263 # Microsecond and seconds might not actually be supported by all
264 # FTP commands, so we strip them before comparing if it looks
265 # like at least one of the two values does not contain them.
266 replacement = {}
267 if mtime_detail.microsecond == 0 or mtime_modified.microsecond == 0:
268 replacement["microsecond"] = 0
269 if mtime_detail.second == 0 or mtime_modified.second == 0:
270 replacement["second"] = 0
271 self.assertEqual(
272 mtime_detail.replace(**replacement), mtime_modified.replace(**replacement)
273 )
274
232275 def test_opener_path(self):
233276 self.fs.makedir("foo")
234277 self.fs.writetext("foo/bar", "baz")
265308 # Open with create and check this does fail
266309 with open_fs(url, create=True) as ftp_fs:
267310 self.assertTrue(ftp_fs.isfile("foo"))
311
312 def test_upload_connection(self):
313 with mock.patch.object(self.fs, "_manage_ftp") as _manage_ftp:
314 self.fs.upload("foo", BytesIO(b"hello"))
315 self.assertEqual(self.fs.gettext("foo"), "hello")
316 _manage_ftp.assert_not_called()
268317
269318
270319 class TestFTPFSNoMLSD(TestFTPFS):
278327 pass
279328
280329
281 @pytest.mark.slow
330 @mark.slow
331 @unittest.skipIf(platform.python_implementation() == "PyPy", "ftp unreliable with PyPy")
282332 class TestAnonFTPFS(FSTestCases, unittest.TestCase):
283
284333 user = "anonymous"
285334 pasw = ""
286335
11
22 import unittest
33
4 from fs import glob
5 from fs import open_fs
4 from fs import glob, open_fs
65
76
87 class TestGlob(unittest.TestCase):
00 import sys
1
12 import unittest
23
34
0
10 from __future__ import unicode_literals
21
3 import datetime
42 import unittest
5
6 import pytz
3 from datetime import datetime
74
85 from fs.enums import ResourceType
96 from fs.info import Info
107 from fs.permissions import Permissions
118 from fs.time import datetime_to_epoch
9
10 try:
11 from datetime import timezone
12 except ImportError:
13 from fs._tzcompat import timezone # type: ignore
1214
1315
1416 class TestInfo(unittest.TestCase):
7173
7274 def test_details(self):
7375 dates = [
74 datetime.datetime(2016, 7, 5, tzinfo=pytz.UTC),
75 datetime.datetime(2016, 7, 6, tzinfo=pytz.UTC),
76 datetime.datetime(2016, 7, 7, tzinfo=pytz.UTC),
77 datetime.datetime(2016, 7, 8, tzinfo=pytz.UTC),
76 datetime(2016, 7, 5, tzinfo=timezone.utc),
77 datetime(2016, 7, 6, tzinfo=timezone.utc),
78 datetime(2016, 7, 7, tzinfo=timezone.utc),
79 datetime(2016, 7, 8, tzinfo=timezone.utc),
7880 ]
7981 epochs = [datetime_to_epoch(d) for d in dates]
8082
00 from __future__ import unicode_literals
11
22 import io
3 import six
34 import unittest
45
5 import six
6
7 from fs import iotools
8 from fs import tempfs
9
6 from fs import iotools, tempfs
107 from fs.test import UNICODE_TEXT
118
129
22 import posixpath
33 import unittest
44
5 import pytest
6
75 from fs import memoryfs
8 from fs.test import FSTestCases
9 from fs.test import UNICODE_TEXT
6 from fs.test import UNICODE_TEXT, FSTestCases
107
118 try:
129 # Only supported on Python 3.4+
2926 posixpath.join(parent_dir, str(file_id)), UNICODE_TEXT
3027 )
3128
32 @pytest.mark.skipif(
33 not tracemalloc, reason="`tracemalloc` isn't supported on this Python version."
29 @unittest.skipUnless(
30 tracemalloc, reason="`tracemalloc` isn't supported on this Python version."
3431 )
3532 def test_close_mem_free(self):
3633 """Ensure all file memory is freed when calling close().
6764 "Memory usage increased after closing the file system; diff is %0.2f KiB."
6865 % (diff_close.size_diff / 1024.0),
6966 )
67
68 def test_copy_preserve_time(self):
69 self.fs.makedir("foo")
70 self.fs.makedir("bar")
71 self.fs.touch("foo/file.txt")
72
73 src_datetime = self.fs.getmodified("foo/file.txt")
74
75 self.fs.copy("foo/file.txt", "bar/file.txt", preserve_time=True)
76 self.assertTrue(self.fs.exists("bar/file.txt"))
77
78 dst_datetime = self.fs.getmodified("bar/file.txt")
79 self.assertEqual(dst_datetime, src_datetime)
80
81
82 class TestMemoryFile(unittest.TestCase):
83 def setUp(self):
84 self.fs = memoryfs.MemoryFS()
85
86 def tearDown(self):
87 self.fs.close()
88
89 def test_readline_writing(self):
90 with self.fs.openbin("test.txt", "w") as f:
91 self.assertRaises(IOError, f.readline)
92
93 def test_readinto_writing(self):
94 with self.fs.openbin("test.txt", "w") as f:
95 self.assertRaises(IOError, f.readinto, bytearray(10))
00 from __future__ import unicode_literals
11
22 import unittest
3 from parameterized import parameterized_class
34
5 from fs import open_fs
46 from fs.mirror import mirror
5 from fs import open_fs
67
78
9 @parameterized_class(("WORKERS",), [(0,), (1,), (2,), (4,)])
810 class TestMirror(unittest.TestCase):
9 WORKERS = 0 # Single threaded
10
1111 def _contents(self, fs):
1212 """Extract an FS in to a simple data structure."""
13 namespaces = ("details", "metadata_changed", "modified")
1314 contents = []
1415 for path, dirs, files in fs.walk():
1516 for info in dirs:
1718 contents.append((_path, "dir", b""))
1819 for info in files:
1920 _path = info.make_path(path)
20 contents.append((_path, "file", fs.readbytes(_path)))
21 _bytes = fs.readbytes(_path)
22 _info = fs.getinfo(_path, namespaces)
23 contents.append(
24 (
25 _path,
26 "file",
27 _bytes,
28 _info.modified,
29 _info.metadata_changed,
30 )
31 )
2132 return sorted(contents)
2233
2334 def assert_compare_fs(self, fs1, fs2):
2738 def test_empty_mirror(self):
2839 m1 = open_fs("mem://")
2940 m2 = open_fs("mem://")
30 mirror(m1, m2, workers=self.WORKERS)
41 mirror(m1, m2, workers=self.WORKERS, preserve_time=True)
3142 self.assert_compare_fs(m1, m2)
3243
3344 def test_mirror_one_file(self):
3445 m1 = open_fs("mem://")
3546 m1.writetext("foo", "hello")
3647 m2 = open_fs("mem://")
37 mirror(m1, m2, workers=self.WORKERS)
48 mirror(m1, m2, workers=self.WORKERS, preserve_time=True)
3849 self.assert_compare_fs(m1, m2)
3950
4051 def test_mirror_one_file_one_dir(self):
4253 m1.writetext("foo", "hello")
4354 m1.makedir("bar")
4455 m2 = open_fs("mem://")
45 mirror(m1, m2, workers=self.WORKERS)
56 mirror(m1, m2, workers=self.WORKERS, preserve_time=True)
4657 self.assert_compare_fs(m1, m2)
4758
4859 def test_mirror_delete_replace(self):
5061 m1.writetext("foo", "hello")
5162 m1.makedir("bar")
5263 m2 = open_fs("mem://")
53 mirror(m1, m2, workers=self.WORKERS)
64 mirror(m1, m2, workers=self.WORKERS, preserve_time=True)
5465 self.assert_compare_fs(m1, m2)
5566 m2.remove("foo")
56 mirror(m1, m2, workers=self.WORKERS)
67 mirror(m1, m2, workers=self.WORKERS, preserve_time=True)
5768 self.assert_compare_fs(m1, m2)
5869 m2.removedir("bar")
59 mirror(m1, m2, workers=self.WORKERS)
70 mirror(m1, m2, workers=self.WORKERS, preserve_time=True)
6071 self.assert_compare_fs(m1, m2)
6172
6273 def test_mirror_extra_dir(self):
6576 m1.makedir("bar")
6677 m2 = open_fs("mem://")
6778 m2.makedir("baz")
68 mirror(m1, m2, workers=self.WORKERS)
79 mirror(m1, m2, workers=self.WORKERS, preserve_time=True)
6980 self.assert_compare_fs(m1, m2)
7081
7182 def test_mirror_extra_file(self):
7586 m2 = open_fs("mem://")
7687 m2.makedir("baz")
7788 m2.touch("egg")
78 mirror(m1, m2, workers=self.WORKERS)
89 mirror(m1, m2, workers=self.WORKERS, preserve_time=True)
7990 self.assert_compare_fs(m1, m2)
8091
8192 def test_mirror_wrong_type(self):
8596 m2 = open_fs("mem://")
8697 m2.makedir("foo")
8798 m2.touch("bar")
88 mirror(m1, m2, workers=self.WORKERS)
99 mirror(m1, m2, workers=self.WORKERS, preserve_time=True)
89100 self.assert_compare_fs(m1, m2)
90101
91102 def test_mirror_update(self):
93104 m1.writetext("foo", "hello")
94105 m1.makedir("bar")
95106 m2 = open_fs("mem://")
96 mirror(m1, m2, workers=self.WORKERS)
107 mirror(m1, m2, workers=self.WORKERS, preserve_time=True)
97108 self.assert_compare_fs(m1, m2)
98109 m2.appendtext("foo", " world!")
99 mirror(m1, m2, workers=self.WORKERS)
110 mirror(m1, m2, workers=self.WORKERS, preserve_time=True)
100111 self.assert_compare_fs(m1, m2)
101
102
103 class TestMirrorWorkers1(TestMirror):
104 WORKERS = 1
105
106
107 class TestMirrorWorkers2(TestMirror):
108 WORKERS = 2
109
110
111 class TestMirrorWorkers4(TestMirror):
112 WORKERS = 4
00 from __future__ import unicode_literals
11
22 import unittest
3
43 from six import text_type
54
6 from fs.mode import check_readable, check_writable, Mode
5 from fs.mode import Mode, check_readable, check_writable
76
87
98 class TestMode(unittest.TestCase):
11
22 import unittest
33
4 from fs.memoryfs import MemoryFS
45 from fs.mountfs import MountError, MountFS
5 from fs.memoryfs import MemoryFS
66 from fs.tempfs import TempFS
77 from fs.test import FSTestCases
88
0
10 from __future__ import unicode_literals
21
32 import unittest
43
4 try:
5 from unittest import mock
6 except ImportError:
7 import mock
8
9 from parameterized import parameterized, parameterized_class
10
511 import fs.move
612 from fs import open_fs
13 from fs.errors import FSError, ResourceReadOnly
14 from fs.path import join
15 from fs.wrap import read_only
716
817
9 class TestMove(unittest.TestCase):
18 @parameterized_class(("preserve_time",), [(True,), (False,)])
19 class TestMoveCheckTime(unittest.TestCase):
1020 def test_move_fs(self):
21 namespaces = ("details", "modified")
22
1123 src_fs = open_fs("mem://")
1224 src_fs.makedirs("foo/bar")
1325 src_fs.touch("test.txt")
1426 src_fs.touch("foo/bar/baz.txt")
27 src_file1_info = src_fs.getinfo("test.txt", namespaces)
28 src_file2_info = src_fs.getinfo("foo/bar/baz.txt", namespaces)
1529
1630 dst_fs = open_fs("mem://")
17 fs.move.move_fs(src_fs, dst_fs)
31 dst_fs.create("test.txt")
32 dst_fs.setinfo("test.txt", {"details": {"modified": 1000000}})
1833
34 fs.move.move_fs(src_fs, dst_fs, preserve_time=self.preserve_time)
35
36 self.assertTrue(src_fs.isempty("/"))
1937 self.assertTrue(dst_fs.isdir("foo/bar"))
2038 self.assertTrue(dst_fs.isfile("test.txt"))
21 self.assertTrue(src_fs.isempty("/"))
39 self.assertTrue(dst_fs.isfile("foo/bar/baz.txt"))
2240
23 def test_copy_dir(self):
41 if self.preserve_time:
42 dst_file1_info = dst_fs.getinfo("test.txt", namespaces)
43 dst_file2_info = dst_fs.getinfo("foo/bar/baz.txt", namespaces)
44 self.assertEqual(dst_file1_info.modified, src_file1_info.modified)
45 self.assertEqual(dst_file2_info.modified, src_file2_info.modified)
46
47 def test_move_file(self):
48 namespaces = ("details", "modified")
49 with open_fs("mem://") as src_fs, open_fs("mem://") as dst_fs:
50 src_fs.writetext("source.txt", "Source")
51 src_fs_file_info = src_fs.getinfo("source.txt", namespaces)
52 fs.move.move_file(
53 src_fs,
54 "source.txt",
55 dst_fs,
56 "dest.txt",
57 preserve_time=self.preserve_time,
58 )
59 self.assertFalse(src_fs.exists("source.txt"))
60 self.assertEqual(dst_fs.readtext("dest.txt"), "Source")
61
62 if self.preserve_time:
63 dst_fs_file_info = dst_fs.getinfo("dest.txt", namespaces)
64 self.assertEqual(src_fs_file_info.modified, dst_fs_file_info.modified)
65
66 def test_move_dir(self):
67 namespaces = ("details", "modified")
68
2469 src_fs = open_fs("mem://")
2570 src_fs.makedirs("foo/bar")
2671 src_fs.touch("test.txt")
2772 src_fs.touch("foo/bar/baz.txt")
73 src_file2_info = src_fs.getinfo("foo/bar/baz.txt", namespaces)
2874
2975 dst_fs = open_fs("mem://")
30 fs.move.move_dir(src_fs, "/foo", dst_fs, "/")
76 dst_fs.create("test.txt")
77 dst_fs.setinfo("test.txt", {"details": {"modified": 1000000}})
3178
79 fs.move.move_dir(src_fs, "/foo", dst_fs, "/", preserve_time=self.preserve_time)
80
81 self.assertFalse(src_fs.exists("foo"))
82 self.assertTrue(src_fs.isfile("test.txt"))
3283 self.assertTrue(dst_fs.isdir("bar"))
3384 self.assertTrue(dst_fs.isfile("bar/baz.txt"))
34 self.assertFalse(src_fs.exists("foo"))
85
86 if self.preserve_time:
87 dst_file2_info = dst_fs.getinfo("bar/baz.txt", namespaces)
88 self.assertEqual(dst_file2_info.modified, src_file2_info.modified)
89
90
91 class TestMove(unittest.TestCase):
92 def test_move_file_tempfs(self):
93 with open_fs("temp://") as src, open_fs("temp://") as dst:
94 src_dir = src.makedir("Some subfolder")
95 src_dir.writetext("file.txt", "Content")
96 dst_dir = dst.makedir("dest dir")
97 fs.move.move_file(src_dir, "file.txt", dst_dir, "target.txt")
98 self.assertFalse(src.exists("Some subfolder/file.txt"))
99 self.assertEqual(dst.readtext("dest dir/target.txt"), "Content")
100
101 def test_move_file_fs_urls(self):
102 # create a temp dir to work on
103 with open_fs("temp://") as tmp:
104 path = tmp.getsyspath("/")
105 tmp.makedir("subdir_src")
106 tmp.writetext("subdir_src/file.txt", "Content")
107 tmp.makedir("subdir_dst")
108 fs.move.move_file(
109 "osfs://" + join(path, "subdir_src"),
110 "file.txt",
111 "osfs://" + join(path, "subdir_dst"),
112 "target.txt",
113 )
114 self.assertFalse(tmp.exists("subdir_src/file.txt"))
115 self.assertEqual(tmp.readtext("subdir_dst/target.txt"), "Content")
116
117 def test_move_file_same_fs_read_only_source(self):
118 with open_fs("temp://") as tmp:
119 path = tmp.getsyspath("/")
120 tmp.writetext("file.txt", "Content")
121 src = read_only(open_fs(path))
122 dst = tmp.makedir("sub")
123 with self.assertRaises(ResourceReadOnly):
124 fs.move.move_file(src, "file.txt", dst, "target_file.txt")
125 self.assertTrue(src.exists("file.txt"))
126 self.assertFalse(
127 dst.exists("target_file.txt"), "file should not have been copied over"
128 )
129
130 def test_move_file_read_only_mem_source(self):
131 with open_fs("mem://") as src, open_fs("mem://") as dst:
132 src.writetext("file.txt", "Content")
133 dst_sub = dst.makedir("sub")
134 src_ro = read_only(src)
135 with self.assertRaises(ResourceReadOnly):
136 fs.move.move_file(src_ro, "file.txt", dst_sub, "target.txt")
137 self.assertTrue(src.exists("file.txt"))
138 self.assertFalse(
139 dst_sub.exists("target.txt"), "file should not have been copied over"
140 )
141
142 def test_move_file_read_only_mem_dest(self):
143 with open_fs("mem://") as src, open_fs("mem://") as dst:
144 src.writetext("file.txt", "Content")
145 dst_ro = read_only(dst)
146 with self.assertRaises(ResourceReadOnly):
147 fs.move.move_file(src, "file.txt", dst_ro, "target.txt")
148 self.assertTrue(src.exists("file.txt"))
149 self.assertFalse(
150 dst_ro.exists("target.txt"), "file should not have been copied over"
151 )
152
153 @parameterized.expand([(True,), (False,)])
154 def test_move_file_cleanup_on_error(self, cleanup):
155 with open_fs("mem://") as src, open_fs("mem://") as dst:
156 src.writetext("file.txt", "Content")
157 with mock.patch.object(src, "remove") as mck:
158 mck.side_effect = FSError
159 with self.assertRaises(FSError):
160 fs.move.move_file(
161 src,
162 "file.txt",
163 dst,
164 "target.txt",
165 cleanup_dst_on_error=cleanup,
166 )
167 self.assertTrue(src.exists("file.txt"))
168 self.assertEqual(not dst.exists("target.txt"), cleanup)
11
22 import unittest
33
4 from fs import errors
5 from fs.memoryfs import MemoryFS
46 from fs.multifs import MultiFS
5 from fs.memoryfs import MemoryFS
6 from fs import errors
7
87 from fs.test import FSTestCases
98
109
11
22 import unittest
33 import warnings
4
54
65 from fs.base import _new_name
76
00 from __future__ import unicode_literals
11
2 import sys
3
24 import os
3 import sys
5 import pkg_resources
6 import shutil
47 import tempfile
58 import unittest
6 import pkg_resources
7
8 import pytest
99
1010 from fs import open_fs, opener
11 from fs.osfs import OSFS
12 from fs.opener import registry, errors
11 from fs.appfs import UserDataFS
1312 from fs.memoryfs import MemoryFS
14 from fs.appfs import UserDataFS
13 from fs.opener import errors, registry
1514 from fs.opener.parse import ParseResult
1615 from fs.opener.registry import Registry
16 from fs.osfs import OSFS
1717
1818 try:
1919 from unittest import mock
207207 self.assertTrue(mem_fs.isclosed())
208208
209209
210 @pytest.mark.usefixtures("mock_appdir_directories")
211210 class TestOpeners(unittest.TestCase):
211 def setUp(self):
212 self.tmpdir = tempfile.mkdtemp()
213
214 def tearDown(self):
215 shutil.rmtree(self.tmpdir)
216
212217 def test_repr(self):
213218 # Check __repr__ works
214219 for entry_point in pkg_resources.iter_entry_points("fs.opener"):
259264 mem_fs_2 = opener.open_fs(mem_fs)
260265 self.assertEqual(mem_fs, mem_fs_2)
261266
262 def test_open_userdata(self):
267 @mock.patch("appdirs.{}".format(UserDataFS.app_dir), autospec=True, spec_set=True)
268 def test_open_userdata(self, app_dir):
269 app_dir.return_value = self.tmpdir
270
263271 with self.assertRaises(errors.OpenerError):
264272 opener.open_fs("userdata://foo:bar:baz:egg")
265273
268276 self.assertEqual(app_fs.app_dirs.appauthor, "willmcgugan")
269277 self.assertEqual(app_fs.app_dirs.version, "1.0")
270278
271 def test_open_userdata_no_version(self):
279 @mock.patch("appdirs.{}".format(UserDataFS.app_dir), autospec=True, spec_set=True)
280 def test_open_userdata_no_version(self, app_dir):
281 app_dir.return_value = self.tmpdir
282
272283 app_fs = opener.open_fs("userdata://fstest:willmcgugan", create=True)
273284 self.assertEqual(app_fs.app_dirs.appname, "fstest")
274285 self.assertEqual(app_fs.app_dirs.appauthor, "willmcgugan")
275286 self.assertEqual(app_fs.app_dirs.version, None)
276287
277 def test_user_data_opener(self):
288 @mock.patch("appdirs.{}".format(UserDataFS.app_dir), autospec=True, spec_set=True)
289 def test_user_data_opener(self, app_dir):
290 app_dir.return_value = self.tmpdir
291
278292 user_data_fs = open_fs("userdata://fstest:willmcgugan:1.0", create=True)
279293 self.assertIsInstance(user_data_fs, UserDataFS)
280294 user_data_fs.makedir("foo", recreate=True)
286300 def test_open_ftp(self, mock_FTPFS):
287301 open_fs("ftp://foo:bar@ftp.example.org")
288302 mock_FTPFS.assert_called_once_with(
289 "ftp.example.org", passwd="bar", port=21, user="foo", proxy=None, timeout=10
303 "ftp.example.org",
304 passwd="bar",
305 port=21,
306 user="foo",
307 proxy=None,
308 timeout=10,
309 tls=False,
310 )
311
312 @mock.patch("fs.ftpfs.FTPFS")
313 def test_open_ftps(self, mock_FTPFS):
314 open_fs("ftps://foo:bar@ftp.example.org")
315 mock_FTPFS.assert_called_once_with(
316 "ftp.example.org",
317 passwd="bar",
318 port=21,
319 user="foo",
320 proxy=None,
321 timeout=10,
322 tls=True,
290323 )
291324
292325 @mock.patch("fs.ftpfs.FTPFS")
299332 user="foo",
300333 proxy="ftp.proxy.org",
301334 timeout=10,
302 )
335 tls=False,
336 )
00 # coding: utf-8
11 from __future__ import unicode_literals
2
3 import sys
24
35 import errno
46 import io
57 import os
68 import shutil
79 import tempfile
8 import sys
10 import time
911 import unittest
10 import pytest
11
12 from fs import osfs, open_fs
13 from fs.path import relpath, dirname
14 from fs import errors
12 import warnings
13 from six import text_type
14
15 from fs import errors, open_fs, osfs
16 from fs.path import dirname, relpath
1517 from fs.test import FSTestCases
16
17 from six import text_type
1818
1919 try:
2020 from unittest import mock
2424
2525 class TestOSFS(FSTestCases, unittest.TestCase):
2626 """Test OSFS implementation."""
27
28 @classmethod
29 def setUpClass(cls):
30 warnings.simplefilter("error")
31
32 @classmethod
33 def tearDownClass(cls):
34 warnings.simplefilter(warnings.defaultaction)
2735
2836 def make_fs(self):
2937 temp_dir = tempfile.mkdtemp("fstestosfs")
8795 self.assertIn("TYRIONLANISTER", fs1.getsyspath("/"))
8896 self.assertNotIn("TYRIONLANISTER", fs2.getsyspath("/"))
8997
90 @pytest.mark.skipif(osfs.sendfile is None, reason="sendfile not supported")
91 @pytest.mark.skipif(
98 def test_copy_preserve_time(self):
99 self.fs.makedir("foo")
100 self.fs.makedir("bar")
101 self.fs.create("foo/file.txt")
102 raw_info = {"details": {"modified": time.time() - 10000}}
103 self.fs.setinfo("foo/file.txt", raw_info)
104
105 namespaces = ("details", "modified")
106 src_info = self.fs.getinfo("foo/file.txt", namespaces)
107
108 self.fs.copy("foo/file.txt", "bar/file.txt", preserve_time=True)
109 self.assertTrue(self.fs.exists("bar/file.txt"))
110
111 dst_info = self.fs.getinfo("bar/file.txt", namespaces)
112 delta = dst_info.modified - src_info.modified
113 self.assertAlmostEqual(delta.total_seconds(), 0, places=2)
114
115 @unittest.skipUnless(osfs.sendfile, "sendfile not supported")
116 @unittest.skipIf(
92117 sys.version_info >= (3, 8),
93 reason="the copy function uses sendfile in Python 3.8+, "
118 "the copy function uses sendfile in Python 3.8+, "
94119 "making the patched implementation irrelevant",
95120 )
96121 def test_copy_sendfile(self):
138163 finally:
139164 shutil.rmtree(dir_path)
140165
141 @pytest.mark.skipif(not hasattr(os, "symlink"), reason="No symlink support")
166 @unittest.skipUnless(hasattr(os, "symlink"), "No symlink support")
142167 def test_symlinks(self):
143168 with open(self._get_real_path("foo"), "wb") as f:
144169 f.write(b"foobar")
0 from __future__ import absolute_import, unicode_literals, print_function
0 from __future__ import absolute_import, print_function, unicode_literals
11
22 """
33 fstests.test_path: testcases for the fs path functions
0 from __future__ import unicode_literals
1 from __future__ import print_function
0 from __future__ import print_function, unicode_literals
21
32 import unittest
4
53 from six import text_type
64
7 from fs.permissions import make_mode, Permissions
5 from fs.permissions import Permissions, make_mode
86
97
108 class TestPermissions(unittest.TestCase):
55 import unittest
66
77 from fs import osfs
8 from fs.subfs import SubFS
98 from fs.memoryfs import MemoryFS
109 from fs.path import relpath
10 from fs.subfs import SubFS
11
1112 from .test_osfs import TestOSFS
1213
1314
66 import tarfile
77 import tempfile
88 import unittest
9 import pytest
109
1110 from fs import tarfs
11 from fs.compress import write_tar
1212 from fs.enums import ResourceType
13 from fs.compress import write_tar
13 from fs.errors import NoURL
1414 from fs.opener import open_fs
1515 from fs.opener.errors import NotWriteable
16 from fs.errors import NoURL
1716 from fs.test import FSTestCases
1817
1918 from .test_archives import ArchiveTestCases
19
20 try:
21 from pytest import mark
22 except ImportError:
23 from . import mark
2024
2125
2226 class TestWriteReadTarFS(unittest.TestCase):
9397 del fs._tar_file
9498
9599
96 @pytest.mark.skipif(six.PY2, reason="Python2 does not support LZMA")
100 @mark.slow
101 @unittest.skipIf(six.PY2, "Python2 does not support LZMA")
97102 class TestWriteXZippedTarFS(FSTestCases, unittest.TestCase):
98103 def make_fs(self):
99104 fh, _tar_file = tempfile.mkstemp()
118123 tarfile.open(fs._tar_file, "r:{}".format(other_comps))
119124
120125
126 @mark.slow
121127 class TestWriteBZippedTarFS(FSTestCases, unittest.TestCase):
122128 def make_fs(self):
123129 fh, _tar_file = tempfile.mkstemp()
236242
237243
238244 class TestImplicitDirectories(unittest.TestCase):
239 """Regression tests for #160.
240 """
245 """Regression tests for #160."""
241246
242247 @classmethod
243248 def setUpClass(cls):
11
22 import os
33
4 from fs import errors
45 from fs.tempfs import TempFS
5 from fs import errors
66
77 from .test_osfs import TestOSFS
88
0 from __future__ import unicode_literals, print_function
0 from __future__ import print_function, unicode_literals
11
2 import unittest
23 from datetime import datetime
3 import unittest
4
5 import pytz
64
75 from fs.time import datetime_to_epoch, epoch_to_datetime
6
7 try:
8 from datetime import timezone
9 except ImportError:
10 from fs._tzcompat import timezone # type: ignore
811
912
1013 class TestEpoch(unittest.TestCase):
1114 def test_epoch_to_datetime(self):
1215 self.assertEqual(
13 epoch_to_datetime(142214400), datetime(1974, 7, 5, tzinfo=pytz.UTC)
16 epoch_to_datetime(142214400), datetime(1974, 7, 5, tzinfo=timezone.utc)
1417 )
1518
1619 def test_datetime_to_epoch(self):
1720 self.assertEqual(
18 datetime_to_epoch(datetime(1974, 7, 5, tzinfo=pytz.UTC)), 142214400
21 datetime_to_epoch(datetime(1974, 7, 5, tzinfo=timezone.utc)), 142214400
1922 )
11
22 import unittest
33
4 from fs.mode import validate_open_mode
5 from fs.mode import validate_openbin_mode
64 from fs import tools
5 from fs.mode import validate_open_mode, validate_openbin_mode
76 from fs.opener import open_fs
87
98
0 from __future__ import print_function
1 from __future__ import unicode_literals
0 from __future__ import print_function, unicode_literals
21
32 import io
43 import unittest
00 from __future__ import unicode_literals
11
2 import six
23 import unittest
34
5 from fs import walk
46 from fs.errors import FSError
57 from fs.memoryfs import MemoryFS
6 from fs import walk
78 from fs.wrap import read_only
8 import six
99
1010
1111 class TestWalker(unittest.TestCase):
00 from __future__ import unicode_literals
11
2 import operator
23 import unittest
34
4 from fs import errors
5 try:
6 from unittest import mock
7 except ImportError:
8 import mock
9
10 import six
11
12 import fs.copy
13 import fs.errors
14 import fs.mirror
15 import fs.move
16 import fs.wrap
517 from fs import open_fs
6 from fs import wrap
7
8
9 class TestWrap(unittest.TestCase):
10 def test_readonly(self):
11 mem_fs = open_fs("mem://")
12 fs = wrap.read_only(mem_fs)
13
14 with self.assertRaises(errors.ResourceReadOnly):
15 fs.open("foo", "w")
16
17 with self.assertRaises(errors.ResourceReadOnly):
18 fs.appendtext("foo", "bar")
19
20 with self.assertRaises(errors.ResourceReadOnly):
21 fs.appendbytes("foo", b"bar")
22
23 with self.assertRaises(errors.ResourceReadOnly):
24 fs.makedir("foo")
25
26 with self.assertRaises(errors.ResourceReadOnly):
27 fs.move("foo", "bar")
28
29 with self.assertRaises(errors.ResourceReadOnly):
30 fs.openbin("foo", "w")
31
32 with self.assertRaises(errors.ResourceReadOnly):
33 fs.remove("foo")
34
35 with self.assertRaises(errors.ResourceReadOnly):
36 fs.removedir("foo")
37
38 with self.assertRaises(errors.ResourceReadOnly):
39 fs.setinfo("foo", {})
40
41 with self.assertRaises(errors.ResourceReadOnly):
42 fs.settimes("foo", {})
43
44 with self.assertRaises(errors.ResourceReadOnly):
45 fs.copy("foo", "bar")
46
47 with self.assertRaises(errors.ResourceReadOnly):
48 fs.create("foo")
49
50 with self.assertRaises(errors.ResourceReadOnly):
51 fs.writetext("foo", "bar")
52
53 with self.assertRaises(errors.ResourceReadOnly):
54 fs.writebytes("foo", b"bar")
55
56 with self.assertRaises(errors.ResourceReadOnly):
57 fs.makedirs("foo/bar")
58
59 with self.assertRaises(errors.ResourceReadOnly):
60 fs.touch("foo")
61
62 with self.assertRaises(errors.ResourceReadOnly):
63 fs.upload("foo", None)
64
65 with self.assertRaises(errors.ResourceReadOnly):
66 fs.writefile("foo", None)
67
68 self.assertTrue(mem_fs.isempty("/"))
69 mem_fs.writebytes("file", b"read me")
70 with fs.openbin("file") as read_file:
18 from fs.info import Info
19
20
21 class TestWrapReadOnly(unittest.TestCase):
22 def setUp(self):
23 self.fs = open_fs("mem://")
24 self.ro = fs.wrap.read_only(self.fs)
25
26 def tearDown(self):
27 self.fs.close()
28
29 def assertReadOnly(self, func, *args, **kwargs):
30 self.assertRaises(fs.errors.ResourceReadOnly, func, *args, **kwargs)
31
32 def test_open_w(self):
33 self.assertReadOnly(self.ro.open, "foo", "w")
34
35 def test_appendtext(self):
36 self.assertReadOnly(self.ro.appendtext, "foo", "bar")
37
38 def test_appendbytes(self):
39 self.assertReadOnly(self.ro.appendbytes, "foo", b"bar")
40
41 def test_makedir(self):
42 self.assertReadOnly(self.ro.makedir, "foo")
43
44 def test_move(self):
45 self.assertReadOnly(self.ro.move, "foo", "bar")
46
47 def test_openbin_w(self):
48 self.assertReadOnly(self.ro.openbin, "foo", "w")
49
50 def test_remove(self):
51 self.assertReadOnly(self.ro.remove, "foo")
52
53 def test_removedir(self):
54 self.assertReadOnly(self.ro.removedir, "foo")
55
56 def test_removetree(self):
57 self.assertReadOnly(self.ro.removetree, "foo")
58
59 def test_setinfo(self):
60 self.assertReadOnly(self.ro.setinfo, "foo", {})
61
62 def test_settimes(self):
63 self.assertReadOnly(self.ro.settimes, "foo", {})
64
65 def test_copy(self):
66 self.assertReadOnly(self.ro.copy, "foo", "bar")
67
68 def test_create(self):
69 self.assertReadOnly(self.ro.create, "foo")
70
71 def test_writetext(self):
72 self.assertReadOnly(self.ro.writetext, "foo", "bar")
73
74 def test_writebytes(self):
75 self.assertReadOnly(self.ro.writebytes, "foo", b"bar")
76
77 def test_makedirs(self):
78 self.assertReadOnly(self.ro.makedirs, "foo/bar")
79
80 def test_touch(self):
81 self.assertReadOnly(self.ro.touch, "foo")
82
83 def test_upload(self):
84 self.assertReadOnly(self.ro.upload, "foo", six.BytesIO())
85
86 def test_writefile(self):
87 self.assertReadOnly(self.ro.writefile, "foo", six.StringIO())
88
89 def test_openbin_r(self):
90 self.fs.writebytes("file", b"read me")
91 with self.ro.openbin("file") as read_file:
7192 self.assertEqual(read_file.read(), b"read me")
7293
73 with fs.open("file", "rb") as read_file:
94 def test_open_r(self):
95 self.fs.writebytes("file", b"read me")
96 with self.ro.open("file", "rb") as read_file:
7497 self.assertEqual(read_file.read(), b"read me")
7598
76 def test_cachedir(self):
77 mem_fs = open_fs("mem://")
78 mem_fs.makedirs("foo/bar/baz")
79 mem_fs.touch("egg")
80
81 fs = wrap.cache_directory(mem_fs)
82 self.assertEqual(sorted(fs.listdir("/")), ["egg", "foo"])
83 self.assertEqual(sorted(fs.listdir("/")), ["egg", "foo"])
84 self.assertTrue(fs.isdir("foo"))
85 self.assertTrue(fs.isdir("foo"))
86 self.assertTrue(fs.isfile("egg"))
87 self.assertTrue(fs.isfile("egg"))
88
89 self.assertEqual(fs.getinfo("foo"), mem_fs.getinfo("foo"))
90 self.assertEqual(fs.getinfo("foo"), mem_fs.getinfo("foo"))
91
92 self.assertEqual(fs.getinfo("/"), mem_fs.getinfo("/"))
93 self.assertEqual(fs.getinfo("/"), mem_fs.getinfo("/"))
94
95 with self.assertRaises(errors.ResourceNotFound):
96 fs.getinfo("/foofoo")
99
100 class TestWrapReadOnlySyspath(unittest.TestCase):
101 # If the wrapped fs has a syspath, there is a chance that somewhere
102 # in fs.copy or fs.mirror we try to use it to our advantage, but
103 # we want to make sure these implementations don't circumvent the
104 # wrapper.
105
106 def setUp(self):
107 self.fs = open_fs("temp://")
108 self.ro = fs.wrap.read_only(self.fs)
109 self.src = open_fs("temp://")
110 self.src.touch("foo")
111 self.src.makedir("bar")
112
113 def tearDown(self):
114 self.fs.close()
115 self.src.close()
116
117 def assertReadOnly(self, func, *args, **kwargs):
118 self.assertRaises(fs.errors.ResourceReadOnly, func, *args, **kwargs)
119
120 def test_copy_fs(self):
121 self.assertReadOnly(fs.copy.copy_fs, self.src, self.ro)
122
123 def test_copy_fs_if_newer(self):
124 self.assertReadOnly(fs.copy.copy_fs_if_newer, self.src, self.ro)
125
126 def test_copy_file(self):
127 self.assertReadOnly(fs.copy.copy_file, self.src, "foo", self.ro, "foo")
128
129 def test_copy_file_if_newer(self):
130 self.assertReadOnly(fs.copy.copy_file_if_newer, self.src, "foo", self.ro, "foo")
131
132 def test_copy_structure(self):
133 self.assertReadOnly(fs.copy.copy_structure, self.src, self.ro)
134
135 def test_mirror(self):
136 self.assertReadOnly(fs.mirror.mirror, self.src, self.ro)
137 fs.mirror.mirror(self.src, self.fs)
138 self.fs.touch("baz")
139 self.assertReadOnly(fs.mirror.mirror, self.src, self.ro)
140
141 def test_move_fs(self):
142 self.assertReadOnly(fs.move.move_fs, self.src, self.ro)
143 self.src.removetree("/")
144 self.fs.touch("foo")
145 self.assertReadOnly(fs.move.move_fs, self.ro, self.src)
146
147 def test_move_file(self):
148 self.assertReadOnly(fs.move.move_file, self.src, "foo", self.ro, "foo")
149 self.fs.touch("baz")
150 self.assertReadOnly(fs.move.move_file, self.ro, "baz", self.src, "foo")
151
152 def test_move_dir(self):
153 self.assertReadOnly(fs.move.move_file, self.src, "bar", self.ro, "bar")
154 self.fs.makedir("baz")
155 self.assertReadOnly(fs.move.move_dir, self.ro, "baz", self.src, "baz")
156
157
158 class TestWrapCachedDir(unittest.TestCase):
159 def setUp(self):
160 self.fs = open_fs("mem://")
161 self.fs.makedirs("foo/bar/baz")
162 self.fs.touch("egg")
163 self.cached = fs.wrap.cache_directory(self.fs)
164
165 def tearDown(self):
166 self.fs.close()
167
168 def assertNotFound(self, func, *args, **kwargs):
169 self.assertRaises(fs.errors.ResourceNotFound, func, *args, **kwargs)
170
171 def test_scandir(self):
172 key = operator.attrgetter("name")
173 expected = [
174 Info({"basic": {"name": "egg", "is_dir": False}}),
175 Info({"basic": {"name": "foo", "is_dir": True}}),
176 ]
177 with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir:
178 self.assertEqual(sorted(self.cached.scandir("/"), key=key), expected)
179 scandir.assert_has_calls([mock.call("/", namespaces=None, page=None)])
180 with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir:
181 self.assertEqual(sorted(self.cached.scandir("/"), key=key), expected)
182 scandir.assert_not_called()
183
184 def test_isdir(self):
185 with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir:
186 self.assertTrue(self.cached.isdir("foo"))
187 self.assertFalse(self.cached.isdir("egg")) # is file
188 self.assertFalse(self.cached.isdir("spam")) # doesn't exist
189 scandir.assert_has_calls([mock.call("/", namespaces=None, page=None)])
190 with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir:
191 self.assertTrue(self.cached.isdir("foo"))
192 self.assertFalse(self.cached.isdir("egg"))
193 self.assertFalse(self.cached.isdir("spam"))
194 scandir.assert_not_called()
195
196 def test_isfile(self):
197 with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir:
198 self.assertTrue(self.cached.isfile("egg"))
199 self.assertFalse(self.cached.isfile("foo")) # is dir
200 self.assertFalse(self.cached.isfile("spam")) # doesn't exist
201 scandir.assert_has_calls([mock.call("/", namespaces=None, page=None)])
202 with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir:
203 self.assertTrue(self.cached.isfile("egg"))
204 self.assertFalse(self.cached.isfile("foo"))
205 self.assertFalse(self.cached.isfile("spam"))
206 scandir.assert_not_called()
207
208 def test_getinfo(self):
209 with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir:
210 self.assertEqual(self.cached.getinfo("foo"), self.fs.getinfo("foo"))
211 self.assertEqual(self.cached.getinfo("/"), self.fs.getinfo("/"))
212 self.assertNotFound(self.cached.getinfo, "spam")
213 scandir.assert_has_calls([mock.call("/", namespaces=None, page=None)])
214 with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir:
215 self.assertEqual(self.cached.getinfo("foo"), self.fs.getinfo("foo"))
216 self.assertEqual(self.cached.getinfo("/"), self.fs.getinfo("/"))
217 self.assertNotFound(self.cached.getinfo, "spam")
218 scandir.assert_not_called()
00 from __future__ import unicode_literals
11
22 import unittest
3
43 from six import text_type
54
65 from fs import wrapfs
00 # -*- encoding: UTF-8
11 from __future__ import unicode_literals
22
3 import sys
4
35 import os
4 import sys
6 import six
57 import tempfile
68 import unittest
79 import zipfile
810
9 import six
10
1111 from fs import zipfs
1212 from fs.compress import write_zip
13 from fs.enums import Seek
14 from fs.errors import NoURL
1315 from fs.opener import open_fs
1416 from fs.opener.errors import NotWriteable
15 from fs.errors import NoURL
1617 from fs.test import FSTestCases
17 from fs.enums import Seek
1818
1919 from .test_archives import ArchiveTestCases
2020
+0
-27
tox.ini less more
0 [tox]
1 envlist = {py27,py34,py35,py36,py37}{,-scandir},pypy,typecheck,lint
2 sitepackages = False
3 skip_missing_interpreters=True
4
5 [testenv]
6 deps = -r {toxinidir}/testrequirements.txt
7 commands = coverage run -m pytest --cov-append {posargs} {toxinidir}/tests
8
9 [testenv:typecheck]
10 python = python37
11 deps =
12 mypy==0.740
13 -r {toxinidir}/testrequirements.txt
14 commands = make typecheck
15 whitelist_externals = make
16
17 [testenv:lint]
18 python = python37
19 deps =
20 flake8
21 # flake8-builtins
22 flake8-bugbear
23 flake8-comprehensions
24 # flake8-isort
25 flake8-mutable
26 commands = flake8 fs tests