diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml
new file mode 100644
index 00000000..fd623f58
--- /dev/null
+++ b/.github/FUNDING.yml
@@ -0,0 +1,5 @@
+# These are supported funding model platforms
+
+github: willmcgugan
+ko_fi: willmcgugan
+tidelift: "pypi/rich"
diff --git a/pull_request_template.md b/.github/PULL_REQUEST_TEMPLATE.md
similarity index 65%
rename from pull_request_template.md
rename to .github/PULL_REQUEST_TEMPLATE.md
index 98b95212..6f7fcbcf 100644
--- a/pull_request_template.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -1,17 +1,19 @@
## Type of changes
-- [ ] Bug fix
-- [ ] New feature
-- [ ] Documentation / docstrings
-- [ ] Tests
-- [ ] Other
+
+
+- Bug fix
+- New feature
+- Documentation / docstrings
+- Tests
+- Other
## Checklist
- [ ] I've run the latest [black](https://github.com/ambv/black) with default args on new code.
- [ ] I've updated CHANGELOG.md and CONTRIBUTORS.md where appropriate.
- [ ] I've added tests for new code.
-- [ ] I accept that @willmcgugan may be pedantic in the code review.
+- [ ] I accept that @PyFilesystem/maintainers may be pedantic in the code review.
## Description
diff --git a/.github/workflows/package.yml b/.github/workflows/package.yml
new file mode 100644
index 00000000..6e6903a4
--- /dev/null
+++ b/.github/workflows/package.yml
@@ -0,0 +1,145 @@
+name: Package
+
+on:
+ push:
+ tags:
+ - 'v2.*'
+
+jobs:
+
+ build-wheel:
+ runs-on: ubuntu-latest
+ name: Build wheel distribution
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v2
+ with:
+ submodules: true
+ - name: Setup Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Update build dependencies
+ run: python -m pip install -U pip wheel setuptools
+ - name: Build wheel distribution
+ run: python setup.py bdist_wheel
+ - name: Store built wheel
+ uses: actions/upload-artifact@v2
+ with:
+ name: dist
+ path: dist/*
+
+ build-sdist:
+ runs-on: ubuntu-latest
+ name: Build source distribution
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v2
+ with:
+ submodules: true
+ - name: Set up Python 3.9
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.9
+ - name: Update build dependencies
+ run: python -m pip install -U pip wheel setuptools
+ - name: Build source distribution
+ run: python setup.py sdist
+ - name: Store source distribution
+ uses: actions/upload-artifact@v2
+ with:
+ name: dist
+ path: dist/*
+
+ test-sdist:
+ runs-on: ubuntu-latest
+ name: Test source distribution
+ needs:
+ - build-sdist
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v2
+ with:
+ submodules: true
+ - name: Setup Python 3.9
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.9
+ - name: Download source distribution
+ uses: actions/download-artifact@v2
+ with:
+ name: dist
+ path: dist
+ - name: Install source distribution
+ run: python -m pip install dist/fs-*.tar.gz
+ - name: Remove source code
+ run: rm -rvd fs
+ - name: Install test requirements
+ run: python -m pip install -r tests/requirements.txt
+ - name: Test installed package
+ run: python -m unittest discover -vv
+
+ test-wheel:
+ runs-on: ubuntu-latest
+ name: Test wheel distribution
+ needs:
+ - build-wheel
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v2
+ with:
+ submodules: true
+ - name: Setup Python 3.9
+ uses: actions/setup-python@v2
+ with:
+ python-version: 3.9
+ - name: Download wheel distribution
+ uses: actions/download-artifact@v2
+ with:
+ name: dist
+ path: dist
+ - name: Install wheel distribution
+ run: python -m pip install dist/fs-*.whl
+ - name: Remove source code
+ run: rm -rvd fs
+ - name: Install test requirements
+ run: python -m pip install -r tests/requirements.txt
+ - name: Test installed package
+ run: python -m unittest discover -vv
+
+ upload:
+ environment: PyPI
+ runs-on: ubuntu-latest
+ name: Upload
+ needs:
+ - build-sdist
+ - build-wheel
+ - test-sdist
+ - test-wheel
+ steps:
+ - name: Download built distributions
+ uses: actions/download-artifact@v2
+ with:
+ name: dist
+ path: dist
+ - name: Publish distributions to PyPI
+ if: startsWith(github.ref, 'refs/tags/v')
+ uses: pypa/gh-action-pypi-publish@master
+ with:
+ user: __token__
+ password: ${{ secrets.PYPI_API_TOKEN }}
+ skip_existing: false
+
+ release:
+ environment: GitHub Releases
+ runs-on: ubuntu-latest
+ if: "startsWith(github.ref, 'refs/tags/v')"
+ name: Release
+ needs: upload
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v1
+ - name: Release a Changelog
+ uses: rasmus-saks/release-a-changelog-action@v1.0.1
+ with:
+ github-token: '${{ secrets.GITHUB_TOKEN }}'
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
new file mode 100644
index 00000000..4be9098a
--- /dev/null
+++ b/.github/workflows/test.yml
@@ -0,0 +1,91 @@
+name: Test
+
+on:
+ - push
+ - pull_request
+
+jobs:
+ test:
+ runs-on: ubuntu-latest
+ timeout-minutes: 10
+ strategy:
+ fail-fast: false
+ matrix:
+ python-version:
+ - 2.7
+ - 3.5
+ - 3.6
+ - 3.7
+ - 3.8
+ - 3.9
+ - '3.10'
+ - pypy-2.7
+ - pypy-3.6
+ - pypy-3.7
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v1
+ - name: Setup Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v2
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Update pip
+ run: python -m pip install -U pip wheel setuptools
+ - name: Install tox
+ run: python -m pip install tox tox-gh-actions
+ - name: Test with tox
+ run: python -m tox
+ - name: Store partial coverage reports
+ uses: actions/upload-artifact@v2
+ with:
+ name: coverage
+ path: .coverage.*
+
+ coveralls:
+ needs: test
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v1
+ - name: Setup Python 3.10
+ uses: actions/setup-python@v2
+ with:
+ python-version: '3.10'
+ - name: Install coverage package
+ run: python -m pip install -U coverage
+ - name: Download partial coverage reports
+ uses: actions/download-artifact@v2
+ with:
+ name: coverage
+ - name: Combine coverage
+ run: python -m coverage combine
+ - name: Report coverage
+ run: python -m coverage report
+ - name: Export coverage to XML
+ run: python -m coverage xml
+ - name: Upload coverage statistics to Coveralls
+ uses: AndreMiras/coveralls-python-action@develop
+
+ lint:
+ runs-on: ubuntu-latest
+ strategy:
+ fail-fast: false
+ matrix:
+ linter:
+ - typecheck
+ - codestyle
+ - docstyle
+ - codeformat
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v1
+ - name: Setup Python '3.10'
+ uses: actions/setup-python@v2
+ with:
+ python-version: '3.10'
+ - name: Update pip
+ run: python -m pip install -U pip wheel setuptools
+ - name: Install tox
+ run: python -m pip install tox tox-gh-actions
+ - name: Run ${{ matrix.linter }} linter
+ run: python -m tox -e ${{ matrix.linter }}
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index ad36cf4e..00000000
--- a/.travis.yml
+++ /dev/null
@@ -1,38 +0,0 @@
-sudo: false
-language: python
-
-matrix:
- include:
- - python: "2.7"
- env:
- - SETUPTOOLS=setuptools PIP=pip
- - python: "3.7"
- env:
- - SETUPTOOLS=setuptools PIP=pip
- dist: xenial
- sudo: true
- - python: "3.6"
- env:
- - SETUPTOOLS=setuptools PIP=pip
- - python: "3.5"
- env:
- - SETUPTOOLS=setuptools PIP=pip
- - python: "3.4"
- env:
- - SETUPTOOLS=setuptools PIP=pip
-
-before_install:
- - pip install $SETUPTOOLS $PIP -U
- - pip --version
- - pip install -r testrequirements.txt
- - pip freeze
-
-install:
- - pip install -e .
-
-after_success:
- - coveralls
-
-# command to run tests
-script:
- - nosetests -v --with-coverage --cover-package=fs tests
diff --git a/CHANGELOG.md b/CHANGELOG.md
index f2be6711..ef16734e 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,12 +5,212 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/).
-## [2.4.9] - (Unreleased)
+
+## Unreleased
+
+
+### Added
+
+- Added `filter_glob` and `exclude_glob` parameters to `fs.walk.Walker`.
+ Closes [#459](https://github.com/PyFilesystem/pyfilesystem2/issues/459).
+
+### Fixed
+- Elaborated documentation of `filter_dirs` and `exclude_dirs` in `fs.walk.Walker`.
+ Closes [#371](https://github.com/PyFilesystem/pyfilesystem2/issues/371).
+- Fixes a backward incompatibility where `fs.move.move_file` raises `DestinationExists`
+ ([#535](https://github.com/PyFilesystem/pyfilesystem2/issues/535)).
+- Fixed a bug where files could be truncated or deleted when moved / copied onto itself.
+ Closes [#546](https://github.com/PyFilesystem/pyfilesystem2/issues/546)
+
+## [2.4.16] - 2022-05-02
+
+### Changed
+
+- Make `fs.zipfs._ZipExtFile` use the seeking mechanism implemented
+ in the Python standard library in Python version 3.7 and later
+ ([#527](https://github.com/PyFilesystem/pyfilesystem2/pull/527)).
+- Mark `fs.zipfs.ReadZipFS` as a case-sensitive filesystem
+ ([#527](https://github.com/PyFilesystem/pyfilesystem2/pull/527)).
+- Optimized moving files between filesystems with syspaths.
+ ([#523](https://github.com/PyFilesystem/pyfilesystem2/pull/523)).
+- Fixed `fs.move.move_file` to clean up the copy on the destination in case of errors.
+- `fs.opener.manage_fs` with `writeable=True` will now raise a `ResourceReadOnly`
+ exception if the managed filesystem is not writeable.
+- Marked filesystems wrapped with `fs.wrap.WrapReadOnly` as read-only.
+
+
+## [2.4.15] - 2022-02-07
+
+### Changed
+
+- Support more lenient usernames and group names in FTP servers
+ ([#507](https://github.com/PyFilesystem/pyfilesystem2/pull/507)).
+ Closes [#506](https://github.com/PyFilesystem/pyfilesystem2/issues/506).
+- Removed dependency on pytz ([#518](https://github.com/PyFilesystem/pyfilesystem2/pull/518)).
+ Closes [#516](https://github.com/PyFilesystem/pyfilesystem2/issues/518).
+
+### Fixed
+
+- Fixed `MemoryFS.move` and `MemoryFS.movedir` not updating the name of moved
+ resources, causing `MemoryFS.scandir` to use the old name.
+ ([#510](https://github.com/PyFilesystem/pyfilesystem2/pull/510)).
+ Closes [#509](https://github.com/PyFilesystem/pyfilesystem2/issues/509).
+- Make `WrapFS.move` and `WrapFS.movedir` use the delegate FS methods instead
+ of `fs.move` functions, which was causing optimized implementation of
+ `movedir` to be always skipped.
+ ([#511](https://github.com/PyFilesystem/pyfilesystem2/pull/511)).
+
+
+## [2.4.14] - 2021-11-16
+
+### Added
+
+- Added `fs.copy.copy_file_if`, `fs.copy.copy_dir_if`, and `fs.copy.copy_fs_if`.
+ Closes [#458](https://github.com/PyFilesystem/pyfilesystem2/issues/458).
+- Added `fs.base.FS.getmodified`.
+
+### Changed
+
+- FTP servers that do not support the MLST command now try to use the MDTM command to
+ retrieve the last modification timestamp of a resource.
+ Closes [#456](https://github.com/PyFilesystem/pyfilesystem2/pull/456).
+
+### Fixed
+
+- Fixed performance bugs in `fs.copy.copy_dir_if_newer`. Test cases were adapted to catch those bugs in the future.
+- Fixed precision bug for timestamps in `fs.OSFS.setinfo`.
+
+
+## [2.4.13] - 2021-03-27
+
+### Added
+
+- Added FTP over TLS (FTPS) support to FTPFS.
+ Closes [#437](https://github.com/PyFilesystem/pyfilesystem2/issues/437),
+ [#449](https://github.com/PyFilesystem/pyfilesystem2/pull/449).
+- `PathError` now supports wrapping an exception using the `exc` argument.
+ Closes [#453](https://github.com/PyFilesystem/pyfilesystem2/issues/453).
+- Better documentation of the `writable` parameter of `fs.open_fs`, and
+ hint about using `fs.wrap.read_only` when a read-only filesystem is
+ required. Closes [#441](https://github.com/PyFilesystem/pyfilesystem2/issues/441).
+- Copy and move operations now provide a parameter `preserve_time` that, when
+ passed as `True`, makes sure the "mtime" of the destination file will be
+ the same as that of the source file.
+
+### Changed
+
+- Make `FS.upload` explicit about the expected error when the parent directory of the destination does not exist.
+ Closes [#445](https://github.com/PyFilesystem/pyfilesystem2/pull/445).
+- Migrate continuous integration from Travis-CI to GitHub Actions and introduce several linters
+ again in the build steps ([#448](https://github.com/PyFilesystem/pyfilesystem2/pull/448)).
+ Closes [#446](https://github.com/PyFilesystem/pyfilesystem2/issues/446).
+- Stop requiring `pytest` to run tests, allowing any test runner supporting `unittest`-style
+ test suites.
+- `FSTestCases` now builds the large data required for `upload` and `download` tests only
+ once in order to reduce the total testing time.
+- `MemoryFS.move` and `MemoryFS.movedir` will now avoid copying data.
+ Closes [#452](https://github.com/PyFilesystem/pyfilesystem2/issues/452).
+- `FS.removetree("/")` behaviour has been standardized in all filesystems, and
+ is expected to clear the contents of the root folder without deleting it.
+ Closes [#471](https://github.com/PyFilesystem/pyfilesystem2/issues/471).
+- `FS.getbasic` is now deprecated, as it is redundant with `FS.getinfo`,
+ and `FS.getinfo` is now explicitly expected to return the *basic* info
+ namespace unconditionally. Closes [#469](https://github.com/PyFilesystem/pyfilesystem2/issues/469).
+
+### Fixed
+
+- Make `FTPFile`, `MemoryFile` and `RawWrapper` accept [`array.array`](https://docs.python.org/3/library/array.html)
+ arguments for the `write` and `writelines` methods, as expected by their base class [`io.RawIOBase`](https://docs.python.org/3/library/io.html#io.RawIOBase).
+- Various documentation issues, including `MemoryFS` docstring not rendering properly.
+- Avoid creating a new connection on every call of `FTPFS.upload`. Closes [#455](https://github.com/PyFilesystem/pyfilesystem2/issues/455).
+- `WrapReadOnly.removetree` not raising a `ResourceReadOnly` when called. Closes [#468](https://github.com/PyFilesystem/pyfilesystem2/issues/468).
+- `WrapCachedDir.isdir` and `WrapCachedDir.isfile` raising a `ResourceNotFound` error on non-existing path ([#470](https://github.com/PyFilesystem/pyfilesystem2/pull/470)).
+- `FTPFS` not listing certain entries with sticky/SUID/SGID permissions set by Linux server ([#473](https://github.com/PyFilesystem/pyfilesystem2/pull/473)).
+ Closes [#451](https://github.com/PyFilesystem/pyfilesystem2/issues/451).
+- `scandir` iterator not being closed explicitly in `OSFS.scandir`, occasionally causing a `ResourceWarning`
+ to be thrown. Closes [#311](https://github.com/PyFilesystem/pyfilesystem2/issues/311).
+- Incomplete type annotations for the `temp_fs` parameter of `WriteTarFS` and `WriteZipFS`.
+ Closes [#410](https://github.com/PyFilesystem/pyfilesystem2/issues/410).
+
+
+## [2.4.12] - 2021-01-14
+
+### Added
+
+- Missing `mode` attribute to `_MemoryFile` objects returned by `MemoryFS.openbin`.
+- Missing `readinto` method for `MemoryFS` and `FTPFS` file objects. Closes
+ [#380](https://github.com/PyFilesystem/pyfilesystem2/issues/380).
+- Added compatibility if a Windows FTP server returns file information to the
+ `LIST` command with 24-hour times. Closes [#438](https://github.com/PyFilesystem/pyfilesystem2/issues/438).
+- Added Python 3.9 support. Closes [#443](https://github.com/PyFilesystem/pyfilesystem2/issues/443).
+
+### Changed
+
+- Start testing on PyPy. Due to [#342](https://github.com/PyFilesystem/pyfilesystem2/issues/342)
+ we have to treat PyPy builds specially and allow them to fail, but at least we'll
+ be able to see if we break something aside from known issues with FTP tests.
+- Include docs in source distributions as well as the whole tests folder,
+ ensuring `conftest.py` is present, fixes [#364](https://github.com/PyFilesystem/pyfilesystem2/issues/364).
+- Stop patching copy with Python 3.8+ because it already uses `sendfile`
+ ([#424](https://github.com/PyFilesystem/pyfilesystem2/pull/424)).
+ Closes [#421](https://github.com/PyFilesystem/pyfilesystem2/issues/421).
+
+### Fixed
+
+- Fixed crash when CPython's -OO flag is used
+- Fixed error when parsing timestamps from a FTP directory served from a WindowsNT FTP Server.
+ Closes [#395](https://github.com/PyFilesystem/pyfilesystem2/issues/395).
+- Fixed documentation of `Mode.to_platform_bin`. Closes [#382](https://github.com/PyFilesystem/pyfilesystem2/issues/382).
+- Fixed the code example in the "Testing Filesystems" section of the
+ "Implementing Filesystems" guide. Closes [#407](https://github.com/PyFilesystem/pyfilesystem2/issues/407).
+- Fixed `FTPFS.openbin` not implicitly opening files in binary mode like expected
+ from `openbin`. Closes [#406](https://github.com/PyFilesystem/pyfilesystem2/issues/406).
+
+
+## [2.4.11] - 2019-09-07
+
+### Added
+
+- Added geturl for TarFS and ZipFS for 'fs' purpose. NoURL for 'download' purpose.
+- Added helpful root path in CreateFailed exception.
+ Closes [#340](https://github.com/PyFilesystem/pyfilesystem2/issues/340).
+- Added Python 3.8 support.
+
+### Fixed
+
+- Fixed tests leaving tmp files
+- Fixed typing issues
+- Fixed link namespace returning bytes
+- Fixed broken FSURL in windows [#329](https://github.com/PyFilesystem/pyfilesystem2/issues/329)
+- Fixed hidden exception at fs.close() when opening an absent zip/tar file URL [#333](https://github.com/PyFilesystem/pyfilesystem2/issues/333)
+- Fixed abstract class import from `collections` which would break on Python 3.8
+- Fixed incorrect imports of `mock` on Python 3
+- Removed some unused imports and unused `requirements.txt` file
+- Added mypy checks to Travis. Closes [#332](https://github.com/PyFilesystem/pyfilesystem2/issues/332).
+- Fixed missing `errno.ENOTSUP` on PyPy. Closes [#338](https://github.com/PyFilesystem/pyfilesystem2/issues/338).
+- Fixed bug in a decorator that would trigger an `AttributeError` when a class
+ was created that implemented a deprecated method and had no docstring of its
+ own.
+
+### Changed
+
+- Entire test suite has been migrated to [pytest](https://docs.pytest.org/en/latest/). Closes [#327](https://github.com/PyFilesystem/pyfilesystem2/issues/327).
+- Style checking is now enforced using `flake8`; this involved some code cleanup
+ such as removing unused imports.
+
+## [2.4.10] - 2019-07-29
+
+### Fixed
+
+- Fixed broken WrapFS.movedir [#322](https://github.com/PyFilesystem/pyfilesystem2/issues/322).
+
+## [2.4.9] - 2019-07-28
### Fixed
- Restored fs.path import
- Fixed potential race condition in makedirs. Fixes [#310](https://github.com/PyFilesystem/pyfilesystem2/issues/310)
+- Added missing methods to WrapFS. Fixed [#294](https://github.com/PyFilesystem/pyfilesystem2/issues/294)
### Changed
@@ -384,7 +584,7 @@ No changes, pushed wrong branch to PyPi.
### Added
-- New `copy_if_newer' functionality in`copy` module.
+- New `copy_if_newer` functionality in `copy` module.
### Fixed
@@ -395,17 +595,17 @@ No changes, pushed wrong branch to PyPi.
### Changed
- Improved FTP support for non-compliant servers
-- Fix for ZipFS implied directories
+- Fix for `ZipFS` implied directories
## [2.0.1] - 2017-03-11
### Added
-- TarFS contributed by Martin Larralde
+- `TarFS` contributed by Martin Larralde.
### Fixed
-- FTPFS bugs.
+- `FTPFS` bugs.
## [2.0.0] - 2016-12-07
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index e84f7586..5347176f 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -2,14 +2,122 @@
Pull Requests are very welcome for this project!
-For bug fixes or new features, please file an issue before submitting a pull request. If the change isn't trivial, it may be best to wait for feedback. For a quicker response, contact [Will McGugan](mailto:willmcgugan+pyfs@gmail.com) directly.
+For bug fixes or new features, please file an issue before submitting a pull
+request. If the change isn't trivial, it may be best to wait for feedback.
+For a quicker response, contact [Will McGugan](mailto:willmcgugan+pyfs@gmail.com)
+directly.
-## Coding Guidelines
-This project runs on Python2.7 and Python3.X. Python2.7 will be dropped at some point, but for now, please maintain compatibility.
+## `tox`
+
+Most of the guidelines that follow can be checked with a particular
+[`tox`](https://pypi.org/project/tox/) environment. Having it installed will
+help you develop and verify your code locally without having to wait for
+our Continuous Integration pipeline to finish.
-Please format new code with [black](https://github.com/ambv/black), using the default settings.
## Tests
-New code should have unit tests. We strive to have near 100% coverage. Get in touch, if you need assistance with the tests.
+New code should have unit tests. We strive to have near 100% coverage.
+Get in touch, if you need assistance with the tests. You shouldn't refrain
+from opening a Pull Request even if all the tests were not added yet, or if
+not all of them are passing yet.
+
+### Dependencies
+
+The dependency for running the tests can be found in the `tests/requirements.txt` file.
+If you're using `tox`, you won't have to install them manually. Otherwise,
+they can be installed with `pip`:
+```console
+$ pip install -r tests/requirements.txt
+```
+
+### Running (with `tox`)
+
+Simply run in the repository folder to execute the tests for all available
+environments:
+```console
+$ tox
+```
+
+Since this can take some time, you can use a single environment to run
+tests only once, for instance to run tests only with Python 3.9:
+```console
+$ tox -e py39
+```
+
+### Running (without `tox`)
+
+Tests are written using the standard [`unittest`](https://docs.python.org/3/library/unittest.html)
+framework. You should be able to run them using the standard library runner:
+```console
+$ python -m unittest discover -vv
+```
+
+
+## Coding Guidelines
+
+This project runs on Python2.7 and Python3.X. Python2.7 will be dropped at
+some point, but for now, please maintain compatibility. PyFilesystem2 uses
+the [`six`](https://pypi.org/project/six/) library to write version-agnostic
+Python code.
+
+### Style
+
+The code (including the tests) should follow PEP8. You can check for the
+code style with:
+```console
+$ tox -e codestyle
+```
+
+This will invoke [`flake8`](https://pypi.org/project/flake8/) with some common
+plugins such as [`flake8-comprehensions`](https://pypi.org/project/flake8-comprehensions/).
+
+### Format
+
+Please format new code with [black](https://github.com/ambv/black), using the
+default settings. You can check whether the code is well-formatted with:
+```console
+$ tox -e codeformat
+```
+
+### Type annotations
+
+The code is typechecked with [`mypy`](https://pypi.org/project/mypy/), and
+type annotations written as comments, to stay compatible with Python2. Run
+the typechecking with:
+```console
+$ tox -e typecheck
+```
+
+
+## Documentation
+
+### Dependencies
+
+The documentation is built with [Sphinx](https://pypi.org/project/Sphinx/),
+using the [ReadTheDocs](https://pypi.org/project/sphinx-rtd-theme/) theme.
+The dependencies are listed in `docs/requirements.txt` and can be installed with
+`pip`:
+```console
+$ pip install -r docs/requirements.txt
+```
+
+### Building
+
+Run the following command to build the HTML documentation:
+```console
+$ python setup.py build_sphinx
+```
+
+The documentation index will be written to the `build/sphinx/html/`
+directory.
+
+### Style
+
+The API reference is written in the Python source, using docstrings in
+[Google format](https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html).
+The documentation style can be checked with:
+```console
+$ tox -e docstyle
+```
diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md
index ef1d3a09..78102487 100644
--- a/CONTRIBUTORS.md
+++ b/CONTRIBUTORS.md
@@ -2,9 +2,48 @@
Many thanks to the following developers for contributing to this project:
+- [Adrian Garcia Badaracco](https://github.com/adriangb)
+- [Alex Povel](https://github.com/alexpovel)
+- [Andreas Tollkötter](https://github.com/atollk)
+- [Andrew Scheller](https://github.com/lurch)
+- [Andrey Serov](https://github.com/zmej-serow)
+- [Ben Lindsay](https://github.com/benlindsay)
+- [Bernhard M. Wiedemann](https://github.com/bmwiedemann)
+- [@chfw](https://github.com/chfw)
+- [Dafna Hirschfeld](https://github.com/kamomil)
- [Diego Argueta](https://github.com/dargueta)
+- [Eelke van den Bos](https://github.com/eelkevdbos)
+- [Egor Namakonov](https://github.com/fresheed)
+- [Felix Yan](https://github.com/felixonmars)
+- [@FooBarQuaxx](https://github.com/FooBarQuaxx)
- [Geoff Jukes](https://github.com/geoffjukes)
-- [Giampaolo](https://github.com/gpcimino)
+- [George Macon](https://github.com/gmacon)
+- [Giampaolo Cimino](https://github.com/gpcimino)
+- [@Hoboneer](https://github.com/Hoboneer)
+- [Jen Hagg](https://github.com/jenhagg)
+- [Joseph Atkins-Turkish](https://github.com/Spacerat)
+- [Joshua Tauberer](https://github.com/JoshData)
+- [Justin Charlong](https://github.com/jcharlong)
+- [Louis Sautier](https://github.com/sbraz)
+- [Martin Durant](https://github.com/martindurant)
- [Martin Larralde](https://github.com/althonos)
+- [Masaya Nakamura](https://github.com/mashabow)
+- [Matthew Gamble](https://github.com/djmattyg007)
+- [Morten Engelhardt Olsen](https://github.com/xoriath)
+- [@mrg0029](https://github.com/mrg0029)
+- [Nathan Goldbaum](https://github.com/ngoldbaum)
+- [Nick Henderson](https://github.com/nwh)
+- [Oliver Galvin](https://github.com/odgalvin)
+- [Philipp Wiesner](https://github.com/birnbaum)
+- [Philippe Ombredanne](https://github.com/pombredanne)
+- [Rehan Khwaja](https://github.com/rkhwaja)
+- [Silvan Spross](https://github.com/sspross)
+- [@sqwishy](https://github.com/sqwishy)
+- [Sven Schliesing](https://github.com/muffl0n)
+- [Thomas Feldmann](https://github.com/tfeldmann)
+- [Tim Gates](https://github.com/timgates42/)
+- [@tkossak](https://github.com/tkossak)
+- [Todd Levi](https://github.com/televi)
+- [Vilius Grigaliūnas](https://github.com/vilius-g)
- [Will McGugan](https://github.com/willmcgugan)
- [Zmej Serow](https://github.com/zmej-serow)
diff --git a/LICENSE b/LICENSE
index 97a4b916..34845692 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,5 +1,6 @@
MIT License
+Copyright (c) 2017-2021 The PyFilesystem2 contributors
Copyright (c) 2016-2019 Will McGugan
Permission is hereby granted, free of charge, to any person obtaining a copy
diff --git a/MANIFEST.in b/MANIFEST.in
index 1aba38f6..cf5499b5 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1 +1,8 @@
+include CHANGELOG.md
+include CONTRIBUTING.md
+include CONTRIBUTORS.md
include LICENSE
+graft tests
+graft docs
+global-exclude __pycache__
+global-exclude *.py[co]
diff --git a/README.md b/README.md
index 27f02063..0f1326b1 100644
--- a/README.md
+++ b/README.md
@@ -2,17 +2,19 @@
Python's Filesystem abstraction layer.
-[](https://badge.fury.io/py/fs)
+[](https://pypi.org/project/fs/)
[](https://pypi.org/project/fs/)
-[](https://travis-ci.org/PyFilesystem/pyfilesystem2)
-[](https://coveralls.io/github/PyFilesystem/pyfilesystem2)
-[](https://www.codacy.com/app/will-mcgugan/pyfilesystem2?utm_source=github.com&utm_medium=referral&utm_content=PyFilesystem/pyfilesystem2&utm_campaign=Badge_Grade)
-[](https://landscape.io/github/PyFilesystem/pyfilesystem2/master)
+[](https://pepy.tech/project/fs/)
+[](https://github.com/PyFilesystem/pyfilesystem2/actions?query=branch%3Amaster)
+[](https://ci.appveyor.com/project/willmcgugan/pyfilesystem2)
+[](https://coveralls.io/github/PyFilesystem/pyfilesystem2)
+[](https://www.codacy.com/app/will-mcgugan/pyfilesystem2?utm_source=github.com&utm_medium=referral&utm_content=PyFilesystem/pyfilesystem2&utm_campaign=Badge_Grade)
+[](http://pyfilesystem2.readthedocs.io/en/stable/?badge=stable)
## Documentation
-- [Wiki](https://www.pyfilesystem.org)
-- [API Documentation](https://docs.pyfilesystem.org/)
+- ~~[Wiki](https://www.pyfilesystem.org)~~ (currently offline)
+- [API Documentation](https://pyfilesystem2.readthedocs.io/en/latest/)
- [GitHub Repository](https://github.com/PyFilesystem/pyfilesystem2)
- [Blog](https://www.willmcgugan.com/tag/fs/)
@@ -92,10 +94,11 @@ The following developers have contributed code and their time to this projects:
- [Will McGugan](https://github.com/willmcgugan)
- [Martin Larralde](https://github.com/althonos)
-- [Giampaolo](https://github.com/gpcimino)
+- [Giampaolo Cimino](https://github.com/gpcimino)
- [Geoff Jukes](https://github.com/geoffjukes)
-See CONTRIBUTORS.md for a full list of contributors.
+See [CONTRIBUTORS.md](https://github.com/PyFilesystem/pyfilesystem2/blob/master/CONTRIBUTORS.md)
+for a full list of contributors.
PyFilesystem2 owes a massive debt of gratitude to the following
developers who contributed code and ideas to the original version.
diff --git a/appveyor.yml b/appveyor.yml
index 6cdb3773..e55b138a 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -20,10 +20,12 @@ environment:
install:
# We need wheel installed to build wheels
- - "%PYTHON%\\python.exe -m pip install nose psutil pyftpdlib mock"
+ - "%PYTHON%\\python.exe -m pip install -U pip wheel setuptools"
+ - "%PYTHON%\\python.exe -m pip install pytest"
+ - "%PYTHON%\\python.exe -m pip install -r tests/requirements.txt"
- "%PYTHON%\\python.exe setup.py install"
build: off
test_script:
- - "%PYTHON%\\python.exe -m nose tests -v"
+ - "%PYTHON%\\python.exe -m pytest"
diff --git a/docs/requirements.txt b/docs/requirements.txt
new file mode 100644
index 00000000..ee590c37
--- /dev/null
+++ b/docs/requirements.txt
@@ -0,0 +1,4 @@
+# the bare requirements for building docs
+Sphinx ~=3.0
+sphinx-rtd-theme ~=0.5.1
+recommonmark ~=0.6
diff --git a/docs/source/conf.py b/docs/source/conf.py
index a5cc0d23..661cd0c5 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -13,9 +13,8 @@
# serve to show the default.
import sys
-import os
-
+import os
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
@@ -39,7 +38,8 @@
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon',
- 'sphinx.ext.intersphinx'
+ 'sphinx.ext.intersphinx',
+ "recommonmark",
]
# Add any paths that contain templates here, relative to this directory.
@@ -63,7 +63,7 @@
# General information about the project.
project = u'PyFilesystem'
-copyright = u'2016-2017, Will McGugan'
+copyright = u'2016-2021, Will McGugan and the PyFilesystem2 contributors'
author = u'Will McGugan'
# The version info for the project you're documenting, acts as replacement for
@@ -71,6 +71,7 @@
# built documents.
#
from fs import __version__
+
# The short X.Y version.
version = '.'.join(__version__.split('.')[:2])
# The full version, including alpha/beta/rc tags.
@@ -304,3 +305,14 @@
#texinfo_no_detailmenu = False
napoleon_include_special_with_doc = True
+
+
+# -- Options for autodoc -----------------------------------------------------
+
+# Configure autodoc so that it doesn't skip building the documentation for
+# __init__ methods, since the arguments to instantiate classes should be in
+# the __init__ docstring and not at the class-level.
+
+autodoc_default_options = {
+ 'special-members': '__init__',
+}
diff --git a/docs/source/contributing.md b/docs/source/contributing.md
new file mode 120000
index 00000000..f939e75f
--- /dev/null
+++ b/docs/source/contributing.md
@@ -0,0 +1 @@
+../../CONTRIBUTING.md
\ No newline at end of file
diff --git a/docs/source/extension.rst b/docs/source/extension.rst
index 3180561b..c9dc3f69 100644
--- a/docs/source/extension.rst
+++ b/docs/source/extension.rst
@@ -29,7 +29,8 @@ Here's an example taken from an Amazon S3 Filesystem::
__all__ = ['S3FSOpener']
- from fs.opener import Opener, OpenerError
+ from fs.opener import Opener
+ from fs.opener.errors import OpenerError
from ._s3fs import S3FS
diff --git a/docs/source/globbing.rst b/docs/source/globbing.rst
index c72392fc..ac81c185 100644
--- a/docs/source/globbing.rst
+++ b/docs/source/globbing.rst
@@ -15,7 +15,7 @@ Matching Files and Directories
In a glob pattern, A ``*`` means match anything text in a filename. A ``?``
matches any single character. A ``**`` matches any number of subdirectories,
-making the glob *recusrive*. If the glob pattern ends in a ``/``, it will
+making the glob *recursive*. If the glob pattern ends in a ``/``, it will
only match directory paths, otherwise it will match files and directories.
.. note::
diff --git a/docs/source/guide.rst b/docs/source/guide.rst
index e1f078ee..2c54655c 100644
--- a/docs/source/guide.rst
+++ b/docs/source/guide.rst
@@ -176,7 +176,7 @@ You can open a file from a FS object with :meth:`~fs.base.FS.open`, which is ver
In the case of a ``OSFS``, a standard file-like object will be returned. Other filesystems may return a different object supporting the same methods. For instance, :class:`~fs.memoryfs.MemoryFS` will return a ``io.BytesIO`` object.
-PyFilesystem also offers a number of shortcuts for common file related operations. For instance, :meth:`~fs.base.FS.readbytes` will return the file contents as a bytes, and :meth:`~fs.base.FS.readtext` will read unicode text. These methods is generally preferable to explicitly opening files, as the FS object may have an optimized implementation.
+PyFilesystem also offers a number of shortcuts for common file related operations. For instance, :meth:`~fs.base.FS.readbytes` will return the file contents as bytes, and :meth:`~fs.base.FS.readtext` will read unicode text. These methods are generally preferable to explicitly opening files, as the FS object may have an optimized implementation.
Other *shortcut* methods are :meth:`~fs.base.FS.download`, :meth:`~fs.base.FS.upload`, :meth:`~fs.base.FS.writebytes`, :meth:`~fs.base.FS.writetext`.
diff --git a/docs/source/implementers.rst b/docs/source/implementers.rst
index 1f23866b..bb055d69 100644
--- a/docs/source/implementers.rst
+++ b/docs/source/implementers.rst
@@ -40,9 +40,10 @@ To test your implementation, you can borrow the test suite used to test the buil
Here's the simplest possible example to test a filesystem class called ``MyFS``::
+ import unittest
from fs.test import FSTestCases
- class TestMyFS(FSTestCases):
+ class TestMyFS(FSTestCases, unittest.TestCase):
def make_fs(self):
# Return an instance of your FS object here
@@ -54,6 +55,13 @@ You may also want to override some of the methods in the test suite for more tar
.. autoclass:: fs.test.FSTestCases
:members:
+.. note::
+
+ As of version 2.4.11 this project uses `pytest `_ to run its tests.
+ While it's completely compatible with ``unittest``-style tests, it's much more powerful and
+ feature-rich. We suggest you take advantage of it and its plugins in new tests you write, rather
+ than sticking to strict ``unittest`` features. For benefits and limitations, see `here `_.
+
.. _essential-methods:
diff --git a/docs/source/index.rst b/docs/source/index.rst
index c3fb2eb1..a2b72ebf 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -25,7 +25,7 @@ Contents:
external.rst
interface.rst
reference.rst
-
+ contributing.md
Indices and tables
diff --git a/docs/source/info.rst b/docs/source/info.rst
index 51719dbf..82c3e076 100644
--- a/docs/source/info.rst
+++ b/docs/source/info.rst
@@ -46,7 +46,7 @@ file::
resource_info = fs.getinfo('myfile.txt', namespaces=['details', 'access'])
-In addition to the specified namespaces, the fileystem will also return
+In addition to the specified namespaces, the filesystem will also return
the ``basic`` namespace, which contains the name of the resource, and a
flag which indicates if the resource is a directory.
@@ -88,13 +88,13 @@ size int Number of bytes used to store the
the overhead (in bytes) used to store
the directory entry.
type ResourceType Resource type, one of the values
- defined in :class:`~fs.ResourceType`.
+ defined in :class:`~fs.enums.ResourceType`.
================ =================== ==========================================
The time values (``accessed_time``, ``created_time`` etc.) may be
``None`` if the filesystem doesn't store that information. The ``size``
and ``type`` keys are guaranteed to be available, although ``type`` may
-be :attr:`~fs.ResourceType.unknown` if the filesystem is unable to
+be :attr:`~fs.enums.ResourceType.unknown` if the filesystem is unable to
retrieve the resource type.
Access Namespace
diff --git a/docs/source/interface.rst b/docs/source/interface.rst
index e2da135b..924ac3fb 100644
--- a/docs/source/interface.rst
+++ b/docs/source/interface.rst
@@ -20,6 +20,7 @@ The following is a complete list of methods on PyFilesystem objects.
* :meth:`~fs.base.FS.getdetails` Get details info namespace for a resource.
* :meth:`~fs.base.FS.getinfo` Get info regarding a file or directory.
* :meth:`~fs.base.FS.getmeta` Get meta information for a resource.
+* :meth:`~fs.base.FS.getmodified` Get the last modified time of a resource.
* :meth:`~fs.base.FS.getospath` Get path with encoding expected by the OS.
* :meth:`~fs.base.FS.getsize` Get the size of a file.
* :meth:`~fs.base.FS.getsyspath` Get the system path of a resource, if one exists.
diff --git a/docs/source/openers.rst b/docs/source/openers.rst
index 1fa643e5..b5a2a823 100644
--- a/docs/source/openers.rst
+++ b/docs/source/openers.rst
@@ -56,3 +56,25 @@ To open a filesysem with a FS URL, you can use :meth:`~fs.opener.registry.Regist
from fs import open_fs
projects_fs = open_fs('osfs://~/projects')
+
+
+Manually registering Openers
+----------------------------
+
+The ``fs.opener`` registry uses an entry point to install external openers
+(see :ref:`extension`), and it does so once, when you import `fs` for the first
+time. In some rare cases where entry points are not available (for instance,
+when running an embedded interpreter) or when extensions are installed *after*
+the interpreter has started (for instance in a notebook, see
+`PyFilesystem2#485 `_).
+
+However, a new opener can be installed manually at any time with the
+`fs.opener.registry.install` method. For instance, here's how the opener for
+the `s3fs `_ extension can be added to
+the registry::
+
+ import fs.opener
+ from fs_s3fs.opener import S3FSOpener
+
+ fs.opener.registry.install(S3FSOpener)
+ # fs.open_fs("s3fs://...") should now work
diff --git a/examples/count_py.py b/examples/count_py.py
index 1f38d1e2..1a6dd670 100644
--- a/examples/count_py.py
+++ b/examples/count_py.py
@@ -11,7 +11,6 @@
from fs import open_fs
from fs.filesize import traditional
-
fs_url = sys.argv[1]
count = 0
diff --git a/examples/find_dups.py b/examples/find_dups.py
index adc8b2cc..269509f3 100644
--- a/examples/find_dups.py
+++ b/examples/find_dups.py
@@ -7,11 +7,11 @@
"""
-from collections import defaultdict
import sys
-from fs import open_fs
+from collections import defaultdict
+from fs import open_fs
hashes = defaultdict(list)
with open_fs(sys.argv[1]) as fs:
diff --git a/examples/rm_pyc.py b/examples/rm_pyc.py
index 71f46b17..9d95f5d7 100644
--- a/examples/rm_pyc.py
+++ b/examples/rm_pyc.py
@@ -11,7 +11,6 @@
from fs import open_fs
-
with open_fs(sys.argv[1]) as fs:
count = fs.glob("**/*.pyc").remove()
print(f"{count} .pyc files remove")
diff --git a/examples/upload.py b/examples/upload.py
index 04a0e152..77e5d401 100644
--- a/examples/upload.py
+++ b/examples/upload.py
@@ -12,9 +12,10 @@
"""
-import os
import sys
+import os
+
from fs import open_fs
_, file_path, fs_url = sys.argv
diff --git a/fs/__init__.py b/fs/__init__.py
index b60a61e7..97dc55ba 100644
--- a/fs/__init__.py
+++ b/fs/__init__.py
@@ -1,12 +1,12 @@
"""Python filesystem abstraction layer.
"""
-__import__("pkg_resources").declare_namespace(__name__)
+__import__("pkg_resources").declare_namespace(__name__) # type: ignore
+from . import path
+from ._fscompat import fsdecode, fsencode
from ._version import __version__
from .enums import ResourceType, Seek
from .opener import open_fs
-from ._fscompat import fsencode, fsdecode
-from . import path
__all__ = ["__version__", "ResourceType", "Seek", "open_fs"]
diff --git a/fs/_bulk.py b/fs/_bulk.py
index a11069e8..caba0c58 100644
--- a/fs/_bulk.py
+++ b/fs/_bulk.py
@@ -6,18 +6,21 @@
from __future__ import unicode_literals
-import threading
+import typing
+import threading
from six.moves.queue import Queue
-from .copy import copy_file_internal
+from .copy import copy_file_internal, copy_modified_time
from .errors import BulkCopyFailed
from .tools import copy_file_data
-if False: # typing.TYPE_CHECKING
- from .base import FS
+if typing.TYPE_CHECKING:
+ from typing import IO, List, Optional, Text, Tuple, Type
+
from types import TracebackType
- from typing import IO, Iterator, List, Optional, Mapping, Text, Type, Union
+
+ from .base import FS
class _Worker(threading.Thread):
@@ -74,11 +77,13 @@ def __call__(self):
class Copier(object):
"""Copy files in worker threads."""
- def __init__(self, num_workers=4):
- # type: (int) -> None
+ def __init__(self, num_workers=4, preserve_time=False):
+ # type: (int, bool) -> None
if num_workers < 0:
raise ValueError("num_workers must be >= 0")
self.num_workers = num_workers
+ self.preserve_time = preserve_time
+ self.all_tasks = [] # type: List[Tuple[FS, Text, FS, Text]]
self.queue = None # type: Optional[Queue[_Task]]
self.workers = [] # type: List[_Worker]
self.errors = [] # type: List[Exception]
@@ -96,10 +101,18 @@ def start(self):
def stop(self):
"""Stop the workers (will block until they are finished)."""
if self.running and self.num_workers:
- for worker in self.workers:
+ # Notify the workers that all tasks have arrived
+ # and wait for them to finish.
+ for _worker in self.workers:
self.queue.put(None)
for worker in self.workers:
worker.join()
+
+ # If the "last modified" time is to be preserved, do it now.
+ if self.preserve_time:
+ for args in self.all_tasks:
+ copy_modified_time(*args)
+
# Free up references held by workers
del self.workers[:]
self.queue.join()
@@ -123,13 +136,16 @@ def __exit__(
if traceback is None and self.errors:
raise BulkCopyFailed(self.errors)
- def copy(self, src_fs, src_path, dst_fs, dst_path):
- # type: (FS, Text, FS, Text) -> None
+ def copy(self, src_fs, src_path, dst_fs, dst_path, preserve_time=False):
+ # type: (FS, Text, FS, Text, bool) -> None
"""Copy a file from one fs to another."""
if self.queue is None:
# This should be the most performant for a single-thread
- copy_file_internal(src_fs, src_path, dst_fs, dst_path)
+ copy_file_internal(
+ src_fs, src_path, dst_fs, dst_path, preserve_time=self.preserve_time
+ )
else:
+ self.all_tasks.append((src_fs, src_path, dst_fs, dst_path))
src_file = src_fs.openbin(src_path, "r")
try:
dst_file = dst_fs.openbin(dst_path, "w")
diff --git a/fs/_fscompat.py b/fs/_fscompat.py
index 54717b7f..fa7d2c0b 100644
--- a/fs/_fscompat.py
+++ b/fs/_fscompat.py
@@ -1,17 +1,15 @@
-import sys
-
import six
try:
- from os import fsencode, fsdecode
+ from os import fsdecode, fsencode
except ImportError:
- from backports.os import fsencode, fsdecode # type: ignore
+ from backports.os import fsdecode, fsencode # type: ignore
try:
from os import fspath
except ImportError:
- def fspath(path):
+ def fspath(path): # type: ignore
"""Return the path representation of a path-like object.
If str or bytes is passed in, it is returned unchanged. Otherwise the
diff --git a/fs/_ftp_parse.py b/fs/_ftp_parse.py
index b50f75eb..16c581eb 100644
--- a/fs/_ftp_parse.py
+++ b/fs/_ftp_parse.py
@@ -1,31 +1,32 @@
-from __future__ import absolute_import
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import absolute_import, print_function, unicode_literals
-import unicodedata
-import datetime
import re
import time
+import unicodedata
+from datetime import datetime
-from pytz import UTC
+try:
+ from datetime import timezone
+except ImportError:
+ from ._tzcompat import timezone # type: ignore
from .enums import ResourceType
from .permissions import Permissions
-
-EPOCH_DT = datetime.datetime.fromtimestamp(0, UTC)
+EPOCH_DT = datetime.fromtimestamp(0, timezone.utc)
RE_LINUX = re.compile(
r"""
^
- ([ldrwx-]{10})
+ ([-dlpscbD])
+ ([r-][w-][xsS-][r-][w-][xsS-][r-][w-][xtT-][\.\+]?)
\s+?
(\d+)
\s+?
- ([\w\-]+)
+ ([A-Za-z0-9][A-Za-z0-9\-\.\_\@]*\$?)
\s+?
- ([\w\-]+)
+ ([A-Za-z0-9][A-Za-z0-9\-\.\_\@]*\$?)
\s+?
(\d+)
\s+?
@@ -41,20 +42,21 @@
RE_WINDOWSNT = re.compile(
r"""
^
- (?P.*(AM|PM))
- \s*
- (?P(|\d*))
- \s*
+ (?P\S+)
+ \s+
+ (?P\S+(AM|PM)?)
+ \s+
+ (?P(|\d+))
+ \s+
(?P.*)
$
""",
- re.VERBOSE)
+ re.VERBOSE,
+)
def get_decoders():
- """
- Returns all available FTP LIST line decoders with their matching regexes.
- """
+ """Return all available FTP LIST line decoders with their matching regexes."""
decoders = [
(RE_LINUX, decode_linux),
(RE_WINDOWSNT, decode_windowsnt),
@@ -82,15 +84,13 @@ def parse_line(line):
def _parse_time(t, formats):
- t = " ".join(token.strip() for token in t.lower().split(" "))
-
- _t = None
for frmt in formats:
try:
_t = time.strptime(t, frmt)
+ break
except ValueError:
continue
- if not _t:
+ else:
return None
year = _t.tm_year if _t.tm_year != 1900 else time.localtime().tm_year
@@ -98,23 +98,27 @@ def _parse_time(t, formats):
day = _t.tm_mday
hour = _t.tm_hour
minutes = _t.tm_min
- dt = datetime.datetime(year, month, day, hour, minutes, tzinfo=UTC)
+ dt = datetime(year, month, day, hour, minutes, tzinfo=timezone.utc)
epoch_time = (dt - EPOCH_DT).total_seconds()
return epoch_time
+def _decode_linux_time(mtime):
+ return _parse_time(mtime, formats=["%b %d %Y", "%b %d %H:%M"])
+
+
def decode_linux(line, match):
- perms, links, uid, gid, size, mtime, name = match.groups()
- is_link = perms.startswith("l")
- is_dir = perms.startswith("d") or is_link
+ ty, perms, links, uid, gid, size, mtime, name = match.groups()
+ is_link = ty == "l"
+ is_dir = ty == "d" or is_link
if is_link:
name, _, _link_name = name.partition("->")
name = name.strip()
_link_name = _link_name.strip()
- permissions = Permissions.parse(perms[1:])
+ permissions = Permissions.parse(perms)
- mtime_epoch = _parse_time(mtime, formats=["%b %d %Y", "%b %d %H:%M"])
+ mtime_epoch = _decode_linux_time(mtime)
name = unicodedata.normalize("NFC", name)
@@ -138,12 +142,39 @@ def decode_linux(line, match):
return raw_info
+def _decode_windowsnt_time(mtime):
+ return _parse_time(mtime, formats=["%d-%m-%y %I:%M%p", "%d-%m-%y %H:%M"])
+
+
def decode_windowsnt(line, match):
- """
- Decodes a Windows NT FTP LIST line like these two:
+ """Decode a Windows NT FTP LIST line.
+
+ Examples:
+ Decode a directory line::
+
+ >>> line = "11-02-18 02:12PM images"
+ >>> match = RE_WINDOWSNT.match(line)
+ >>> pprint(decode_windowsnt(line, match))
+ {'basic': {'is_dir': True, 'name': 'images'},
+ 'details': {'modified': 1518358320.0, 'type': 1},
+ 'ftp': {'ls': '11-02-18 02:12PM images'}}
+
+ Decode a file line::
+
+ >>> line = "11-02-18 03:33PM 9276 logo.gif"
+ >>> match = RE_WINDOWSNT.match(line)
+ >>> pprint(decode_windowsnt(line, match))
+ {'basic': {'is_dir': False, 'name': 'logo.gif'},
+ 'details': {'modified': 1518363180.0, 'size': 9276, 'type': 2},
+ 'ftp': {'ls': '11-02-18 03:33PM 9276 logo.gif'}}
+
+ Alternatively, the time might also be present in 24-hour format::
+
+ >>> line = "11-02-18 15:33 9276 logo.gif"
+ >>> match = RE_WINDOWSNT.match(line)
+ >>> decode_windowsnt(line, match)["details"]["modified"]
+ 1518363180.0
- `11-02-18 02:12PM images`
- `11-02-18 03:33PM 9276 logo.gif`
"""
is_dir = match.group("size") == ""
@@ -161,7 +192,9 @@ def decode_windowsnt(line, match):
if not is_dir:
raw_info["details"]["size"] = int(match.group("size"))
- modified = _parse_time(match.group("modified"), formats=["%d-%m-%y %I:%M%p"])
+ modified = _decode_windowsnt_time(
+ match.group("modified_date") + " " + match.group("modified_time")
+ )
if modified is not None:
raw_info["details"]["modified"] = modified
diff --git a/fs/_pathcompat.py b/fs/_pathcompat.py
new file mode 100644
index 00000000..3d628662
--- /dev/null
+++ b/fs/_pathcompat.py
@@ -0,0 +1,41 @@
+# mypy: ignore-errors
+try:
+ from os.path import commonpath
+except ImportError:
+ # Return the longest common sub-path of the sequence of paths given as input.
+ # The paths are not normalized before comparing them (this is the
+ # responsibility of the caller). Any trailing separator is stripped from the
+ # returned path.
+
+ def commonpath(paths):
+ """Given a sequence of path names, returns the longest common sub-path."""
+
+ if not paths:
+ raise ValueError("commonpath() arg is an empty sequence")
+
+ paths = tuple(paths)
+ if isinstance(paths[0], bytes):
+ sep = b"/"
+ curdir = b"."
+ else:
+ sep = "/"
+ curdir = "."
+
+ split_paths = [path.split(sep) for path in paths]
+
+ try:
+ (isabs,) = set(p[:1] == sep for p in paths)
+ except ValueError:
+ raise ValueError("Can't mix absolute and relative paths")
+
+ split_paths = [[c for c in s if c and c != curdir] for s in split_paths]
+ s1 = min(split_paths)
+ s2 = max(split_paths)
+ common = s1
+ for i, c in enumerate(s1):
+ if c != s2[i]:
+ common = s1[:i]
+ break
+
+ prefix = sep if isabs else sep[:0]
+ return prefix + sep.join(common)
diff --git a/fs/_repr.py b/fs/_repr.py
index af51c28a..d313b0a8 100644
--- a/fs/_repr.py
+++ b/fs/_repr.py
@@ -5,7 +5,7 @@
import typing
-if False: # typing.TYPE_CHECKING
+if typing.TYPE_CHECKING:
from typing import Text, Tuple
@@ -27,7 +27,7 @@ def make_repr(class_name, *args, **kwargs):
>>> MyClass('Will')
MyClass('foo', name='Will')
>>> MyClass(None)
- MyClass()
+ MyClass('foo')
"""
arguments = [repr(arg) for arg in args]
diff --git a/fs/_typing.py b/fs/_typing.py
index 47d6c9b1..0c80b8ef 100644
--- a/fs/_typing.py
+++ b/fs/_typing.py
@@ -3,8 +3,8 @@
"""
import sys
-import six
+import six
_PY = sys.version_info
@@ -12,7 +12,7 @@
if _PY.major == 3 and _PY.minor == 5 and _PY.micro in (0, 1):
- def overload(func): # pragma: no cover
+ def overload(func): # pragma: no cover # noqa: F811
return func
diff --git a/fs/_tzcompat.py b/fs/_tzcompat.py
new file mode 100644
index 00000000..135a7b32
--- /dev/null
+++ b/fs/_tzcompat.py
@@ -0,0 +1,29 @@
+"""Compatibility shim for python2's lack of datetime.timezone.
+
+This is the example code from the Python 2 documentation:
+https://docs.python.org/2.7/library/datetime.html#tzinfo-objects
+"""
+
+from datetime import timedelta, tzinfo
+
+ZERO = timedelta(0)
+
+
+class UTC(tzinfo):
+ """UTC"""
+
+ def utcoffset(self, dt):
+ return ZERO
+
+ def tzname(self, dt):
+ return "UTC"
+
+ def dst(self, dt):
+ return ZERO
+
+
+utc = UTC()
+
+
+class timezone:
+ utc = utc
diff --git a/fs/_url_tools.py b/fs/_url_tools.py
new file mode 100644
index 00000000..cfd76a7a
--- /dev/null
+++ b/fs/_url_tools.py
@@ -0,0 +1,55 @@
+import typing
+
+import platform
+import re
+import six
+
+if typing.TYPE_CHECKING:
+ from typing import Text
+
+_WINDOWS_PLATFORM = platform.system() == "Windows"
+
+
+def url_quote(path_snippet):
+ # type: (Text) -> Text
+ """Quote a URL without quoting the Windows drive letter, if any.
+
+ On Windows, it will separate drive letter and quote Windows
+ path alone. No magic on Unix-like path, just pythonic
+ `~urllib.request.pathname2url`.
+
+ Arguments:
+ path_snippet (str): a file path, relative or absolute.
+
+ """
+ if _WINDOWS_PLATFORM and _has_drive_letter(path_snippet):
+ drive_letter, path = path_snippet.split(":", 1)
+ if six.PY2:
+ path = path.encode("utf-8")
+ path = six.moves.urllib.request.pathname2url(path)
+ path_snippet = "{}:{}".format(drive_letter, path)
+ else:
+ if six.PY2:
+ path_snippet = path_snippet.encode("utf-8")
+ path_snippet = six.moves.urllib.request.pathname2url(path_snippet)
+ return path_snippet
+
+
+def _has_drive_letter(path_snippet):
+ # type: (Text) -> bool
+ """Check whether a path contains a drive letter.
+
+ Arguments:
+ path_snippet (str): a file path, relative or absolute.
+
+ Example:
+ >>> _has_drive_letter("D:/Data")
+ True
+ >>> _has_drive_letter(r"C:\\System32\\ test")
+ True
+ >>> _has_drive_letter("/tmp/abc:test")
+ False
+
+ """
+ windows_drive_pattern = ".:[/\\\\].*$"
+ return re.match(windows_drive_pattern, path_snippet) is not None
diff --git a/fs/_version.py b/fs/_version.py
index 2d80d12d..c1ab7f71 100644
--- a/fs/_version.py
+++ b/fs/_version.py
@@ -1,3 +1,3 @@
"""Version, used in module and setup.py.
"""
-__version__ = "2.4.8"
+__version__ = "2.4.16"
diff --git a/fs/appfs.py b/fs/appfs.py
index 0657faf5..2fd45687 100644
--- a/fs/appfs.py
+++ b/fs/appfs.py
@@ -11,11 +11,14 @@
import typing
-from .osfs import OSFS
-from ._repr import make_repr
+import abc
+import six
from appdirs import AppDirs
-if False: # typing.TYPE_CHECKING
+from ._repr import make_repr
+from .osfs import OSFS
+
+if typing.TYPE_CHECKING:
from typing import Optional, Text
@@ -29,10 +32,25 @@
]
-class _AppFS(OSFS):
- """Abstract base class for an app FS.
+class _CopyInitMeta(abc.ABCMeta):
+ """A metaclass that performs a hard copy of the `__init__`.
+
+ This is a fix for Sphinx, which is a pain to configure in a way that
+ it documents the ``__init__`` method of a class when it is inherited.
+ Copying ``__init__`` makes it think it is not inherited, and let us
+ share the documentation between all the `_AppFS` subclasses.
+
"""
+ def __new__(mcls, classname, bases, cls_dict):
+ cls_dict.setdefault("__init__", bases[0].__init__)
+ return super(abc.ABCMeta, mcls).__new__(mcls, classname, bases, cls_dict)
+
+
+@six.add_metaclass(_CopyInitMeta)
+class _AppFS(OSFS):
+ """Abstract base class for an app FS."""
+
# FIXME(@althonos): replace by ClassVar[Text] once
# https://github.com/python/mypy/pull/4718 is accepted
# (subclass override will raise errors until then)
@@ -47,6 +65,19 @@ def __init__(
create=True, # type: bool
):
# type: (...) -> None
+ """Create a new application-specific filesystem.
+
+ Arguments:
+ appname (str): The name of the application.
+ author (str): The name of the author (used on Windows).
+ version (str): Optional version string, if a unique location
+ per version of the application is required.
+ roaming (bool): If `True`, use a *roaming* profile on
+ Windows.
+ create (bool): If `True` (the default) the directory
+ will be created if it does not exist.
+
+ """
self.app_dirs = AppDirs(appname, author, version, roaming)
self._create = create
super(_AppFS, self).__init__(
@@ -77,16 +108,6 @@ class UserDataFS(_AppFS):
May also be opened with
``open_fs('userdata://appname:author:version')``.
- Arguments:
- appname (str): The name of the application.
- author (str): The name of the author (used on Windows).
- version (str): Optional version string, if a unique location
- per version of the application is required.
- roaming (bool): If `True`, use a *roaming* profile on
- Windows.
- create (bool): If `True` (the default) the directory
- will be created if it does not exist.
-
"""
app_dir = "user_data_dir"
@@ -98,16 +119,6 @@ class UserConfigFS(_AppFS):
May also be opened with
``open_fs('userconf://appname:author:version')``.
- Arguments:
- appname (str): The name of the application.
- author (str): The name of the author (used on Windows).
- version (str): Optional version string, if a unique location
- per version of the application is required.
- roaming (bool): If `True`, use a *roaming* profile on
- Windows.
- create (bool): If `True` (the default) the directory
- will be created if it does not exist.
-
"""
app_dir = "user_config_dir"
@@ -119,16 +130,6 @@ class UserCacheFS(_AppFS):
May also be opened with
``open_fs('usercache://appname:author:version')``.
- Arguments:
- appname (str): The name of the application.
- author (str): The name of the author (used on Windows).
- version (str): Optional version string, if a unique location
- per version of the application is required.
- roaming (bool): If `True`, use a *roaming* profile on
- Windows.
- create (bool): If `True` (the default) the directory
- will be created if it does not exist.
-
"""
app_dir = "user_cache_dir"
@@ -140,16 +141,6 @@ class SiteDataFS(_AppFS):
May also be opened with
``open_fs('sitedata://appname:author:version')``.
- Arguments:
- appname (str): The name of the application.
- author (str): The name of the author (used on Windows).
- version (str): Optional version string, if a unique location
- per version of the application is required.
- roaming (bool): If `True`, use a *roaming* profile on
- Windows.
- create (bool): If `True` (the default) the directory
- will be created if it does not exist.
-
"""
app_dir = "site_data_dir"
@@ -161,16 +152,6 @@ class SiteConfigFS(_AppFS):
May also be opened with
``open_fs('siteconf://appname:author:version')``.
- Arguments:
- appname (str): The name of the application.
- author (str): The name of the author (used on Windows).
- version (str): Optional version string, if a unique location
- per version of the application is required.
- roaming (bool): If `True`, use a *roaming* profile on
- Windows.
- create (bool): If `True` (the default) the directory
- will be created if it does not exist.
-
"""
app_dir = "site_config_dir"
@@ -182,16 +163,6 @@ class UserLogFS(_AppFS):
May also be opened with
``open_fs('userlog://appname:author:version')``.
- Arguments:
- appname (str): The name of the application.
- author (str): The name of the author (used on Windows).
- version (str): Optional version string, if a unique location
- per version of the application is required.
- roaming (bool): If `True`, use a *roaming* profile on
- Windows.
- create (bool): If `True` (the default) the directory
- will be created if it does not exist.
-
"""
app_dir = "user_log_dir"
diff --git a/fs/base.py b/fs/base.py
index 297d1587..d42997d4 100644
--- a/fs/base.py
+++ b/fs/base.py
@@ -8,36 +8,35 @@
from __future__ import absolute_import, print_function, unicode_literals
+import typing
+
import abc
import hashlib
import itertools
import os
+import six
import threading
import time
-import typing
+import warnings
from contextlib import closing
from functools import partial, wraps
-import warnings
-
-import six
-from . import copy, errors, fsencode, iotools, move, tools, walk, wildcard
+from . import copy, errors, fsencode, glob, iotools, tools, walk, wildcard
+from .copy import copy_modified_time
from .glob import BoundGlobber
from .mode import validate_open_mode
-from .path import abspath, join, normpath
+from .path import abspath, isbase, join, normpath
from .time import datetime_to_epoch
from .walk import Walker
-if False: # typing.TYPE_CHECKING
- from datetime import datetime
- from threading import RLock
+if typing.TYPE_CHECKING:
from typing import (
+ IO,
Any,
BinaryIO,
Callable,
Collection,
Dict,
- IO,
Iterable,
Iterator,
List,
@@ -48,11 +47,15 @@
Type,
Union,
)
+
+ from datetime import datetime
+ from threading import RLock
from types import TracebackType
+
from .enums import ResourceType
from .info import Info, RawInfo
- from .subfs import SubFS
from .permissions import Permissions
+ from .subfs import SubFS
from .walk import BoundWalker
_F = typing.TypeVar("_F", bound="FS")
@@ -84,7 +87,7 @@ def _method(*args, **kwargs):
""".format(
method.__name__
)
- if getattr(_method, "__doc__"):
+ if getattr(_method, "__doc__", None) is not None:
_method.__doc__ += deprecated_msg
return _method
@@ -92,8 +95,7 @@ def _method(*args, **kwargs):
@six.add_metaclass(abc.ABCMeta)
class FS(object):
- """Base class for FS objects.
- """
+ """Base class for FS objects."""
# This is the "standard" meta namespace.
_meta = {} # type: Dict[Text, Union[Text, int, bool, None]]
@@ -106,8 +108,7 @@ class FS(object):
def __init__(self):
# type: (...) -> None
- """Create a filesystem. See help(type(self)) for accurate signature.
- """
+ """Create a filesystem. See help(type(self)) for accurate signature."""
self._closed = False
self._lock = threading.RLock()
super(FS, self).__init__()
@@ -118,8 +119,7 @@ def __del__(self):
def __enter__(self):
# type: (...) -> FS
- """Allow use of filesystem as a context manager.
- """
+ """Allow use of filesystem as a context manager."""
return self
def __exit__(
@@ -129,21 +129,18 @@ def __exit__(
traceback, # type: Optional[TracebackType]
):
# type: (...) -> None
- """Close filesystem on exit.
- """
+ """Close filesystem on exit."""
self.close()
@property
def glob(self):
- """`~fs.glob.BoundGlobber`: a globber object..
- """
+ """`~fs.glob.BoundGlobber`: a globber object.."""
return BoundGlobber(self)
@property
def walk(self):
# type: (_F) -> BoundWalker[_F]
- """`~fs.walk.BoundWalker`: a walker bound to this filesystem.
- """
+ """`~fs.walk.BoundWalker`: a walker bound to this filesystem."""
return self.walker_class.bind(self)
# ---------------------------------------------------------------- #
@@ -158,12 +155,16 @@ def getinfo(self, path, namespaces=None):
Arguments:
path (str): A path to a resource on the filesystem.
- namespaces (list, optional): Info namespaces to query
- (defaults to *[basic]*).
+ namespaces (list, optional): Info namespaces to query. The
+ `"basic"` namespace is alway included in the returned
+ info, whatever the value of `namespaces` may be.
Returns:
~fs.info.Info: resource information object.
+ Raises:
+ fs.errors.ResourceNotFound: If ``path`` does not exist.
+
For more information regarding resource information, see :ref:`info`.
"""
@@ -175,7 +176,7 @@ def listdir(self, path):
This method will return a list of the resources in a directory.
A *resource* is a file, directory, or one of the other types
- defined in `~fs.ResourceType`.
+ defined in `~fs.enums.ResourceType`.
Arguments:
path (str): A path to a directory on the filesystem
@@ -241,10 +242,12 @@ def openbin(
io.IOBase: a *file-like* object.
Raises:
- fs.errors.FileExpected: If the path is not a file.
- fs.errors.FileExists: If the file exists, and *exclusive mode*
- is specified (``x`` in the mode).
- fs.errors.ResourceNotFound: If the path does not exist.
+ fs.errors.FileExpected: If ``path`` exists and is not a file.
+ fs.errors.FileExists: If the ``path`` exists, and
+ *exclusive mode* is specified (``x`` in the mode).
+ fs.errors.ResourceNotFound: If ``path`` does not exist and
+ ``mode`` does not imply creating the file, or if any
+ ancestor of ``path`` does not exist.
"""
@@ -273,7 +276,7 @@ def removedir(self, path):
Raises:
fs.errors.DirectoryNotEmpty: If the directory is not empty (
see `~fs.base.FS.removetree` for a way to remove the
- directory contents.).
+ directory contents).
fs.errors.DirectoryExpected: If the path does not refer to
a directory.
fs.errors.ResourceNotFound: If no resource exists at the
@@ -393,8 +396,14 @@ def close(self):
"""
self._closed = True
- def copy(self, src_path, dst_path, overwrite=False):
- # type: (Text, Text, bool) -> None
+ def copy(
+ self,
+ src_path, # type: Text
+ dst_path, # type: Text
+ overwrite=False, # type: bool
+ preserve_time=False, # type: bool
+ ):
+ # type: (...) -> None
"""Copy file contents from ``src_path`` to ``dst_path``.
Arguments:
@@ -402,23 +411,38 @@ def copy(self, src_path, dst_path, overwrite=False):
dst_path (str): Path to destination file.
overwrite (bool): If `True`, overwrite the destination file
if it exists (defaults to `False`).
+ preserve_time (bool): If `True`, try to preserve mtime of the
+ resource (defaults to `False`).
Raises:
fs.errors.DestinationExists: If ``dst_path`` exists,
and ``overwrite`` is `False`.
fs.errors.ResourceNotFound: If a parent directory of
``dst_path`` does not exist.
+ fs.errors.FileExpected: If ``src_path`` is not a file.
"""
with self._lock:
- if not overwrite and self.exists(dst_path):
+ _src_path = self.validatepath(src_path)
+ _dst_path = self.validatepath(dst_path)
+ if not overwrite and self.exists(_dst_path):
raise errors.DestinationExists(dst_path)
- with closing(self.open(src_path, "rb")) as read_file:
+ if _src_path == _dst_path:
+ raise errors.IllegalDestination(dst_path)
+ with closing(self.open(_src_path, "rb")) as read_file:
# FIXME(@althonos): typing complains because open return IO
- self.upload(dst_path, read_file) # type: ignore
+ self.upload(_dst_path, read_file) # type: ignore
+ if preserve_time:
+ copy_modified_time(self, _src_path, self, _dst_path)
- def copydir(self, src_path, dst_path, create=False):
- # type: (Text, Text, bool) -> None
+ def copydir(
+ self,
+ src_path, # type: Text
+ dst_path, # type: Text
+ create=False, # type: bool
+ preserve_time=False, # type: bool
+ ):
+ # type: (...) -> None
"""Copy the contents of ``src_path`` to ``dst_path``.
Arguments:
@@ -426,18 +450,26 @@ def copydir(self, src_path, dst_path, create=False):
dst_path (str): Path to destination directory.
create (bool): If `True`, then ``dst_path`` will be created
if it doesn't exist already (defaults to `False`).
+ preserve_time (bool): If `True`, try to preserve mtime of the
+ resource (defaults to `False`).
Raises:
fs.errors.ResourceNotFound: If the ``dst_path``
does not exist, and ``create`` is not `True`.
+ fs.errors.DirectoryExpected: If ``src_path`` is not a
+ directory.
"""
with self._lock:
- if not create and not self.exists(dst_path):
+ _src_path = self.validatepath(src_path)
+ _dst_path = self.validatepath(dst_path)
+ if isbase(_src_path, _dst_path):
+ raise errors.IllegalDestination(dst_path)
+ if not create and not self.exists(_dst_path):
raise errors.ResourceNotFound(dst_path)
- if not self.getinfo(src_path).is_dir:
+ if not self.getinfo(_src_path).is_dir:
raise errors.DirectoryExpected(src_path)
- copy.copy_dir(self, src_path, self, dst_path)
+ copy.copy_dir(self, _src_path, self, _dst_path, preserve_time=preserve_time)
def create(self, path, wipe=False):
# type: (Text, bool) -> bool
@@ -473,6 +505,9 @@ def desc(self, path):
Returns:
str: a short description of the path.
+ Raises:
+ fs.errors.ResourceNotFound: If ``path`` does not exist.
+
"""
if not self.exists(path):
raise errors.ResourceNotFound(path)
@@ -544,26 +579,22 @@ def filterdir(
def match_dir(patterns, info):
# type: (Optional[Iterable[Text]], Info) -> bool
- """Pattern match info.name.
- """
+ """Pattern match info.name."""
return info.is_file or self.match(patterns, info.name)
def match_file(patterns, info):
# type: (Optional[Iterable[Text]], Info) -> bool
- """Pattern match info.name.
- """
+ """Pattern match info.name."""
return info.is_dir or self.match(patterns, info.name)
def exclude_dir(patterns, info):
# type: (Optional[Iterable[Text]], Info) -> bool
- """Pattern match info.name.
- """
+ """Pattern match info.name."""
return info.is_file or not self.match(patterns, info.name)
def exclude_file(patterns, info):
# type: (Optional[Iterable[Text]], Info) -> bool
- """Pattern match info.name.
- """
+ """Pattern match info.name."""
return info.is_dir or not self.match(patterns, info.name)
if files:
@@ -597,6 +628,7 @@ def readbytes(self, path):
bytes: the file contents.
Raises:
+ fs.errors.FileExpected: if ``path`` exists but is not a file.
fs.errors.ResourceNotFound: if ``path`` does not exist.
"""
@@ -608,11 +640,15 @@ def readbytes(self, path):
def download(self, path, file, chunk_size=None, **options):
# type: (Text, BinaryIO, Optional[int], **Any) -> None
- """Copies a file from the filesystem to a file-like object.
+ """Copy a file from the filesystem to a file-like object.
This may be more efficient that opening and copying files
manually if the filesystem supplies an optimized method.
+ Note that the file object ``file`` will *not* be closed by this
+ method. Take care to close it after this method completes
+ (ideally with a context manager).
+
Arguments:
path (str): Path to a resource.
file (file-like): A file-like object open for writing in
@@ -623,13 +659,12 @@ def download(self, path, file, chunk_size=None, **options):
**options: Implementation specific options required to open
the source file.
- Note that the file object ``file`` will *not* be closed by this
- method. Take care to close it after this method completes
- (ideally with a context manager).
-
Example:
>>> with open('starwars.mov', 'wb') as write_file:
- ... my_fs.download('/movies/starwars.mov', write_file)
+ ... my_fs.download('/Videos/starwars.mov', write_file)
+
+ Raises:
+ fs.errors.ResourceNotFound: if ``path`` does not exist.
"""
with self._lock:
@@ -672,6 +707,23 @@ def readtext(
gettext = _new_name(readtext, "gettext")
+ def getmodified(self, path):
+ # type: (Text) -> Optional[datetime]
+ """Get the timestamp of the last modifying access of a resource.
+
+ Arguments:
+ path (str): A path to a resource.
+
+ Returns:
+ datetime: The timestamp of the last modification.
+
+ The *modified timestamp* of a file is the point in time
+ that the file was last changed. Depending on the file system,
+ it might only have limited accuracy.
+
+ """
+ return self.getinfo(path, namespaces=["details"]).modified
+
def getmeta(self, namespace="standard"):
# type: (Text) -> Mapping[Text, object]
"""Get meta information regarding a filesystem.
@@ -736,6 +788,9 @@ def getsize(self, path):
Returns:
int: the *size* of the resource.
+ Raises:
+ fs.errors.ResourceNotFound: if ``path`` does not exist.
+
The *size* of a file is the total number of readable bytes,
which may not reflect the exact number of bytes of reserved
disk space (or other storage medium).
@@ -751,7 +806,7 @@ def getsyspath(self, path):
# type: (Text) -> Text
"""Get the *system path* of a resource.
- Parameters:
+ Arguments:
path (str): A path on the filesystem.
Returns:
@@ -787,10 +842,9 @@ def getsyspath(self, path):
def getospath(self, path):
# type: (Text) -> bytes
- """Get a *system path* to a resource, encoded in the operating
- system's prefered encoding.
+ """Get the *system path* to a resource, in the OS' prefered encoding.
- Parameters:
+ Arguments:
path (str): A path on the filesystem.
Returns:
@@ -807,7 +861,7 @@ def getospath(self, path):
Note:
If you want your code to work in Python2.7 and Python3 then
- use this method if you want to work will the OS filesystem
+ use this method if you want to work with the OS filesystem
outside of the OSFS interface.
"""
@@ -819,15 +873,18 @@ def gettype(self, path):
# type: (Text) -> ResourceType
"""Get the type of a resource.
- Parameters:
+ Arguments:
path (str): A path on the filesystem.
Returns:
- ~fs.ResourceType: the type of the resource.
+ ~fs.enums.ResourceType: the type of the resource.
+
+ Raises:
+ fs.errors.ResourceNotFound: if ``path`` does not exist.
A type of a resource is an integer that identifies the what
the resource references. The standard type integers may be one
- of the values in the `~fs.ResourceType` enumerations.
+ of the values in the `~fs.enums.ResourceType` enumerations.
The most common resource types, supported by virtually all
filesystems are ``directory`` (1) and ``file`` (2), but the
@@ -857,13 +914,14 @@ def geturl(self, path, purpose="download"):
# type: (Text, Text) -> Text
"""Get the URL to a given resource.
- Parameters:
+ Arguments:
path (str): A path on the filesystem
purpose (str): A short string that indicates which URL
to retrieve for the given path (if there is more than
one). The default is ``'download'``, which should return
a URL that serves the file. Other filesystems may support
- other values for ``purpose``.
+ other values for ``purpose``: for instance, `OSFS` supports
+ ``'fs'``, which returns a FS URL (see :ref:`fs-urls`).
Returns:
str: a URL.
@@ -878,7 +936,7 @@ def hassyspath(self, path):
# type: (Text) -> bool
"""Check if a path maps to a system path.
- Parameters:
+ Arguments:
path (str): A path on the filesystem.
Returns:
@@ -896,7 +954,7 @@ def hasurl(self, path, purpose="download"):
# type: (Text, Text) -> bool
"""Check if a path has a corresponding URL.
- Parameters:
+ Arguments:
path (str): A path on the filesystem.
purpose (str): A purpose parameter, as given in
`~fs.base.FS.geturl`.
@@ -914,15 +972,14 @@ def hasurl(self, path, purpose="download"):
def isclosed(self):
# type: () -> bool
- """Check if the filesystem is closed.
- """
+ """Check if the filesystem is closed."""
return getattr(self, "_closed", False)
def isdir(self, path):
# type: (Text) -> bool
"""Check if a path maps to an existing directory.
- Parameters:
+ Arguments:
path (str): A path on the filesystem.
Returns:
@@ -941,7 +998,7 @@ def isempty(self, path):
A directory is considered empty when it does not contain
any file or any directory.
- Parameters:
+ Arguments:
path (str): A path to a directory on the filesystem.
Returns:
@@ -958,7 +1015,7 @@ def isfile(self, path):
# type: (Text) -> bool
"""Check if a path maps to an existing file.
- Parameters:
+ Arguments:
path (str): A path on the filesystem.
Returns:
@@ -974,7 +1031,7 @@ def islink(self, path):
# type: (Text) -> bool
"""Check if a path maps to a symlink.
- Parameters:
+ Arguments:
path (str): A path on the filesystem.
Returns:
@@ -998,6 +1055,7 @@ def lock(self):
Example:
>>> with my_fs.lock(): # May block
... # code here has exclusive access to the filesystem
+ ... pass
It is a good idea to put a lock around any operations that you
would like to be *atomic*. For instance if you are copying
@@ -1016,25 +1074,37 @@ def lock(self):
"""
return self._lock
- def movedir(self, src_path, dst_path, create=False):
- # type: (Text, Text, bool) -> None
+ def movedir(self, src_path, dst_path, create=False, preserve_time=False):
+ # type: (Text, Text, bool, bool) -> None
"""Move directory ``src_path`` to ``dst_path``.
- Parameters:
+ Arguments:
src_path (str): Path of source directory on the filesystem.
dst_path (str): Path to destination directory.
create (bool): If `True`, then ``dst_path`` will be created
if it doesn't exist already (defaults to `False`).
+ preserve_time (bool): If `True`, try to preserve mtime of the
+ resources (defaults to `False`).
Raises:
fs.errors.ResourceNotFound: if ``dst_path`` does not exist,
and ``create`` is `False`.
+ fs.errors.DirectoryExpected: if ``src_path`` or one of its
+ ancestors is not a directory.
"""
+ from .move import move_dir
+
with self._lock:
+ _src_path = self.validatepath(src_path)
+ _dst_path = self.validatepath(dst_path)
+ if _src_path == _dst_path:
+ return
+ if isbase(_src_path, _dst_path):
+ raise errors.IllegalDestination(dst_path)
if not create and not self.exists(dst_path):
raise errors.ResourceNotFound(dst_path)
- move.move_dir(self, src_path, self, dst_path)
+ move_dir(self, src_path, self, dst_path, preserve_time=preserve_time)
def makedirs(
self,
@@ -1079,8 +1149,8 @@ def makedirs(
raise
return self.opendir(path)
- def move(self, src_path, dst_path, overwrite=False):
- # type: (Text, Text, bool) -> None
+ def move(self, src_path, dst_path, overwrite=False, preserve_time=False):
+ # type: (Text, Text, bool, bool) -> None
"""Move a file from ``src_path`` to ``dst_path``.
Arguments:
@@ -1089,6 +1159,8 @@ def move(self, src_path, dst_path, overwrite=False):
file will be written to.
overwrite (bool): If `True`, destination path will be
overwritten if it exists.
+ preserve_time (bool): If `True`, try to preserve mtime of the
+ resources (defaults to `False`).
Raises:
fs.errors.FileExpected: If ``src_path`` maps to a
@@ -1099,14 +1171,19 @@ def move(self, src_path, dst_path, overwrite=False):
``dst_path`` does not exist.
"""
- if not overwrite and self.exists(dst_path):
+ _src_path = self.validatepath(src_path)
+ _dst_path = self.validatepath(dst_path)
+ if not overwrite and self.exists(_dst_path):
raise errors.DestinationExists(dst_path)
- if self.getinfo(src_path).is_dir:
+ if self.getinfo(_src_path).is_dir:
raise errors.FileExpected(src_path)
+ if _src_path == _dst_path:
+ # early exit when moving a file onto itself
+ return
if self.getmeta().get("supports_rename", False):
try:
- src_sys_path = self.getsyspath(src_path)
- dst_sys_path = self.getsyspath(dst_path)
+ src_sys_path = self.getsyspath(_src_path)
+ dst_sys_path = self.getsyspath(_dst_path)
except errors.NoSysPath: # pragma: no cover
pass
else:
@@ -1115,12 +1192,16 @@ def move(self, src_path, dst_path, overwrite=False):
except OSError:
pass
else:
+ if preserve_time:
+ copy_modified_time(self, _src_path, self, _dst_path)
return
with self._lock:
- with self.open(src_path, "rb") as read_file:
+ with self.open(_src_path, "rb") as read_file:
# FIXME(@althonos): typing complains because open return IO
- self.upload(dst_path, read_file) # type: ignore
- self.remove(src_path)
+ self.upload(_dst_path, read_file) # type: ignore
+ if preserve_time:
+ copy_modified_time(self, _src_path, self, _dst_path)
+ self.remove(_src_path)
def open(
self,
@@ -1196,28 +1277,56 @@ def opendir(
~fs.subfs.SubFS: A filesystem representing a sub-directory.
Raises:
- fs.errors.DirectoryExpected: If ``dst_path`` does not
- exist or is not a directory.
+ fs.errors.ResourceNotFound: If ``path`` does not exist.
+ fs.errors.DirectoryExpected: If ``path`` is not a directory.
"""
from .subfs import SubFS
_factory = factory or self.subfs_class or SubFS
- if not self.getbasic(path).is_dir:
+ if not self.getinfo(path).is_dir:
raise errors.DirectoryExpected(path=path)
return _factory(self, path)
def removetree(self, dir_path):
# type: (Text) -> None
- """Recursively remove the contents of a directory.
+ """Recursively remove a directory and all its contents.
- This method is similar to `~fs.base.removedir`, but will
+ This method is similar to `~fs.base.FS.removedir`, but will
remove the contents of the directory if it is not empty.
Arguments:
dir_path (str): Path to a directory on the filesystem.
+ Raises:
+ fs.errors.ResourceNotFound: If ``dir_path`` does not exist.
+ fs.errors.DirectoryExpected: If ``dir_path`` is not a directory.
+
+ Caution:
+ A filesystem should never delete its root folder, so
+ ``FS.removetree("/")`` has different semantics: the
+ contents of the root folder will be deleted, but the
+ root will be untouched::
+
+ >>> home_fs = fs.open_fs("~")
+ >>> home_fs.removetree("/")
+ >>> home_fs.exists("/")
+ True
+ >>> home_fs.isempty("/")
+ True
+
+ Combined with `~fs.base.FS.opendir`, this can be used
+ to clear a directory without removing the directory
+ itself::
+
+ >>> home_fs = fs.open_fs("~")
+ >>> home_fs.opendir("/Videos").removetree("/")
+ >>> home_fs.exists("/Videos")
+ True
+ >>> home_fs.isempty("/Videos")
+ True
+
"""
_dir_path = abspath(normpath(dir_path))
with self._lock:
@@ -1309,6 +1418,10 @@ def upload(self, path, file, chunk_size=None, **options):
**options: Implementation specific options required to open
the source file.
+ Raises:
+ fs.errors.ResourceNotFound: If a parent directory of
+ ``path`` does not exist.
+
Note that the file object ``file`` will *not* be closed by this
method. Take care to close it after this method completes
(ideally with a context manager).
@@ -1409,7 +1522,7 @@ def writetext(
path (str): Destination path on the filesystem.
contents (str): Text to be written.
encoding (str, optional): Encoding of destination file
- (defaults to ``'ut-8'``).
+ (defaults to ``'utf-8'``).
errors (str, optional): How encoding errors should be treated
(same as `io.open`).
newline (str): Newline parameter (same as `io.open`).
@@ -1450,8 +1563,7 @@ def touch(self, path):
def validatepath(self, path):
# type: (Text) -> Text
- """Check if a path is valid, returning a normalized absolute
- path.
+ """Validate a path, returning a normalized absolute path on sucess.
Many filesystems have restrictions on the format of paths they
support. This method will check that ``path`` is valid on the
@@ -1465,11 +1577,10 @@ def validatepath(self, path):
str: A normalized, absolute path.
Raises:
+ fs.errors.InvalidPath: If the path is invalid.
+ fs.errors.FilesystemClosed: if the filesystem is closed.
fs.errors.InvalidCharsInPath: If the path contains
invalid characters.
- fs.errors.InvalidPath: If the path is invalid.
- fs.errors.FilesystemClosed: if the filesystem
- is closed.
"""
self.check()
@@ -1521,7 +1632,16 @@ def getbasic(self, path):
Returns:
~fs.info.Info: Resource information object for ``path``.
+ Note:
+ .. deprecated:: 2.4.13
+ Please use `~FS.getinfo` directly, which is
+ required to always return the *basic* namespace.
+
"""
+ warnings.warn(
+ "method 'getbasic' has been deprecated, please use 'getinfo'",
+ DeprecationWarning,
+ )
return self.getinfo(path, namespaces=["basic"])
def getdetails(self, path):
@@ -1556,23 +1676,81 @@ def match(self, patterns, name):
# type: (Optional[Iterable[Text]], Text) -> bool
"""Check if a name matches any of a list of wildcards.
+ If a filesystem is case *insensitive* (such as Windows) then
+ this method will perform a case insensitive match (i.e. ``*.py``
+ will match the same names as ``*.PY``). Otherwise the match will
+ be case sensitive (``*.py`` and ``*.PY`` will match different
+ names).
+
Arguments:
- patterns (list): A list of patterns, e.g. ``['*.py']``
+ patterns (list, optional): A list of patterns, e.g.
+ ``['*.py']``, or `None` to match everything.
name (str): A file or directory name (not a path)
Returns:
bool: `True` if ``name`` matches any of the patterns.
+ Raises:
+ TypeError: If ``patterns`` is a single string instead of
+ a list (or `None`).
+
+ Example:
+ >>> my_fs.match(['*.py'], '__init__.py')
+ True
+ >>> my_fs.match(['*.jpg', '*.png'], 'foo.gif')
+ False
+
+ Note:
+ If ``patterns`` is `None` (or ``['*']``), then this
+ method will always return `True`.
+
+ """
+ if patterns is None:
+ return True
+ if isinstance(patterns, six.text_type):
+ raise TypeError("patterns must be a list or sequence")
+ case_sensitive = not typing.cast(
+ bool, self.getmeta().get("case_insensitive", False)
+ )
+ matcher = wildcard.get_matcher(patterns, case_sensitive)
+ return matcher(name)
+
+ def match_glob(self, patterns, path, accept_prefix=False):
+ # type: (Optional[Iterable[Text]], Text, bool) -> bool
+ """Check if a path matches any of a list of glob patterns.
+
If a filesystem is case *insensitive* (such as Windows) then
this method will perform a case insensitive match (i.e. ``*.py``
will match the same names as ``*.PY``). Otherwise the match will
be case sensitive (``*.py`` and ``*.PY`` will match different
names).
+ Arguments:
+ patterns (list, optional): A list of patterns, e.g.
+ ``['*.py']``, or `None` to match everything.
+ path (str): A resource path, starting with "/".
+ accept_prefix (bool): If ``True``, the path is
+ not required to match the patterns themselves
+ but only need to be a prefix of a string that does.
+
+ Returns:
+ bool: `True` if ``path`` matches any of the patterns.
+
+ Raises:
+ TypeError: If ``patterns`` is a single string instead of
+ a list (or `None`).
+ ValueError: If ``path`` is not a string starting with "/".
+
Example:
- >>> home_fs.match(['*.py'], '__init__.py')
+ >>> my_fs.match_glob(['*.py'], '/__init__.py')
+ True
+ >>> my_fs.match_glob(['*.jpg', '*.png'], '/foo.gif')
+ False
+ >>> my_fs.match_glob(['dir/file.txt'], '/dir/', accept_prefix=True)
True
- >>> home_fs.match(['*.jpg', '*.png'], 'foo.gif')
+ >>> my_fs.match_glob(['dir/file.txt'], '/dir/', accept_prefix=False)
+ False
+ >>> my_fs.match_glob(['dir/file.txt'], '/dir/gile.txt', accept_prefix=True)
False
Note:
@@ -1582,13 +1760,17 @@ def match(self, patterns, name):
"""
if patterns is None:
return True
+ if not path or path[0] != "/":
+ raise ValueError("%s needs to be a string starting with /" % path)
if isinstance(patterns, six.text_type):
raise TypeError("patterns must be a list or sequence")
case_sensitive = not typing.cast(
bool, self.getmeta().get("case_insensitive", False)
)
- matcher = wildcard.get_matcher(patterns, case_sensitive)
- return matcher(name)
+ matcher = glob.get_matcher(
+ patterns, case_sensitive, accept_prefix=accept_prefix
+ )
+ return matcher(path)
def tree(self, **kwargs):
# type: (**Any) -> None
@@ -1624,16 +1806,20 @@ def hash(self, path, name):
Arguments:
path(str): A path on the filesystem.
- name(str): One of the algorithms supported by the hashlib module, e.g. `"md5"`
+ name(str):
+ One of the algorithms supported by the `hashlib` module,
+ e.g. `"md5"` or `"sha256"`.
Returns:
str: The hex digest of the hash.
Raises:
fs.errors.UnsupportedHash: If the requested hash is not supported.
+ fs.errors.ResourceNotFound: If ``path`` does not exist.
+ fs.errors.FileExpected: If ``path`` exists but is not a file.
"""
- _path = self.validatepath(path)
+ self.validatepath(path)
try:
hash_object = hashlib.new(name)
except ValueError:
diff --git a/fs/compress.py b/fs/compress.py
index cf0f130a..a1b2e346 100644
--- a/fs/compress.py
+++ b/fs/compress.py
@@ -4,26 +4,25 @@
`tarfile` modules from the standard library.
"""
-from __future__ import absolute_import
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import absolute_import, print_function, unicode_literals
-import time
-import tarfile
import typing
-import zipfile
-from datetime import datetime
import six
+import tarfile
+import time
+import zipfile
+from datetime import datetime
from .enums import ResourceType
+from .errors import MissingInfoNamespace, NoSysPath
from .path import relpath
from .time import datetime_to_epoch
-from .errors import NoSysPath, MissingInfoNamespace
from .walk import Walker
-if False: # typing.TYPE_CHECKING
- from typing import BinaryIO, Optional, Text, Tuple, Type, Union
+if typing.TYPE_CHECKING:
+ from typing import BinaryIO, Optional, Text, Tuple, Union
+
from .base import FS
ZipTime = Tuple[int, int, int, int, int, int]
@@ -46,9 +45,9 @@ def write_zip(
compression (int): Compression to use (one of the constants
defined in the `zipfile` module in the stdlib). Defaults
to `zipfile.ZIP_DEFLATED`.
- encoding (str):
- The encoding to use for filenames. The default is ``"utf-8"``,
- use ``"CP437"`` if compatibility with WinZip is desired.
+ encoding (str): The encoding to use for filenames. The default
+ is ``"utf-8"``, use ``"CP437"`` if compatibility with WinZip
+ is desired.
walker (~fs.walk.Walker, optional): A `Walker` instance, or `None`
to use default walker. You can use this to specify which files
you want to compress.
@@ -116,6 +115,7 @@ def write_tar(
"""Write the contents of a filesystem to a tar file.
Arguments:
+ src_fs (~fs.base.FS): The source filesystem to compress.
file (str or io.IOBase): Destination file, may be a file
name or an open file object.
compression (str, optional): Compression to use, or `None`
diff --git a/fs/constants.py b/fs/constants.py
index b9902a37..c12a9256 100644
--- a/fs/constants.py
+++ b/fs/constants.py
@@ -3,7 +3,6 @@
import io
-
DEFAULT_CHUNK_SIZE = io.DEFAULT_BUFFER_SIZE * 16
"""`int`: the size of a single chunk read from or written to a file.
"""
diff --git a/fs/copy.py b/fs/copy.py
index 9b171d32..154fe715 100644
--- a/fs/copy.py
+++ b/fs/copy.py
@@ -5,16 +5,18 @@
import typing
-from .errors import FSError
+import warnings
+
+from .errors import IllegalDestination, ResourceNotFound
from .opener import manage_fs
-from .path import abspath, combine, frombase, normpath
+from .path import abspath, combine, frombase, isbase, normpath
from .tools import is_thread_safe
from .walk import Walker
-if False: # typing.TYPE_CHECKING
+if typing.TYPE_CHECKING:
from typing import Callable, Optional, Text, Union
+
from .base import FS
- from .walk import Walker
_OnCopy = Callable[[FS, Text, FS, Text], object]
@@ -25,6 +27,7 @@ def copy_fs(
walker=None, # type: Optional[Walker]
on_copy=None, # type: Optional[_OnCopy]
workers=0, # type: int
+ preserve_time=False, # type: bool
):
# type: (...) -> None
"""Copy the contents of one filesystem to another.
@@ -40,10 +43,12 @@ def copy_fs(
dst_path)``.
workers (int): Use `worker` threads to copy data, or ``0`` (default) for
a single-threaded copy.
+ preserve_time (bool): If `True`, try to preserve mtime of the
+ resources (defaults to `False`).
"""
- return copy_dir(
- src_fs, "/", dst_fs, "/", walker=walker, on_copy=on_copy, workers=workers
+ return copy_fs_if(
+ src_fs, dst_fs, "always", walker, on_copy, workers, preserve_time=preserve_time
)
@@ -53,60 +58,66 @@ def copy_fs_if_newer(
walker=None, # type: Optional[Walker]
on_copy=None, # type: Optional[_OnCopy]
workers=0, # type: int
+ preserve_time=False, # type: bool
):
# type: (...) -> None
"""Copy the contents of one filesystem to another, checking times.
- If both source and destination files exist, the copy is executed
- only if the source file is newer than the destination file. In case
- modification times of source or destination files are not available,
- copy file is always executed.
+ .. deprecated:: 2.5.0
+ Use `~fs.copy.copy_fs_if` with ``condition="newer"`` instead.
+
+ """
+ warnings.warn(
+ "copy_fs_if_newer is deprecated. Use copy_fs_if instead.", DeprecationWarning
+ )
+ return copy_fs_if(
+ src_fs, dst_fs, "newer", walker, on_copy, workers, preserve_time=preserve_time
+ )
+
+
+def copy_fs_if(
+ src_fs, # type: Union[FS, Text]
+ dst_fs, # type: Union[FS, Text]
+ condition="always", # type: Text
+ walker=None, # type: Optional[Walker]
+ on_copy=None, # type: Optional[_OnCopy]
+ workers=0, # type: int
+ preserve_time=False, # type: bool
+):
+ # type: (...) -> None
+ """Copy the contents of one filesystem to another, depending on a condition.
Arguments:
src_fs (FS or str): Source filesystem (URL or instance).
dst_fs (FS or str): Destination filesystem (URL or instance).
+ condition (str): Name of the condition to check for each file.
walker (~fs.walk.Walker, optional): A walker object that will be
used to scan for files in ``src_fs``. Set this if you only want
to consider a sub-set of the resources in ``src_fs``.
on_copy (callable):A function callback called after a single file copy
is executed. Expected signature is ``(src_fs, src_path, dst_fs,
dst_path)``.
- workers (int): Use ``worker`` threads to copy data, or ``0`` (default) for
- a single-threaded copy.
-
- """
- return copy_dir_if_newer(
- src_fs, "/", dst_fs, "/", walker=walker, on_copy=on_copy, workers=workers
- )
+ workers (int): Use ``worker`` threads to copy data, or ``0`` (default)
+ for a single-threaded copy.
+ preserve_time (bool): If `True`, try to preserve mtime of the
+ resources (defaults to `False`).
-
-def _source_is_newer(src_fs, src_path, dst_fs, dst_path):
- # type: (FS, Text, FS, Text) -> bool
- """Determine if source file is newer than destination file.
-
- Arguments:
- src_fs (FS): Source filesystem (instance or URL).
- src_path (str): Path to a file on the source filesystem.
- dst_fs (FS): Destination filesystem (instance or URL).
- dst_path (str): Path to a file on the destination filesystem.
-
- Returns:
- bool: `True` if the source file is newer than the destination
- file or file modification time cannot be determined, `False`
- otherwise.
+ See Also:
+ `~fs.copy.copy_file_if` for the full list of supported values for the
+ ``condition`` argument.
"""
- try:
- if dst_fs.exists(dst_path):
- namespace = ("details", "modified")
- src_modified = src_fs.getinfo(src_path, namespace).modified
- if src_modified is not None:
- dst_modified = dst_fs.getinfo(dst_path, namespace).modified
- return dst_modified is None or src_modified > dst_modified
- return True
- except FSError: # pragma: no cover
- # todo: should log something here
- return True
+ return copy_dir_if(
+ src_fs,
+ "/",
+ dst_fs,
+ "/",
+ condition,
+ walker=walker,
+ on_copy=on_copy,
+ workers=workers,
+ preserve_time=preserve_time,
+ )
def copy_file(
@@ -114,6 +125,7 @@ def copy_file(
src_path, # type: Text
dst_fs, # type: Union[FS, Text]
dst_path, # type: Text
+ preserve_time=False, # type: bool
):
# type: (...) -> None
"""Copy a file from one filesystem to another.
@@ -125,78 +137,78 @@ def copy_file(
src_path (str): Path to a file on the source filesystem.
dst_fs (FS or str): Destination filesystem (instance or URL).
dst_path (str): Path to a file on the destination filesystem.
+ preserve_time (bool): If `True`, try to preserve mtime of the
+ resource (defaults to `False`).
"""
- with manage_fs(src_fs, writeable=False) as _src_fs:
- with manage_fs(dst_fs, create=True) as _dst_fs:
- if _src_fs is _dst_fs:
- # Same filesystem, so we can do a potentially optimized
- # copy
- _src_fs.copy(src_path, dst_path, overwrite=True)
- else:
- # Standard copy
- with _src_fs.lock(), _dst_fs.lock():
- if _dst_fs.hassyspath(dst_path):
- with _dst_fs.openbin(dst_path, "w") as write_file:
- _src_fs.download(src_path, write_file)
- else:
- with _src_fs.openbin(src_path) as read_file:
- _dst_fs.upload(dst_path, read_file)
+ copy_file_if(
+ src_fs, src_path, dst_fs, dst_path, "always", preserve_time=preserve_time
+ )
-def copy_file_internal(
- src_fs, # type: FS
+def copy_file_if_newer(
+ src_fs, # type: Union[FS, Text]
src_path, # type: Text
- dst_fs, # type: FS
+ dst_fs, # type: Union[FS, Text]
dst_path, # type: Text
+ preserve_time=False, # type: bool
):
- # type: (...) -> None
- """Low level copy, that doesn't call manage_fs or lock.
-
- If the destination exists, and is a file, it will be first truncated.
-
- This method exists to optimize copying in loops. In general you
- should prefer `copy_file`.
+ # type: (...) -> bool
+ """Copy a file from one filesystem to another, checking times.
- Arguments:
- src_fs (FS): Source filesystem.
- src_path (str): Path to a file on the source filesystem.
- dst_fs (FS: Destination filesystem.
- dst_path (str): Path to a file on the destination filesystem.
+ .. deprecated:: 2.5.0
+ Use `~fs.copy.copy_file_if` with ``condition="newer"`` instead.
"""
- if src_fs is dst_fs:
- # Same filesystem, so we can do a potentially optimized
- # copy
- src_fs.copy(src_path, dst_path, overwrite=True)
- elif dst_fs.hassyspath(dst_path):
- with dst_fs.openbin(dst_path, "w") as write_file:
- src_fs.download(src_path, write_file)
- else:
- with src_fs.openbin(src_path) as read_file:
- dst_fs.upload(dst_path, read_file)
+ warnings.warn(
+ "copy_file_if_newer is deprecated. Use copy_file_if instead.",
+ DeprecationWarning,
+ )
+ return copy_file_if(
+ src_fs, src_path, dst_fs, dst_path, "newer", preserve_time=preserve_time
+ )
-def copy_file_if_newer(
+def copy_file_if(
src_fs, # type: Union[FS, Text]
src_path, # type: Text
dst_fs, # type: Union[FS, Text]
dst_path, # type: Text
+ condition, # type: Text
+ preserve_time=False, # type: bool
):
# type: (...) -> bool
- """Copy a file from one filesystem to another, checking times.
-
- If the destination exists, and is a file, it will be first truncated.
- If both source and destination files exist, the copy is executed only
- if the source file is newer than the destination file. In case
- modification times of source or destination files are not available,
- copy is always executed.
+ """Copy a file from one filesystem to another, depending on a condition.
+
+ Depending on the value of ``condition``, certain requirements must
+ be fulfilled for a file to be copied to ``dst_fs``. The following
+ values are supported:
+
+ ``"always"``
+ The source file is always copied.
+ ``"newer"``
+ The last modification time of the source file must be newer than that
+ of the destination file. If either file has no modification time, the
+ copy is performed always.
+ ``"older"``
+ The last modification time of the source file must be older than that
+ of the destination file. If either file has no modification time, the
+ copy is performed always.
+ ``"exists"``
+ The source file is only copied if a file of the same path already
+ exists in ``dst_fs``.
+ ``"not_exists"``
+ The source file is only copied if no file of the same path already
+ exists in ``dst_fs``.
Arguments:
src_fs (FS or str): Source filesystem (instance or URL).
src_path (str): Path to a file on the source filesystem.
dst_fs (FS or str): Destination filesystem (instance or URL).
dst_path (str): Path to a file on the destination filesystem.
+ condition (str): Name of the condition to check for each file.
+ preserve_time (bool): If `True`, try to preserve mtime of the
+ resource (defaults to `False`).
Returns:
bool: `True` if the file copy was executed, `False` otherwise.
@@ -204,28 +216,81 @@ def copy_file_if_newer(
"""
with manage_fs(src_fs, writeable=False) as _src_fs:
with manage_fs(dst_fs, create=True) as _dst_fs:
- if _src_fs is _dst_fs:
- # Same filesystem, so we can do a potentially optimized
- # copy
- if _source_is_newer(_src_fs, src_path, _dst_fs, dst_path):
- _src_fs.copy(src_path, dst_path, overwrite=True)
- return True
- else:
- return False
- else:
- # Standard copy
- with _src_fs.lock(), _dst_fs.lock():
- if _source_is_newer(_src_fs, src_path, _dst_fs, dst_path):
- copy_file_internal(_src_fs, src_path, _dst_fs, dst_path)
- return True
- else:
- return False
+ do_copy = _copy_is_necessary(
+ _src_fs, src_path, _dst_fs, dst_path, condition
+ )
+ if do_copy:
+ copy_file_internal(
+ _src_fs,
+ src_path,
+ _dst_fs,
+ dst_path,
+ preserve_time=preserve_time,
+ lock=True,
+ )
+ return do_copy
+
+
+def copy_file_internal(
+ src_fs, # type: FS
+ src_path, # type: Text
+ dst_fs, # type: FS
+ dst_path, # type: Text
+ preserve_time=False, # type: bool
+ lock=False, # type: bool
+):
+ # type: (...) -> None
+ """Copy a file at low level, without calling `manage_fs` or locking.
+
+ If the destination exists, and is a file, it will be first truncated.
+
+ This method exists to optimize copying in loops. In general you
+ should prefer `copy_file`.
+
+ Arguments:
+ src_fs (FS): Source filesystem.
+ src_path (str): Path to a file on the source filesystem.
+ dst_fs (FS): Destination filesystem.
+ dst_path (str): Path to a file on the destination filesystem.
+ preserve_time (bool): If `True`, try to preserve mtime of the
+ resource (defaults to `False`).
+ lock (bool): Lock both filesystems before copying.
+
+ """
+ _src_path = src_fs.validatepath(src_path)
+ _dst_path = dst_fs.validatepath(dst_path)
+ if src_fs is dst_fs:
+ # It's not allowed to copy a file onto itself
+ if _src_path == _dst_path:
+ raise IllegalDestination(dst_path)
+ # Same filesystem, so we can do a potentially optimized copy
+ src_fs.copy(src_path, dst_path, overwrite=True, preserve_time=preserve_time)
+ return
+
+ def _copy_locked():
+ if dst_fs.hassyspath(dst_path):
+ with dst_fs.openbin(dst_path, "w") as write_file:
+ src_fs.download(src_path, write_file)
+ else:
+ with src_fs.openbin(src_path) as read_file:
+ dst_fs.upload(dst_path, read_file)
+
+ if preserve_time:
+ copy_modified_time(src_fs, src_path, dst_fs, dst_path)
+
+ if lock:
+ with src_fs.lock(), dst_fs.lock():
+ _copy_locked()
+ else:
+ _copy_locked()
def copy_structure(
src_fs, # type: Union[FS, Text]
dst_fs, # type: Union[FS, Text]
walker=None, # type: Optional[Walker]
+ src_root="/", # type: Text
+ dst_root="/", # type: Text
):
# type: (...) -> None
"""Copy directories (but not files) from ``src_fs`` to ``dst_fs``.
@@ -236,14 +301,27 @@ def copy_structure(
walker (~fs.walk.Walker, optional): A walker object that will be
used to scan for files in ``src_fs``. Set this if you only
want to consider a sub-set of the resources in ``src_fs``.
+ src_root (str): Path of the base directory to consider as the root
+ of the tree structure to copy.
+ dst_root (str): Path to the target root of the tree structure.
"""
walker = walker or Walker()
with manage_fs(src_fs) as _src_fs:
with manage_fs(dst_fs, create=True) as _dst_fs:
+ _src_root = _src_fs.validatepath(src_root)
+ _dst_root = _dst_fs.validatepath(dst_root)
+
+ # It's not allowed to copy a structure into itself
+ if _src_fs == _dst_fs and isbase(_src_root, _dst_root):
+ raise IllegalDestination(dst_root)
+
with _src_fs.lock(), _dst_fs.lock():
- for dir_path in walker.dirs(_src_fs):
- _dst_fs.makedir(dir_path, recreate=True)
+ _dst_fs.makedirs(_dst_root, recreate=True)
+ for dir_path in walker.dirs(_src_fs, _src_root):
+ _dst_fs.makedir(
+ combine(_dst_root, frombase(_src_root, dir_path)), recreate=True
+ )
def copy_dir(
@@ -254,6 +332,7 @@ def copy_dir(
walker=None, # type: Optional[Walker]
on_copy=None, # type: Optional[_OnCopy]
workers=0, # type: int
+ preserve_time=False, # type: bool
):
# type: (...) -> None
"""Copy a directory from one filesystem to another.
@@ -271,67 +350,90 @@ def copy_dir(
``(src_fs, src_path, dst_fs, dst_path)``.
workers (int): Use ``worker`` threads to copy data, or ``0`` (default) for
a single-threaded copy.
+ preserve_time (bool): If `True`, try to preserve mtime of the
+ resources (defaults to `False`).
"""
- on_copy = on_copy or (lambda *args: None)
- walker = walker or Walker()
- _src_path = abspath(normpath(src_path))
- _dst_path = abspath(normpath(dst_path))
+ copy_dir_if(
+ src_fs,
+ src_path,
+ dst_fs,
+ dst_path,
+ "always",
+ walker,
+ on_copy,
+ workers,
+ preserve_time=preserve_time,
+ )
- def src():
- return manage_fs(src_fs, writeable=False)
- def dst():
- return manage_fs(dst_fs, create=True)
+def copy_dir_if_newer(
+ src_fs, # type: Union[FS, Text]
+ src_path, # type: Text
+ dst_fs, # type: Union[FS, Text]
+ dst_path, # type: Text
+ walker=None, # type: Optional[Walker]
+ on_copy=None, # type: Optional[_OnCopy]
+ workers=0, # type: int
+ preserve_time=False, # type: bool
+):
+ # type: (...) -> None
+ """Copy a directory from one filesystem to another, checking times.
- from ._bulk import Copier
+ .. deprecated:: 2.5.0
+ Use `~fs.copy.copy_dir_if` with ``condition="newer"`` instead.
- with src() as _src_fs, dst() as _dst_fs:
- with _src_fs.lock(), _dst_fs.lock():
- _thread_safe = is_thread_safe(_src_fs, _dst_fs)
- with Copier(num_workers=workers if _thread_safe else 0) as copier:
- _dst_fs.makedir(_dst_path, recreate=True)
- for dir_path, dirs, files in walker.walk(_src_fs, _src_path):
- copy_path = combine(_dst_path, frombase(_src_path, dir_path))
- for info in dirs:
- _dst_fs.makedir(info.make_path(copy_path), recreate=True)
- for info in files:
- src_path = info.make_path(dir_path)
- dst_path = info.make_path(copy_path)
- copier.copy(_src_fs, src_path, _dst_fs, dst_path)
- on_copy(_src_fs, src_path, _dst_fs, dst_path)
+ """
+ warnings.warn(
+ "copy_dir_if_newer is deprecated. Use copy_dir_if instead.", DeprecationWarning
+ )
+ copy_dir_if(
+ src_fs,
+ src_path,
+ dst_fs,
+ dst_path,
+ "newer",
+ walker,
+ on_copy,
+ workers,
+ preserve_time=preserve_time,
+ )
-def copy_dir_if_newer(
+def copy_dir_if(
src_fs, # type: Union[FS, Text]
src_path, # type: Text
dst_fs, # type: Union[FS, Text]
dst_path, # type: Text
+ condition, # type: Text
walker=None, # type: Optional[Walker]
on_copy=None, # type: Optional[_OnCopy]
workers=0, # type: int
+ preserve_time=False, # type: bool
):
# type: (...) -> None
- """Copy a directory from one filesystem to another, checking times.
-
- If both source and destination files exist, the copy is executed only
- if the source file is newer than the destination file. In case
- modification times of source or destination files are not available,
- copy is always executed.
+ """Copy a directory from one filesystem to another, depending on a condition.
Arguments:
src_fs (FS or str): Source filesystem (instance or URL).
src_path (str): Path to a directory on the source filesystem.
dst_fs (FS or str): Destination filesystem (instance or URL).
dst_path (str): Path to a directory on the destination filesystem.
+ condition (str): Name of the condition to check for each file.
walker (~fs.walk.Walker, optional): A walker object that will be
- used to scan for files in ``src_fs``. Set this if you only
- want to consider a sub-set of the resources in ``src_fs``.
- on_copy (callable, optional): A function callback called after
- a single file copy is executed. Expected signature is
- ``(src_fs, src_path, dst_fs, dst_path)``.
+ used to scan for files in ``src_fs``. Set this if you only want
+ to consider a sub-set of the resources in ``src_fs``.
+ on_copy (callable):A function callback called after a single file copy
+ is executed. Expected signature is ``(src_fs, src_path, dst_fs,
+ dst_path)``.
workers (int): Use ``worker`` threads to copy data, or ``0`` (default) for
a single-threaded copy.
+ preserve_time (bool): If `True`, try to preserve mtime of the
+ resources (defaults to `False`).
+
+ See Also:
+ `~fs.copy.copy_file_if` for the full list of supported values for the
+ ``condition`` argument.
"""
on_copy = on_copy or (lambda *args: None)
@@ -339,48 +441,98 @@ def copy_dir_if_newer(
_src_path = abspath(normpath(src_path))
_dst_path = abspath(normpath(dst_path))
- def src():
- return manage_fs(src_fs, writeable=False)
-
- def dst():
- return manage_fs(dst_fs, create=True)
-
from ._bulk import Copier
- with src() as _src_fs, dst() as _dst_fs:
+ copy_structure(src_fs, dst_fs, walker, src_path, dst_path)
+
+ with manage_fs(src_fs, writeable=False) as _src_fs, manage_fs(
+ dst_fs, create=True
+ ) as _dst_fs:
with _src_fs.lock(), _dst_fs.lock():
_thread_safe = is_thread_safe(_src_fs, _dst_fs)
- with Copier(num_workers=workers if _thread_safe else 0) as copier:
- _dst_fs.makedir(_dst_path, recreate=True)
- namespace = ("details", "modified")
- dst_state = {
- path: info
- for path, info in walker.info(_dst_fs, _dst_path, namespace)
- if info.is_file
- }
- src_state = [
- (path, info)
- for path, info in walker.info(_src_fs, _src_path, namespace)
- ]
- for dir_path, copy_info in src_state:
+ with Copier(
+ num_workers=workers if _thread_safe else 0, preserve_time=preserve_time
+ ) as copier:
+ for dir_path in walker.files(_src_fs, _src_path):
copy_path = combine(_dst_path, frombase(_src_path, dir_path))
- if copy_info.is_dir:
- _dst_fs.makedir(copy_path, recreate=True)
- elif copy_info.is_file:
- # dst file is present, try to figure out if copy
- # is necessary
- try:
- src_modified = copy_info.modified
- dst_modified = dst_state[dir_path].modified
- except KeyError:
- do_copy = True
- else:
- do_copy = (
- src_modified is None
- or dst_modified is None
- or src_modified > dst_modified
- )
-
- if do_copy:
- copier.copy(_src_fs, dir_path, _dst_fs, copy_path)
- on_copy(_src_fs, dir_path, _dst_fs, copy_path)
+ if _copy_is_necessary(
+ _src_fs, dir_path, _dst_fs, copy_path, condition
+ ):
+ copier.copy(_src_fs, dir_path, _dst_fs, copy_path)
+ on_copy(_src_fs, dir_path, _dst_fs, copy_path)
+
+
+def _copy_is_necessary(
+ src_fs, # type: FS
+ src_path, # type: Text
+ dst_fs, # type: FS
+ dst_path, # type: Text
+ condition, # type: Text
+):
+ # type: (...) -> bool
+
+ if condition == "always":
+ return True
+
+ elif condition == "newer":
+ try:
+ src_modified = src_fs.getmodified(src_path)
+ dst_modified = dst_fs.getmodified(dst_path)
+ except ResourceNotFound:
+ return True
+ else:
+ return (
+ src_modified is None
+ or dst_modified is None
+ or src_modified > dst_modified
+ )
+
+ elif condition == "older":
+ try:
+ src_modified = src_fs.getmodified(src_path)
+ dst_modified = dst_fs.getmodified(dst_path)
+ except ResourceNotFound:
+ return True
+ else:
+ return (
+ src_modified is None
+ or dst_modified is None
+ or src_modified < dst_modified
+ )
+
+ elif condition == "exists":
+ return dst_fs.exists(dst_path)
+
+ elif condition == "not_exists":
+ return not dst_fs.exists(dst_path)
+
+ else:
+ raise ValueError("{} is not a valid copy condition.".format(condition))
+
+
+def copy_modified_time(
+ src_fs, # type: Union[FS, Text]
+ src_path, # type: Text
+ dst_fs, # type: Union[FS, Text]
+ dst_path, # type: Text
+):
+ # type: (...) -> None
+ """Copy modified time metadata from one file to another.
+
+ Arguments:
+ src_fs (FS or str): Source filesystem (instance or URL).
+ src_path (str): Path to a directory on the source filesystem.
+ dst_fs (FS or str): Destination filesystem (instance or URL).
+ dst_path (str): Path to a directory on the destination filesystem.
+
+ """
+ namespaces = ("details",)
+ with manage_fs(src_fs, writeable=False) as _src_fs:
+ with manage_fs(dst_fs, create=True) as _dst_fs:
+ src_meta = _src_fs.getinfo(src_path, namespaces)
+ src_details = src_meta.raw.get("details", {})
+ dst_details = {}
+ for value in ("metadata_changed", "modified"):
+ if value in src_details:
+ dst_details[value] = src_details[value]
+ _dst_fs.setinfo(dst_path, {"details": dst_details})
diff --git a/fs/enums.py b/fs/enums.py
index 3c7d3ed0..adc288dd 100644
--- a/fs/enums.py
+++ b/fs/enums.py
@@ -1,8 +1,7 @@
"""Enums used by PyFilesystem.
"""
-from __future__ import absolute_import
-from __future__ import unicode_literals
+from __future__ import absolute_import, unicode_literals
import os
from enum import IntEnum, unique
diff --git a/fs/error_tools.py b/fs/error_tools.py
index ba32c23c..bdb3818c 100644
--- a/fs/error_tools.py
+++ b/fs/error_tools.py
@@ -1,36 +1,34 @@
"""Tools for managing OS errors.
"""
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import print_function, unicode_literals
-import collections
-import errno
-import platform
import sys
import typing
-from contextlib import contextmanager
-from six import reraise, PY3
+import errno
+import platform
+from contextlib import contextmanager
+from six import reraise
from . import errors
-if False: # typing.TYPE_CHECKING
+if typing.TYPE_CHECKING:
+ from typing import Iterator, Optional, Text, Type, Union
+
from types import TracebackType
- from typing import Iterator, Optional, Mapping, Text, Type, Union
-if PY3:
+try:
from collections.abc import Mapping
-else:
- from collections import Mapping
+except ImportError:
+ from collections import Mapping # noqa: E811
_WINDOWS_PLATFORM = platform.system() == "Windows"
class _ConvertOSErrors(object):
- """Context manager to convert OSErrors in to FS Errors.
- """
+ """Context manager to convert OSErrors in to FS Errors."""
FILE_ERRORS = {
64: errors.RemoteConnectionError, # ENONET
@@ -116,4 +114,4 @@ def unwrap_errors(path_replace):
e.path = path_replace.get(e.path, e.path)
else:
e.path = path_replace
- reraise(type(e), e)
+ raise
diff --git a/fs/errors.py b/fs/errors.py
index e5452e06..adc9afa9 100644
--- a/fs/errors.py
+++ b/fs/errors.py
@@ -8,16 +8,15 @@
"""
-from __future__ import unicode_literals
-from __future__ import print_function
+from __future__ import print_function, unicode_literals
-import functools
import typing
+import functools
import six
from six import text_type
-if False: # typing.TYPE_CHECKING
+if typing.TYPE_CHECKING:
from typing import Optional, Text
@@ -33,6 +32,7 @@
"FilesystemClosed",
"FSError",
"IllegalBackReference",
+ "IllegalDestination",
"InsufficientStorage",
"InvalidCharsInPath",
"InvalidPath",
@@ -42,6 +42,7 @@
"OperationFailed",
"OperationTimeout",
"PathError",
+ "PatternError",
"PermissionDenied",
"RemoteConnectionError",
"RemoveRootError",
@@ -51,14 +52,14 @@
"ResourceNotFound",
"ResourceReadOnly",
"Unsupported",
+ "UnsupportedHash",
]
class MissingInfoNamespace(AttributeError):
- """An expected namespace is missing.
- """
+ """An expected namespace is missing."""
- def __init__(self, namespace):
+ def __init__(self, namespace): # noqa: D107
# type: (Text) -> None
self.namespace = namespace
msg = "namespace '{}' is required for this attribute"
@@ -70,20 +71,18 @@ def __reduce__(self):
@six.python_2_unicode_compatible
class FSError(Exception):
- """Base exception for the `fs` module.
- """
+ """Base exception for the `fs` module."""
default_message = "Unspecified error"
- def __init__(self, msg=None):
+ def __init__(self, msg=None): # noqa: D107
# type: (Optional[Text]) -> None
self._msg = msg or self.default_message
super(FSError, self).__init__()
def __str__(self):
# type: () -> Text
- """Return the error message.
- """
+ """Return the error message."""
msg = self._msg.format(**self.__dict__)
return msg
@@ -94,8 +93,7 @@ def __repr__(self):
class FilesystemClosed(FSError):
- """Attempt to use a closed filesystem.
- """
+ """Attempt to use a closed filesystem."""
default_message = "attempt to use closed filesystem"
@@ -105,18 +103,17 @@ class BulkCopyFailed(FSError):
default_message = "One or more copy operations failed (see errors attribute)"
- def __init__(self, errors):
+ def __init__(self, errors): # noqa: D107
self.errors = errors
super(BulkCopyFailed, self).__init__()
class CreateFailed(FSError):
- """Filesystem could not be created.
- """
+ """Filesystem could not be created."""
default_message = "unable to create filesystem, {details}"
- def __init__(self, msg=None, exc=None):
+ def __init__(self, msg=None, exc=None): # noqa: D107
# type: (Optional[Text], Optional[Exception]) -> None
self._msg = msg or self.default_message
self.details = "" if exc is None else text_type(exc)
@@ -140,34 +137,32 @@ def __reduce__(self):
class PathError(FSError):
- """Base exception for errors to do with a path string.
- """
+ """Base exception for errors to do with a path string."""
default_message = "path '{path}' is invalid"
- def __init__(self, path, msg=None):
- # type: (Text, Optional[Text]) -> None
+ def __init__(self, path, msg=None, exc=None): # noqa: D107
+ # type: (Text, Optional[Text], Optional[Exception]) -> None
self.path = path
+ self.exc = exc
super(PathError, self).__init__(msg=msg)
def __reduce__(self):
- return type(self), (self.path, self._msg)
+ return type(self), (self.path, self._msg, self.exc)
class NoSysPath(PathError):
- """The filesystem does not provide *sys paths* to the resource.
- """
+ """The filesystem does not provide *sys paths* to the resource."""
default_message = "path '{path}' does not map to the local filesystem"
class NoURL(PathError):
- """The filesystem does not provide an URL for the resource.
- """
+ """The filesystem does not provide an URL for the resource."""
default_message = "path '{path}' has no '{purpose}' URL"
- def __init__(self, path, purpose, msg=None):
+ def __init__(self, path, purpose, msg=None): # noqa: D107
# type: (Text, Text, Optional[Text]) -> None
self.purpose = purpose
super(NoURL, self).__init__(path, msg=msg)
@@ -177,22 +172,19 @@ def __reduce__(self):
class InvalidPath(PathError):
- """Path can't be mapped on to the underlaying filesystem.
- """
+ """Path can't be mapped on to the underlaying filesystem."""
default_message = "path '{path}' is invalid on this filesystem "
class InvalidCharsInPath(InvalidPath):
- """Path contains characters that are invalid on this filesystem.
- """
+ """Path contains characters that are invalid on this filesystem."""
default_message = "path '{path}' contains invalid characters"
class OperationFailed(FSError):
- """A specific operation failed.
- """
+ """A specific operation failed."""
default_message = "operation failed, {details}"
@@ -201,7 +193,7 @@ def __init__(
path=None, # type: Optional[Text]
exc=None, # type: Optional[Exception]
msg=None, # type: Optional[Text]
- ):
+ ): # noqa: D107
# type: (...) -> None
self.path = path
self.exc = exc
@@ -214,54 +206,57 @@ def __reduce__(self):
class Unsupported(OperationFailed):
- """Operation not supported by the filesystem.
- """
+ """Operation not supported by the filesystem."""
default_message = "not supported"
class RemoteConnectionError(OperationFailed):
- """Operations encountered remote connection trouble.
- """
+ """Operations encountered remote connection trouble."""
default_message = "remote connection error"
class InsufficientStorage(OperationFailed):
- """Storage is insufficient for requested operation.
- """
+ """Storage is insufficient for requested operation."""
default_message = "insufficient storage space"
class PermissionDenied(OperationFailed):
- """Not enough permissions.
- """
+ """Not enough permissions."""
default_message = "permission denied"
class OperationTimeout(OperationFailed):
- """Filesystem took too long.
- """
+ """Filesystem took too long."""
default_message = "operation timed out"
class RemoveRootError(OperationFailed):
- """Attempt to remove the root directory.
- """
+ """Attempt to remove the root directory."""
default_message = "root directory may not be removed"
-class ResourceError(FSError):
- """Base exception class for error associated with a specific resource.
+class IllegalDestination(OperationFailed):
+ """The given destination cannot be used for the operation.
+
+ This error will occur when attempting to move / copy a folder into itself or copying
+ a file onto itself.
"""
+ default_message = "'{path}' is not a legal destination"
+
+
+class ResourceError(FSError):
+ """Base exception class for error associated with a specific resource."""
+
default_message = "failed on path {path}"
- def __init__(self, path, exc=None, msg=None):
+ def __init__(self, path, exc=None, msg=None): # noqa: D107
# type: (Text, Optional[Exception], Optional[Text]) -> None
self.path = path
self.exc = exc
@@ -272,71 +267,61 @@ def __reduce__(self):
class ResourceNotFound(ResourceError):
- """Required resource not found.
- """
+ """Required resource not found."""
default_message = "resource '{path}' not found"
class ResourceInvalid(ResourceError):
- """Resource has the wrong type.
- """
+ """Resource has the wrong type."""
default_message = "resource '{path}' is invalid for this operation"
class FileExists(ResourceError):
- """File already exists.
- """
+ """File already exists."""
default_message = "resource '{path}' exists"
class FileExpected(ResourceInvalid):
- """Operation only works on files.
- """
+ """Operation only works on files."""
default_message = "path '{path}' should be a file"
class DirectoryExpected(ResourceInvalid):
- """Operation only works on directories.
- """
+ """Operation only works on directories."""
default_message = "path '{path}' should be a directory"
class DestinationExists(ResourceError):
- """Target destination already exists.
- """
+ """Target destination already exists."""
default_message = "destination '{path}' exists"
class DirectoryExists(ResourceError):
- """Directory already exists.
- """
+ """Directory already exists."""
default_message = "directory '{path}' exists"
class DirectoryNotEmpty(ResourceError):
- """Attempt to remove a non-empty directory.
- """
+ """Attempt to remove a non-empty directory."""
default_message = "directory '{path}' is not empty"
class ResourceLocked(ResourceError):
- """Attempt to use a locked resource.
- """
+ """Attempt to use a locked resource."""
default_message = "resource '{path}' is locked"
class ResourceReadOnly(ResourceError):
- """Attempting to modify a read-only resource.
- """
+ """Attempting to modify a read-only resource."""
default_message = "resource '{path}' is read only"
@@ -354,7 +339,7 @@ class IllegalBackReference(ValueError):
"""
- def __init__(self, path):
+ def __init__(self, path): # noqa: D107
# type: (Text) -> None
self.path = path
msg = ("path '{path}' contains back-references outside of filesystem").format(
@@ -373,3 +358,19 @@ class UnsupportedHash(ValueError):
not supported by hashlib.
"""
+
+
+class PatternError(ValueError):
+ """A string pattern with invalid syntax was given."""
+
+ default_message = "pattern '{pattern}' is invalid at position {position}"
+
+ def __init__(self, pattern, position, exc=None, msg=None): # noqa: D107
+ # type: (Text, int, Optional[Exception], Optional[Text]) -> None
+ self.pattern = pattern
+ self.position = position
+ self.exc = exc
+ super(ValueError, self).__init__()
+
+ def __reduce__(self):
+ return type(self), (self.path, self.position, self.exc, self._msg)
diff --git a/fs/filesize.py b/fs/filesize.py
index ff2ecf63..ed113e88 100644
--- a/fs/filesize.py
+++ b/fs/filesize.py
@@ -11,12 +11,11 @@
"""
-from __future__ import division
-from __future__ import unicode_literals
+from __future__ import division, unicode_literals
import typing
-if False: # typing.TYPE_CHECKING
+if typing.TYPE_CHECKING:
from typing import Iterable, SupportsInt, Text
@@ -34,8 +33,9 @@ def _to_str(size, suffixes, base):
elif size < base:
return "{:,} bytes".format(size)
- for i, suffix in enumerate(suffixes, 2):
- unit = base ** i
+ # TODO (dargueta): Don't rely on unit or suffix being defined in the loop.
+ for i, suffix in enumerate(suffixes, 2): # noqa: B007
+ unit = base**i
if size < unit:
break
return "{:,.1f} {}".format((base * size / unit), suffix)
@@ -60,7 +60,7 @@ def traditional(size):
`str`: A string containing an abbreviated file size and units.
Example:
- >>> filesize.traditional(30000)
+ >>> fs.filesize.traditional(30000)
'29.3 KB'
"""
@@ -86,7 +86,7 @@ def binary(size):
`str`: A string containing a abbreviated file size and units.
Example:
- >>> filesize.binary(30000)
+ >>> fs.filesize.binary(30000)
'29.3 KiB'
"""
@@ -111,7 +111,7 @@ def decimal(size):
`str`: A string containing a abbreviated file size and units.
Example:
- >>> filesize.decimal(30000)
+ >>> fs.filesize.decimal(30000)
'30.0 kB'
"""
diff --git a/fs/ftpfs.py b/fs/ftpfs.py
index 21e98a10..50d8a0d5 100644
--- a/fs/ftpfs.py
+++ b/fs/ftpfs.py
@@ -1,53 +1,51 @@
"""Manage filesystems on remote FTP servers.
"""
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import print_function, unicode_literals
+import typing
+
+import array
import calendar
-import ftplib
+import datetime
import io
import itertools
import socket
import threading
-import typing
from collections import OrderedDict
from contextlib import contextmanager
from ftplib import FTP
-from ftplib import error_perm
-from ftplib import error_temp
+
+try:
+ from ftplib import FTP_TLS
+except ImportError as err:
+ FTP_TLS = err # type: ignore
from typing import cast
-from six import PY2
-from six import text_type
+from ftplib import error_perm, error_temp
+from six import PY2, raise_from, text_type
+from . import _ftp_parse as ftp_parse
from . import errors
from .base import FS
from .constants import DEFAULT_CHUNK_SIZE
-from .enums import ResourceType
-from .enums import Seek
+from .enums import ResourceType, Seek
from .info import Info
from .iotools import line_iterator
from .mode import Mode
-from .path import abspath
-from .path import dirname
-from .path import basename
-from .path import normpath
-from .path import split
-from . import _ftp_parse as ftp_parse
+from .path import abspath, basename, dirname, normpath, split
+from .time import epoch_to_datetime
-if False: # typing.TYPE_CHECKING
- import ftplib
+if typing.TYPE_CHECKING:
from typing import (
Any,
BinaryIO,
ByteString,
+ Container,
ContextManager,
+ Dict,
Iterable,
Iterator,
- Collection,
- Container,
- Dict,
List,
Optional,
SupportsInt,
@@ -55,6 +53,10 @@
Tuple,
Union,
)
+
+ import ftplib
+ import mmap
+
from .base import _OpendirFactory
from .info import RawInfo
from .permissions import Permissions
@@ -103,7 +105,7 @@ def manage_ftp(ftp):
finally:
try:
ftp.quit()
- except: # pragma: no cover
+ except Exception: # pragma: no cover
pass
@@ -124,7 +126,6 @@ def _decode(st, encoding):
# type: (Union[Text, bytes], Text) -> Text
return st.decode(encoding, "replace") if isinstance(st, bytes) else st
-
else:
def _encode(st, _):
@@ -237,8 +238,18 @@ def read(self, size=-1):
remaining -= len(chunk)
return b"".join(chunks)
- def readline(self, size=-1):
- # type: (int) -> bytes
+ def readinto(self, buffer):
+ # type: (Union[bytearray, memoryview, array.array[Any], mmap.mmap]) -> int
+ data = self.read(len(buffer))
+ bytes_read = len(data)
+ if isinstance(buffer, array.array):
+ buffer[:bytes_read] = array.array(buffer.typecode, data)
+ else:
+ buffer[:bytes_read] = data # type: ignore
+ return bytes_read
+
+ def readline(self, size=None):
+ # type: (Optional[int]) -> bytes
return next(line_iterator(self, size)) # type: ignore
def readlines(self, hint=-1):
@@ -257,10 +268,13 @@ def writable(self):
return self.mode.writing
def write(self, data):
- # type: (bytes) -> int
+ # type: (Union[bytes, memoryview, array.array[Any], mmap.mmap]) -> int
if not self.mode.writing:
raise IOError("File not open for writing")
+ if isinstance(data, array.array):
+ data = data.tobytes()
+
with self._lock:
conn = self.write_conn
data_pos = 0
@@ -276,8 +290,16 @@ def write(self, data):
return data_pos
def writelines(self, lines):
- # type: (Iterable[bytes]) -> None
- self.write(b"".join(lines))
+ # type: (Iterable[Union[bytes, memoryview, array.array[Any], mmap.mmap]]) -> None # noqa: E501
+ if not self.mode.writing:
+ raise IOError("File not open for writing")
+ data = bytearray()
+ for line in lines:
+ if isinstance(line, array.array):
+ data.extend(line.tobytes())
+ else:
+ data.extend(line) # type: ignore
+ self.write(data)
def truncate(self, size=None):
# type: (Optional[int]) -> int
@@ -327,16 +349,35 @@ def seek(self, pos, whence=Seek.set):
class FTPFS(FS):
"""A FTP (File Transport Protocol) Filesystem.
- Arguments:
- host (str): A FTP host, e.g. ``'ftp.mirror.nl'``.
- user (str): A username (default is ``'anonymous'``).
- passwd (str): Password for the server, or `None` for anon.
- acct (str): FTP account.
- timeout (int): Timeout for contacting server (in seconds,
- defaults to 10).
- port (int): FTP port number (default 21).
- proxy (str, optional): An FTP proxy, or ``None`` (default)
- for no proxy.
+ Optionally, the connection can be made securely via TLS. This is known as
+ FTPS, or FTP Secure. TLS will be enabled when using the ftps:// protocol,
+ or when setting the `tls` argument to True in the constructor.
+
+ Examples:
+ Create with the constructor::
+
+ >>> from fs.ftpfs import FTPFS
+ >>> ftp_fs = FTPFS("demo.wftpserver.com")
+
+ Or via an FS URL::
+
+ >>> ftp_fs = fs.open_fs('ftp://test.rebex.net')
+
+ Or via an FS URL, using TLS::
+
+ >>> ftp_fs = fs.open_fs('ftps://demo.wftpserver.com')
+
+ You can also use a non-anonymous username, and optionally a
+ password, even within a FS URL::
+
+ >>> ftp_fs = FTPFS("test.rebex.net", user="demo", passwd="password")
+ >>> ftp_fs = fs.open_fs('ftp://demo:password@test.rebex.net')
+
+ Connecting via a proxy is supported. If using a FS URL, the proxy
+ URL will need to be added as a URL parameter::
+
+ >>> ftp_fs = FTPFS("ftp.ebi.ac.uk", proxy="test.rebex.net")
+ >>> ftp_fs = fs.open_fs('ftp://ftp.ebi.ac.uk/?proxy=test.rebex.net')
"""
@@ -358,8 +399,24 @@ def __init__(
timeout=10, # type: int
port=21, # type: int
proxy=None, # type: Optional[Text]
+ tls=False, # type: bool
):
# type: (...) -> None
+ """Create a new `FTPFS` instance.
+
+ Arguments:
+ host (str): A FTP host, e.g. ``'ftp.mirror.nl'``.
+ user (str): A username (default is ``'anonymous'``).
+ passwd (str): Password for the server, or `None` for anon.
+ acct (str): FTP account.
+ timeout (int): Timeout for contacting server (in seconds,
+ defaults to 10).
+ port (int): FTP port number (default 21).
+ proxy (str, optional): An FTP proxy, or ``None`` (default)
+ for no proxy.
+ tls (bool): Attempt to use FTP over TLS (FTPS) (default: False)
+
+ """
super(FTPFS, self).__init__()
self._host = host
self._user = user
@@ -368,6 +425,10 @@ def __init__(
self.timeout = timeout
self.port = port
self.proxy = proxy
+ self.tls = tls
+
+ if self.tls and isinstance(FTP_TLS, Exception):
+ raise_from(errors.CreateFailed("FTP over TLS not supported"), FTP_TLS)
self.encoding = "latin-1"
self._ftp = None # type: Optional[FTP]
@@ -398,8 +459,7 @@ def host(self):
@classmethod
def _parse_features(cls, feat_response):
# type: (Text) -> Dict[Text, Text]
- """Parse a dict of features from FTP feat response.
- """
+ """Parse a dict of features from FTP feat response."""
features = {}
if feat_response.split("-")[0] == "211":
for line in feat_response.splitlines():
@@ -410,13 +470,16 @@ def _parse_features(cls, feat_response):
def _open_ftp(self):
# type: () -> FTP
- """Open a new ftp object.
- """
- _ftp = FTP()
+ """Open a new ftp object."""
+ _ftp = FTP_TLS() if self.tls else FTP()
_ftp.set_debuglevel(0)
with ftp_errors(self):
_ftp.connect(self.host, self.port, self.timeout)
_ftp.login(self.user, self.passwd, self.acct)
+ try:
+ _ftp.prot_p() # type: ignore
+ except AttributeError:
+ pass
self._features = {}
try:
feat_response = _decode(_ftp.sendcmd("FEAT"), "latin-1")
@@ -442,16 +505,24 @@ def _manage_ftp(self):
def ftp_url(self):
# type: () -> Text
"""Get the FTP url this filesystem will open."""
- _host_part = self.host if self.port == 21 else "{}:{}".format(self.host, self.port)
- _user_part = "" if self.user == "anonymous" or self.user is None else "{}:{}@".format(self.user, self.passwd)
- url = "ftp://{}{}".format(_user_part, _host_part)
+ if self.port == 21:
+ _host_part = self.host
+ else:
+ _host_part = "{}:{}".format(self.host, self.port)
+
+ if self.user == "anonymous" or self.user is None:
+ _user_part = ""
+ else:
+ _user_part = "{}:{}@".format(self.user, self.passwd)
+
+ scheme = "ftps" if self.tls else "ftp"
+ url = "{}://{}{}".format(scheme, _user_part, _host_part)
return url
@property
def ftp(self):
# type: () -> FTP
- """~ftplib.FTP: the underlying FTP client.
- """
+ """~ftplib.FTP: the underlying FTP client."""
return self._get_ftp()
def geturl(self, path, purpose="download"):
@@ -469,10 +540,9 @@ def _get_ftp(self):
return self._ftp
@property
- def features(self):
+ def features(self): # noqa: D401
# type: () -> Dict[Text, Text]
- """dict: features of the remote FTP server.
- """
+ """`dict`: Features of the remote FTP server."""
self._get_ftp()
return self._features
@@ -494,10 +564,15 @@ def _read_dir(self, path):
@property
def supports_mlst(self):
# type: () -> bool
- """bool: whether the server supports MLST feature.
- """
+ """bool: whether the server supports MLST feature."""
return "MLST" in self.features
+ @property
+ def supports_mdtm(self):
+ # type: () -> bool
+ """bool: whether the server supports the MDTM feature."""
+ return "MDTM" in self.features
+
def create(self, path, wipe=False):
# type: (Text, bool) -> bool
_path = self.validatepath(path)
@@ -513,8 +588,7 @@ def create(self, path, wipe=False):
@classmethod
def _parse_ftp_time(cls, time_text):
# type: (Text) -> Optional[int]
- """Parse a time from an ftp directory listing.
- """
+ """Parse a time from an ftp directory listing."""
try:
tm_year = int(time_text[0:4])
tm_month = int(time_text[4:6])
@@ -575,7 +649,7 @@ def _parse_mlsx(cls, lines):
details["created"] = cls._parse_ftp_time(facts["create"])
yield raw_info
- if False: # typing.TYPE_CHECKING
+ if typing.TYPE_CHECKING:
def opendir(self, path, factory=None):
# type: (_F, Text, Optional[_OpendirFactory]) -> SubFS[_F]
@@ -619,8 +693,21 @@ def getmeta(self, namespace="standard"):
if namespace == "standard":
_meta = self._meta.copy()
_meta["unicode_paths"] = "UTF8" in self.features
+ _meta["supports_mtime"] = "MDTM" in self.features
return _meta
+ def getmodified(self, path):
+ # type: (Text) -> Optional[datetime.datetime]
+ if self.supports_mdtm:
+ _path = self.validatepath(path)
+ with self._lock:
+ with ftp_errors(self, path=path):
+ cmd = "MDTM " + _encode(_path, self.ftp.encoding)
+ response = self.ftp.sendcmd(cmd)
+ mtime = self._parse_ftp_time(response.split()[1])
+ return epoch_to_datetime(mtime)
+ return super(FTPFS, self).getmodified(path)
+
def listdir(self, path):
# type: (Text) -> List[Text]
_path = self.validatepath(path)
@@ -677,7 +764,7 @@ def openbin(self, path, mode="r", buffering=-1, **options):
raise errors.FileExpected(path)
if _mode.exclusive:
raise errors.FileExists(path)
- ftp_file = FTPFile(self, _path, mode)
+ ftp_file = FTPFile(self, _path, _mode.to_platform_bin())
return ftp_file # type: ignore
def remove(self, path):
@@ -728,9 +815,8 @@ def _scandir(self, path, namespaces=None):
for raw_info in self._parse_mlsx(lines):
yield Info(raw_info)
return
- with self._lock:
- for info in self._read_dir(_path).values():
- yield info
+ for info in self._read_dir(_path).values():
+ yield info
def scandir(
self,
@@ -751,11 +837,10 @@ def upload(self, path, file, chunk_size=None, **options):
# type: (Text, BinaryIO, Optional[int], **Any) -> None
_path = self.validatepath(path)
with self._lock:
- with self._manage_ftp() as ftp:
- with ftp_errors(self, path):
- ftp.storbinary(
- str("STOR ") + _encode(_path, self.ftp.encoding), file
- )
+ with ftp_errors(self, path):
+ self.ftp.storbinary(
+ str("STOR ") + _encode(_path, self.ftp.encoding), file
+ )
def writebytes(self, path, contents):
# type: (Text, ByteString) -> None
@@ -765,8 +850,32 @@ def writebytes(self, path, contents):
def setinfo(self, path, info):
# type: (Text, RawInfo) -> None
- if not self.exists(path):
- raise errors.ResourceNotFound(path)
+ use_mfmt = False
+ if "MFMT" in self.features:
+ info_details = None
+ if "modified" in info:
+ info_details = info["modified"]
+ elif "details" in info:
+ info_details = info["details"]
+ if info_details and "modified" in info_details:
+ use_mfmt = True
+ mtime = cast(float, info_details["modified"])
+
+ if use_mfmt:
+ with ftp_errors(self, path):
+ cmd = (
+ "MFMT "
+ + datetime.datetime.utcfromtimestamp(mtime).strftime("%Y%m%d%H%M%S")
+ + " "
+ + _encode(path, self.ftp.encoding)
+ )
+ try:
+ self.ftp.sendcmd(cmd)
+ except error_perm:
+ pass
+ else:
+ if not self.exists(path):
+ raise errors.ResourceNotFound(path)
def readbytes(self, path):
# type: (Text) -> bytes
diff --git a/fs/glob.py b/fs/glob.py
index 09927952..4e783652 100644
--- a/fs/glob.py
+++ b/fs/glob.py
@@ -1,48 +1,138 @@
+"""Useful functions for working with glob patterns.
+"""
+
from __future__ import unicode_literals
-from collections import namedtuple
-from typing import Iterator, List
+import typing
+from functools import partial
+
import re
+from collections import namedtuple
-from .lrucache import LRUCache
from ._repr import make_repr
+from .lrucache import LRUCache
from .path import iteratepath
-from . import wildcard
-_PATTERN_CACHE = LRUCache(
- 1000
-) # type: LRUCache[Tuple[Text, bool], Tuple[int, bool, Pattern]]
-
-GlobMatch = namedtuple('GlobMatch', ["path", "info"])
+GlobMatch = namedtuple("GlobMatch", ["path", "info"])
Counts = namedtuple("Counts", ["files", "directories", "data"])
LineCounts = namedtuple("LineCounts", ["lines", "non_blank"])
-if False: # typing.TYPE_CHECKING
- from typing import Iterator, List, Optional, Tuple
+if typing.TYPE_CHECKING:
+ from typing import (
+ Iterator,
+ List,
+ Optional,
+ Pattern,
+ Text,
+ Tuple,
+ Iterable,
+ Callable,
+ )
from .base import FS
- from .info import Info
-def _translate_glob(pattern, case_sensitive=True):
- levels = 0
+_PATTERN_CACHE = LRUCache(
+ 1000
+) # type: LRUCache[Tuple[Text, bool], Tuple[Optional[int], Pattern]]
+
+
+def _split_pattern_by_sep(pattern):
+ # type: (Text) -> List[Text]
+ """Split a glob pattern at its directory seperators (/).
+
+ Takes into account escaped cases like [/].
+ """
+ indices = [-1]
+ bracket_open = False
+ for i, c in enumerate(pattern):
+ if c == "/" and not bracket_open:
+ indices.append(i)
+ elif c == "[":
+ bracket_open = True
+ elif c == "]":
+ bracket_open = False
+
+ indices.append(len(pattern))
+ return [pattern[i + 1 : j] for i, j in zip(indices[:-1], indices[1:])]
+
+
+def _translate(pattern):
+ # type: (Text) -> Text
+ """Translate a glob pattern without '**' to a regular expression.
+
+ There is no way to quote meta-characters.
+
+ Arguments:
+ pattern (str): A glob pattern.
+
+ Returns:
+ str: A regex equivalent to the given pattern.
+
+ """
+ i, n = 0, len(pattern)
+ res = []
+ while i < n:
+ c = pattern[i]
+ i = i + 1
+ if c == "*":
+ if i < n and pattern[i] == "*":
+ raise ValueError("glob._translate does not support '**' patterns.")
+ res.append("[^/]*")
+ elif c == "?":
+ res.append("[^/]")
+ elif c == "[":
+ j = i
+ if j < n and pattern[j] == "!":
+ j = j + 1
+ if j < n and pattern[j] == "]":
+ j = j + 1
+ while j < n and pattern[j] != "]":
+ j = j + 1
+ if j >= n:
+ res.append("\\[")
+ else:
+ stuff = pattern[i:j].replace("\\", "\\\\")
+ i = j + 1
+ if stuff[0] == "!":
+ stuff = "^/" + stuff[1:]
+ elif stuff[0] == "^":
+ stuff = "\\" + stuff
+ res.append("[%s]" % stuff)
+ else:
+ res.append(re.escape(c))
+ return "".join(res)
+
+
+def _translate_glob(pattern):
+ # type: (Text) -> Tuple[Optional[int], Text]
+ """Translate a glob pattern to a regular expression.
+
+ There is no way to quote meta-characters.
+
+ Arguments:
+ pattern (str): A glob pattern.
+
+ Returns:
+ Tuple[Optional[int], Text]: The first component describes the levels
+ of depth this glob pattern goes to; basically the number of "/" in
+ the pattern. If there is a "**" in the glob pattern, the depth is
+ basically unbounded, and this component is `None` instead.
+ The second component is the regular expression.
+
+ """
recursive = False
re_patterns = [""]
for component in iteratepath(pattern):
- if component == "**":
- re_patterns.append(".*/?")
+ if "**" in component:
recursive = True
+ split = component.split("**")
+ split_re = [_translate(s) for s in split]
+ re_patterns.append("/?" + ".*/?".join(split_re))
else:
- re_patterns.append(
- "/" + wildcard._translate(component, case_sensitive=case_sensitive)
- )
- levels += 1
+ re_patterns.append("/" + _translate(component))
re_glob = "(?ms)^" + "".join(re_patterns) + ("/$" if pattern.endswith("/") else "$")
- return (
- levels,
- recursive,
- re.compile(re_glob, 0 if case_sensitive else re.IGNORECASE),
- )
+ return pattern.count("/") + 1 if not recursive else None, re_glob
def match(pattern, path):
@@ -64,10 +154,13 @@ def match(pattern, path):
"""
try:
- levels, recursive, re_pattern = _PATTERN_CACHE[(pattern, True)]
+ levels, re_pattern = _PATTERN_CACHE[(pattern, True)]
except KeyError:
- levels, recursive, re_pattern = _translate_glob(pattern, case_sensitive=True)
- _PATTERN_CACHE[(pattern, True)] = (levels, recursive, re_pattern)
+ levels, re_str = _translate_glob(pattern)
+ re_pattern = re.compile(re_str)
+ _PATTERN_CACHE[(pattern, True)] = (levels, re_pattern)
+ if path and path[0] != "/":
+ path = "/" + path
return bool(re_pattern.match(path))
@@ -84,28 +177,103 @@ def imatch(pattern, path):
"""
try:
- levels, recursive, re_pattern = _PATTERN_CACHE[(pattern, False)]
+ levels, re_pattern = _PATTERN_CACHE[(pattern, False)]
except KeyError:
- levels, recursive, re_pattern = _translate_glob(pattern, case_sensitive=True)
- _PATTERN_CACHE[(pattern, False)] = (levels, recursive, re_pattern)
+ levels, re_str = _translate_glob(pattern)
+ re_pattern = re.compile(re_str, re.IGNORECASE)
+ _PATTERN_CACHE[(pattern, False)] = (levels, re_pattern)
+ if path and path[0] != "/":
+ path = "/" + path
return bool(re_pattern.match(path))
-class Globber(object):
- """A generator of glob results.
+def match_any(patterns, path):
+ # type: (Iterable[Text], Text) -> bool
+ """Test if a path matches any of a list of patterns.
- Arguments:
- fs (~fs.base.FS): A filesystem object
- pattern (str): A glob pattern, e.g. ``"**/*.py"``
- path (str): A path to a directory in the filesystem.
- namespaces (list): A list of additional info namespaces.
- case_sensitive (bool): If ``True``, the path matching will be
- case *sensitive* i.e. ``"FOO.py"`` and ``"foo.py"`` will
- be different, otherwise path matching will be case *insensitive*.
- exclude_dirs (list): A list of patterns to exclude when searching,
- e.g. ``["*.git"]``.
+ Will return `True` if ``patterns`` is an empty list.
+
+ Arguments:
+ patterns (list): A list of wildcard pattern, e.g ``["*.py",
+ "*.pyc"]``
+ path (str): A resource path.
+
+ Returns:
+ bool: `True` if the path matches at least one of the patterns.
+
+ """
+ if not patterns:
+ return True
+ return any(match(pattern, path) for pattern in patterns)
+
+
+def imatch_any(patterns, path):
+ # type: (Iterable[Text], Text) -> bool
+ """Test if a path matches any of a list of patterns (case insensitive).
+
+ Will return `True` if ``patterns`` is an empty list.
+
+ Arguments:
+ patterns (list): A list of wildcard pattern, e.g ``["*.py",
+ "*.pyc"]``
+ path (str): A resource path.
+
+ Returns:
+ bool: `True` if the path matches at least one of the patterns.
+
+ """
+ if not patterns:
+ return True
+ return any(imatch(pattern, path) for pattern in patterns)
+
+
+def get_matcher(patterns, case_sensitive, accept_prefix=False):
+ # type: (Iterable[Text], bool, bool) -> Callable[[Text], bool]
+ """Get a callable that matches paths against the given patterns.
+
+ Arguments:
+ patterns (list): A list of wildcard pattern. e.g. ``["*.py",
+ "*.pyc"]``
+ case_sensitive (bool): If ``True``, then the callable will be case
+ sensitive, otherwise it will be case insensitive.
+ accept_prefix (bool): If ``True``, the name is
+ not required to match the patterns themselves
+ but only need to be a prefix of a string that does.
+
+ Returns:
+ callable: a matcher that will return `True` if the paths given as
+ an argument matches any of the given patterns, or if no patterns
+ exist.
+
+ Example:
+ >>> from fs import glob
+ >>> is_python = glob.get_matcher(['*.py'], True)
+ >>> is_python('__init__.py')
+ True
+ >>> is_python('foo.txt')
+ False
"""
+ if not patterns:
+ return lambda path: True
+
+ if accept_prefix:
+ new_patterns = []
+ for pattern in patterns:
+ split = _split_pattern_by_sep(pattern)
+ for i in range(1, len(split)):
+ new_pattern = "/".join(split[:i])
+ new_patterns.append(new_pattern)
+ new_patterns.append(new_pattern + "/")
+ new_patterns.append(pattern)
+ patterns = new_patterns
+
+ matcher = match_any if case_sensitive else imatch_any
+ return partial(matcher, patterns)
+
+
+class Globber(object):
+ """A generator of glob results."""
def __init__(
self,
@@ -117,6 +285,20 @@ def __init__(
exclude_dirs=None,
):
# type: (FS, str, str, Optional[List[str]], bool, Optional[List[str]]) -> None
+ """Create a new Globber instance.
+
+ Arguments:
+ fs (~fs.base.FS): A filesystem object
+ pattern (str): A glob pattern, e.g. ``"**/*.py"``
+ path (str): A path to a directory in the filesystem.
+ namespaces (list): A list of additional info namespaces.
+ case_sensitive (bool): If ``True``, the path matching will be
+ case *sensitive* i.e. ``"FOO.py"`` and ``"foo.py"`` will be
+ different, otherwise path matching will be case *insensitive*.
+ exclude_dirs (list): A list of patterns to exclude when searching,
+ e.g. ``["*.git"]``.
+
+ """
self.fs = fs
self.pattern = pattern
self.path = path
@@ -138,18 +320,15 @@ def __repr__(self):
def _make_iter(self, search="breadth", namespaces=None):
# type: (str, List[str]) -> Iterator[GlobMatch]
try:
- levels, recursive, re_pattern = _PATTERN_CACHE[
- (self.pattern, self.case_sensitive)
- ]
+ levels, re_pattern = _PATTERN_CACHE[(self.pattern, self.case_sensitive)]
except KeyError:
- levels, recursive, re_pattern = _translate_glob(
- self.pattern, case_sensitive=self.case_sensitive
- )
+ levels, re_str = _translate_glob(self.pattern)
+ re_pattern = re.compile(re_str, 0 if self.case_sensitive else re.IGNORECASE)
for path, info in self.fs.walk.info(
path=self.path,
namespaces=namespaces or self.namespaces,
- max_depth=None if recursive else levels,
+ max_depth=levels,
search=search,
exclude_dirs=self.exclude_dirs,
):
@@ -160,7 +339,7 @@ def _make_iter(self, search="breadth", namespaces=None):
def __iter__(self):
# type: () -> Iterator[GlobMatch]
- """An iterator of :class:`fs.glob.GlobMatch` objects."""
+ """Get an iterator of :class:`fs.glob.GlobMatch` objects."""
return self._make_iter()
def count(self):
@@ -168,9 +347,8 @@ def count(self):
"""Count files / directories / data in matched paths.
Example:
- >>> import fs
- >>> fs.open_fs('~/projects').glob('**/*.py').count()
- Counts(files=18519, directories=0, data=206690458)
+ >>> my_fs.glob('**/*.py').count()
+ Counts(files=2, directories=0, data=55)
Returns:
`~Counts`: A named tuple containing results.
@@ -179,7 +357,7 @@ def count(self):
directories = 0
files = 0
data = 0
- for path, info in self._make_iter(namespaces=["details"]):
+ for _path, info in self._make_iter(namespaces=["details"]):
if info.is_dir:
directories += 1
else:
@@ -195,12 +373,10 @@ def count_lines(self):
`~LineCounts`: A named tuple containing line counts.
Example:
- >>> import fs
- >>> fs.open_fs('~/projects').glob('**/*.py').count_lines()
- LineCounts(lines=5767102, non_blank=4915110)
+ >>> my_fs.glob('**/*.py').count_lines()
+ LineCounts(lines=4, non_blank=3)
"""
-
lines = 0
non_blank = 0
for path, info in self._make_iter():
@@ -213,15 +389,14 @@ def count_lines(self):
def remove(self):
# type: () -> int
- """Removed all matched paths.
+ """Remove all matched paths.
Returns:
int: Number of file and directories removed.
Example:
- >>> import fs
- >>> fs.open_fs('~/projects/my_project').glob('**/*.pyc').remove()
- 29
+ >>> my_fs.glob('**/*.pyc').remove()
+ 2
"""
removes = 0
@@ -235,13 +410,10 @@ def remove(self):
class BoundGlobber(object):
- """A :class:`~Globber` object bound to a filesystem.
+ """A `~fs.glob.Globber` object bound to a filesystem.
An instance of this object is available on every Filesystem object
- as ``.glob``.
-
- Arguments:
- fs (FS): A filesystem object.
+ as the `~fs.base.FS.glob` property.
"""
@@ -249,6 +421,12 @@ class BoundGlobber(object):
def __init__(self, fs):
# type: (FS) -> None
+ """Create a new bound Globber.
+
+ Arguments:
+ fs (FS): A filesystem object to bind to.
+
+ """
self.fs = fs
def __repr__(self):
@@ -270,9 +448,7 @@ def __call__(
e.g. ``["*.git"]``.
Returns:
- `~Globber`:
- An object that may be queried for the glob matches.
-
+ `Globber`: An object that may be queried for the glob matches.
"""
return Globber(
diff --git a/fs/info.py b/fs/info.py
index d01c4c44..21bb1498 100644
--- a/fs/info.py
+++ b/fs/info.py
@@ -1,27 +1,26 @@
"""Container for filesystem resource informations.
"""
-from __future__ import absolute_import
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import absolute_import, print_function, unicode_literals
import typing
from typing import cast
-from copy import deepcopy
import six
+from copy import deepcopy
-from .path import join
+from ._typing import Text, overload
from .enums import ResourceType
from .errors import MissingInfoNamespace
+from .path import join
from .permissions import Permissions
from .time import epoch_to_datetime
-from ._typing import overload, Text
-if False: # typing.TYPE_CHECKING
- from datetime import datetime
+if typing.TYPE_CHECKING:
from typing import Any, Callable, List, Mapping, Optional, Union
+ from datetime import datetime
+
RawInfo = Mapping[Text, Mapping[Text, object]]
ToDatetime = Callable[[int], datetime]
T = typing.TypeVar("T")
@@ -41,7 +40,7 @@ class Info(object):
raw_info (dict): A dict containing resource info.
to_datetime (callable): A callable that converts an
epoch time to a datetime object. The default uses
- :func:`~fs.time.epoch_to_datetime`.
+ `~fs.time.epoch_to_datetime`.
"""
@@ -49,8 +48,7 @@ class Info(object):
def __init__(self, raw_info, to_datetime=epoch_to_datetime):
# type: (RawInfo, ToDatetime) -> None
- """Create a resource info object from a raw info dict.
- """
+ """Create a resource info object from a raw info dict."""
self.raw = raw_info
self._to_datetime = to_datetime
self.namespaces = frozenset(self.raw.keys())
@@ -69,16 +67,16 @@ def __eq__(self, other):
return self.raw == getattr(other, "raw", None)
@overload
- def _make_datetime(self, t): # pragma: no cover
+ def _make_datetime(self, t):
# type: (None) -> None
pass
@overload
- def _make_datetime(self, t): # pragma: no cover
+ def _make_datetime(self, t): # noqa: F811
# type: (int) -> datetime
pass
- def _make_datetime(self, t):
+ def _make_datetime(self, t): # noqa: F811
# type: (Optional[int]) -> Optional[datetime]
if t is not None:
return self._to_datetime(t)
@@ -86,16 +84,16 @@ def _make_datetime(self, t):
return None
@overload
- def get(self, namespace, key): # pragma: no cover
+ def get(self, namespace, key):
# type: (Text, Text) -> Any
pass
- @overload
- def get(self, namespace, key, default): # pragma: no cover
+ @overload # noqa: F811
+ def get(self, namespace, key, default): # noqa: F811
# type: (Text, Text, T) -> Union[Any, T]
pass
- def get(self, namespace, key, default=None):
+ def get(self, namespace, key, default=None): # noqa: F811
# type: (Text, Text, Optional[Any]) -> Optional[Any]
"""Get a raw info value.
@@ -107,8 +105,9 @@ def get(self, namespace, key, default=None):
is not found.
Example:
- >>> info.get('access', 'permissions')
- ['u_r', 'u_w', '_wx']
+ >>> info = my_fs.getinfo("foo.py", namespaces=["details"])
+ >>> info.get('details', 'type')
+ 2
"""
try:
@@ -132,7 +131,11 @@ def is_writeable(self, namespace, key):
# type: (Text, Text) -> bool
"""Check if a given key in a namespace is writable.
- Uses `~fs.base.FS.setinfo`.
+ When creating an `Info` object, you can add a ``_write`` key to
+ each raw namespace that lists which keys are writable or not.
+
+ In general, this means they are compatible with the `setinfo`
+ function of filesystem objects.
Arguments:
namespace (str): A namespace identifier.
@@ -141,6 +144,24 @@ def is_writeable(self, namespace, key):
Returns:
bool: `True` if the key can be modified, `False` otherwise.
+ Example:
+ Create an `Info` object that marks only the ``modified`` key
+ as writable in the ``details`` namespace::
+
+ >>> now = time.time()
+ >>> info = Info({
+ ... "basic": {"name": "foo", "is_dir": False},
+ ... "details": {
+ ... "modified": now,
+ ... "created": now,
+ ... "_write": ["modified"],
+ ... }
+ ... })
+ >>> info.is_writeable("details", "created")
+ False
+ >>> info.is_writeable("details", "modified")
+ True
+
"""
_writeable = self.get(namespace, "_write", ())
return key in _writeable
@@ -160,8 +181,7 @@ def has_namespace(self, namespace):
def copy(self, to_datetime=None):
# type: (Optional[ToDatetime]) -> Info
- """Create a copy of this resource info object.
- """
+ """Create a copy of this resource info object."""
return Info(deepcopy(self.raw), to_datetime=to_datetime or self._to_datetime)
def make_path(self, dir_path):
@@ -180,21 +200,24 @@ def make_path(self, dir_path):
@property
def name(self):
# type: () -> Text
- """`str`: the resource name.
- """
+ """`str`: the resource name."""
return cast(Text, self.get("basic", "name"))
@property
def suffix(self):
# type: () -> Text
- """`str`: the last component of the name (including dot), or an
- empty string if there is no suffix.
+ """`str`: the last component of the name (with dot).
+
+ In case there is no suffix, an empty string is returned.
Example:
- >>> info
-
+ >>> info = my_fs.getinfo("foo.py")
>>> info.suffix
'.py'
+ >>> info2 = my_fs.getinfo("bar")
+ >>> info2.suffix
+ ''
+
"""
name = self.get("basic", "name")
if name.startswith(".") and name.count(".") == 1:
@@ -208,10 +231,10 @@ def suffixes(self):
"""`List`: a list of any suffixes in the name.
Example:
- >>> info
-
+ >>> info = my_fs.getinfo("foo.tar.gz")
>>> info.suffixes
['.tar', '.gz']
+
"""
name = self.get("basic", "name")
if name.startswith(".") and name.count(".") == 1:
@@ -224,8 +247,7 @@ def stem(self):
"""`str`: the name minus any suffixes.
Example:
- >>> info
-
+ >>> info = my_fs.getinfo("foo.tar.gz")
>>> info.stem
'foo'
@@ -238,29 +260,26 @@ def stem(self):
@property
def is_dir(self):
# type: () -> bool
- """`bool`: `True` if the resource references a directory.
- """
+ """`bool`: `True` if the resource references a directory."""
return cast(bool, self.get("basic", "is_dir"))
@property
def is_file(self):
# type: () -> bool
- """`bool`: `True` if the resource references a file.
- """
+ """`bool`: `True` if the resource references a file."""
return not cast(bool, self.get("basic", "is_dir"))
@property
def is_link(self):
# type: () -> bool
- """`bool`: `True` if the resource is a symlink.
- """
+ """`bool`: `True` if the resource is a symlink."""
self._require_namespace("link")
return self.get("link", "target", None) is not None
@property
def type(self):
# type: () -> ResourceType
- """`~fs.ResourceType`: the type of the resource.
+ """`~fs.enums.ResourceType`: the type of the resource.
Requires the ``"details"`` namespace.
diff --git a/fs/iotools.py b/fs/iotools.py
index 87e6997a..fbef6fef 100644
--- a/fs/iotools.py
+++ b/fs/iotools.py
@@ -1,35 +1,27 @@
"""Compatibility tools between Python 2 and Python 3 I/O interfaces.
"""
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import print_function, unicode_literals
-import io
import typing
-from io import SEEK_SET, SEEK_CUR
+
+import array
+import io
+from io import SEEK_CUR, SEEK_SET
from .mode import Mode
-if False: # typing.TYPE_CHECKING
- from io import RawIOBase, IOBase
- from typing import (
- Any,
- BinaryIO,
- Iterable,
- Iterator,
- IO,
- List,
- Optional,
- Text,
- Union,
- )
+if typing.TYPE_CHECKING:
+ from typing import IO, Any, Iterable, Iterator, List, Optional, Text, Union
+
+ import mmap
+ from io import RawIOBase
class RawWrapper(io.RawIOBase):
- """Convert a Python 2 style file-like object in to a IO object.
- """
+ """Convert a Python 2 style file-like object in to a IO object."""
- def __init__(self, f, mode=None, name=None):
+ def __init__(self, f, mode=None, name=None): # noqa: D107
# type: (IO[bytes], Optional[Text], Optional[Text]) -> None
self._f = f
self.mode = mode or getattr(f, "mode", None)
@@ -90,8 +82,11 @@ def truncate(self, size=None):
return self._f.truncate(size)
def write(self, data):
- # type: (bytes) -> int
- count = self._f.write(data)
+ # type: (Union[bytes, memoryview, array.array[Any], mmap.mmap]) -> int
+ if isinstance(data, array.array):
+ count = self._f.write(data.tobytes())
+ else:
+ count = self._f.write(data) # type: ignore
return len(data) if count is None else count
@typing.no_type_check
@@ -118,7 +113,7 @@ def readinto(self, b):
except AttributeError:
data = self._f.read(len(b))
bytes_read = len(data)
- b[: len(data)] = data
+ b[:bytes_read] = data
return bytes_read
@typing.no_type_check
@@ -129,20 +124,23 @@ def readinto1(self, b):
except AttributeError:
data = self._f.read1(len(b))
bytes_read = len(data)
- b[: len(data)] = data
+ b[:bytes_read] = data
return bytes_read
- def readline(self, limit=-1):
- # type: (int) -> bytes
- return self._f.readline(limit)
+ def readline(self, limit=None):
+ # type: (Optional[int]) -> bytes
+ return self._f.readline(-1 if limit is None else limit)
- def readlines(self, hint=-1):
- # type: (int) -> List[bytes]
- return self._f.readlines(hint)
+ def readlines(self, hint=None):
+ # type: (Optional[int]) -> List[bytes]
+ return self._f.readlines(-1 if hint is None else hint)
- def writelines(self, sequence):
- # type: (Iterable[Union[bytes, bytearray]]) -> None
- return self._f.writelines(sequence)
+ def writelines(self, lines):
+ # type: (Iterable[Union[bytes, memoryview, array.array[Any], mmap.mmap]]) -> None # noqa: E501
+ _lines = (
+ line.tobytes() if isinstance(line, array.array) else line for line in lines
+ )
+ return self._f.writelines(typing.cast("Iterable[bytes]", _lines))
def __iter__(self):
# type: () -> Iterator[bytes]
@@ -162,8 +160,7 @@ def make_stream(
**kwargs # type: Any
):
# type: (...) -> IO
- """Take a Python 2.x binary file and return an IO Stream.
- """
+ """Take a Python 2.x binary file and return an IO Stream."""
reading = "r" in mode
writing = "w" in mode
appending = "a" in mode
diff --git a/fs/lrucache.py b/fs/lrucache.py
index 490d2700..8ae26de5 100644
--- a/fs/lrucache.py
+++ b/fs/lrucache.py
@@ -1,12 +1,11 @@
"""Least Recently Used cache mapping.
"""
-from __future__ import absolute_import
-from __future__ import unicode_literals
+from __future__ import absolute_import, unicode_literals
import typing
-from collections import OrderedDict
+from collections import OrderedDict
_K = typing.TypeVar("_K")
_V = typing.TypeVar("_V")
@@ -22,13 +21,13 @@ class LRUCache(OrderedDict, typing.Generic[_K, _V]):
def __init__(self, cache_size):
# type: (int) -> None
+ """Create a new LRUCache with the given size."""
self.cache_size = cache_size
super(LRUCache, self).__init__()
def __setitem__(self, key, value):
# type: (_K, _V) -> None
- """Store a new views, potentially discarding an old value.
- """
+ """Store a new views, potentially discarding an old value."""
if key not in self:
if len(self) >= self.cache_size:
self.popitem(last=False)
@@ -36,8 +35,7 @@ def __setitem__(self, key, value):
def __getitem__(self, key):
# type: (_K) -> _V
- """Get the item, but also makes it most recent.
- """
+ """Get the item, but also makes it most recent."""
_super = typing.cast(OrderedDict, super(LRUCache, self))
value = _super.__getitem__(key)
_super.__delitem__(key)
diff --git a/fs/memoryfs.py b/fs/memoryfs.py
index dcee49db..0ca5ce16 100644
--- a/fs/memoryfs.py
+++ b/fs/memoryfs.py
@@ -1,41 +1,45 @@
"""Manage a volatile in-memory filesystem.
"""
-from __future__ import absolute_import
-from __future__ import unicode_literals
+from __future__ import absolute_import, unicode_literals
+
+import typing
import contextlib
import io
import os
+import six
import time
-import typing
from collections import OrderedDict
from threading import RLock
-import six
-
from . import errors
+from ._typing import overload
from .base import FS
+from .copy import copy_modified_time
from .enums import ResourceType, Seek
from .info import Info
from .mode import Mode
-from .path import iteratepath
-from .path import normpath
-from .path import split
-from ._typing import overload
+from .path import isbase, iteratepath, normpath, split
-if False: # typing.TYPE_CHECKING
+if typing.TYPE_CHECKING:
from typing import (
Any,
BinaryIO,
Collection,
Dict,
+ Iterable,
Iterator,
List,
Optional,
SupportsInt,
- Union,
Text,
+ Tuple,
+ Union,
)
+
+ import array
+ import mmap
+
from .base import _OpendirFactory
from .info import RawInfo
from .permissions import Permissions
@@ -75,6 +79,11 @@ def __str__(self):
_template = ""
return _template.format(path=self._path, mode=self._mode)
+ @property
+ def mode(self):
+ # type: () -> Text
+ return self._mode.to_platform_bin()
+
@contextlib.contextmanager
def _seek_lock(self):
# type: () -> Iterator[None]
@@ -85,14 +94,12 @@ def _seek_lock(self):
def on_modify(self): # noqa: D401
# type: () -> None
- """Called when file data is modified.
- """
+ """Called when file data is modified."""
self._dir_entry.modified_time = self.modified_time = time.time()
def on_access(self): # noqa: D401
# type: () -> None
- """Called when file is accessed.
- """
+ """Called when file is accessed."""
self._dir_entry.accessed_time = self.accessed_time = time.time()
def flush(self):
@@ -108,12 +115,15 @@ def __iter__(self):
def next(self):
# type: () -> bytes
with self._seek_lock():
+ self.on_access()
return next(self._bytes_io)
__next__ = next
- def readline(self, size=-1):
- # type: (int) -> bytes
+ def readline(self, size=None):
+ # type: (Optional[int]) -> bytes
+ if not self._mode.reading:
+ raise IOError("File not open for reading")
with self._seek_lock():
self.on_access()
return self._bytes_io.readline(size)
@@ -125,7 +135,7 @@ def close(self):
self._dir_entry.remove_open_file(self)
super(_MemoryFile, self).close()
- def read(self, size=-1):
+ def read(self, size=None):
# type: (Optional[int]) -> bytes
if not self._mode.reading:
raise IOError("File not open for reading")
@@ -137,9 +147,20 @@ def readable(self):
# type: () -> bool
return self._mode.reading
+ def readinto(self, buffer):
+ # type (bytearray) -> Optional[int]
+ if not self._mode.reading:
+ raise IOError("File not open for reading")
+ with self._seek_lock():
+ self.on_access()
+ return self._bytes_io.readinto(buffer)
+
def readlines(self, hint=-1):
# type: (int) -> List[bytes]
+ if not self._mode.reading:
+ raise IOError("File not open for reading")
with self._seek_lock():
+ self.on_access()
return self._bytes_io.readlines(hint)
def seekable(self):
@@ -173,17 +194,15 @@ def writable(self):
return self._mode.writing
def write(self, data):
- # type: (bytes) -> int
+ # type: (Union[bytes, memoryview, array.array[Any], mmap.mmap]) -> int
if not self._mode.writing:
raise IOError("File not open for writing")
with self._seek_lock():
self.on_modify()
return self._bytes_io.write(data)
- def writelines(self, sequence): # type: ignore
- # type: (List[bytes]) -> None
- # FIXME(@althonos): For some reason the stub for IOBase.writelines
- # is List[Any] ?! It should probably be Iterable[ByteString]
+ def writelines(self, sequence):
+ # type: (Iterable[Union[bytes, memoryview, array.array[Any], mmap.mmap]]) -> None # noqa: E501
with self._seek_lock():
self.on_modify()
self._bytes_io.writelines(sequence)
@@ -229,21 +248,21 @@ def size(self):
return _bytes_file.tell()
@overload
- def get_entry(self, name, default): # pragma: no cover
+ def get_entry(self, name, default): # noqa: F811
# type: (Text, _DirEntry) -> _DirEntry
pass
@overload
- def get_entry(self, name): # pragma: no cover
+ def get_entry(self, name): # noqa: F811
# type: (Text) -> Optional[_DirEntry]
pass
@overload
- def get_entry(self, name, default): # pragma: no cover
+ def get_entry(self, name, default): # noqa: F811
# type: (Text, None) -> Optional[_DirEntry]
pass
- def get_entry(self, name, default=None):
+ def get_entry(self, name, default=None): # noqa: F811
# type: (Text, Optional[_DirEntry]) -> Optional[_DirEntry]
assert self.is_dir, "must be a directory"
return self._dir.get(name, default)
@@ -256,6 +275,10 @@ def remove_entry(self, name):
# type: (Text) -> None
del self._dir[name]
+ def clear(self):
+ # type: () -> None
+ self._dir.clear()
+
def __contains__(self, name):
# type: (object) -> bool
return name in self._dir
@@ -276,6 +299,21 @@ def remove_open_file(self, memory_file):
# type: (_MemoryFile) -> None
self._open_files.remove(memory_file)
+ def to_info(self, namespaces=None):
+ # type: (Optional[Collection[Text]]) -> Info
+ namespaces = namespaces or ()
+ info = {"basic": {"name": self.name, "is_dir": self.is_dir}}
+ if "details" in namespaces:
+ info["details"] = {
+ "_write": ["accessed", "modified"],
+ "type": int(self.resource_type),
+ "size": self.size,
+ "accessed": self.accessed_time,
+ "modified": self.modified_time,
+ "created": self.created_time,
+ }
+ return Info(info)
+
@six.python_2_unicode_compatible
class MemoryFS(FS):
@@ -286,12 +324,16 @@ class MemoryFS(FS):
fast, but non-permanent. The `MemoryFS` constructor takes no
arguments.
- Example:
- >>> mem_fs = MemoryFS()
+ Examples:
+ Create with the constructor::
+
+ >>> from fs.memoryfs import MemoryFS
+ >>> mem_fs = MemoryFS()
- Or via an FS URL:
- >>> import fs
- >>> mem_fs = fs.open_fs('mem://')
+ Or via an FS URL::
+
+ >>> import fs
+ >>> mem_fs = fs.open_fs('mem://')
"""
@@ -307,8 +349,7 @@ class MemoryFS(FS):
def __init__(self):
# type: () -> None
- """Create an in-memory filesystem.
- """
+ """Create an in-memory filesystem."""
self._meta = self._meta.copy()
self.root = self._make_dir_entry(ResourceType.directory, "")
super(MemoryFS, self).__init__()
@@ -327,8 +368,7 @@ def _make_dir_entry(self, resource_type, name):
def _get_dir_entry(self, dir_path):
# type: (Text) -> Optional[_DirEntry]
- """Get a directory entry, or `None` if one doesn't exist.
- """
+ """Get a directory entry, or `None` if one doesn't exist."""
with self._lock:
dir_path = normpath(dir_path)
current_entry = self.root # type: Optional[_DirEntry]
@@ -342,41 +382,33 @@ def _get_dir_entry(self, dir_path):
def close(self):
# type: () -> None
- self.root = None
+ if not self._closed:
+ del self.root
return super(MemoryFS, self).close()
def getinfo(self, path, namespaces=None):
# type: (Text, Optional[Collection[Text]]) -> Info
- namespaces = namespaces or ()
_path = self.validatepath(path)
dir_entry = self._get_dir_entry(_path)
if dir_entry is None:
raise errors.ResourceNotFound(path)
- info = {"basic": {"name": dir_entry.name, "is_dir": dir_entry.is_dir}}
- if "details" in namespaces:
- info["details"] = {
- "_write": ["accessed", "modified"],
- "type": int(dir_entry.resource_type),
- "size": dir_entry.size,
- "accessed": dir_entry.accessed_time,
- "modified": dir_entry.modified_time,
- "created": dir_entry.created_time,
- }
- return Info(info)
+ return dir_entry.to_info(namespaces=namespaces)
def listdir(self, path):
# type: (Text) -> List[Text]
self.check()
_path = self.validatepath(path)
with self._lock:
+ # locate and validate the entry corresponding to the given path
dir_entry = self._get_dir_entry(_path)
if dir_entry is None:
raise errors.ResourceNotFound(path)
if not dir_entry.is_dir:
raise errors.DirectoryExpected(path)
+ # return the filenames in the order they were created
return dir_entry.list()
- if False: # typing.TYPE_CHECKING
+ if typing.TYPE_CHECKING:
def opendir(self, path, factory=None):
# type: (_M, Text, Optional[_OpendirFactory]) -> SubFS[_M]
@@ -412,6 +444,74 @@ def makedir(
parent_dir.set_entry(dir_name, new_dir)
return self.opendir(path)
+ def move(self, src_path, dst_path, overwrite=False, preserve_time=False):
+ src_dir, src_name = split(self.validatepath(src_path))
+ dst_dir, dst_name = split(self.validatepath(dst_path))
+
+ with self._lock:
+ src_dir_entry = self._get_dir_entry(src_dir)
+ if src_dir_entry is None or src_name not in src_dir_entry:
+ raise errors.ResourceNotFound(src_path)
+ src_entry = src_dir_entry.get_entry(src_name)
+ if src_entry.is_dir:
+ raise errors.FileExpected(src_path)
+
+ dst_dir_entry = self._get_dir_entry(dst_dir)
+ if dst_dir_entry is None:
+ raise errors.ResourceNotFound(dst_path)
+ elif not overwrite and dst_name in dst_dir_entry:
+ raise errors.DestinationExists(dst_path)
+
+ # handle moving a file onto itself
+ if src_dir == dst_dir and src_name == dst_name:
+ if overwrite:
+ return
+ raise errors.DestinationExists(dst_path)
+
+ # move the entry from the src folder to the dst folder
+ dst_dir_entry.set_entry(dst_name, src_entry)
+ src_dir_entry.remove_entry(src_name)
+ # make sure to update the entry name itself (see #509)
+ src_entry.name = dst_name
+
+ if preserve_time:
+ copy_modified_time(self, src_path, self, dst_path)
+
+ def movedir(self, src_path, dst_path, create=False, preserve_time=False):
+ _src_path = self.validatepath(src_path)
+ _dst_path = self.validatepath(dst_path)
+ dst_dir, dst_name = split(_dst_path)
+ src_dir, src_name = split(_src_path)
+
+ # move a dir onto itself
+ if _src_path == _dst_path:
+ return
+ # move a dir into itself
+ if isbase(_src_path, _dst_path):
+ raise errors.IllegalDestination(dst_path)
+
+ with self._lock:
+ src_dir_entry = self._get_dir_entry(src_dir)
+ if src_dir_entry is None or src_name not in src_dir_entry:
+ raise errors.ResourceNotFound(src_path)
+ src_entry = src_dir_entry.get_entry(src_name)
+ if not src_entry.is_dir:
+ raise errors.DirectoryExpected(src_path)
+
+ # move the entry from the src folder to the dst folder
+ dst_dir_entry = self._get_dir_entry(dst_dir)
+ if dst_dir_entry is None or (not create and dst_name not in dst_dir_entry):
+ raise errors.ResourceNotFound(dst_path)
+
+ # move the entry from the src folder to the dst folder
+ dst_dir_entry.set_entry(dst_name, src_entry)
+ src_dir_entry.remove_entry(src_name)
+ # make sure to update the entry name itself (see #509)
+ src_entry.name = dst_name
+
+ if preserve_time:
+ copy_modified_time(self, src_path, self, dst_path)
+
def openbin(self, path, mode="r", buffering=-1, **options):
# type: (Text, Text, int, **Any) -> BinaryIO
_mode = Mode(mode)
@@ -478,12 +578,29 @@ def remove(self, path):
def removedir(self, path):
# type: (Text) -> None
+ # make sure we are not removing root
_path = self.validatepath(path)
-
if _path == "/":
raise errors.RemoveRootError()
+ # make sure the directory is empty
+ if not self.isempty(path):
+ raise errors.DirectoryNotEmpty(path)
+ # we can now delegate to removetree since we confirmed that
+ # * path exists (isempty)
+ # * path is a folder (isempty)
+ # * path is not root
+ self.removetree(_path)
+
+ def removetree(self, path):
+ # type: (Text) -> None
+ _path = self.validatepath(path)
with self._lock:
+
+ if _path == "/":
+ self.root.clear()
+ return
+
dir_path, file_name = split(_path)
parent_dir_entry = self._get_dir_entry(dir_path)
@@ -494,11 +611,34 @@ def removedir(self, path):
if not dir_dir_entry.is_dir:
raise errors.DirectoryExpected(path)
- if len(dir_dir_entry):
- raise errors.DirectoryNotEmpty(path)
-
parent_dir_entry.remove_entry(file_name)
+ def scandir(
+ self,
+ path, # type: Text
+ namespaces=None, # type: Optional[Collection[Text]]
+ page=None, # type: Optional[Tuple[int, int]]
+ ):
+ # type: (...) -> Iterator[Info]
+ self.check()
+ _path = self.validatepath(path)
+ with self._lock:
+ # locate and validate the entry corresponding to the given path
+ dir_entry = self._get_dir_entry(_path)
+ if dir_entry is None:
+ raise errors.ResourceNotFound(path)
+ if not dir_entry.is_dir:
+ raise errors.DirectoryExpected(path)
+ # if paging was requested, slice the filenames
+ filenames = dir_entry.list()
+ if page is not None:
+ start, end = page
+ filenames = filenames[start:end]
+ # yield info with the right namespaces
+ for name in filenames:
+ entry = typing.cast(_DirEntry, dir_entry.get_entry(name))
+ yield entry.to_info(namespaces=namespaces)
+
def setinfo(self, path, info):
# type: (Text, RawInfo) -> None
_path = self.validatepath(path)
diff --git a/fs/mirror.py b/fs/mirror.py
index 98f3d5f1..70b2dc5f 100644
--- a/fs/mirror.py
+++ b/fs/mirror.py
@@ -16,10 +16,8 @@
"""
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import print_function, unicode_literals
-from contextlib import contextmanager
import typing
from ._bulk import Copier
@@ -29,8 +27,9 @@
from .tools import is_thread_safe
from .walk import Walker
-if False: # typing.TYPE_CHECKING
+if typing.TYPE_CHECKING:
from typing import Callable, Optional, Text, Union
+
from .base import FS
from .info import Info
@@ -58,6 +57,7 @@ def mirror(
walker=None, # type: Optional[Walker]
copy_if_newer=True, # type: bool
workers=0, # type: int
+ preserve_time=False, # type: bool
):
# type: (...) -> None
"""Mirror files / directories from one filesystem to another.
@@ -74,6 +74,9 @@ def mirror(
workers (int): Number of worker threads used
(0 for single threaded). Set to a relatively low number
for network filesystems, 4 would be a good start.
+ preserve_time (bool): If `True`, try to preserve mtime of the
+ resources (defaults to `False`).
+
"""
def src():
@@ -83,22 +86,30 @@ def dst():
return manage_fs(dst_fs, create=True)
with src() as _src_fs, dst() as _dst_fs:
- with _src_fs.lock(), _dst_fs.lock():
- _thread_safe = is_thread_safe(_src_fs, _dst_fs)
- with Copier(num_workers=workers if _thread_safe else 0) as copier:
+ _thread_safe = is_thread_safe(_src_fs, _dst_fs)
+ with Copier(
+ num_workers=workers if _thread_safe else 0, preserve_time=preserve_time
+ ) as copier:
+ with _src_fs.lock(), _dst_fs.lock():
_mirror(
_src_fs,
_dst_fs,
walker=walker,
copy_if_newer=copy_if_newer,
copy_file=copier.copy,
+ preserve_time=preserve_time,
)
def _mirror(
- src_fs, dst_fs, walker=None, copy_if_newer=True, copy_file=copy_file_internal
+ src_fs, # type: FS
+ dst_fs, # type: FS
+ walker=None, # type: Optional[Walker]
+ copy_if_newer=True, # type: bool
+ copy_file=copy_file_internal, # type: Callable[[FS, str, FS, str, bool], None]
+ preserve_time=False, # type: bool
):
- # type: (FS, FS, Optional[Walker], bool, Callable[[FS, str, FS, str], None]) -> None
+ # type: (...) -> None
walker = walker or Walker()
walk = walker.walk(src_fs, namespaces=["details"])
for path, dirs, files in walk:
@@ -122,7 +133,7 @@ def _mirror(
# Compare file info
if copy_if_newer and not _compare(_file, dst_file):
continue
- copy_file(src_fs, _path, dst_fs, _path)
+ copy_file(src_fs, _path, dst_fs, _path, preserve_time)
# Make directories
for _dir in dirs:
diff --git a/fs/mode.py b/fs/mode.py
index 96e00566..c719340c 100644
--- a/fs/mode.py
+++ b/fs/mode.py
@@ -5,8 +5,7 @@
"""
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import print_function, unicode_literals
import typing
@@ -14,9 +13,8 @@
from ._typing import Text
-
-if False: # typing.TYPE_CHECKING
- from typing import Container, FrozenSet, Set, Union
+if typing.TYPE_CHECKING:
+ from typing import FrozenSet, Set, Union
__all__ = ["Mode", "check_readable", "check_writable", "validate_openbin_mode"]
@@ -31,12 +29,6 @@ class Mode(typing.Container[Text]):
`mode strings `_
used when opening files.
- Arguments:
- mode (str): A *mode* string, as used by `io.open`.
-
- Raises:
- ValueError: If the mode string is invalid.
-
Example:
>>> mode = Mode('rb')
>>> mode.reading
@@ -52,6 +44,15 @@ class Mode(typing.Container[Text]):
def __init__(self, mode):
# type: (Text) -> None
+ """Create a new `Mode` instance.
+
+ Arguments:
+ mode (str): A *mode* string, as used by `io.open`.
+
+ Raises:
+ ValueError: If the mode string is invalid.
+
+ """
self._mode = mode
self.validate()
@@ -65,8 +66,8 @@ def __str__(self):
def __contains__(self, character):
# type: (object) -> bool
- """Check if a mode contains a given character.
- """
+ """Check if a mode contains a given character."""
+ assert isinstance(character, Text)
return character in self._mode
def to_platform(self):
@@ -83,8 +84,7 @@ def to_platform_bin(self):
# type: () -> Text
"""Get a *binary* mode string for the current platform.
- Currently, this just removes the 'x' on PY2 because PY2 doesn't
- support exclusive mode.
+ This removes the 't' and adds a 'b' if needed.
"""
_mode = self.to_platform().replace("t", "")
@@ -123,64 +123,55 @@ def validate_bin(self):
@property
def create(self):
# type: () -> bool
- """`bool`: `True` if the mode would create a file.
- """
+ """`bool`: `True` if the mode would create a file."""
return "a" in self or "w" in self or "x" in self
@property
def reading(self):
# type: () -> bool
- """`bool`: `True` if the mode permits reading.
- """
+ """`bool`: `True` if the mode permits reading."""
return "r" in self or "+" in self
@property
def writing(self):
# type: () -> bool
- """`bool`: `True` if the mode permits writing.
- """
+ """`bool`: `True` if the mode permits writing."""
return "w" in self or "a" in self or "+" in self or "x" in self
@property
def appending(self):
# type: () -> bool
- """`bool`: `True` if the mode permits appending.
- """
+ """`bool`: `True` if the mode permits appending."""
return "a" in self
@property
def updating(self):
# type: () -> bool
- """`bool`: `True` if the mode permits both reading and writing.
- """
+ """`bool`: `True` if the mode permits both reading and writing."""
return "+" in self
@property
def truncate(self):
# type: () -> bool
- """`bool`: `True` if the mode would truncate an existing file.
- """
+ """`bool`: `True` if the mode would truncate an existing file."""
return "w" in self or "x" in self
@property
def exclusive(self):
# type: () -> bool
- """`bool`: `True` if the mode require exclusive creation.
- """
+ """`bool`: `True` if the mode require exclusive creation."""
return "x" in self
@property
def binary(self):
# type: () -> bool
- """`bool`: `True` if a mode specifies binary.
- """
+ """`bool`: `True` if a mode specifies binary."""
return "b" in self
@property
def text(self):
# type: () -> bool
- """`bool`: `True` if a mode specifies text.
- """
+ """`bool`: `True` if a mode specifies text."""
return "t" in self or "b" not in self
diff --git a/fs/mountfs.py b/fs/mountfs.py
index aa314ed5..92fba6d7 100644
--- a/fs/mountfs.py
+++ b/fs/mountfs.py
@@ -1,9 +1,7 @@
"""Manage other filesystems as a folder hierarchy.
"""
-from __future__ import absolute_import
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import absolute_import, print_function, unicode_literals
import typing
@@ -12,19 +10,16 @@
from . import errors
from .base import FS
from .memoryfs import MemoryFS
-from .path import abspath
-from .path import forcedir
-from .path import normpath
-from .mode import validate_open_mode
-from .mode import validate_openbin_mode
+from .mode import validate_open_mode, validate_openbin_mode
+from .path import abspath, forcedir, normpath
-if False: # typing.TYPE_CHECKING
+if typing.TYPE_CHECKING:
from typing import (
+ IO,
Any,
BinaryIO,
Collection,
Iterator,
- IO,
List,
MutableSequence,
Optional,
@@ -32,6 +27,7 @@
Tuple,
Union,
)
+
from .enums import ResourceType
from .info import Info, RawInfo
from .permissions import Permissions
@@ -41,18 +37,11 @@
class MountError(Exception):
- """Thrown when mounts conflict.
- """
+ """Thrown when mounts conflict."""
class MountFS(FS):
- """A virtual filesystem that maps directories on to other file-systems.
-
- Arguments:
- auto_close (bool): If `True` (the default), the child
- filesystems will be closed when `MountFS` is closed.
-
- """
+ """A virtual filesystem that maps directories on to other file-systems."""
_meta = {
"virtual": True,
@@ -64,6 +53,13 @@ class MountFS(FS):
def __init__(self, auto_close=True):
# type: (bool) -> None
+ """Create a new `MountFS` instance.
+
+ Arguments:
+ auto_close (bool): If `True` (the default), the child
+ filesystems will be closed when `MountFS` is closed.
+
+ """
super(MountFS, self).__init__()
self.auto_close = auto_close
self.default_fs = MemoryFS() # type: FS
diff --git a/fs/move.py b/fs/move.py
index 5da6b82d..752b5816 100644
--- a/fs/move.py
+++ b/fs/move.py
@@ -1,22 +1,30 @@
"""Functions for moving files between filesystems.
"""
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import print_function, unicode_literals
import typing
-from .copy import copy_dir
-from .copy import copy_file
+from ._pathcompat import commonpath
+from .copy import copy_dir, copy_file
+from .errors import FSError
from .opener import manage_fs
+from .osfs import OSFS
+from .path import frombase
-if False: # typing.TYPE_CHECKING
- from .base import FS
+if typing.TYPE_CHECKING:
from typing import Text, Union
+ from .base import FS
+
-def move_fs(src_fs, dst_fs, workers=0):
- # type: (Union[Text, FS], Union[Text, FS], int) -> None
+def move_fs(
+ src_fs, # type: Union[Text, FS]
+ dst_fs, # type:Union[Text, FS]
+ workers=0, # type: int
+ preserve_time=False, # type: bool
+):
+ # type: (...) -> None
"""Move the contents of a filesystem to another filesystem.
Arguments:
@@ -24,9 +32,11 @@ def move_fs(src_fs, dst_fs, workers=0):
dst_fs (FS or str): Destination filesystem (instance or URL).
workers (int): Use `worker` threads to copy data, or ``0`` (default) for
a single-threaded copy.
+ preserve_time (bool): If `True`, try to preserve mtime of the
+ resources (defaults to `False`).
"""
- move_dir(src_fs, "/", dst_fs, "/", workers=workers)
+ move_dir(src_fs, "/", dst_fs, "/", workers=workers, preserve_time=preserve_time)
def move_file(
@@ -34,6 +44,8 @@ def move_file(
src_path, # type: Text
dst_fs, # type: Union[Text, FS]
dst_path, # type: Text
+ preserve_time=False, # type: bool
+ cleanup_dst_on_error=True, # type: bool
):
# type: (...) -> None
"""Move a file from one filesystem to another.
@@ -41,20 +53,64 @@ def move_file(
Arguments:
src_fs (FS or str): Source filesystem (instance or URL).
src_path (str): Path to a file on ``src_fs``.
- dst_fs (FS or str); Destination filesystem (instance or URL).
+ dst_fs (FS or str): Destination filesystem (instance or URL).
dst_path (str): Path to a file on ``dst_fs``.
+ preserve_time (bool): If `True`, try to preserve mtime of the
+ resources (defaults to `False`).
+ cleanup_dst_on_error (bool): If `True`, tries to delete the file copied to
+ ``dst_fs`` if deleting the file from ``src_fs`` fails (defaults to `True`).
"""
- with manage_fs(src_fs) as _src_fs:
- with manage_fs(dst_fs, create=True) as _dst_fs:
+ with manage_fs(src_fs, writeable=True) as _src_fs:
+ with manage_fs(dst_fs, writeable=True, create=True) as _dst_fs:
if _src_fs is _dst_fs:
# Same filesystem, may be optimized
- _src_fs.move(src_path, dst_path, overwrite=True)
- else:
- # Standard copy and delete
- with _src_fs.lock(), _dst_fs.lock():
- copy_file(_src_fs, src_path, _dst_fs, dst_path)
+ _src_fs.move(
+ src_path, dst_path, overwrite=True, preserve_time=preserve_time
+ )
+ return
+
+ if _src_fs.hassyspath(src_path) and _dst_fs.hassyspath(dst_path):
+ # if both filesystems have a syspath we create a new OSFS from a
+ # common parent folder and use it to move the file.
+ try:
+ src_syspath = _src_fs.getsyspath(src_path)
+ dst_syspath = _dst_fs.getsyspath(dst_path)
+ common = commonpath([src_syspath, dst_syspath])
+ if common:
+ rel_src = frombase(common, src_syspath)
+ rel_dst = frombase(common, dst_syspath)
+ with _src_fs.lock(), _dst_fs.lock():
+ with OSFS(common) as base:
+ base.move(
+ rel_src,
+ rel_dst,
+ overwrite=True,
+ preserve_time=preserve_time,
+ )
+ return # optimization worked, exit early
+ except ValueError:
+ # This is raised if we cannot find a common base folder.
+ # In this case just fall through to the standard method.
+ pass
+
+ # Standard copy and delete
+ with _src_fs.lock(), _dst_fs.lock():
+ copy_file(
+ _src_fs,
+ src_path,
+ _dst_fs,
+ dst_path,
+ preserve_time=preserve_time,
+ )
+ try:
_src_fs.remove(src_path)
+ except FSError as e:
+ # if the source cannot be removed we delete the copy on the
+ # destination
+ if cleanup_dst_on_error:
+ _dst_fs.remove(dst_path)
+ raise e
def move_dir(
@@ -63,6 +119,7 @@ def move_dir(
dst_fs, # type: Union[Text, FS]
dst_path, # type: Text
workers=0, # type: int
+ preserve_time=False, # type: bool
):
# type: (...) -> None
"""Move a directory from one filesystem to another.
@@ -72,19 +129,22 @@ def move_dir(
src_path (str): Path to a directory on ``src_fs``
dst_fs (FS or str): Destination filesystem (instance or URL).
dst_path (str): Path to a directory on ``dst_fs``.
- workers (int): Use `worker` threads to copy data, or ``0`` (default) for
- a single-threaded copy.
+ workers (int): Use ``worker`` threads to copy data, or ``0``
+ (default) for a single-threaded copy.
+ preserve_time (bool): If `True`, try to preserve mtime of the
+ resources (defaults to `False`).
"""
-
- def src():
- return manage_fs(src_fs, writeable=False)
-
- def dst():
- return manage_fs(dst_fs, create=True)
-
- with src() as _src_fs, dst() as _dst_fs:
- with _src_fs.lock(), _dst_fs.lock():
- _dst_fs.makedir(dst_path, recreate=True)
- copy_dir(src_fs, src_path, dst_fs, dst_path, workers=workers)
- _src_fs.removetree(src_path)
+ with manage_fs(src_fs, writeable=True) as _src_fs:
+ with manage_fs(dst_fs, writeable=True, create=True) as _dst_fs:
+ with _src_fs.lock(), _dst_fs.lock():
+ _dst_fs.makedir(dst_path, recreate=True)
+ copy_dir(
+ src_fs,
+ src_path,
+ dst_fs,
+ dst_path,
+ workers=workers,
+ preserve_time=preserve_time,
+ )
+ _src_fs.removetree(src_path)
diff --git a/fs/multifs.py b/fs/multifs.py
index 60a3ad40..125d0de4 100644
--- a/fs/multifs.py
+++ b/fs/multifs.py
@@ -1,14 +1,12 @@
"""Manage several filesystems through a single view.
"""
-from __future__ import absolute_import
-from __future__ import unicode_literals
-from __future__ import print_function
+from __future__ import absolute_import, print_function, unicode_literals
import typing
-from collections import namedtuple, OrderedDict
-from operator import itemgetter
+from collections import OrderedDict, namedtuple
+from operator import itemgetter
from six import text_type
from . import errors
@@ -17,20 +15,21 @@
from .opener import open_fs
from .path import abspath, normpath
-if False: # typing.TYPE_CHECKING
+if typing.TYPE_CHECKING:
from typing import (
+ IO,
Any,
BinaryIO,
Collection,
Iterator,
- IO,
- MutableMapping,
List,
+ MutableMapping,
MutableSet,
Optional,
Text,
Tuple,
)
+
from .enums import ResourceType
from .info import Info, RawInfo
from .permissions import Permissions
@@ -55,6 +54,13 @@ class MultiFS(FS):
def __init__(self, auto_close=True):
# type: (bool) -> None
+ """Create a new MultiFS.
+
+ Arguments:
+ auto_close (bool): If `True` (the default), the child
+ filesystems will be closed when `MultiFS` is closed.
+
+ """
super(MultiFS, self).__init__()
self._auto_close = auto_close
@@ -127,14 +133,12 @@ def get_fs(self, name):
def _resort(self):
# type: () -> None
- """Force `iterate_fs` to re-sort on next reference.
- """
+ """Force `iterate_fs` to re-sort on next reference."""
self._fs_sequence = None
def iterate_fs(self):
# type: () -> Iterator[Tuple[Text, FS]]
- """Get iterator that returns (name, fs) in priority order.
- """
+ """Get iterator that returns (name, fs) in priority order."""
if self._fs_sequence is None:
self._fs_sequence = [
(name, fs)
@@ -146,8 +150,7 @@ def iterate_fs(self):
def _delegate(self, path):
# type: (Text) -> Optional[FS]
- """Get a filesystem which has a given path.
- """
+ """Get a filesystem which has a given path."""
for _name, fs in self.iterate_fs():
if fs.exists(path):
return fs
@@ -155,8 +158,7 @@ def _delegate(self, path):
def _delegate_required(self, path):
# type: (Text) -> FS
- """Check that there is a filesystem with the given ``path``.
- """
+ """Check that there is a filesystem with the given ``path``."""
fs = self._delegate(path)
if fs is None:
raise errors.ResourceNotFound(path)
@@ -164,8 +166,7 @@ def _delegate_required(self, path):
def _writable_required(self, path):
# type: (Text) -> FS
- """Check that ``path`` is writeable.
- """
+ """Check that ``path`` is writeable."""
if self.write_fs is None:
raise errors.ResourceReadOnly(path)
return self.write_fs
diff --git a/fs/opener/__init__.py b/fs/opener/__init__.py
index 91870e7a..651a630b 100644
--- a/fs/opener/__init__.py
+++ b/fs/opener/__init__.py
@@ -3,16 +3,16 @@
"""
# Declare fs.opener as a namespace package
-__import__("pkg_resources").declare_namespace(__name__)
+__import__("pkg_resources").declare_namespace(__name__) # type: ignore
+
+# Import opener modules so that `registry.install` if called on each opener
+from . import appfs, ftpfs, memoryfs, osfs, tarfs, tempfs, zipfs
# Import objects into fs.opener namespace
from .base import Opener
from .parse import parse_fs_url as parse
from .registry import registry
-# Import opener modules so that `registry.install` if called on each opener
-from . import appfs, ftpfs, memoryfs, osfs, tarfs, tempfs, zipfs
-
# Alias functions defined as Registry methods
open_fs = registry.open_fs
open = registry.open
diff --git a/fs/opener/appfs.py b/fs/opener/appfs.py
index 93cffef0..db8bd34f 100644
--- a/fs/opener/appfs.py
+++ b/fs/opener/appfs.py
@@ -2,27 +2,25 @@
"""``AppFS`` opener definition.
"""
-from __future__ import absolute_import
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import absolute_import, print_function, unicode_literals
import typing
from .base import Opener
-from .registry import registry
from .errors import OpenerError
+from .registry import registry
-if False: # typing.TYPE_CHECKING
+if typing.TYPE_CHECKING:
from typing import Text, Union
- from .parse import ParseResult
+
from ..appfs import _AppFS
from ..subfs import SubFS
+ from .parse import ParseResult
@registry.install
class AppFSOpener(Opener):
- """``AppFS`` opener.
- """
+ """``AppFS`` opener."""
protocols = ["userdata", "userconf", "sitedata", "siteconf", "usercache", "userlog"]
_protocol_mapping = None
@@ -37,8 +35,8 @@ def open_fs(
):
# type: (...) -> Union[_AppFS, SubFS[_AppFS]]
- from ..subfs import ClosingSubFS
from .. import appfs
+ from ..subfs import ClosingSubFS
if self._protocol_mapping is None:
self._protocol_mapping = {
diff --git a/fs/opener/base.py b/fs/opener/base.py
index 952eea53..5facaaae 100644
--- a/fs/opener/base.py
+++ b/fs/opener/base.py
@@ -2,13 +2,14 @@
"""`Opener` abstract base class.
"""
-import abc
import typing
+import abc
import six
-if False: # typing.TYPE_CHECKING
- from typing import List, Text, Union
+if typing.TYPE_CHECKING:
+ from typing import List, Text
+
from ..base import FS
from .parse import ParseResult
diff --git a/fs/opener/errors.py b/fs/opener/errors.py
index 593eb168..7c8ae8a5 100644
--- a/fs/opener/errors.py
+++ b/fs/opener/errors.py
@@ -4,25 +4,20 @@
class ParseError(ValueError):
- """Attempt to parse an invalid FS URL.
- """
+ """Attempt to parse an invalid FS URL."""
class OpenerError(Exception):
- """Base exception for opener related errors.
- """
+ """Base exception for opener related errors."""
class UnsupportedProtocol(OpenerError):
- """No opener found for the given protocol.
- """
+ """No opener found for the given protocol."""
class EntryPointError(OpenerError):
- """An entry point could not be loaded.
- """
+ """An entry point could not be loaded."""
class NotWriteable(OpenerError):
- """A writable FS could not be created.
- """
+ """A writable FS could not be created."""
diff --git a/fs/opener/ftpfs.py b/fs/opener/ftpfs.py
index 4c30c962..6729ee1e 100644
--- a/fs/opener/ftpfs.py
+++ b/fs/opener/ftpfs.py
@@ -2,31 +2,27 @@
"""`FTPFS` opener definition.
"""
-from __future__ import absolute_import
-from __future__ import print_function
-from __future__ import unicode_literals
-
-import six
+from __future__ import absolute_import, print_function, unicode_literals
import typing
+from ..errors import CreateFailed
from .base import Opener
from .registry import registry
-from ..errors import FSError, CreateFailed
-if False: # typing.TYPE_CHECKING
- from typing import List, Text, Union
- from ..ftpfs import FTPFS
+if typing.TYPE_CHECKING:
+ from typing import Text, Union
+
+ from ..ftpfs import FTPFS # noqa: F401
from ..subfs import SubFS
from .parse import ParseResult
@registry.install
class FTPOpener(Opener):
- """`FTPFS` opener.
- """
+ """`FTPFS` opener."""
- protocols = ["ftp"]
+ protocols = ["ftp", "ftps"]
@CreateFailed.catch_all
def open_fs(
@@ -51,6 +47,7 @@ def open_fs(
passwd=parse_result.password,
proxy=parse_result.params.get("proxy"),
timeout=int(parse_result.params.get("timeout", "10")),
+ tls=bool(parse_result.protocol == "ftps"),
)
if dir_path:
if create:
diff --git a/fs/opener/memoryfs.py b/fs/opener/memoryfs.py
index 8b8976c3..400d2d9c 100644
--- a/fs/opener/memoryfs.py
+++ b/fs/opener/memoryfs.py
@@ -2,25 +2,23 @@
"""`MemoryFS` opener definition.
"""
-from __future__ import absolute_import
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import absolute_import, print_function, unicode_literals
import typing
from .base import Opener
from .registry import registry
-if False: # typing.TYPE_CHECKING
+if typing.TYPE_CHECKING:
from typing import Text
+
+ from ..memoryfs import MemoryFS # noqa: F401
from .parse import ParseResult
- from ..memoryfs import MemoryFS
@registry.install
class MemOpener(Opener):
- """`MemoryFS` opener.
- """
+ """`MemoryFS` opener."""
protocols = ["mem"]
diff --git a/fs/opener/osfs.py b/fs/opener/osfs.py
index 986de249..e9c3fc45 100644
--- a/fs/opener/osfs.py
+++ b/fs/opener/osfs.py
@@ -2,25 +2,23 @@
"""`OSFS` opener definition.
"""
-from __future__ import absolute_import
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import absolute_import, print_function, unicode_literals
import typing
from .base import Opener
from .registry import registry
-if False: # typing.TYPE_CHECKING
+if typing.TYPE_CHECKING:
from typing import Text
+
+ from ..osfs import OSFS # noqa: F401
from .parse import ParseResult
- from ..osfs import OSFS
@registry.install
class OSFSOpener(Opener):
- """`OSFS` opener.
- """
+ """`OSFS` opener."""
protocols = ["file", "osfs"]
@@ -33,8 +31,9 @@ def open_fs(
cwd, # type: Text
):
# type: (...) -> OSFS
+ from os.path import abspath, expanduser, join, normpath
+
from ..osfs import OSFS
- from os.path import abspath, expanduser, normpath, join
_path = abspath(join(cwd, expanduser(parse_result.resource)))
path = normpath(_path)
diff --git a/fs/opener/parse.py b/fs/opener/parse.py
index 61f99f2c..f554bf38 100644
--- a/fs/opener/parse.py
+++ b/fs/opener/parse.py
@@ -1,29 +1,27 @@
"""Function to parse FS URLs in to their constituent parts.
"""
-from __future__ import absolute_import
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import absolute_import, print_function, unicode_literals
-import collections
-import re
import typing
+import collections
+import re
import six
from six.moves.urllib.parse import parse_qs, unquote
from .errors import ParseError
-if False: # typing.TYPE_CHECKING
+if typing.TYPE_CHECKING:
from typing import Optional, Text
-_ParseResult = collections.namedtuple(
- "ParseResult", ["protocol", "username", "password", "resource", "params", "path"]
-)
-
-
-class ParseResult(_ParseResult):
+class ParseResult(
+ collections.namedtuple(
+ "ParseResult",
+ ["protocol", "username", "password", "resource", "params", "path"],
+ )
+):
"""A named tuple containing fields of a parsed FS URL.
Attributes:
diff --git a/fs/opener/registry.py b/fs/opener/registry.py
index 80183295..19547234 100644
--- a/fs/opener/registry.py
+++ b/fs/opener/registry.py
@@ -2,39 +2,27 @@
"""`Registry` class mapping protocols and FS URLs to their `Opener`.
"""
-from __future__ import absolute_import
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import absolute_import, print_function, unicode_literals
-import collections
-import contextlib
import typing
-import six
+import collections
+import contextlib
import pkg_resources
+from ..errors import ResourceReadOnly
from .base import Opener
-from .errors import UnsupportedProtocol, EntryPointError
+from .errors import EntryPointError, UnsupportedProtocol
from .parse import parse_fs_url
-if False: # typing.TYPE_CHECKING
- from typing import (
- Callable,
- Dict,
- Iterator,
- List,
- Optional,
- Text,
- Type,
- Tuple,
- Union,
- )
+if typing.TYPE_CHECKING:
+ from typing import Callable, Dict, Iterator, List, Text, Tuple, Type, Union
+
from ..base import FS
class Registry(object):
- """A registry for `Opener` instances.
- """
+ """A registry for `Opener` instances."""
def __init__(self, default_opener="osfs", load_extern=False):
# type: (Text, bool) -> None
@@ -57,7 +45,7 @@ def __repr__(self):
return "".format(self.protocols)
def install(self, opener):
- # type: (Union[Type[Opener], Opener, Callable[[], Opener]]) -> None
+ # type: (Union[Type[Opener], Opener, Callable[[], Opener]]) -> Opener
"""Install an opener.
Arguments:
@@ -66,24 +54,24 @@ def install(self, opener):
Note:
May be used as a class decorator. For example::
+
registry = Registry()
@registry.install
class ArchiveOpener(Opener):
protocols = ['zip', 'tar']
+
"""
_opener = opener if isinstance(opener, Opener) else opener()
assert isinstance(_opener, Opener), "Opener instance required"
assert _opener.protocols, "must list one or more protocols"
for protocol in _opener.protocols:
self._protocols[protocol] = _opener
- return opener
+ return _opener
@property
def protocols(self):
# type: () -> List[Text]
- """`list`: the list of supported protocols.
- """
-
+ """`list`: the list of supported protocols."""
_protocols = list(self._protocols)
if self.load_extern:
_protocols.extend(
@@ -201,7 +189,8 @@ def open_fs(
"""Open a filesystem from a FS URL (ignoring the path component).
Arguments:
- fs_url (str): A filesystem URL.
+ fs_url (str): A filesystem URL. If a filesystem instance is
+ given instead, it will be returned transparently.
writeable (bool, optional): `True` if the filesystem must
be writeable.
create (bool, optional): `True` if the filesystem should be
@@ -214,6 +203,14 @@ def open_fs(
Returns:
~fs.base.FS: A filesystem instance.
+ Caution:
+ The ``writeable`` parameter only controls whether the
+ filesystem *needs* to be writable, which is relevant for
+ some archive filesystems. Passing ``writeable=False`` will
+ **not** make the return filesystem read-only. For this,
+ consider using `fs.wrap.read_only` to wrap the returned
+ instance.
+
"""
from ..base import FS
@@ -254,10 +251,13 @@ def manage_fs(
required logic for that.
Example:
- >>> def print_ls(list_fs):
- ... '''List a directory.'''
- ... with manage_fs(list_fs) as fs:
- ... print(' '.join(fs.listdir()))
+ The `~Registry.manage_fs` method can be used to define a small
+ utility function::
+
+ >>> def print_ls(list_fs):
+ ... '''List a directory.'''
+ ... with manage_fs(list_fs) as fs:
+ ... print(' '.join(fs.listdir()))
This function may be used in two ways. You may either pass
a ``str``, as follows::
@@ -273,14 +273,20 @@ def manage_fs(
"""
from ..base import FS
+ def assert_writeable(fs):
+ if fs.getmeta().get("read_only", True):
+ raise ResourceReadOnly(path="/")
+
if isinstance(fs_url, FS):
+ if writeable:
+ assert_writeable(fs_url)
yield fs_url
else:
_fs = self.open_fs(fs_url, create=create, writeable=writeable, cwd=cwd)
+ if writeable:
+ assert_writeable(_fs)
try:
yield _fs
- except:
- raise
finally:
_fs.close()
diff --git a/fs/opener/tarfs.py b/fs/opener/tarfs.py
index 867ca80f..e53a51b9 100644
--- a/fs/opener/tarfs.py
+++ b/fs/opener/tarfs.py
@@ -2,26 +2,24 @@
"""`TarFS` opener definition.
"""
-from __future__ import absolute_import
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import absolute_import, print_function, unicode_literals
import typing
from .base import Opener
-from .registry import registry
from .errors import NotWriteable
+from .registry import registry
-if False: # typing.TYPE_CHECKING
+if typing.TYPE_CHECKING:
from typing import Text
+
+ from ..tarfs import TarFS # noqa: F401
from .parse import ParseResult
- from ..tarfs import TarFS
@registry.install
class TarOpener(Opener):
- """`TarFS` opener.
- """
+ """`TarFS` opener."""
protocols = ["tar"]
diff --git a/fs/opener/tempfs.py b/fs/opener/tempfs.py
index 5fe47a08..f48eb099 100644
--- a/fs/opener/tempfs.py
+++ b/fs/opener/tempfs.py
@@ -2,25 +2,23 @@
"""`TempFS` opener definition.
"""
-from __future__ import absolute_import
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import absolute_import, print_function, unicode_literals
import typing
from .base import Opener
from .registry import registry
-if False: # typing.TYPE_CHECKING
+if typing.TYPE_CHECKING:
from typing import Text
+
+ from ..tempfs import TempFS # noqa: F401
from .parse import ParseResult
- from ..tempfs import TempFS
@registry.install
class TempOpener(Opener):
- """`TempFS` opener.
- """
+ """`TempFS` opener."""
protocols = ["temp"]
diff --git a/fs/opener/zipfs.py b/fs/opener/zipfs.py
index 714fe384..10c979cc 100644
--- a/fs/opener/zipfs.py
+++ b/fs/opener/zipfs.py
@@ -2,26 +2,24 @@
"""`ZipFS` opener definition.
"""
-from __future__ import absolute_import
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import absolute_import, print_function, unicode_literals
import typing
from .base import Opener
-from .registry import registry
from .errors import NotWriteable
+from .registry import registry
-if False: # typing.TYPE_CHECKING
+if typing.TYPE_CHECKING:
from typing import Text
+
+ from ..zipfs import ZipFS # noqa: F401
from .parse import ParseResult
- from ..zipfs import ZipFS
@registry.install
class ZipOpener(Opener):
- """`ZipFS` opener.
- """
+ """`ZipFS` opener."""
protocols = ["zip"]
diff --git a/fs/osfs.py b/fs/osfs.py
index b891380e..7a095f7f 100644
--- a/fs/osfs.py
+++ b/fs/osfs.py
@@ -4,9 +4,10 @@
of the Python standard library.
"""
-from __future__ import absolute_import
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import absolute_import, print_function, unicode_literals
+
+import sys
+import typing
import errno
import io
@@ -15,12 +16,9 @@
import os
import platform
import shutil
+import six
import stat
-import sys
import tempfile
-import typing
-
-import six
try:
from os import scandir
@@ -28,7 +26,7 @@
try:
from scandir import scandir # type: ignore
except ImportError: # pragma: no cover
- scandir = None # pragma: no cover
+ scandir = None # type: ignore # pragma: no cover
try:
from os import sendfile
@@ -36,35 +34,36 @@
try:
from sendfile import sendfile # type: ignore
except ImportError:
- sendfile = None # pragma: no cover
+ sendfile = None # type: ignore # pragma: no cover
from . import errors
-from .errors import FileExists
+from ._fscompat import fsdecode, fsencode, fspath
+from ._url_tools import url_quote
from .base import FS
+from .copy import copy_modified_time
from .enums import ResourceType
-from ._fscompat import fsencode, fsdecode, fspath
+from .error_tools import convert_os_errors
+from .errors import FileExpected, NoURL
from .info import Info
+from .mode import Mode, validate_open_mode
from .path import basename, dirname
from .permissions import Permissions
-from .error_tools import convert_os_errors
-from .mode import Mode, validate_open_mode
-from .errors import FileExpected, NoURL
-if False: # typing.TYPE_CHECKING
+if typing.TYPE_CHECKING:
from typing import (
+ IO,
Any,
BinaryIO,
- Callable,
Collection,
Dict,
Iterator,
- IO,
List,
Optional,
SupportsInt,
Text,
Tuple,
)
+
from .base import _OpendirFactory
from .info import RawInfo
from .subfs import SubFS
@@ -82,22 +81,6 @@
class OSFS(FS):
"""Create an OSFS.
- Arguments:
- root_path (str or ~os.PathLike): An OS path or path-like object to
- the location on your HD you wish to manage.
- create (bool): Set to `True` to create the root directory if it
- does not already exist, otherwise the directory should exist
- prior to creating the ``OSFS`` instance (defaults to `False`).
- create_mode (int): The permissions that will be used to create
- the directory if ``create`` is `True` and the path doesn't
- exist, defaults to ``0o777``.
- expand_vars(bool): If `True` (the default) environment variables of
- the form $name or ${name} will be expanded.
-
- Raises:
- `fs.errors.CreateFailed`: If ``root_path`` does not
- exist, or could not be created.
-
Examples:
>>> current_directory_fs = OSFS('.')
>>> home_fs = OSFS('~/')
@@ -114,6 +97,23 @@ def __init__(
):
# type: (...) -> None
"""Create an OSFS instance.
+
+ Arguments:
+ root_path (str or ~os.PathLike): An OS path or path-like object
+ to the location on your HD you wish to manage.
+ create (bool): Set to `True` to create the root directory if it
+ does not already exist, otherwise the directory should exist
+ prior to creating the ``OSFS`` instance (defaults to `False`).
+ create_mode (int): The permissions that will be used to create
+ the directory if ``create`` is `True` and the path doesn't
+ exist, defaults to ``0o777``.
+ expand_vars(bool): If `True` (the default) environment variables
+ of the form ``~``, ``$name`` or ``${name}`` will be expanded.
+
+ Raises:
+ `fs.errors.CreateFailed`: If ``root_path`` does not
+ exist, or could not be created.
+
"""
super(OSFS, self).__init__()
if isinstance(root_path, bytes):
@@ -137,7 +137,8 @@ def __init__(
)
else:
if not os.path.isdir(_root_path):
- raise errors.CreateFailed("root path does not exist")
+ message = "root path '{}' does not exist".format(_root_path)
+ raise errors.CreateFailed(message)
_meta = self._meta = {
"network": False,
@@ -150,7 +151,8 @@ def __init__(
try:
# https://stackoverflow.com/questions/7870041/check-if-file-system-is-case-insensitive-in-python
- # I don't know of a better way of detecting case insensitivity of a filesystem
+ # I don't know of a better way of detecting case insensitivity of a
+ # filesystem
with tempfile.NamedTemporaryFile(prefix="TmP") as _tmp_file:
_meta["case_insensitive"] = os.path.exists(_tmp_file.name.lower())
except Exception:
@@ -186,9 +188,8 @@ def __str__(self):
return fmt.format(_class_name.lower(), self.root_path)
def _to_sys_path(self, path):
- # type: (Text) -> Text
- """Convert a FS path to a path on the OS.
- """
+ # type: (Text) -> bytes
+ """Convert a FS path to a path on the OS."""
sys_path = fsencode(
os.path.join(self._root_path, path.lstrip("/").replace("/", os.sep))
)
@@ -197,8 +198,7 @@ def _to_sys_path(self, path):
@classmethod
def _make_details_from_stat(cls, stat_result):
# type: (os.stat_result) -> Dict[Text, object]
- """Make a *details* info dict from an `os.stat_result` object.
- """
+ """Make a *details* info dict from an `os.stat_result` object."""
details = {
"_write": ["accessed", "modified"],
"accessed": stat_result.st_atime,
@@ -217,8 +217,7 @@ def _make_details_from_stat(cls, stat_result):
@classmethod
def _make_access_from_stat(cls, stat_result):
# type: (os.stat_result) -> Dict[Text, object]
- """Make an *access* info dict from an `os.stat_result` object.
- """
+ """Make an *access* info dict from an `os.stat_result` object."""
access = {} # type: Dict[Text, object]
access["permissions"] = Permissions(mode=stat_result.st_mode).dump()
access["gid"] = gid = stat_result.st_gid
@@ -251,8 +250,7 @@ def _make_access_from_stat(cls, stat_result):
@classmethod
def _get_type_from_stat(cls, _stat):
# type: (os.stat_result) -> ResourceType
- """Get the resource type from an `os.stat_result` object.
- """
+ """Get the resource type from an `os.stat_result` object."""
st_mode = _stat.st_mode
st_type = stat.S_IFMT(st_mode)
return cls.STAT_TO_RESOURCE_TYPE.get(st_type, ResourceType.unknown)
@@ -266,13 +264,11 @@ def _gettarget(self, sys_path):
if hasattr(os, "readlink"):
try:
if _WINDOWS_PLATFORM: # pragma: no cover
- target = os.readlink(sys_path)
+ return os.readlink(sys_path)
else:
- target = os.readlink(fsencode(sys_path))
+ return fsdecode(os.readlink(fsencode(sys_path)))
except OSError:
pass
- else:
- return target
return None
def _make_link_info(self, sys_path):
@@ -397,7 +393,7 @@ def removedir(self, path):
# --- Type hint for opendir ------------------------------
- if False: # typing.TYPE_CHECKING
+ if typing.TYPE_CHECKING:
def opendir(self, path, factory=None):
# type: (_O, Text, Optional[_OpendirFactory]) -> SubFS[_O]
@@ -415,6 +411,9 @@ def _check_copy(self, src_path, dst_path, overwrite=False):
# check dst_path does not exist if we are not overwriting
if not overwrite and self.exists(_dst_path):
raise errors.DestinationExists(dst_path)
+ # it's not allowed to copy a file onto itself
+ if _src_path == _dst_path:
+ raise errors.IllegalDestination(dst_path)
# check parent dir of _dst_path exists and is a directory
if self.gettype(dirname(dst_path)) is not ResourceType.directory:
raise errors.DirectoryExpected(dirname(dst_path))
@@ -422,20 +421,21 @@ def _check_copy(self, src_path, dst_path, overwrite=False):
if sys.version_info[:2] < (3, 8) and sendfile is not None:
- _sendfile_error_codes = frozenset(
- {
- errno.EIO,
- errno.EINVAL,
- errno.ENOSYS,
- errno.ENOTSUP, # type: ignore
- errno.EBADF,
- errno.ENOTSOCK,
- errno.EOPNOTSUPP,
- }
- )
+ _sendfile_error_codes = {
+ errno.EIO,
+ errno.EINVAL,
+ errno.ENOSYS,
+ errno.EBADF,
+ errno.ENOTSOCK,
+ errno.EOPNOTSUPP,
+ }
+
+ # PyPy doesn't define ENOTSUP so we have to add it conditionally.
+ if hasattr(errno, "ENOTSUP"):
+ _sendfile_error_codes.add(errno.ENOTSUP)
- def copy(self, src_path, dst_path, overwrite=False):
- # type: (Text, Text, bool) -> None
+ def copy(self, src_path, dst_path, overwrite=False, preserve_time=False):
+ # type: (Text, Text, bool, bool) -> None
with self._lock:
# validate and canonicalise paths
_src_path, _dst_path = self._check_copy(src_path, dst_path, overwrite)
@@ -455,6 +455,8 @@ def copy(self, src_path, dst_path, overwrite=False):
while sent > 0:
sent = sendfile(fd_dst, fd_src, offset, maxsize)
offset += sent
+ if preserve_time:
+ copy_modified_time(self, src_path, self, dst_path)
except OSError as e:
# the error is not a simple "sendfile not supported" error
if e.errno not in self._sendfile_error_codes:
@@ -464,8 +466,8 @@ def copy(self, src_path, dst_path, overwrite=False):
else:
- def copy(self, src_path, dst_path, overwrite=False):
- # type: (Text, Text, bool) -> None
+ def copy(self, src_path, dst_path, overwrite=False, preserve_time=False):
+ # type: (Text, Text, bool, bool) -> None
with self._lock:
_src_path, _dst_path = self._check_copy(src_path, dst_path, overwrite)
shutil.copy2(self.getsyspath(_src_path), self.getsyspath(_dst_path))
@@ -478,47 +480,56 @@ def _scandir(self, path, namespaces=None):
# type: (Text, Optional[Collection[Text]]) -> Iterator[Info]
self.check()
namespaces = namespaces or ()
+ requires_stat = not {"details", "stat", "access"}.isdisjoint(namespaces)
_path = self.validatepath(path)
if _WINDOWS_PLATFORM:
sys_path = os.path.join(
self._root_path, path.lstrip("/").replace("/", os.sep)
)
else:
- sys_path = self._to_sys_path(_path)
+ sys_path = self._to_sys_path(_path) # type: ignore
with convert_os_errors("scandir", path, directory=True):
- for dir_entry in scandir(sys_path):
- info = {
- "basic": {
- "name": fsdecode(dir_entry.name),
- "is_dir": dir_entry.is_dir(),
- }
- }
- if "details" in namespaces:
- stat_result = dir_entry.stat()
- info["details"] = self._make_details_from_stat(stat_result)
- if "stat" in namespaces:
- stat_result = dir_entry.stat()
- info["stat"] = {
- k: getattr(stat_result, k)
- for k in dir(stat_result)
- if k.startswith("st_")
- }
- if "lstat" in namespaces:
- lstat_result = dir_entry.stat(follow_symlinks=False)
- info["lstat"] = {
- k: getattr(lstat_result, k)
- for k in dir(lstat_result)
- if k.startswith("st_")
+ scandir_iter = scandir(sys_path)
+ try:
+ for dir_entry in scandir_iter:
+ info = {
+ "basic": {
+ "name": fsdecode(dir_entry.name),
+ "is_dir": dir_entry.is_dir(),
+ }
}
- if "link" in namespaces:
- info["link"] = self._make_link_info(
- os.path.join(sys_path, dir_entry.name)
- )
- if "access" in namespaces:
- stat_result = dir_entry.stat()
- info["access"] = self._make_access_from_stat(stat_result)
-
- yield Info(info)
+ if requires_stat:
+ stat_result = dir_entry.stat()
+ if "details" in namespaces:
+ info["details"] = self._make_details_from_stat(
+ stat_result
+ )
+ if "stat" in namespaces:
+ info["stat"] = {
+ k: getattr(stat_result, k)
+ for k in dir(stat_result)
+ if k.startswith("st_")
+ }
+ if "access" in namespaces:
+ info["access"] = self._make_access_from_stat(
+ stat_result
+ )
+ if "lstat" in namespaces:
+ lstat_result = dir_entry.stat(follow_symlinks=False)
+ info["lstat"] = {
+ k: getattr(lstat_result, k)
+ for k in dir(lstat_result)
+ if k.startswith("st_")
+ }
+ if "link" in namespaces:
+ info["link"] = self._make_link_info(
+ os.path.join(sys_path, dir_entry.name)
+ )
+
+ yield Info(info)
+ finally:
+ if sys.version_info >= (3, 6):
+ scandir_iter.close()
else:
@@ -528,7 +539,6 @@ def _scandir(self, path, namespaces=None):
namespaces = namespaces or ()
_path = self.validatepath(path)
sys_path = self.getsyspath(_path)
- _sys_path = fsencode(sys_path)
with convert_os_errors("scandir", path, directory=True):
for entry_name in os.listdir(sys_path):
_entry_name = fsdecode(entry_name)
@@ -586,9 +596,14 @@ def getsyspath(self, path):
def geturl(self, path, purpose="download"):
# type: (Text, Text) -> Text
- if purpose != "download":
+ sys_path = self.getsyspath(path)
+ if purpose == "download":
+ return "file://" + sys_path
+ elif purpose == "fs":
+ url_path = url_quote(sys_path)
+ return "osfs://" + url_path
+ else:
raise NoURL(path, purpose)
- return "file://" + self.getsyspath(path)
def gettype(self, path):
# type: (Text) -> ResourceType
@@ -652,10 +667,10 @@ def setinfo(self, path, info):
if "details" in info:
details = info["details"]
if "accessed" in details or "modified" in details:
- _accessed = typing.cast(int, details.get("accessed"))
- _modified = typing.cast(int, details.get("modified", _accessed))
- accessed = int(_modified if _accessed is None else _accessed)
- modified = int(_modified)
+ _accessed = typing.cast(float, details.get("accessed"))
+ _modified = typing.cast(float, details.get("modified", _accessed))
+ accessed = float(_modified if _accessed is None else _accessed)
+ modified = float(_modified)
if accessed is not None or modified is not None:
with convert_os_errors("setinfo", path):
os.utime(sys_path, (accessed, modified))
@@ -668,8 +683,7 @@ def validatepath(self, path):
except UnicodeEncodeError as error:
raise errors.InvalidCharsInPath(
path,
- msg="path '{path}' could not be encoded for the filesystem (check LANG env var); {error}".format(
- path=path, error=error
- ),
+ msg="path '{path}' could not be encoded for the filesystem (check LANG"
+ " env var); {error}".format(path=path, error=error),
)
return super(OSFS, self).validatepath(path)
diff --git a/fs/path.py b/fs/path.py
index 3783dd13..0bfa5149 100644
--- a/fs/path.py
+++ b/fs/path.py
@@ -8,15 +8,15 @@
"""
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import print_function, unicode_literals
-import re
import typing
+import re
+
from .errors import IllegalBackReference
-if False: # typing.TYPE_CHECKING
+if typing.TYPE_CHECKING:
from typing import List, Text, Tuple
@@ -64,11 +64,11 @@ def normpath(path):
>>> normpath("/foo//bar/frob/../baz")
'/foo/bar/baz'
>>> normpath("foo/../../bar")
- Traceback (most recent call last)
+ Traceback (most recent call last):
...
- IllegalBackReference: path 'foo/../../bar' contains back-references outside of filesystem"
+ fs.errors.IllegalBackReference: path 'foo/../../bar' contains back-references outside of filesystem
- """
+ """ # noqa: E501
if path in "/":
return path
@@ -86,6 +86,7 @@ def normpath(path):
else:
components.append(component)
except IndexError:
+ # FIXME (@althonos): should be raised from the IndexError
raise IllegalBackReference(path)
return prefix + "/".join(components)
@@ -509,7 +510,7 @@ def forcedir(path):
>>> forcedir("foo/bar/")
'foo/bar/'
>>> forcedir("foo/spam.txt")
- 'foo/spam.txt'
+ 'foo/spam.txt/'
"""
if not path.endswith("/"):
diff --git a/fs/permissions.py b/fs/permissions.py
index 7c8b2030..3fee3352 100644
--- a/fs/permissions.py
+++ b/fs/permissions.py
@@ -1,31 +1,27 @@
"""Abstract permissions container.
"""
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import print_function, unicode_literals
import typing
-from typing import Container, Iterable
+from typing import Iterable
import six
from ._typing import Text
-
-if False: # typing.TYPE_CHECKING
+if typing.TYPE_CHECKING:
from typing import Iterator, List, Optional, Tuple, Type, Union
def make_mode(init):
# type: (Union[int, Iterable[Text], None]) -> int
- """Make a mode integer from an initial value.
- """
+ """Make a mode integer from an initial value."""
return Permissions.get_mode(init)
class _PermProperty(object):
- """Creates simple properties to get/set permissions.
- """
+ """Creates simple properties to get/set permissions."""
def __init__(self, name):
# type: (Text) -> None
@@ -52,19 +48,6 @@ class Permissions(object):
on a resource. It supports Linux permissions, but is generic enough
to manage permission information from almost any filesystem.
- Arguments:
- names (list, optional): A list of permissions.
- mode (int, optional): A mode integer.
- user (str, optional): A triplet of *user* permissions, e.g.
- ``"rwx"`` or ``"r--"``
- group (str, optional): A triplet of *group* permissions, e.g.
- ``"rwx"`` or ``"r--"``
- other (str, optional): A triplet of *other* permissions, e.g.
- ``"rwx"`` or ``"r--"``
- sticky (bool, optional): A boolean for the *sticky* bit.
- setuid (bool, optional): A boolean for the *setuid* bit.
- setguid (bool, optional): A boolean for the *setguid* bit.
-
Example:
>>> from fs.permissions import Permissions
>>> p = Permissions(user='rwx', group='rw-', other='r--')
@@ -73,7 +56,7 @@ class Permissions(object):
>>> p.mode
500
>>> oct(p.mode)
- '0764'
+ '0o764'
"""
@@ -105,6 +88,22 @@ def __init__(
setguid=None, # type: Optional[bool]
):
# type: (...) -> None
+ """Create a new `Permissions` instance.
+
+ Arguments:
+ names (list, optional): A list of permissions.
+ mode (int, optional): A mode integer.
+ user (str, optional): A triplet of *user* permissions, e.g.
+ ``"rwx"`` or ``"r--"``
+ group (str, optional): A triplet of *group* permissions, e.g.
+ ``"rwx"`` or ``"r--"``
+ other (str, optional): A triplet of *other* permissions, e.g.
+ ``"rwx"`` or ``"r--"``
+ sticky (bool, optional): A boolean for the *sticky* bit.
+ setuid (bool, optional): A boolean for the *setuid* bit.
+ setguid (bool, optional): A boolean for the *setguid* bit.
+
+ """
if names is not None:
self._perms = set(names)
elif mode is not None:
@@ -174,8 +173,7 @@ def __ne__(self, other):
@classmethod
def parse(cls, ls):
# type: (Text) -> Permissions
- """Parse permissions in Linux notation.
- """
+ """Parse permissions in Linux notation."""
user = ls[:3]
group = ls[3:6]
other = ls[6:9]
@@ -184,8 +182,7 @@ def parse(cls, ls):
@classmethod
def load(cls, permissions):
# type: (List[Text]) -> Permissions
- """Load a serialized permissions object.
- """
+ """Load a serialized permissions object."""
return cls(names=permissions)
@classmethod
@@ -222,26 +219,22 @@ def create(cls, init=None):
@classmethod
def get_mode(cls, init):
# type: (Union[int, Iterable[Text], None]) -> int
- """Convert an initial value to a mode integer.
- """
+ """Convert an initial value to a mode integer."""
return cls.create(init).mode
def copy(self):
# type: () -> Permissions
- """Make a copy of this permissions object.
- """
+ """Make a copy of this permissions object."""
return Permissions(names=list(self._perms))
def dump(self):
# type: () -> List[Text]
- """Get a list suitable for serialization.
- """
+ """Get a list suitable for serialization."""
return sorted(self._perms)
def as_str(self):
# type: () -> Text
- """Get a Linux-style string representation of permissions.
- """
+ """Get a Linux-style string representation of permissions."""
perms = [
c if name in self._perms else "-"
for name, c in zip(self._LINUX_PERMS_NAMES[-9:], "rwxrwxrwx")
@@ -259,8 +252,7 @@ def as_str(self):
@property
def mode(self):
# type: () -> int
- """`int`: mode integer.
- """
+ """`int`: mode integer."""
mode = 0
for name, mask in self._LINUX_PERMS:
if name in self._perms:
diff --git a/fs/subfs.py b/fs/subfs.py
index d0d0d386..9bc6167b 100644
--- a/fs/subfs.py
+++ b/fs/subfs.py
@@ -1,19 +1,19 @@
"""Manage a directory in a *parent* filesystem.
"""
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import print_function, unicode_literals
import typing
import six
-from .wrapfs import WrapFS
from .path import abspath, join, normpath, relpath
+from .wrapfs import WrapFS
-if False: # typing.TYPE_CHECKING
+if typing.TYPE_CHECKING:
from typing import Text, Tuple
- from .base import FS
+
+ from .base import FS # noqa: F401
_F = typing.TypeVar("_F", bound="FS", covariant=True)
@@ -21,7 +21,7 @@
@six.python_2_unicode_compatible
class SubFS(WrapFS[_F], typing.Generic[_F]):
- """A sub-directory on another filesystem.
+ """A sub-directory on a parent filesystem.
A SubFS is a filesystem object that maps to a sub-directory of
another filesystem. This is the object that is returned by
@@ -29,7 +29,7 @@ class SubFS(WrapFS[_F], typing.Generic[_F]):
"""
- def __init__(self, parent_fs, path):
+ def __init__(self, parent_fs, path): # noqa: D107
# type: (_F, Text) -> None
super(SubFS, self).__init__(parent_fs)
self._sub_dir = abspath(normpath(path))
@@ -55,8 +55,7 @@ def delegate_path(self, path):
class ClosingSubFS(SubFS[_F], typing.Generic[_F]):
- """A version of `SubFS` which closes its parent when closed.
- """
+ """A version of `SubFS` which closes its parent when closed."""
def close(self):
# type: () -> None
diff --git a/fs/tarfs.py b/fs/tarfs.py
index ce2109c2..e699f86a 100644
--- a/fs/tarfs.py
+++ b/fs/tarfs.py
@@ -1,36 +1,32 @@
"""Manage the filesystem in a Tar archive.
"""
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import print_function, unicode_literals
-import copy
-import os
-import tarfile
import typing
-from collections import OrderedDict
-from typing import cast, IO
+from typing import IO, cast
+import os
import six
+import tarfile
+from collections import OrderedDict
from . import errors
+from ._url_tools import url_quote
from .base import FS
from .compress import write_tar
from .enums import ResourceType
-from .errors import IllegalBackReference
+from .errors import IllegalBackReference, NoURL
from .info import Info
from .iotools import RawWrapper
from .opener import open_fs
-from .path import dirname, relpath, basename, isbase, normpath, parts, frombase
-from .wrapfs import WrapFS
+from .path import basename, frombase, isbase, normpath, parts, relpath
from .permissions import Permissions
+from .wrapfs import WrapFS
-
-if False: # typing.TYPE_CHECKING
- from tarfile import TarInfo
+if typing.TYPE_CHECKING:
from typing import (
Any,
- AnyStr,
BinaryIO,
Collection,
Dict,
@@ -40,8 +36,10 @@
Tuple,
Union,
)
- from .info import Info, RawInfo
- from .permissions import Permissions
+
+ from tarfile import TarInfo
+
+ from .info import RawInfo
from .subfs import SubFS
T = typing.TypeVar("T", bound="ReadTarFS")
@@ -56,7 +54,6 @@ def _get_member_info(member, encoding):
# type: (TarInfo, Text) -> Dict[Text, object]
return member.get_info(encoding, None)
-
else:
def _get_member_info(member, encoding):
@@ -69,10 +66,10 @@ def _get_member_info(member, encoding):
class TarFS(WrapFS):
"""Read and write tar files.
- There are two ways to open a TarFS for the use cases of reading
+ There are two ways to open a `TarFS` for the use cases of reading
a tar file, and creating a new one.
- If you open the TarFS with ``write`` set to `False` (the
+ If you open the `TarFS` with ``write`` set to `False` (the
default), then the filesystem will be a read only filesystem which
maps to the files and directories within the tar file. Files are
decompressed on the fly when you open them.
@@ -82,9 +79,9 @@ class TarFS(WrapFS):
with TarFS('foo.tar.gz') as tar_fs:
readme = tar_fs.readtext('readme.txt')
- If you open the TarFS with ``write`` set to `True`, then the TarFS
+ If you open the TarFS with ``write`` set to `True`, then the `TarFS`
will be a empty temporary filesystem. Any files / directories you
- create in the TarFS will be written in to a tar file when the TarFS
+ create in the `TarFS` will be written in to a tar file when the `TarFS`
is closed. The compression is set from the new file name but may be
set manually with the ``compression`` argument.
@@ -103,8 +100,9 @@ class TarFS(WrapFS):
use default (`False`) to read an existing tar file.
compression (str, optional): Compression to use (one of the formats
supported by `tarfile`: ``xz``, ``gz``, ``bz2``, or `None`).
- temp_fs (str): An FS URL for the temporary filesystem
- used to store data prior to tarring.
+ temp_fs (str): An FS URL or an FS instance to use to store
+ data prior to tarring. Defaults to creating a new
+ `~fs.tempfs.TempFS`.
"""
@@ -115,13 +113,13 @@ class TarFS(WrapFS):
"gz": (".tar.gz", ".tgz"),
}
- def __new__(
+ def __new__( # type: ignore
cls,
file, # type: Union[Text, BinaryIO]
write=False, # type: bool
compression=None, # type: Optional[Text]
encoding="utf-8", # type: Text
- temp_fs="temp://__tartemp__", # type: Text
+ temp_fs="temp://__tartemp__", # type: Union[Text, FS]
):
# type: (...) -> FS
if isinstance(file, (six.text_type, six.binary_type)):
@@ -144,7 +142,7 @@ def __new__(
else:
return ReadTarFS(file, encoding=encoding)
- if False: # typing.TYPE_CHECKING
+ if typing.TYPE_CHECKING:
def __init__(
self,
@@ -153,23 +151,22 @@ def __init__(
compression=None, # type: Optional[Text]
encoding="utf-8", # type: Text
temp_fs="temp://__tartemp__", # type: Text
- ):
+ ): # noqa: D107
# type: (...) -> None
pass
@six.python_2_unicode_compatible
class WriteTarFS(WrapFS):
- """A writable tar file.
- """
+ """A writable tar file."""
def __init__(
self,
file, # type: Union[Text, BinaryIO]
compression=None, # type: Optional[Text]
encoding="utf-8", # type: Text
- temp_fs="temp://__tartemp__", # type: Text
- ):
+ temp_fs="temp://__tartemp__", # type: Union[Text, FS]
+ ): # noqa: D107
# type: (...) -> None
self._file = file # type: Union[Text, BinaryIO]
self.compression = compression
@@ -225,6 +222,7 @@ def write_tar(
Note:
This is called automatically when the TarFS is closed.
+
"""
if not self.isclosed():
write_tar(
@@ -237,8 +235,7 @@ def write_tar(
@six.python_2_unicode_compatible
class ReadTarFS(FS):
- """A readable tar file.
- """
+ """A readable tar file."""
_meta = {
"case_insensitive": True,
@@ -263,7 +260,7 @@ class ReadTarFS(FS):
}
@errors.CreateFailed.catch_all
- def __init__(self, file, encoding="utf-8"):
+ def __init__(self, file, encoding="utf-8"): # noqa: D107
# type: (Union[Text, BinaryIO], Text) -> None
super(ReadTarFS, self).__init__()
self._file = file
@@ -461,16 +458,25 @@ def removedir(self, path):
def close(self):
# type: () -> None
super(ReadTarFS, self).close()
- self._tar.close()
+ if hasattr(self, "_tar"):
+ self._tar.close()
def isclosed(self):
# type: () -> bool
return self._tar.closed # type: ignore
+ def geturl(self, path, purpose="download"):
+ # type: (Text, Text) -> Text
+ if purpose == "fs" and isinstance(self._file, six.string_types):
+ quoted_file = url_quote(self._file)
+ quoted_path = url_quote(path)
+ return "tar://{}!/{}".format(quoted_file, quoted_path)
+ else:
+ raise NoURL(path, purpose)
+
if __name__ == "__main__": # pragma: no cover
from fs.tree import render
- from fs.opener import open_fs
with TarFS("tests.tar") as tar_fs:
print(tar_fs.listdir("/"))
diff --git a/fs/tempfs.py b/fs/tempfs.py
index 04af76f9..3f32c8c6 100644
--- a/fs/tempfs.py
+++ b/fs/tempfs.py
@@ -9,19 +9,18 @@
"""
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import print_function, unicode_literals
-import shutil
-import tempfile
import typing
+import shutil
import six
+import tempfile
from . import errors
from .osfs import OSFS
-if False: # typing.TYPE_CHECKING
+if typing.TYPE_CHECKING:
from typing import Optional, Text
@@ -29,16 +28,27 @@
class TempFS(OSFS):
"""A temporary filesystem on the OS.
- Arguments:
- identifier (str): A string to distinguish the directory within
- the OS temp location, used as part of the directory name.
- temp_dir (str, optional): An OS path to your temp directory
- (leave as `None` to auto-detect)
- auto_clean (bool): If `True` (the default), the directory
- contents will be wiped on close.
- ignore_clean_errors (bool): If `True` (the default), any errors
- in the clean process will be suppressed. If `False`, they
- will be raised.
+ Temporary filesystems are created using the `tempfile.mkdtemp`
+ function to obtain a temporary folder in an OS-specific location.
+ You can provide an alternative location with the ``temp_dir``
+ argument of the constructor.
+
+ Examples:
+ Create with the constructor::
+
+ >>> from fs.tempfs import TempFS
+ >>> tmp_fs = TempFS()
+
+ Or via an FS URL::
+
+ >>> import fs
+ >>> tmp_fs = fs.open_fs("temp://")
+
+ Use a specific identifier for the temporary folder to better
+ illustrate its purpose::
+
+ >>> named_tmp_fs = fs.open_fs("temp://local_copy")
+ >>> named_tmp_fs = TempFS(identifier="local_copy")
"""
@@ -50,6 +60,20 @@ def __init__(
ignore_clean_errors=True, # type: bool
):
# type: (...) -> None
+ """Create a new `TempFS` instance.
+
+ Arguments:
+ identifier (str): A string to distinguish the directory within
+ the OS temp location, used as part of the directory name.
+ temp_dir (str, optional): An OS path to your temp directory
+ (leave as `None` to auto-detect).
+ auto_clean (bool): If `True` (the default), the directory
+ contents will be wiped on close.
+ ignore_clean_errors (bool): If `True` (the default), any errors
+ in the clean process will be suppressed. If `False`, they
+ will be raised.
+
+ """
self.identifier = identifier
self._auto_clean = auto_clean
self._ignore_clean_errors = ignore_clean_errors
@@ -70,14 +94,35 @@ def __str__(self):
def close(self):
# type: () -> None
+ """Close the filesystem and release any resources.
+
+ It is important to call this method when you have finished
+ working with the filesystem. Some filesystems may not finalize
+ changes until they are closed (archives for example). You may
+ call this method explicitly (it is safe to call close multiple
+ times), or you can use the filesystem as a context manager to
+ automatically close.
+
+ Hint:
+ Depending on the value of ``auto_clean`` passed when creating
+ the `TempFS`, the underlying temporary folder may be removed
+ or not.
+
+ Example:
+ >>> tmp_fs = TempFS(auto_clean=False)
+ >>> syspath = tmp_fs.getsyspath("/")
+ >>> tmp_fs.close()
+ >>> os.path.exists(syspath)
+ True
+
+ """
if self._auto_clean:
self.clean()
super(TempFS, self).close()
def clean(self):
# type: () -> None
- """Clean (delete) temporary files created by this filesystem.
- """
+ """Clean (delete) temporary files created by this filesystem."""
if self._cleaned:
return
@@ -86,6 +131,7 @@ def clean(self):
except Exception as error:
if not self._ignore_clean_errors:
raise errors.OperationFailed(
- msg="failed to remove temporary directory", exc=error
+ msg="failed to remove temporary directory; {}".format(error),
+ exc=error,
)
self._cleaned = True
diff --git a/fs/test.py b/fs/test.py
index a39ecc94..32e6ea5c 100644
--- a/fs/test.py
+++ b/fs/test.py
@@ -5,30 +5,34 @@
"""
-from __future__ import absolute_import
-from __future__ import unicode_literals
+from __future__ import absolute_import, unicode_literals
-import collections
-from datetime import datetime
import io
import itertools
import json
-import math
import os
+import six
import time
+import unittest
+import warnings
+from datetime import datetime
+from six import text_type
import fs.copy
import fs.move
-from fs import ResourceType, Seek
-from fs import errors
-from fs import walk
-from fs import glob
+from fs import ResourceType, Seek, errors, glob, walk
from fs.opener import open_fs
from fs.subfs import ClosingSubFS, SubFS
-import pytz
-import six
-from six import text_type
+if six.PY2:
+ import collections as collections_abc
+else:
+ import collections.abc as collections_abc
+
+try:
+ from datetime import timezone
+except ImportError:
+ from ._tzcompat import timezone # type: ignore
UNICODE_TEXT = """
@@ -240,13 +244,15 @@
class FSTestCases(object):
- """Basic FS tests.
- """
+ """Basic FS tests."""
- def make_fs(self):
- """Return an FS instance.
+ data1 = b"foo" * 256 * 1024
+ data2 = b"bar" * 2 * 256 * 1024
+ data3 = b"baz" * 3 * 256 * 1024
+ data4 = b"egg" * 7 * 256 * 1024
- """
+ def make_fs(self):
+ """Return an FS instance."""
raise NotImplementedError("implement me")
def destroy_fs(self, fs):
@@ -284,6 +290,15 @@ def assert_not_exists(self, path):
"""
self.assertFalse(self.fs.exists(path))
+ def assert_isempty(self, path):
+ """Assert a path is an empty directory.
+
+ Arguments:
+ path (str): A path on the filesystem.
+
+ """
+ self.assertTrue(self.fs.isempty(path))
+
def assert_isfile(self, path):
"""Assert a path is a file.
@@ -425,15 +440,13 @@ def test_geturl(self):
self.fs.hasurl("a/b/c/foo/bar")
def test_geturl_purpose(self):
- """Check an unknown purpose raises a NoURL error.
- """
+ """Check an unknown purpose raises a NoURL error."""
self.fs.create("foo")
with self.assertRaises(errors.NoURL):
self.fs.geturl("foo", purpose="__nosuchpurpose__")
def test_validatepath(self):
- """Check validatepath returns an absolute path.
- """
+ """Check validatepath returns an absolute path."""
path = self.fs.validatepath("foo")
self.assertEqual(path, "/foo")
@@ -451,6 +464,7 @@ def test_getinfo(self):
root_info = self.fs.getinfo("/")
self.assertEqual(root_info.name, "")
self.assertTrue(root_info.is_dir)
+ self.assertIn("basic", root_info.namespaces)
# Make a file of known size
self.fs.writebytes("foo", b"bar")
@@ -458,17 +472,20 @@ def test_getinfo(self):
# Check basic namespace
info = self.fs.getinfo("foo").raw
+ self.assertIn("basic", info)
self.assertIsInstance(info["basic"]["name"], text_type)
self.assertEqual(info["basic"]["name"], "foo")
self.assertFalse(info["basic"]["is_dir"])
# Check basic namespace dir
info = self.fs.getinfo("dir").raw
+ self.assertIn("basic", info)
self.assertEqual(info["basic"]["name"], "dir")
self.assertTrue(info["basic"]["is_dir"])
# Get the info
info = self.fs.getinfo("foo", namespaces=["details"]).raw
+ self.assertIn("basic", info)
self.assertIsInstance(info, dict)
self.assertEqual(info["details"]["size"], 3)
self.assertEqual(info["details"]["type"], int(ResourceType.file))
@@ -479,8 +496,8 @@ def test_getinfo(self):
# Raw info should be serializable
try:
json.dumps(info)
- except:
- assert False, "info should be JSON serializable"
+ except (TypeError, ValueError):
+ raise AssertionError("info should be JSON serializable")
# Non existant namespace is not an error
no_info = self.fs.getinfo("foo", "__nosuchnamespace__").raw
@@ -769,6 +786,7 @@ def test_openbin_rw(self):
with self.fs.openbin("foo/hello", "w") as f:
repr(f)
+ self.assertIn("b", f.mode)
self.assertIsInstance(f, io.IOBase)
self.assertTrue(f.writable())
self.assertFalse(f.readable())
@@ -782,6 +800,7 @@ def test_openbin_rw(self):
# Read it back
with self.fs.openbin("foo/hello", "r") as f:
+ self.assertIn("b", f.mode)
self.assertIsInstance(f, io.IOBase)
self.assertTrue(f.readable())
self.assertFalse(f.writable())
@@ -863,6 +882,11 @@ def test_open_files(self):
self.assertTrue(f.readable())
self.assertFalse(f.closed)
self.assertEqual(f.readlines(8), [b"Hello\n", b"World\n"])
+ self.assertEqual(f.tell(), 12)
+ buffer = bytearray(4)
+ self.assertEqual(f.readinto(buffer), 4)
+ self.assertEqual(f.tell(), 16)
+ self.assertEqual(buffer, b"foo\n")
with self.assertRaises(IOError):
f.write(b"no")
self.assertTrue(f.closed)
@@ -872,8 +896,9 @@ def test_open_files(self):
self.assertFalse(f.closed)
self.assertTrue(f.closed)
- iter_lines = iter(self.fs.open("text"))
- self.assertEqual(next(iter_lines), "Hello\n")
+ with self.fs.open("text") as f:
+ iter_lines = iter(f)
+ self.assertEqual(next(iter_lines), "Hello\n")
with self.fs.open("unicode", "w") as f:
self.assertEqual(12, f.write("Héllo\nWörld\n"))
@@ -922,6 +947,7 @@ def test_openbin(self):
with self.fs.openbin("file.bin", "wb") as write_file:
repr(write_file)
text_type(write_file)
+ self.assertIn("b", write_file.mode)
self.assertIsInstance(write_file, io.IOBase)
self.assertTrue(write_file.writable())
self.assertFalse(write_file.readable())
@@ -933,6 +959,7 @@ def test_openbin(self):
with self.fs.openbin("file.bin", "rb") as read_file:
repr(write_file)
text_type(write_file)
+ self.assertIn("b", read_file.mode)
self.assertIsInstance(read_file, io.IOBase)
self.assertTrue(read_file.readable())
self.assertFalse(read_file.writable())
@@ -1055,7 +1082,8 @@ def test_remove(self):
self.fs.makedirs("foo/bar/baz/")
error_msg = "resource 'foo/bar/egg/test.txt' not found"
- with self.assertRaisesRegexp(errors.ResourceNotFound, error_msg):
+ assertRaisesRegex = getattr(self, "assertRaisesRegex", self.assertRaisesRegexp)
+ with assertRaisesRegex(errors.ResourceNotFound, error_msg):
self.fs.remove("foo/bar/egg/test.txt")
def test_removedir(self):
@@ -1084,6 +1112,7 @@ def test_removedir(self):
self.fs.removedir("foo/bar")
def test_removetree(self):
+ self.fs.makedirs("spam")
self.fs.makedirs("foo/bar/baz")
self.fs.makedirs("foo/egg")
self.fs.makedirs("foo/a/b/c/d/e")
@@ -1099,18 +1128,62 @@ def test_removetree(self):
self.fs.removetree("foo")
self.assert_not_exists("foo")
+ self.assert_exists("spam")
+
+ # Errors on files
+ self.fs.create("bar")
+ with self.assertRaises(errors.DirectoryExpected):
+ self.fs.removetree("bar")
+
+ # Errors on non-existing path
+ with self.assertRaises(errors.ResourceNotFound):
+ self.fs.removetree("foofoo")
+
+ def test_removetree_root(self):
+ self.fs.makedirs("foo/bar/baz")
+ self.fs.makedirs("foo/egg")
+ self.fs.makedirs("foo/a/b/c/d/e")
+ self.fs.create("foo/egg.txt")
+ self.fs.create("foo/bar/egg.bin")
+ self.fs.create("foo/a/b/c/1.txt")
+ self.fs.create("foo/a/b/c/2.txt")
+ self.fs.create("foo/a/b/c/3.txt")
+
+ self.assert_exists("foo/egg.txt")
+ self.assert_exists("foo/bar/egg.bin")
+
+ # removetree("/") removes the contents,
+ # but not the root folder itself
+ self.fs.removetree("/")
+ self.assert_exists("/")
+ self.assert_isempty("/")
+
+ # we check we can create a file after
+ # to catch potential issues with the
+ # root folder being deleted on faulty
+ # implementations
+ self.fs.create("egg")
+ self.fs.makedir("yolk")
+ self.assert_exists("egg")
+ self.assert_exists("yolk")
def test_setinfo(self):
self.fs.create("birthday.txt")
- now = math.floor(time.time())
+ now = time.time()
change_info = {"details": {"accessed": now + 60, "modified": now + 60 * 60}}
self.fs.setinfo("birthday.txt", change_info)
- new_info = self.fs.getinfo("birthday.txt", namespaces=["details"]).raw
- if "accessed" in new_info.get("_write", []):
- self.assertEqual(new_info["details"]["accessed"], now + 60)
- if "modified" in new_info.get("_write", []):
- self.assertEqual(new_info["details"]["modified"], now + 60 * 60)
+ new_info = self.fs.getinfo("birthday.txt", namespaces=["details"])
+ can_write_acccess = new_info.is_writeable("details", "accessed")
+ can_write_modified = new_info.is_writeable("details", "modified")
+ if can_write_acccess:
+ self.assertAlmostEqual(
+ new_info.get("details", "accessed"), now + 60, places=4
+ )
+ if can_write_modified:
+ self.assertAlmostEqual(
+ new_info.get("details", "modified"), now + 60 * 60, places=4
+ )
with self.assertRaises(errors.ResourceNotFound):
self.fs.setinfo("nothing", {})
@@ -1119,11 +1192,12 @@ def test_settimes(self):
self.fs.create("birthday.txt")
self.fs.settimes("birthday.txt", accessed=datetime(2016, 7, 5))
info = self.fs.getinfo("birthday.txt", namespaces=["details"])
- writeable = info.get("details", "_write", [])
- if "accessed" in writeable:
- self.assertEqual(info.accessed, datetime(2016, 7, 5, tzinfo=pytz.UTC))
- if "modified" in writeable:
- self.assertEqual(info.modified, datetime(2016, 7, 5, tzinfo=pytz.UTC))
+ can_write_acccess = info.is_writeable("details", "accessed")
+ can_write_modified = info.is_writeable("details", "modified")
+ if can_write_acccess:
+ self.assertEqual(info.accessed, datetime(2016, 7, 5, tzinfo=timezone.utc))
+ if can_write_modified:
+ self.assertEqual(info.modified, datetime(2016, 7, 5, tzinfo=timezone.utc))
def test_touch(self):
self.fs.touch("new.txt")
@@ -1131,7 +1205,7 @@ def test_touch(self):
self.fs.settimes("new.txt", datetime(2016, 7, 5))
info = self.fs.getinfo("new.txt", namespaces=["details"])
if info.is_writeable("details", "accessed"):
- self.assertEqual(info.accessed, datetime(2016, 7, 5, tzinfo=pytz.UTC))
+ self.assertEqual(info.accessed, datetime(2016, 7, 5, tzinfo=timezone.utc))
now = time.time()
self.fs.touch("new.txt")
accessed = self.fs.getinfo("new.txt", namespaces=["details"]).raw[
@@ -1181,22 +1255,17 @@ def test_copy(self):
def _test_upload(self, workers):
"""Test fs.copy with varying number of worker threads."""
- data1 = b"foo" * 256 * 1024
- data2 = b"bar" * 2 * 256 * 1024
- data3 = b"baz" * 3 * 256 * 1024
- data4 = b"egg" * 7 * 256 * 1024
-
with open_fs("temp://") as src_fs:
- src_fs.writebytes("foo", data1)
- src_fs.writebytes("bar", data2)
- src_fs.makedir("dir1").writebytes("baz", data3)
- src_fs.makedirs("dir2/dir3").writebytes("egg", data4)
+ src_fs.writebytes("foo", self.data1)
+ src_fs.writebytes("bar", self.data2)
+ src_fs.makedir("dir1").writebytes("baz", self.data3)
+ src_fs.makedirs("dir2/dir3").writebytes("egg", self.data4)
dst_fs = self.fs
fs.copy.copy_fs(src_fs, dst_fs, workers=workers)
- self.assertEqual(dst_fs.readbytes("foo"), data1)
- self.assertEqual(dst_fs.readbytes("bar"), data2)
- self.assertEqual(dst_fs.readbytes("dir1/baz"), data3)
- self.assertEqual(dst_fs.readbytes("dir2/dir3/egg"), data4)
+ self.assertEqual(dst_fs.readbytes("foo"), self.data1)
+ self.assertEqual(dst_fs.readbytes("bar"), self.data2)
+ self.assertEqual(dst_fs.readbytes("dir1/baz"), self.data3)
+ self.assertEqual(dst_fs.readbytes("dir2/dir3/egg"), self.data4)
def test_upload_0(self):
self._test_upload(0)
@@ -1212,21 +1281,17 @@ def test_upload_4(self):
def _test_download(self, workers):
"""Test fs.copy with varying number of worker threads."""
- data1 = b"foo" * 256 * 1024
- data2 = b"bar" * 2 * 256 * 1024
- data3 = b"baz" * 3 * 256 * 1024
- data4 = b"egg" * 7 * 256 * 1024
src_fs = self.fs
with open_fs("temp://") as dst_fs:
- src_fs.writebytes("foo", data1)
- src_fs.writebytes("bar", data2)
- src_fs.makedir("dir1").writebytes("baz", data3)
- src_fs.makedirs("dir2/dir3").writebytes("egg", data4)
+ src_fs.writebytes("foo", self.data1)
+ src_fs.writebytes("bar", self.data2)
+ src_fs.makedir("dir1").writebytes("baz", self.data3)
+ src_fs.makedirs("dir2/dir3").writebytes("egg", self.data4)
fs.copy.copy_fs(src_fs, dst_fs, workers=workers)
- self.assertEqual(dst_fs.readbytes("foo"), data1)
- self.assertEqual(dst_fs.readbytes("bar"), data2)
- self.assertEqual(dst_fs.readbytes("dir1/baz"), data3)
- self.assertEqual(dst_fs.readbytes("dir2/dir3/egg"), data4)
+ self.assertEqual(dst_fs.readbytes("foo"), self.data1)
+ self.assertEqual(dst_fs.readbytes("bar"), self.data2)
+ self.assertEqual(dst_fs.readbytes("dir1/baz"), self.data3)
+ self.assertEqual(dst_fs.readbytes("dir2/dir3/egg"), self.data4)
def test_download_0(self):
self._test_download(0)
@@ -1280,12 +1345,12 @@ def test_desc(self):
def test_scandir(self):
# Check exception for scanning dir that doesn't exist
with self.assertRaises(errors.ResourceNotFound):
- for info in self.fs.scandir("/foobar"):
+ for _info in self.fs.scandir("/foobar"):
pass
# Check scandir returns an iterable
iter_scandir = self.fs.scandir("/")
- self.assertTrue(isinstance(iter_scandir, collections.Iterable))
+ self.assertTrue(isinstance(iter_scandir, collections_abc.Iterable))
self.assertEqual(list(iter_scandir), [])
# Check scanning
@@ -1298,10 +1363,10 @@ def test_scandir(self):
self.fs.create("bar")
self.fs.makedir("dir")
iter_scandir = self.fs.scandir("/")
- self.assertTrue(isinstance(iter_scandir, collections.Iterable))
+ self.assertTrue(isinstance(iter_scandir, collections_abc.Iterable))
scandir = sorted(
- [r.raw for r in iter_scandir], key=lambda info: info["basic"]["name"]
+ (r.raw for r in iter_scandir), key=lambda info: info["basic"]["name"]
)
# Filesystems may send us more than we ask for
@@ -1331,7 +1396,7 @@ def test_scandir(self):
self.assertEqual(len(page2), 1)
page3 = list(self.fs.scandir("/", page=(4, 6)))
self.assertEqual(len(page3), 0)
- paged = set(r.name for r in itertools.chain(page1, page2))
+ paged = {r.name for r in itertools.chain(page1, page2)}
self.assertEqual(paged, {"foo", "bar", "dir"})
def test_filterdir(self):
@@ -1480,6 +1545,10 @@ def test_upload(self):
data = f.read()
self.assertEqual(data, b"bar")
+ # upload to non-existing path (/spam/eggs)
+ with self.assertRaises(errors.ResourceNotFound):
+ self.fs.upload("/spam/eggs", bytes_file)
+
def test_upload_chunk_size(self):
test_data = b"bar" * 128
bytes_file = io.BytesIO(test_data)
@@ -1575,8 +1644,10 @@ def test_files(self):
self.assert_bytes("foo2", b"help")
# Test __del__ doesn't throw traceback
- f = self.fs.open("foo2", "r")
- del f
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore")
+ f = self.fs.open("foo2", "r")
+ del f
with self.assertRaises(IOError):
with self.fs.open("foo2", "r") as f:
@@ -1666,6 +1737,24 @@ def test_copy_dir_temp(self):
self._test_copy_dir("temp://")
self._test_copy_dir_write("temp://")
+ def test_move_dir_same_fs(self):
+ self.fs.makedirs("foo/bar/baz")
+ self.fs.makedir("egg")
+ self.fs.writetext("top.txt", "Hello, World")
+ self.fs.writetext("/foo/bar/baz/test.txt", "Goodbye, World")
+
+ fs.move.move_dir(self.fs, "foo", self.fs, "foo2")
+
+ expected = {"/egg", "/foo2", "/foo2/bar", "/foo2/bar/baz"}
+ self.assertEqual(set(walk.walk_dirs(self.fs)), expected)
+ self.assert_text("top.txt", "Hello, World")
+ self.assert_text("/foo2/bar/baz/test.txt", "Goodbye, World")
+
+ self.assertEqual(sorted(self.fs.listdir("/")), ["egg", "foo2", "top.txt"])
+ self.assertEqual(
+ sorted(x.name for x in self.fs.scandir("/")), ["egg", "foo2", "top.txt"]
+ )
+
def _test_move_dir_write(self, protocol):
# Test moving to this filesystem from another.
other_fs = open_fs(protocol)
@@ -1688,19 +1777,6 @@ def test_move_dir_mem(self):
def test_move_dir_temp(self):
self._test_move_dir_write("temp://")
- def test_move_same_fs(self):
- self.fs.makedirs("foo/bar/baz")
- self.fs.makedir("egg")
- self.fs.writetext("top.txt", "Hello, World")
- self.fs.writetext("/foo/bar/baz/test.txt", "Goodbye, World")
-
- fs.move.move_dir(self.fs, "foo", self.fs, "foo2")
-
- expected = {"/egg", "/foo2", "/foo2/bar", "/foo2/bar/baz"}
- self.assertEqual(set(walk.walk_dirs(self.fs)), expected)
- self.assert_text("top.txt", "Hello, World")
- self.assert_text("/foo2/bar/baz/test.txt", "Goodbye, World")
-
def test_move_file_same_fs(self):
text = "Hello, World"
self.fs.makedir("foo").writetext("test.txt", text)
@@ -1710,6 +1786,9 @@ def test_move_file_same_fs(self):
self.assert_not_exists("foo/test.txt")
self.assert_text("foo/test2.txt", text)
+ self.assertEqual(self.fs.listdir("foo"), ["test2.txt"])
+ self.assertEqual(next(self.fs.scandir("foo")).name, "test2.txt")
+
def _test_move_file(self, protocol):
other_fs = open_fs(protocol)
@@ -1732,6 +1811,40 @@ def test_move_file_mem(self):
def test_move_file_temp(self):
self._test_move_file("temp://")
+ def test_move_file_onto_itself(self):
+ self.fs.writetext("file.txt", "Hello")
+ self.fs.move("file.txt", "file.txt", overwrite=True)
+ self.assert_text("file.txt", "Hello")
+
+ with self.assertRaises(errors.DestinationExists):
+ self.fs.move("file.txt", "file.txt", overwrite=False)
+
+ def test_move_file_onto_itself_relpath(self):
+ subdir = self.fs.makedir("sub")
+ subdir.writetext("file.txt", "Hello")
+ self.fs.move("sub/file.txt", "sub/../sub/file.txt", overwrite=True)
+ self.assert_text("sub/file.txt", "Hello")
+
+ with self.assertRaises(errors.DestinationExists):
+ self.fs.move("sub/file.txt", "sub/../sub/file.txt", overwrite=False)
+
+ def test_copy_file_onto_itself(self):
+ self.fs.writetext("file.txt", "Hello")
+ with self.assertRaises(errors.IllegalDestination):
+ self.fs.copy("file.txt", "file.txt", overwrite=True)
+ with self.assertRaises(errors.DestinationExists):
+ self.fs.copy("file.txt", "file.txt", overwrite=False)
+ self.assert_text("file.txt", "Hello")
+
+ def test_copy_file_onto_itself_relpath(self):
+ subdir = self.fs.makedir("sub")
+ subdir.writetext("file.txt", "Hello")
+ with self.assertRaises(errors.IllegalDestination):
+ self.fs.copy("sub/file.txt", "sub/../sub/file.txt", overwrite=True)
+ with self.assertRaises(errors.DestinationExists):
+ self.fs.copy("sub/file.txt", "sub/../sub/file.txt", overwrite=False)
+ self.assert_text("sub/file.txt", "Hello")
+
def test_copydir(self):
self.fs.makedirs("foo/bar/baz/egg")
self.fs.writetext("foo/bar/foofoo.txt", "Hello")
@@ -1749,6 +1862,27 @@ def test_copydir(self):
with self.assertRaises(errors.DirectoryExpected):
self.fs.copydir("foo2/foofoo.txt", "foofoo.txt", create=True)
+ def test_copydir_onto_itself(self):
+ folder = self.fs.makedir("folder")
+ folder.writetext("file1.txt", "Hello1")
+ sub = folder.makedir("sub")
+ sub.writetext("file2.txt", "Hello2")
+
+ with self.assertRaises(errors.IllegalDestination):
+ self.fs.copydir("folder", "folder")
+ self.assert_text("folder/file1.txt", "Hello1")
+ self.assert_text("folder/sub/file2.txt", "Hello2")
+
+ def test_copydir_into_its_own_subfolder(self):
+ folder = self.fs.makedir("folder")
+ folder.writetext("file1.txt", "Hello1")
+ sub = folder.makedir("sub")
+ sub.writetext("file2.txt", "Hello2")
+ with self.assertRaises(errors.IllegalDestination):
+ self.fs.copydir("folder", "folder/sub/")
+ self.assert_text("folder/file1.txt", "Hello1")
+ self.assert_text("folder/sub/file2.txt", "Hello2")
+
def test_movedir(self):
self.fs.makedirs("foo/bar/baz/egg")
self.fs.writetext("foo/bar/foofoo.txt", "Hello")
@@ -1772,6 +1906,27 @@ def test_movedir(self):
with self.assertRaises(errors.DirectoryExpected):
self.fs.movedir("foo2/foofoo.txt", "foo2/baz/egg")
+ def test_movedir_onto_itself(self):
+ folder = self.fs.makedir("folder")
+ folder.writetext("file1.txt", "Hello1")
+ sub = folder.makedir("sub")
+ sub.writetext("file2.txt", "Hello2")
+
+ self.fs.movedir("folder", "folder")
+ self.assert_text("folder/file1.txt", "Hello1")
+ self.assert_text("folder/sub/file2.txt", "Hello2")
+
+ def test_movedir_into_its_own_subfolder(self):
+ folder = self.fs.makedir("folder")
+ folder.writetext("file1.txt", "Hello1")
+ sub = folder.makedir("sub")
+ sub.writetext("file2.txt", "Hello2")
+
+ with self.assertRaises(errors.IllegalDestination):
+ self.fs.movedir("folder", "folder/sub/")
+ self.assert_text("folder/file1.txt", "Hello1")
+ self.assert_text("folder/sub/file2.txt", "Hello2")
+
def test_match(self):
self.assertTrue(self.fs.match(["*.py"], "foo.py"))
self.assertEqual(
@@ -1790,7 +1945,7 @@ def test_tree(self):
def test_unicode_path(self):
if not self.fs.getmeta().get("unicode_paths", False):
- self.skipTest("the filesystem does not support unicode paths.")
+ raise unittest.SkipTest("the filesystem does not support unicode paths.")
self.fs.makedir("földér")
self.fs.writetext("☭.txt", "Smells like communism.")
@@ -1813,10 +1968,10 @@ def test_unicode_path(self):
def test_case_sensitive(self):
meta = self.fs.getmeta()
if "case_insensitive" not in meta:
- self.skipTest("case sensitivity not known")
+ raise unittest.SkipTest("case sensitivity not known")
if meta.get("case_insensitive", False):
- self.skipTest("the filesystem is not case sensitive.")
+ raise unittest.SkipTest("the filesystem is not case sensitive.")
self.fs.makedir("foo")
self.fs.makedir("Foo")
@@ -1846,4 +2001,3 @@ def test_hash(self):
self.assertEqual(
foo_fs.hash("hashme.txt", "md5"), "9fff4bb103ab8ce4619064109c54cb9c"
)
-
diff --git a/fs/time.py b/fs/time.py
index 5af60578..b462c47f 100644
--- a/fs/time.py
+++ b/fs/time.py
@@ -1,28 +1,43 @@
"""Time related tools.
"""
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import print_function, unicode_literals
+
+import typing
from calendar import timegm
from datetime import datetime
-from pytz import UTC, timezone
+try:
+ from datetime import timezone
+except ImportError:
+ from ._tzcompat import timezone # type: ignore
-utcfromtimestamp = datetime.utcfromtimestamp
-utclocalize = UTC.localize
-GMT = timezone("GMT")
+if typing.TYPE_CHECKING:
+ from typing import Optional
def datetime_to_epoch(d):
# type: (datetime) -> int
- """Convert datetime to epoch.
- """
+ """Convert datetime to epoch."""
return timegm(d.utctimetuple())
-def epoch_to_datetime(t):
+@typing.overload
+def epoch_to_datetime(t): # noqa: D103
+ # type: (None) -> None
+ pass
+
+
+@typing.overload
+def epoch_to_datetime(t): # noqa: D103
# type: (int) -> datetime
- """Convert epoch time to a UTC datetime.
- """
- return utclocalize(utcfromtimestamp(t)) if t is not None else None
+ pass
+
+
+def epoch_to_datetime(t):
+ # type: (Optional[int]) -> Optional[datetime]
+ """Convert epoch time to a UTC datetime."""
+ if t is None:
+ return None
+ return datetime.fromtimestamp(t, tz=timezone.utc)
diff --git a/fs/tools.py b/fs/tools.py
index 4b842029..ca3058e4 100644
--- a/fs/tools.py
+++ b/fs/tools.py
@@ -1,22 +1,17 @@
"""Miscellaneous tools for operating on filesystems.
"""
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import print_function, unicode_literals
-import io
import typing
from . import errors
-from .errors import DirectoryNotEmpty
-from .errors import ResourceNotFound
-from .path import abspath
-from .path import dirname
-from .path import normpath
-from .path import recursepath
-
-if False: # typing.TYPE_CHECKING
- from typing import IO, List, Optional, Text
+from .errors import DirectoryNotEmpty, ResourceNotFound
+from .path import abspath, dirname, normpath, recursepath
+
+if typing.TYPE_CHECKING:
+ from typing import IO, List, Optional, Text, Union
+
from .base import FS
@@ -53,7 +48,9 @@ def copy_file_data(src_file, dst_file, chunk_size=None):
read = src_file.read
write = dst_file.write
# The 'or None' is so that it works with binary and text files
- for chunk in iter(lambda: read(_chunk_size) or None, None):
+ for chunk in iter(
+ lambda: read(_chunk_size) or None, None
+ ): # type: Optional[Union[bytes, str]]
write(chunk)
diff --git a/fs/tree.py b/fs/tree.py
index a10beca9..598f2212 100644
--- a/fs/tree.py
+++ b/fs/tree.py
@@ -4,16 +4,16 @@
Color is supported on UNIX terminals.
"""
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import print_function, unicode_literals
import sys
import typing
from fs.path import abspath, join, normpath
-if False: # typing.TYPE_CHECKING
+if typing.TYPE_CHECKING:
from typing import List, Optional, Text, TextIO, Tuple
+
from .base import FS
from .info import Info
@@ -79,8 +79,7 @@ def render(
def write(line):
# type: (Text) -> None
- """Write a line to the output.
- """
+ """Write a line to the output."""
print(line, file=file)
# FIXME(@althonos): define functions using `with_color` and
@@ -88,32 +87,28 @@ def write(line):
def format_prefix(prefix):
# type: (Text) -> Text
- """Format the prefix lines.
- """
+ """Format the prefix lines."""
if not with_color:
return prefix
return "\x1b[32m%s\x1b[0m" % prefix
def format_dirname(dirname):
# type: (Text) -> Text
- """Format a directory name.
- """
+ """Format a directory name."""
if not with_color:
return dirname
return "\x1b[1;34m%s\x1b[0m" % dirname
def format_error(msg):
# type: (Text) -> Text
- """Format an error.
- """
+ """Format an error."""
if not with_color:
return msg
return "\x1b[31m%s\x1b[0m" % msg
def format_filename(fname):
# type: (Text) -> Text
- """Format a filename.
- """
+ """Format a filename."""
if not with_color:
return fname
if fname.startswith("."):
@@ -122,26 +117,23 @@ def format_filename(fname):
def sort_key_dirs_first(info):
# type: (Info) -> Tuple[bool, Text]
- """Get the info sort function with directories first.
- """
+ """Get the info sort function with directories first."""
return (not info.is_dir, info.name.lower())
def sort_key(info):
# type: (Info) -> Text
- """Get the default info sort function using resource name.
- """
+ """Get the default info sort function using resource name."""
return info.name.lower()
counts = {"dirs": 0, "files": 0}
def format_directory(path, levels):
# type: (Text, List[bool]) -> None
- """Recursive directory function.
- """
+ """Recursive directory function."""
try:
directory = sorted(
fs.filterdir(path, exclude_dirs=exclude, files=filter),
- key=sort_key_dirs_first if dirs_first else sort_key,
+ key=sort_key_dirs_first if dirs_first else sort_key, # type: ignore
)
except Exception as error:
prefix = (
diff --git a/fs/walk.py b/fs/walk.py
index 36ef8446..b743e6f2 100644
--- a/fs/walk.py
+++ b/fs/walk.py
@@ -8,31 +8,27 @@
from __future__ import unicode_literals
import typing
-from collections import defaultdict
-from collections import deque
-from collections import namedtuple
-import six
+from collections import defaultdict, deque, namedtuple
from ._repr import make_repr
from .errors import FSError
-from .path import abspath
-from .path import combine
-from .path import normpath
+from .path import abspath, combine, normpath
-if False: # typing.TYPE_CHECKING
+if typing.TYPE_CHECKING:
from typing import (
Any,
Callable,
Collection,
Iterator,
List,
- Optional,
MutableMapping,
+ Optional,
Text,
Tuple,
Type,
)
+
from .base import FS
from .info import Info
@@ -52,34 +48,7 @@
class Walker(object):
- """A walker object recursively lists directories in a filesystem.
-
- Arguments:
- ignore_errors (bool): If `True`, any errors reading a
- directory will be ignored, otherwise exceptions will
- be raised.
- on_error (callable, optional): If ``ignore_errors`` is `False`,
- then this callable will be invoked for a path and the exception
- object. It should return `True` to ignore the error, or `False`
- to re-raise it.
- search (str): If ``'breadth'`` then the directory will be
- walked *top down*. Set to ``'depth'`` to walk *bottom up*.
- filter (list, optional): If supplied, this parameter should be
- a list of filename patterns, e.g. ``['*.py']``. Files will
- only be returned if the final component matches one of the
- patterns.
- exclude (list, optional): If supplied, this parameter should be
- a list of filename patterns, e.g. ``['~*']``. Files matching
- any of these patterns will be removed from the walk.
- filter_dirs (list, optional): A list of patterns that will be used
- to match directories paths. The walk will only open directories
- that match at least one of these patterns.
- exclude_dirs (list, optional): A list of patterns that will be
- used to filter out directories from the walk. e.g.
- ``['*.svn', '*.git']``.
- max_depth (int, optional): Maximum directory depth to walk.
-
- """
+ """A walker object recursively lists directories in a filesystem."""
def __init__(
self,
@@ -91,8 +60,49 @@ def __init__(
filter_dirs=None, # type: Optional[List[Text]]
exclude_dirs=None, # type: Optional[List[Text]]
max_depth=None, # type: Optional[int]
+ filter_glob=None, # type: Optional[List[Text]]
+ exclude_glob=None, # type: Optional[List[Text]]
):
# type: (...) -> None
+ """Create a new `Walker` instance.
+
+ Arguments:
+ ignore_errors (bool): If `True`, any errors reading a
+ directory will be ignored, otherwise exceptions will
+ be raised.
+ on_error (callable, optional): If ``ignore_errors`` is `False`,
+ then this callable will be invoked for a path and the
+ exception object. It should return `True` to ignore the error,
+ or `False` to re-raise it.
+ search (str): If ``"breadth"`` then the directory will be
+ walked *top down*. Set to ``"depth"`` to walk *bottom up*.
+ filter (list, optional): If supplied, this parameter should be
+ a list of filename patterns, e.g. ``["*.py"]``. Files will
+ only be returned if the final component matches one of the
+ patterns.
+ exclude (list, optional): If supplied, this parameter should be
+ a list of filename patterns, e.g. ``["~*"]``. Files matching
+ any of these patterns will be removed from the walk.
+ filter_dirs (list, optional): A list of patterns that will be used
+ to match directories names. The walk will only open directories
+ that match at least one of these patterns. Directories will
+ only be returned if the final component matches one of the
+ patterns.
+ exclude_dirs (list, optional): A list of patterns that will be
+ used to filter out directories from the walk. e.g.
+ ``['*.svn', '*.git']``. Directory names matching any of these
+ patterns will be removed from the walk.
+ max_depth (int, optional): Maximum directory depth to walk.
+ filter_glob (list, optional): If supplied, this parameter
+ should be a list of path patterns e.g. ``["foo/**/*.py"]``.
+ Resources will only be returned if their global path or
+ an extension of it matches one of the patterns.
+ exclude_glob (list, optional): If supplied, this parameter
+ should be a list of path patterns e.g. ``["foo/**/*.pyc"]``.
+ Resources will not be returned if their global path or
+ an extension of it matches one of the patterns.
+
+ """
if search not in ("breadth", "depth"):
raise ValueError("search must be 'breadth' or 'depth'")
self.ignore_errors = ignore_errors
@@ -110,27 +120,27 @@ def __init__(
self.exclude = exclude
self.filter_dirs = filter_dirs
self.exclude_dirs = exclude_dirs
+ self.filter_glob = filter_glob
+ self.exclude_glob = exclude_glob
self.max_depth = max_depth
super(Walker, self).__init__()
@classmethod
def _ignore_errors(cls, path, error):
# type: (Text, Exception) -> bool
- """Default on_error callback."""
+ """Ignore dir scan errors when called."""
return True
@classmethod
def _raise_errors(cls, path, error):
# type: (Text, Exception) -> bool
- """Callback to re-raise dir scan errors."""
+ """Re-raise dir scan errors when called."""
return False
@classmethod
def _calculate_depth(cls, path):
# type: (Text) -> int
- """Calculate the 'depth' of a directory path (number of
- components).
- """
+ """Calculate the 'depth' of a directory path (i.e. count components)."""
_path = path.strip("/")
return _path.count("/") + 1 if _path else 0
@@ -149,24 +159,24 @@ def bind(cls, fs):
Returns:
~fs.walk.BoundWalker: a bound walker.
- Example:
- >>> from fs import open_fs
- >>> from fs.walk import Walker
- >>> home_fs = open_fs('~/')
- >>> walker = Walker.bind(home_fs)
- >>> for path in walker.files(filter=['*.py']):
- ... print(path)
-
- Unless you have written a customized walker class, you will be
- unlikely to need to call this explicitly, as filesystem objects
- already have a ``walk`` attribute which is a bound walker
- object.
+ Examples:
+ Use this method to explicitly bind a filesystem instance::
- Example:
- >>> from fs import open_fs
- >>> home_fs = open_fs('~/')
- >>> for path in home_fs.walk.files(filter=['*.py']):
- ... print(path)
+ >>> walker = Walker.bind(my_fs)
+ >>> for path in walker.files(filter=['*.py']):
+ ... print(path)
+ /foo.py
+ /bar.py
+
+ Unless you have written a customized walker class, you will
+ be unlikely to need to call this explicitly, as filesystem
+ objects already have a ``walk`` attribute which is a bound
+ walker object::
+
+ >>> for path in my_fs.walk.files(filter=['*.py']):
+ ... print(path)
+ /foo.py
+ /bar.py
"""
return BoundWalker(fs)
@@ -183,6 +193,8 @@ def __repr__(self):
filter_dirs=(self.filter_dirs, None),
exclude_dirs=(self.exclude_dirs, None),
max_depth=(self.max_depth, None),
+ filter_glob=(self.filter_glob, None),
+ exclude_glob=(self.exclude_glob, None),
)
def _iter_walk(
@@ -200,12 +212,20 @@ def _iter_walk(
def _check_open_dir(self, fs, path, info):
# type: (FS, Text, Info) -> bool
- """Check if a directory should be considered in the walk.
- """
+ """Check if a directory should be considered in the walk."""
+ full_path = combine(path, info.name)
if self.exclude_dirs is not None and fs.match(self.exclude_dirs, info.name):
return False
+ if self.exclude_glob is not None and fs.match_glob(
+ self.exclude_glob, full_path
+ ):
+ return False
if self.filter_dirs is not None and not fs.match(self.filter_dirs, info.name):
return False
+ if self.filter_glob is not None and not fs.match_glob(
+ self.filter_glob, full_path, accept_prefix=True
+ ):
+ return False
return self.check_open_dir(fs, path, info)
def check_open_dir(self, fs, path, info):
@@ -251,6 +271,26 @@ def check_scan_dir(self, fs, path, info):
"""
return True
+ def _check_file(self, fs, dir_path, info):
+ # type: (FS, Text, Info) -> bool
+ """Check if a filename should be included."""
+ # Weird check required for backwards compatibility,
+ # when _check_file did not exist.
+ if Walker._check_file == type(self)._check_file:
+ if self.exclude is not None and fs.match(self.exclude, info.name):
+ return False
+ if self.exclude_glob is not None and fs.match_glob(
+ self.exclude_glob, dir_path + "/" + info.name
+ ):
+ return False
+ if self.filter is not None and not fs.match(self.filter, info.name):
+ return False
+ if self.filter_glob is not None and not fs.match_glob(
+ self.filter_glob, dir_path + "/" + info.name, accept_prefix=True
+ ):
+ return False
+ return self.check_file(fs, info)
+
def check_file(self, fs, info):
# type: (FS, Info) -> bool
"""Check if a filename should be included.
@@ -265,10 +305,7 @@ def check_file(self, fs, info):
bool: `True` if the file should be included.
"""
-
- if self.exclude is not None and fs.match(self.exclude, info.name):
- return False
- return fs.match(self.filter, info.name)
+ return True
def _scan(
self,
@@ -295,7 +332,7 @@ def _scan(
yield info
except FSError as error:
if not self.on_error(dir_path, error):
- six.reraise(type(error), error)
+ raise
def walk(
self,
@@ -321,14 +358,16 @@ def walk(
`~fs.info.Info` objects for directories and files in ````.
Example:
- >>> home_fs = open_fs('~/')
>>> walker = Walker(filter=['*.py'])
- >>> namespaces = ['details']
- >>> for path, dirs, files in walker.walk(home_fs, namespaces)
+ >>> for path, dirs, files in walker.walk(my_fs, namespaces=["details"]):
... print("[{}]".format(path))
... print("{} directories".format(len(dirs)))
... total = sum(info.size for info in files)
- ... print("{} bytes {}".format(total))
+ ... print("{} bytes".format(total))
+ [/]
+ 2 directories
+ 55 bytes
+ ...
"""
_path = abspath(normpath(path))
@@ -413,8 +452,7 @@ def _walk_breadth(
namespaces=None, # type: Optional[Collection[Text]]
):
# type: (...) -> Iterator[Tuple[Text, Optional[Info]]]
- """Walk files using a *breadth first* search.
- """
+ """Walk files using a *breadth first* search."""
queue = deque([path])
push = queue.appendleft
pop = queue.pop
@@ -424,7 +462,7 @@ def _walk_breadth(
_calculate_depth = self._calculate_depth
_check_open_dir = self._check_open_dir
_check_scan_dir = self._check_scan_dir
- _check_file = self.check_file
+ _check_file = self._check_file
depth = _calculate_depth(path)
@@ -438,7 +476,7 @@ def _walk_breadth(
if _check_scan_dir(fs, dir_path, info, _depth):
push(_combine(dir_path, info.name))
else:
- if _check_file(fs, info):
+ if _check_file(fs, dir_path, info):
yield dir_path, info # Found a file
yield dir_path, None # End of directory
@@ -449,8 +487,7 @@ def _walk_depth(
namespaces=None, # type: Optional[Collection[Text]]
):
# type: (...) -> Iterator[Tuple[Text, Optional[Info]]]
- """Walk files using a *depth first* search.
- """
+ """Walk files using a *depth first* search."""
# No recursion!
_combine = combine
@@ -458,7 +495,7 @@ def _walk_depth(
_calculate_depth = self._calculate_depth
_check_open_dir = self._check_open_dir
_check_scan_dir = self._check_scan_dir
- _check_file = self.check_file
+ _check_file = self._check_file
depth = _calculate_depth(path)
stack = [
@@ -490,35 +527,37 @@ def _walk_depth(
else:
yield dir_path, info
else:
- if _check_file(fs, info):
+ if _check_file(fs, dir_path, info):
yield dir_path, info
class BoundWalker(typing.Generic[_F]):
"""A class that binds a `Walker` instance to a `FS` instance.
- Arguments:
- fs (FS): A filesystem instance.
- walker_class (type): A `~fs.walk.WalkerBase`
- sub-class. The default uses `~fs.walk.Walker`.
-
You will typically not need to create instances of this class
explicitly. Filesystems have a `~FS.walk` property which returns a
`BoundWalker` object.
Example:
- >>> import fs
- >>> home_fs = fs.open_fs('~/')
- >>> home_fs.walk
- BoundWalker(OSFS('/Users/will', encoding='utf-8'))
+ >>> tmp_fs = fs.tempfs.TempFS()
+ >>> tmp_fs.walk
+ BoundWalker(TempFS())
- A `BoundWalker` is callable. Calling it is an alias for
- `~fs.walk.BoundWalker.walk`.
+ A `BoundWalker` is callable. Calling it is an alias for the
+ `~fs.walk.BoundWalker.walk` method.
"""
def __init__(self, fs, walker_class=Walker):
# type: (_F, Type[Walker]) -> None
+ """Create a new walker bound to the given filesystem.
+
+ Arguments:
+ fs (FS): A filesystem instance.
+ walker_class (type): A `~fs.walk.WalkerBase`
+ sub-class. The default uses `~fs.walk.Walker`.
+
+ """
self.fs = fs
self.walker_class = walker_class
@@ -528,8 +567,7 @@ def __repr__(self):
def _make_walker(self, *args, **kwargs):
# type: (*Any, **Any) -> Walker
- """Create a walker instance.
- """
+ """Create a walker instance."""
walker = self.walker_class(*args, **kwargs)
return walker
@@ -580,13 +618,16 @@ def walk(
`~fs.info.Info` objects for directories and files in ````.
Example:
- >>> home_fs = open_fs('~/')
>>> walker = Walker(filter=['*.py'])
- >>> for path, dirs, files in walker.walk(home_fs, namespaces=['details']):
+ >>> for path, dirs, files in walker.walk(my_fs, namespaces=['details']):
... print("[{}]".format(path))
... print("{} directories".format(len(dirs)))
... total = sum(info.size for info in files)
- ... print("{} bytes {}".format(total))
+ ... print("{} bytes".format(total))
+ [/]
+ 2 directories
+ 55 bytes
+ ...
This method invokes `Walker.walk` with bound `FS` object.
diff --git a/fs/wildcard.py b/fs/wildcard.py
index b9f58591..cc6c0530 100644
--- a/fs/wildcard.py
+++ b/fs/wildcard.py
@@ -2,17 +2,17 @@
"""
# Adapted from https://hg.python.org/cpython/file/2.7/Lib/fnmatch.py
-from __future__ import unicode_literals, print_function
+from __future__ import print_function, unicode_literals
-import re
import typing
+
+import re
from functools import partial
from .lrucache import LRUCache
-from . import path
-if False: # typing.TYPE_CHECKING
- from typing import Callable, Iterable, MutableMapping, Text, Tuple, Pattern
+if typing.TYPE_CHECKING:
+ from typing import Callable, Iterable, Pattern, Text, Tuple
_PATTERN_CACHE = LRUCache(1000) # type: LRUCache[Tuple[Text, bool], Pattern]
@@ -33,7 +33,7 @@ def match(pattern, name):
try:
re_pat = _PATTERN_CACHE[(pattern, True)]
except KeyError:
- res = "(?ms)" + _translate(pattern) + r'\Z'
+ res = "(?ms)" + _translate(pattern) + r"\Z"
_PATTERN_CACHE[(pattern, True)] = re_pat = re.compile(res)
return re_pat.match(name) is not None
@@ -53,7 +53,7 @@ def imatch(pattern, name):
try:
re_pat = _PATTERN_CACHE[(pattern, False)]
except KeyError:
- res = "(?ms)" + _translate(pattern, case_sensitive=False) + r'\Z'
+ res = "(?ms)" + _translate(pattern, case_sensitive=False) + r"\Z"
_PATTERN_CACHE[(pattern, False)] = re_pat = re.compile(res, re.IGNORECASE)
return re_pat.match(name) is not None
@@ -147,14 +147,14 @@ def _translate(pattern, case_sensitive=True):
if not case_sensitive:
pattern = pattern.lower()
i, n = 0, len(pattern)
- res = ""
+ res = []
while i < n:
c = pattern[i]
i = i + 1
if c == "*":
- res = res + "[^/]*"
+ res.append("[^/]*")
elif c == "?":
- res = res + "."
+ res.append(".")
elif c == "[":
j = i
if j < n and pattern[j] == "!":
@@ -164,7 +164,7 @@ def _translate(pattern, case_sensitive=True):
while j < n and pattern[j] != "]":
j = j + 1
if j >= n:
- res = res + "\\["
+ res.append("\\[")
else:
stuff = pattern[i:j].replace("\\", "\\\\")
i = j + 1
@@ -172,7 +172,7 @@ def _translate(pattern, case_sensitive=True):
stuff = "^" + stuff[1:]
elif stuff[0] == "^":
stuff = "\\" + stuff
- res = "%s[%s]" % (res, stuff)
+ res.append("[%s]" % stuff)
else:
- res = res + re.escape(c)
- return res
+ res.append(re.escape(c))
+ return "".join(res)
diff --git a/fs/wrap.py b/fs/wrap.py
index d8aa7054..de38d083 100644
--- a/fs/wrap.py
+++ b/fs/wrap.py
@@ -2,45 +2,45 @@
Here's an example that opens a filesystem then makes it *read only*::
- >>> from fs import open_fs
- >>> from fs.wrap import read_only
- >>> projects_fs = open_fs('~/projects')
- >>> read_only_projects_fs = read_only(projects_fs)
- >>> read_only_projects_fs.remove('__init__.py')
+ >>> home_fs = fs.open_fs('~')
+ >>> read_only_home_fs = fs.wrap.read_only(home_fs)
+ >>> read_only_home_fs.removedir('Desktop')
Traceback (most recent call last):
...
- fs.errors.ResourceReadOnly: resource '__init__.py' is read only
+ fs.errors.ResourceReadOnly: resource 'Desktop' is read only
"""
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import print_function, unicode_literals
import typing
-from .wrapfs import WrapFS
-from .path import abspath, normpath, split
-from .errors import ResourceReadOnly, ResourceNotFound
+from .errors import ResourceNotFound, ResourceReadOnly
from .info import Info
from .mode import check_writable
+from .path import abspath, normpath, split
+from .wrapfs import WrapFS
-if False: # typing.TYPE_CHECKING
- from datetime import datetime
+if typing.TYPE_CHECKING:
from typing import (
+ IO,
Any,
BinaryIO,
Collection,
Dict,
Iterator,
- IO,
+ Mapping,
Optional,
Text,
Tuple,
)
- from .base import FS
- from .info import Info, RawInfo
- from .subfs import SubFS
+
+ from datetime import datetime
+
+ from .base import FS # noqa: F401
+ from .info import RawInfo
from .permissions import Permissions
+ from .subfs import SubFS
_W = typing.TypeVar("_W", bound="WrapFS")
@@ -92,9 +92,25 @@ class WrapCachedDir(WrapFS[_F], typing.Generic[_F]):
"""
+ # FIXME (@althonos): The caching data structure can very likely be
+ # improved. With the current implementation, if `scandir` result was
+ # cached for `namespaces=["details", "access"]`, calling `scandir`
+ # again only with `names=["details"]` will miss the cache, even though
+ # we are already storing the totality of the required metadata.
+ #
+ # A possible solution would be to replaced the cached with a
+ # Dict[Text, Dict[Text, Dict[Text, Info]]]
+ # ^ ^ ^ ^-- the actual info object
+ # | | \-- the path of the directory entry
+ # | \-- the namespace of the info
+ # \-- the cached directory entry
+ #
+ # Furthermore, `listdir` and `filterdir` calls should be cached as well,
+ # since they can be written as wrappers of `scandir`.
+
wrap_name = "cached-dir"
- def __init__(self, wrap_fs):
+ def __init__(self, wrap_fs): # noqa: D107
# type: (_F) -> None
super(WrapCachedDir, self).__init__(wrap_fs)
self._cache = {} # type: Dict[Tuple[Text, frozenset], Dict[Text, Info]]
@@ -135,13 +151,17 @@ def getinfo(self, path, namespaces=None):
def isdir(self, path):
# type: (Text) -> bool
- # FIXME(@althonos): this raises an error on non-existing file !
- return self.getinfo(path).is_dir
+ try:
+ return self.getinfo(path).is_dir
+ except ResourceNotFound:
+ return False
def isfile(self, path):
# type: (Text) -> bool
- # FIXME(@althonos): this raises an error on non-existing file !
- return not self.getinfo(path).is_dir
+ try:
+ return not self.getinfo(path).is_dir
+ except ResourceNotFound:
+ return False
class WrapReadOnly(WrapFS[_F], typing.Generic[_F]):
@@ -181,8 +201,8 @@ def makedir(
self.check()
raise ResourceReadOnly(path)
- def move(self, src_path, dst_path, overwrite=False):
- # type: (Text, Text, bool) -> None
+ def move(self, src_path, dst_path, overwrite=False, preserve_time=False):
+ # type: (Text, Text, bool, bool) -> None
self.check()
raise ResourceReadOnly(dst_path)
@@ -203,6 +223,11 @@ def removedir(self, path):
self.check()
raise ResourceReadOnly(path)
+ def removetree(self, path):
+ # type: (Text) -> None
+ self.check()
+ raise ResourceReadOnly(path)
+
def setinfo(self, path, info):
# type: (Text, RawInfo) -> None
self.check()
@@ -225,8 +250,8 @@ def settimes(self, path, accessed=None, modified=None):
self.check()
raise ResourceReadOnly(path)
- def copy(self, src_path, dst_path, overwrite=False):
- # type: (Text, Text, bool) -> None
+ def copy(self, src_path, dst_path, overwrite=False, preserve_time=False):
+ # type: (Text, Text, bool, bool) -> None
self.check()
raise ResourceReadOnly(dst_path)
@@ -297,3 +322,10 @@ def touch(self, path):
# type: (Text) -> None
self.check()
raise ResourceReadOnly(path)
+
+ def getmeta(self, namespace="standard"):
+ # type: (Text) -> Mapping[Text, object]
+ self.check()
+ meta = dict(self.delegate_fs().getmeta(namespace=namespace))
+ meta.update(read_only=True, supports_rename=False)
+ return meta
diff --git a/fs/wrapfs.py b/fs/wrapfs.py
index 5ff7d3d2..abbbe4e3 100644
--- a/fs/wrapfs.py
+++ b/fs/wrapfs.py
@@ -3,40 +3,38 @@
from __future__ import unicode_literals
-import copy
import typing
import six
from . import errors
from .base import FS
-from .copy import copy_file
-from .info import Info
-from .move import move_file
-from .path import abspath, normpath
+from .copy import copy_dir, copy_file
from .error_tools import unwrap_errors
+from .info import Info
+from .path import abspath, join, normpath
-if False: # typing.TYPE_CHECKING
- from datetime import datetime
- from threading import RLock
+if typing.TYPE_CHECKING:
from typing import (
+ IO,
Any,
AnyStr,
BinaryIO,
Callable,
Collection,
- Dict,
- Iterator,
Iterable,
- IO,
+ Iterator,
List,
Mapping,
Optional,
Text,
- TextIO,
Tuple,
Union,
)
+
+ from datetime import datetime
+ from threading import RLock
+
from .enums import ResourceType
from .info import RawInfo
from .permissions import Permissions
@@ -63,7 +61,7 @@ class WrapFS(FS, typing.Generic[_F]):
wrap_name = None # type: Optional[Text]
- def __init__(self, wrap_fs):
+ def __init__(self, wrap_fs): # noqa: D107
# type: (_F) -> None
self._wrap_fs = wrap_fs
super(WrapFS, self).__init__()
@@ -170,15 +168,23 @@ def makedir(
with unwrap_errors(path):
return _fs.makedir(_path, permissions=permissions, recreate=recreate)
- def move(self, src_path, dst_path, overwrite=False):
- # type: (Text, Text, bool) -> None
- # A custom move permits a potentially optimized code path
- src_fs, _src_path = self.delegate_path(src_path)
- dst_fs, _dst_path = self.delegate_path(dst_path)
+ def move(self, src_path, dst_path, overwrite=False, preserve_time=False):
+ # type: (Text, Text, bool, bool) -> None
+ _fs, _src_path = self.delegate_path(src_path)
+ _, _dst_path = self.delegate_path(dst_path)
with unwrap_errors({_src_path: src_path, _dst_path: dst_path}):
- if not overwrite and dst_fs.exists(_dst_path):
- raise errors.DestinationExists(_dst_path)
- move_file(src_fs, _src_path, dst_fs, _dst_path)
+ _fs.move(
+ _src_path, _dst_path, overwrite=overwrite, preserve_time=preserve_time
+ )
+
+ def movedir(self, src_path, dst_path, create=False, preserve_time=False):
+ # type: (Text, Text, bool, bool) -> None
+ _fs, _src_path = self.delegate_path(src_path)
+ _, _dst_path = self.delegate_path(dst_path)
+ with unwrap_errors({_src_path: src_path, _dst_path: dst_path}):
+ _fs.movedir(
+ _src_path, _dst_path, create=create, preserve_time=preserve_time
+ )
def openbin(self, path, mode="r", buffering=-1, **options):
# type: (Text, Text, int, **Any) -> BinaryIO
@@ -205,6 +211,25 @@ def removedir(self, path):
with unwrap_errors(path):
_fs.removedir(_path)
+ def removetree(self, dir_path):
+ # type: (Text) -> None
+ self.check()
+ _path = abspath(normpath(dir_path))
+ _delegate_fs, _delegate_path = self.delegate_path(dir_path)
+ with unwrap_errors(dir_path):
+ if _path == "/":
+ # with root path, we must remove the contents but
+ # not the directory itself, so we can't just directly
+ # delegate
+ for info in _delegate_fs.scandir(_delegate_path):
+ info_path = join(_delegate_path, info.name)
+ if info.is_dir:
+ _delegate_fs.removetree(info_path)
+ else:
+ _delegate_fs.remove(info_path)
+ else:
+ _delegate_fs.removetree(_delegate_path)
+
def scandir(
self,
path, # type: Text
@@ -238,14 +263,25 @@ def touch(self, path):
with unwrap_errors(path):
_fs.touch(_path)
- def copy(self, src_path, dst_path, overwrite=False):
- # type: (Text, Text, bool) -> None
+ def copy(self, src_path, dst_path, overwrite=False, preserve_time=False):
+ # type: (Text, Text, bool, bool) -> None
src_fs, _src_path = self.delegate_path(src_path)
dst_fs, _dst_path = self.delegate_path(dst_path)
with unwrap_errors({_src_path: src_path, _dst_path: dst_path}):
if not overwrite and dst_fs.exists(_dst_path):
raise errors.DestinationExists(_dst_path)
- copy_file(src_fs, _src_path, dst_fs, _dst_path)
+ copy_file(src_fs, _src_path, dst_fs, _dst_path, preserve_time=preserve_time)
+
+ def copydir(self, src_path, dst_path, create=False, preserve_time=False):
+ # type: (Text, Text, bool, bool) -> None
+ src_fs, _src_path = self.delegate_path(src_path)
+ dst_fs, _dst_path = self.delegate_path(dst_path)
+ with unwrap_errors({_src_path: src_path, _dst_path: dst_path}):
+ if not create and not dst_fs.exists(_dst_path):
+ raise errors.ResourceNotFound(dst_path)
+ if not src_fs.getinfo(_src_path).is_dir:
+ raise errors.DirectoryExpected(src_path)
+ copy_dir(src_fs, _src_path, dst_fs, _dst_path, preserve_time=preserve_time)
def create(self, path, wipe=False):
# type: (Text, bool) -> bool
@@ -262,6 +298,13 @@ def desc(self, path):
desc = _fs.desc(_path)
return desc
+ def download(self, path, file, chunk_size=None, **options):
+ # type: (Text, BinaryIO, Optional[int], **Any) -> None
+ self.check()
+ _fs, _path = self.delegate_path(path)
+ with unwrap_errors(path):
+ _fs.download(_path, file, chunk_size=chunk_size, **options)
+
def exists(self, path):
# type: (Text) -> bool
self.check()
diff --git a/fs/zipfs.py b/fs/zipfs.py
index 1fdf463b..87e41f5e 100644
--- a/fs/zipfs.py
+++ b/fs/zipfs.py
@@ -1,29 +1,30 @@
"""Manage the filesystem in a Zip archive.
"""
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import print_function, unicode_literals
+import sys
import typing
-import zipfile
-from datetime import datetime
import six
+import zipfile
+from datetime import datetime
from . import errors
+from ._url_tools import url_quote
from .base import FS
from .compress import write_zip
from .enums import ResourceType, Seek
from .info import Info
from .iotools import RawWrapper
-from .permissions import Permissions
from .memoryfs import MemoryFS
from .opener import open_fs
from .path import dirname, forcedir, normpath, relpath
+from .permissions import Permissions
from .time import datetime_to_epoch
from .wrapfs import WrapFS
-if False: # typing.TYPE_CHECKING
+if typing.TYPE_CHECKING:
from typing import (
Any,
BinaryIO,
@@ -36,6 +37,7 @@
Tuple,
Union,
)
+
from .info import RawInfo
from .subfs import SubFS
@@ -43,91 +45,146 @@
class _ZipExtFile(RawWrapper):
- def __init__(self, fs, name):
+ def __init__(self, fs, name): # noqa: D107
# type: (ReadZipFS, Text) -> None
self._zip = _zip = fs._zip
self._end = _zip.getinfo(name).file_size
self._pos = 0
super(_ZipExtFile, self).__init__(_zip.open(name), "r", name)
- def read(self, size=-1):
- # type: (int) -> bytes
- buf = self._f.read(-1 if size is None else size)
- self._pos += len(buf)
- return buf
-
- def read1(self, size=-1):
- # type: (int) -> bytes
- buf = self._f.read1(-1 if size is None else size) # type: ignore
- self._pos += len(buf)
- return buf
-
- def seek(self, offset, whence=Seek.set):
- # type: (int, SupportsInt) -> int
- """Change stream position.
-
- Change the stream position to the given byte offset. The
- offset is interpreted relative to the position indicated by
- ``whence``.
-
- Arguments:
- offset (int): the offset to the new position, in bytes.
- whence (int): the position reference. Possible values are:
- * `Seek.set`: start of stream (the default).
- * `Seek.current`: current position; offset may be negative.
- * `Seek.end`: end of stream; offset must be negative.
-
- Returns:
- int: the new absolute position.
-
- Raises:
- ValueError: when ``whence`` is not known, or ``offset``
- is invalid.
-
- Note:
- Zip compression does not support seeking, so the seeking
- is emulated. Seeking somewhere else than the current position
- will need to either:
- * reopen the file and restart decompression
- * read and discard data to advance in the file
-
- """
- _whence = int(whence)
- if _whence == Seek.current:
- offset += self._pos
- if _whence == Seek.current or _whence == Seek.set:
- if offset < 0:
- raise ValueError("Negative seek position {}".format(offset))
- elif _whence == Seek.end:
- if offset > 0:
- raise ValueError("Positive seek position {}".format(offset))
- offset += self._end
- else:
- raise ValueError(
- "Invalid whence ({}, should be {}, {} or {})".format(
- _whence, Seek.set, Seek.current, Seek.end
+ # NOTE(@althonos): Starting from Python 3.7, files inside a Zip archive are
+ # seekable provided they were opened from a seekable file
+ # handle. Before that, we can emulate a seek using the
+ # read method, although it adds a ton of overhead and is
+ # way less efficient than extracting once to a BytesIO.
+ if sys.version_info < (3, 7):
+
+ def read(self, size=-1):
+ # type: (int) -> bytes
+ buf = self._f.read(-1 if size is None else size)
+ self._pos += len(buf)
+ return buf
+
+ def read1(self, size=-1):
+ # type: (int) -> bytes
+ buf = self._f.read1(-1 if size is None else size) # type: ignore
+ self._pos += len(buf)
+ return buf
+
+ def tell(self):
+ # type: () -> int
+ return self._pos
+
+ def seekable(self):
+ return True
+
+ def seek(self, offset, whence=Seek.set):
+ # type: (int, SupportsInt) -> int
+ """Change stream position.
+
+ Change the stream position to the given byte offset. The
+ offset is interpreted relative to the position indicated by
+ ``whence``.
+
+ Arguments:
+ offset (int): the offset to the new position, in bytes.
+ whence (int): the position reference. Possible values are:
+ * `Seek.set`: start of stream (the default).
+ * `Seek.current`: current position; offset may be negative.
+ * `Seek.end`: end of stream; offset must be negative.
+
+ Returns:
+ int: the new absolute position.
+
+ Raises:
+ ValueError: when ``whence`` is not known, or ``offset``
+ is invalid.
+
+ Note:
+ Zip compression does not support seeking, so the seeking
+ is emulated. Seeking somewhere else than the current position
+ will need to either:
+ * reopen the file and restart decompression
+ * read and discard data to advance in the file
+
+ """
+ _whence = int(whence)
+ if _whence == Seek.current:
+ offset += self._pos
+ if _whence == Seek.current or _whence == Seek.set:
+ if offset < 0:
+ raise ValueError("Negative seek position {}".format(offset))
+ elif _whence == Seek.end:
+ if offset > 0:
+ raise ValueError("Positive seek position {}".format(offset))
+ offset += self._end
+ else:
+ raise ValueError(
+ "Invalid whence ({}, should be {}, {} or {})".format(
+ _whence, Seek.set, Seek.current, Seek.end
+ )
)
- )
- if offset < self._pos:
- self._f = self._zip.open(self.name) # type: ignore
- self._pos = 0
- self.read(offset - self._pos)
- return self._pos
+ if offset < self._pos:
+ self._f = self._zip.open(self.name) # type: ignore
+ self._pos = 0
+ self.read(offset - self._pos)
+ return self._pos
+
+ else:
+
+ def seek(self, offset, whence=Seek.set):
+ # type: (int, SupportsInt) -> int
+ """Change stream position.
+
+ Change the stream position to the given byte offset. The
+ offset is interpreted relative to the position indicated by
+ ``whence``.
+
+ Arguments:
+ offset (int): the offset to the new position, in bytes.
+ whence (int): the position reference. Possible values are:
+ * `Seek.set`: start of stream (the default).
+ * `Seek.current`: current position; offset may be negative.
+ * `Seek.end`: end of stream; offset must be negative.
+
+ Returns:
+ int: the new absolute position.
+
+ Raises:
+ ValueError: when ``whence`` is not known, or ``offset``
+ is invalid.
+
+ """
+ _whence = int(whence)
+ _pos = self.tell()
+ if _whence == Seek.set:
+ if offset < 0:
+ raise ValueError("Negative seek position {}".format(offset))
+ elif _whence == Seek.current:
+ if _pos + offset < 0:
+ raise ValueError("Negative seek position {}".format(offset))
+ elif _whence == Seek.end:
+ if offset > 0:
+ raise ValueError("Positive seek position {}".format(offset))
+ else:
+ raise ValueError(
+ "Invalid whence ({}, should be {}, {} or {})".format(
+ _whence, Seek.set, Seek.current, Seek.end
+ )
+ )
- def tell(self):
- # type: () -> int
- return self._pos
+ return self._f.seek(offset, _whence)
class ZipFS(WrapFS):
"""Read and write zip files.
- There are two ways to open a ZipFS for the use cases of reading
+ There are two ways to open a `ZipFS` for the use cases of reading
a zip file, and creating a new one.
- If you open the ZipFS with ``write`` set to `False` (the default)
- then the filesystem will be a read only filesystem which maps to
+ If you open the `ZipFS` with ``write`` set to `False` (the default)
+ then the filesystem will be a read-only filesystem which maps to
the files and directories within the zip file. Files are
decompressed on the fly when you open them.
@@ -136,12 +193,12 @@ class ZipFS(WrapFS):
with ZipFS('foo.zip') as zip_fs:
readme = zip_fs.readtext('readme.txt')
- If you open the ZipFS with ``write`` set to `True`, then the ZipFS
- will be a empty temporary filesystem. Any files / directories you
- create in the ZipFS will be written in to a zip file when the ZipFS
+ If you open the `ZipFS` with ``write`` set to `True`, then the `ZipFS`
+ will be an empty temporary filesystem. Any files / directories you
+ create in the `ZipFS` will be written in to a zip file when the `ZipFS`
is closed.
- Here's how you might write a new zip file containing a readme.txt
+ Here's how you might write a new zip file containing a ``readme.txt``
file::
with ZipFS('foo.zip', write=True) as new_zip:
@@ -157,18 +214,20 @@ class ZipFS(WrapFS):
(default) to read an existing zip file.
compression (int): Compression to use (one of the constants
defined in the `zipfile` module in the stdlib).
- temp_fs (str): An FS URL for the temporary filesystem used to
- store data prior to zipping.
+ temp_fs (str or FS): An FS URL or an FS instance to use to
+ store data prior to zipping. Defaults to creating a new
+ `~fs.tempfs.TempFS`.
"""
- def __new__(
+ # TODO: __new__ returning different types may be too 'magical'
+ def __new__( # type: ignore
cls,
file, # type: Union[Text, BinaryIO]
write=False, # type: bool
compression=zipfile.ZIP_DEFLATED, # type: int
encoding="utf-8", # type: Text
- temp_fs="temp://__ziptemp__", # type: Text
+ temp_fs="temp://__ziptemp__", # type: Union[Text, FS]
):
# type: (...) -> FS
# This magic returns a different class instance based on the
@@ -180,7 +239,7 @@ def __new__(
else:
return ReadZipFS(file, encoding=encoding)
- if False: # typing.TYPE_CHECKING
+ if typing.TYPE_CHECKING:
def __init__(
self,
@@ -189,23 +248,22 @@ def __init__(
compression=zipfile.ZIP_DEFLATED, # type: int
encoding="utf-8", # type: Text
temp_fs="temp://__ziptemp__", # type: Text
- ):
+ ): # noqa: D107
# type: (...) -> None
pass
@six.python_2_unicode_compatible
class WriteZipFS(WrapFS):
- """A writable zip file.
- """
+ """A writable zip file."""
def __init__(
self,
file, # type: Union[Text, BinaryIO]
compression=zipfile.ZIP_DEFLATED, # type: int
encoding="utf-8", # type: Text
- temp_fs="temp://__ziptemp__", # type: Text
- ):
+ temp_fs="temp://__ziptemp__", # type: Union[Text, FS]
+ ): # noqa: D107
# type: (...) -> None
self._file = file
self.compression = compression
@@ -274,11 +332,10 @@ def write_zip(
@six.python_2_unicode_compatible
class ReadZipFS(FS):
- """A readable zip file.
- """
+ """A readable zip file."""
_meta = {
- "case_insensitive": True,
+ "case_insensitive": False,
"network": False,
"read_only": True,
"supports_rename": False,
@@ -288,7 +345,7 @@ class ReadZipFS(FS):
}
@errors.CreateFailed.catch_all
- def __init__(self, file, encoding="utf-8"):
+ def __init__(self, file, encoding="utf-8"): # noqa: D107
# type: (Union[BinaryIO, Text], Text) -> None
super(ReadZipFS, self).__init__()
self._file = file
@@ -306,8 +363,7 @@ def __str__(self):
def _path_to_zip_name(self, path):
# type: (Text) -> str
- """Convert a path to a zip file name.
- """
+ """Convert a path to a zip file name."""
path = relpath(normpath(path))
if self._directory.isdir(path):
path = forcedir(path)
@@ -318,8 +374,7 @@ def _path_to_zip_name(self, path):
@property
def _directory(self):
# type: () -> MemoryFS
- """`MemoryFS`: a filesystem with the same folder hierarchy as the zip.
- """
+ """`MemoryFS`: a filesystem with the same folder hierarchy as the zip."""
self.check()
with self._lock:
if self._directory_fs is None:
@@ -434,7 +489,8 @@ def removedir(self, path):
def close(self):
# type: () -> None
super(ReadZipFS, self).close()
- self._zip.close()
+ if hasattr(self, "_zip"):
+ self._zip.close()
def readbytes(self, path):
# type: (Text) -> bytes
@@ -444,3 +500,12 @@ def readbytes(self, path):
zip_name = self._path_to_zip_name(path)
zip_bytes = self._zip.read(zip_name)
return zip_bytes
+
+ def geturl(self, path, purpose="download"):
+ # type: (Text, Text) -> Text
+ if purpose == "fs" and isinstance(self._file, six.string_types):
+ quoted_file = url_quote(self._file)
+ quoted_path = url_quote(path)
+ return "zip://{}!/{}".format(quoted_file, quoted_path)
+ else:
+ raise errors.NoURL(path, purpose)
diff --git a/requirements-readthedocs.txt b/requirements-readthedocs.txt
new file mode 100644
index 00000000..3e63add2
--- /dev/null
+++ b/requirements-readthedocs.txt
@@ -0,0 +1,2 @@
+# requirements for readthedocs.io
+-e .
diff --git a/requirements.txt b/requirements.txt
deleted file mode 100644
index 0b0438f9..00000000
--- a/requirements.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-appdirs~=1.4.3
-backports.os==0.1.1; python_version == '2.7'
-enum34==1.1.6 ; python_version < '3.4'
-pytz
-setuptools
-six==1.10.0
-typing==3.6.4 ; python_version < '3.5'
diff --git a/setup.cfg b/setup.cfg
index d3766f29..57c6f40b 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,15 +1,69 @@
-[bdist_wheel]
-universal = 1
+# --- Project configuration -------------------------------------------------
[metadata]
+version = attr: fs._version.__version__
+name = fs
+author = Will McGugan
+author_email = will@willmcgugan.com
+maintainer = Martin Larralde
+maintainer_email = martin.larralde@embl.de
+url = https://github.com/PyFilesystem/pyfilesystem2
+license = MIT
license_file = LICENSE
+description = Python's filesystem abstraction layer
long_description = file: README.md
long_description_content_type = text/markdown
+platform = any
+classifiers =
+ Development Status :: 5 - Production/Stable
+ Intended Audience :: Developers
+ License :: OSI Approved :: MIT License
+ Operating System :: OS Independent
+ Programming Language :: Python
+ Programming Language :: Python :: 2.7
+ Programming Language :: Python :: 3.5
+ Programming Language :: Python :: 3.6
+ Programming Language :: Python :: 3.7
+ Programming Language :: Python :: 3.8
+ Programming Language :: Python :: 3.9
+ Programming Language :: Python :: 3.10
+ Programming Language :: Python :: Implementation :: CPython
+ Programming Language :: Python :: Implementation :: PyPy
+ Topic :: System :: Filesystems
+ Typing :: Typed
project_urls =
Bug Reports = https://github.com/PyFilesystem/pyfilesystem2/issues
Documentation = https://pyfilesystem2.readthedocs.io/en/latest/
Wiki = https://www.pyfilesystem.org/
+[options]
+zip_safe = false
+packages = find:
+setup_requires =
+ setuptools >=38.3.0
+install_requires =
+ appdirs~=1.4.3
+ setuptools
+ six ~=1.10
+ enum34 ~=1.1.6 ; python_version < '3.4'
+ typing ~=3.6 ; python_version < '3.6'
+ backports.os ~=0.1 ; python_version < '3.0'
+
+[options.extras_require]
+scandir =
+ scandir~=1.5 ; python_version < '3.5'
+
+[options.packages.find]
+exclude = tests
+
+[options.package_data]
+fs = py.typed
+
+[bdist_wheel]
+universal = 1
+
+# --- Individual linter configuration ---------------------------------------
+
[pydocstyle]
inherit = false
ignore = D102,D105,D200,D203,D213,D406,D407
@@ -33,13 +87,105 @@ warn_return_any = false
[mypy-fs.test]
disallow_untyped_defs = false
+[flake8]
+extend-ignore = E203,E402,W503
+max-line-length = 88
+per-file-ignores =
+ fs/__init__.py:F401
+ fs/*/__init__.py:F401
+ tests/*:E501
+ fs/opener/*:F811
+ fs/_fscompat.py:F401
+ fs/_pathcompat.py:C401
+
+[isort]
+default_section = THIRDPARTY
+known_first_party = fs
+known_standard_library = sys, typing
+line_length = 88
+profile = black
+skip_gitignore = true
+
+# --- Test and coverage configuration ------------------------------------------
+
[coverage:run]
+branch = true
omit = fs/test.py
+source = fs
+relative_files = true
+parallel = true
[coverage:report]
show_missing = true
+skip_covered = true
exclude_lines =
pragma: no cover
if False:
+ it typing.TYPE_CHECKING:
@typing.overload
+ @overload
+
+[tool:pytest]
+markers =
+ slow: marks tests as slow (deselect with '-m "not slow"')
+
+# --- Tox automation configuration ---------------------------------------------
+
+[tox:tox]
+envlist = py{27,34}{,-scandir}, py{35,36,37,38,39,310}, pypy{27,36,37}, typecheck, codestyle, docstyle, codeformat
+sitepackages = false
+skip_missing_interpreters = true
+requires =
+ setuptools >=38.3.0
+
+[testenv]
+commands = python -m coverage run --rcfile {toxinidir}/setup.cfg -m pytest {posargs} {toxinidir}/tests
+deps =
+ -rtests/requirements.txt
+ coverage~=5.0
+ py{35,36,37,38,39,310,py36,py37}: pytest~=6.0
+ py{27,34,py27}: pytest~=4.6
+ py{35,36,37,38,39,310,py36,py37}: pytest-randomly~=3.5
+ py{27,34,py27}: pytest-randomly~=1.2
+ scandir: .[scandir]
+ !scandir: .
+
+[testenv:typecheck]
+commands = mypy --config-file {toxinidir}/setup.cfg {toxinidir}/fs
+deps =
+ .
+ mypy==0.800
+
+[testenv:codestyle]
+commands = flake8 --config={toxinidir}/setup.cfg {toxinidir}/fs {toxinidir}/tests
+deps =
+ flake8==3.7.9
+ #flake8-builtins==1.5.3
+ flake8-bugbear==19.8.0
+ flake8-comprehensions==3.1.4
+ flake8-mutable==1.2.0
+ flake8-tuple==0.4.0
+
+[testenv:codeformat]
+commands = black --check {toxinidir}/fs
+deps =
+ black==22.3.0
+
+[testenv:docstyle]
+commands = pydocstyle --config={toxinidir}/setup.cfg {toxinidir}/fs
+deps =
+ pydocstyle==5.1.1
+[gh-actions]
+python =
+ 2.7: py27, py27-scandir
+ 3.4: py34, py34-scandir
+ 3.5: py35
+ 3.6: py36
+ 3.7: py37
+ 3.8: py38
+ 3.9: py39
+ 3.10: py310
+ pypy-2.7: pypy27
+ pypy-3.6: pypy36
+ pypy-3.7: pypy37
diff --git a/setup.py b/setup.py
index c1bcec3f..c4e2465a 100644
--- a/setup.py
+++ b/setup.py
@@ -1,46 +1,10 @@
#!/usr/bin/env python
-from setuptools import setup, find_packages
+import os
-with open("fs/_version.py") as f:
+with open(os.path.join("fs", "_version.py")) as f:
exec(f.read())
-CLASSIFIERS = [
- "Development Status :: 5 - Production/Stable",
- "Intended Audience :: Developers",
- "License :: OSI Approved :: MIT License",
- "Operating System :: OS Independent",
- "Programming Language :: Python",
- "Programming Language :: Python :: 2.7",
- "Programming Language :: Python :: 3.4",
- "Programming Language :: Python :: 3.5",
- "Programming Language :: Python :: 3.6",
- "Programming Language :: Python :: 3.7",
- "Topic :: System :: Filesystems",
-]
+from setuptools import setup
-REQUIREMENTS = ["appdirs~=1.4.3", "pytz", "setuptools", "six~=1.10"]
-
-setup(
- author="Will McGugan",
- author_email="will@willmcgugan.com",
- classifiers=CLASSIFIERS,
- description="Python's filesystem abstraction layer",
- install_requires=REQUIREMENTS,
- extras_require={
- "scandir :python_version < '3.5'": ["scandir~=1.5"],
- ":python_version < '3.4'": ["enum34~=1.1.6"],
- ":python_version < '3.6'": ["typing~=3.6"],
- ":python_version < '3.0'": ["backports.os~=0.1"],
- },
- license="MIT",
- name="fs",
- packages=find_packages(exclude=("tests",)),
- package_data={"fs": ["py.typed"]},
- zip_safe=False,
- platforms=["any"],
- test_suite="nose.collector",
- tests_require=["appdirs", "mock", "pytz", "pyftpdlib"],
- url="https://github.com/PyFilesystem/pyfilesystem2",
- version=__version__,
-)
+setup(version=__version__)
diff --git a/testrequirements.txt b/testrequirements.txt
deleted file mode 100644
index e465d4ca..00000000
--- a/testrequirements.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-appdirs~=1.4.0
-coverage
-mock
-pyftpdlib==1.5.2
-python-coveralls
-pytz==2016.7
-nose
diff --git a/tests/mark.py b/tests/mark.py
new file mode 100644
index 00000000..5bd8f12d
--- /dev/null
+++ b/tests/mark.py
@@ -0,0 +1,2 @@
+def slow(cls):
+ return cls
diff --git a/tests/requirements.txt b/tests/requirements.txt
new file mode 100644
index 00000000..b7ff3ce4
--- /dev/null
+++ b/tests/requirements.txt
@@ -0,0 +1,18 @@
+# the bare requirements for running tests
+
+# pyftpdlib is needed to spawn a FTP server for the
+# FTPFS test suite
+pyftpdlib ~=1.5
+
+# these are optional dependencies for pyftpdlib that
+# are not explicitly listed, we need to install these
+# ourselves
+psutil ~=5.0
+pysendfile ~=2.0 ; python_version <= "3.3"
+
+# mock is only available from Python 3.3 onward, and
+# mock v4+ doesn't support Python 2.7 anymore
+mock ~=3.0 ; python_version < "3.3"
+
+# parametrized to prevent code duplication in tests.
+parameterized ~=0.8
\ No newline at end of file
diff --git a/tests/test_appfs.py b/tests/test_appfs.py
index 99234258..2d421482 100644
--- a/tests/test_appfs.py
+++ b/tests/test_appfs.py
@@ -1,26 +1,86 @@
from __future__ import unicode_literals
+import shutil
+import six
+import tempfile
import unittest
-import six
+try:
+ from unittest import mock
+except ImportError:
+ import mock
+
+import fs.test
+from fs import appfs
+
+
+class _TestAppFS(fs.test.FSTestCases):
+
+ AppFS = None
-from fs.appfs import UserDataFS
+ @classmethod
+ def setUpClass(cls):
+ super(_TestAppFS, cls).setUpClass()
+ cls.tmpdir = tempfile.mkdtemp()
+ @classmethod
+ def tearDownClass(cls):
+ shutil.rmtree(cls.tmpdir)
-class TestAppFS(unittest.TestCase):
- """Test Application FS."""
+ def make_fs(self):
+ with mock.patch(
+ "appdirs.{}".format(self.AppFS.app_dir),
+ autospec=True,
+ spec_set=True,
+ return_value=tempfile.mkdtemp(dir=self.tmpdir),
+ ):
+ return self.AppFS("fstest", "willmcgugan", "1.0")
- def test_user_data(self):
- """Test UserDataFS."""
- user_data_fs = UserDataFS("fstest", "willmcgugan", "1.0")
- if six.PY2:
+ if six.PY2:
+
+ def test_repr(self):
self.assertEqual(
- repr(user_data_fs),
- "UserDataFS(u'fstest', author=u'willmcgugan', version=u'1.0')",
+ repr(self.fs),
+ "{}(u'fstest', author=u'willmcgugan', version=u'1.0')".format(
+ self.AppFS.__name__
+ ),
)
- else:
+
+ else:
+
+ def test_repr(self):
self.assertEqual(
- repr(user_data_fs),
- "UserDataFS('fstest', author='willmcgugan', version='1.0')",
+ repr(self.fs),
+ "{}('fstest', author='willmcgugan', version='1.0')".format(
+ self.AppFS.__name__
+ ),
)
- self.assertEqual(str(user_data_fs), "")
+
+ def test_str(self):
+ self.assertEqual(
+ str(self.fs), "<{} 'fstest'>".format(self.AppFS.__name__.lower())
+ )
+
+
+class TestUserDataFS(_TestAppFS, unittest.TestCase):
+ AppFS = appfs.UserDataFS
+
+
+class TestUserConfigFS(_TestAppFS, unittest.TestCase):
+ AppFS = appfs.UserConfigFS
+
+
+class TestUserCacheFS(_TestAppFS, unittest.TestCase):
+ AppFS = appfs.UserCacheFS
+
+
+class TestSiteDataFS(_TestAppFS, unittest.TestCase):
+ AppFS = appfs.SiteDataFS
+
+
+class TestSiteConfigFS(_TestAppFS, unittest.TestCase):
+ AppFS = appfs.SiteConfigFS
+
+
+class TestUserLogFS(_TestAppFS, unittest.TestCase):
+ AppFS = appfs.UserLogFS
diff --git a/tests/test_archives.py b/tests/test_archives.py
index c0bfff3b..8b5397a2 100644
--- a/tests/test_archives.py
+++ b/tests/test_archives.py
@@ -3,14 +3,11 @@
import os
import stat
-
from six import text_type
-from fs.opener import open_fs
+from fs import errors, walk
from fs.enums import ResourceType
-from fs import walk
-from fs import errors
-from fs.memoryfs import MemoryFS
+from fs.opener import open_fs
from fs.test import UNICODE_TEXT
diff --git a/tests/test_base.py b/tests/test_base.py
index 66708517..6bcb6639 100644
--- a/tests/test_base.py
+++ b/tests/test_base.py
@@ -4,17 +4,11 @@
import unittest
-try:
- import mock
-except ImportError:
- from unittest import mock
-
-
-from fs.base import FS
from fs import errors
+from fs.base import FS
-class TestFS(FS):
+class DummyFS(FS):
def getinfo(self, path, namespaces=None):
pass
@@ -39,7 +33,7 @@ def setinfo(self, path, info):
class TestBase(unittest.TestCase):
def setUp(self):
- self.fs = TestFS()
+ self.fs = DummyFS()
def test_validatepath(self):
"""Test validatepath method."""
diff --git a/tests/test_copy.py b/tests/test_copy.py
index 160820fa..8e527648 100644
--- a/tests/test_copy.py
+++ b/tests/test_copy.py
@@ -1,34 +1,90 @@
from __future__ import unicode_literals
-import errno
+import calendar
import datetime
+import errno
import os
-import unittest
-import tempfile
import shutil
-import calendar
-
-from six import PY2
+import tempfile
+import unittest
+from parameterized import parameterized
import fs.copy
from fs import open_fs
-class TestCopy(unittest.TestCase):
- def test_copy_fs(self):
- for workers in (0, 1, 2, 4):
- src_fs = open_fs("mem://")
- src_fs.makedirs("foo/bar")
- src_fs.makedirs("foo/empty")
- src_fs.touch("test.txt")
- src_fs.touch("foo/bar/baz.txt")
+def _create_sandbox_dir(prefix="pyfilesystem2_sandbox_", home=None):
+ if home is None:
+ return tempfile.mkdtemp(prefix=prefix)
+ else:
+ sandbox_path = os.path.join(home, prefix)
+ mkdirp(sandbox_path)
+ return sandbox_path
+
+
+def _touch(root, filepath):
+ # create abs filename
+ abs_filepath = os.path.join(root, filepath)
+ # create dir
+ dirname = os.path.dirname(abs_filepath)
+ mkdirp(dirname)
+ # touch file
+ with open(abs_filepath, "a"):
+ os.utime(
+ abs_filepath, None
+ ) # update the mtime in case the file exists, same as touch
+
+ return abs_filepath
+
+
+def _write_file(filepath, write_chars=1024):
+ with open(filepath, "w") as f:
+ f.write("1" * write_chars)
+ return filepath
+
+
+def _delay_file_utime(filepath, delta_sec):
+ utcnow = datetime.datetime.utcnow()
+ unix_timestamp = calendar.timegm(utcnow.timetuple())
+ times = unix_timestamp + delta_sec, unix_timestamp + delta_sec
+ os.utime(filepath, times)
+
+
+def mkdirp(path):
+ # os.makedirs(path, exist_ok=True) only for python3.?
+ try:
+ os.makedirs(path)
+ except OSError as exc:
+ if exc.errno == errno.EEXIST and os.path.isdir(path):
+ pass
+ else:
+ raise
+
+
+class TestCopySimple(unittest.TestCase):
+ @parameterized.expand([(0,), (1,), (2,), (4,)])
+ def test_copy_fs(self, workers):
+ namespaces = ("details", "modified")
+
+ src_fs = open_fs("mem://")
+ src_fs.makedirs("foo/bar")
+ src_fs.makedirs("foo/empty")
+ src_fs.touch("test.txt")
+ src_fs.touch("foo/bar/baz.txt")
+ src_file1_info = src_fs.getinfo("test.txt", namespaces)
+ src_file2_info = src_fs.getinfo("foo/bar/baz.txt", namespaces)
+
+ dst_fs = open_fs("mem://")
+ fs.copy.copy_fs(src_fs, dst_fs, workers=workers, preserve_time=True)
- dst_fs = open_fs("mem://")
- fs.copy.copy_fs(src_fs, dst_fs, workers=workers)
+ self.assertTrue(dst_fs.isdir("foo/empty"))
+ self.assertTrue(dst_fs.isdir("foo/bar"))
+ self.assertTrue(dst_fs.isfile("test.txt"))
- self.assertTrue(dst_fs.isdir("foo/empty"))
- self.assertTrue(dst_fs.isdir("foo/bar"))
- self.assertTrue(dst_fs.isfile("test.txt"))
+ dst_file1_info = dst_fs.getinfo("test.txt", namespaces)
+ dst_file2_info = dst_fs.getinfo("foo/bar/baz.txt", namespaces)
+ self.assertEqual(dst_file1_info.modified, src_file1_info.modified)
+ self.assertEqual(dst_file2_info.modified, src_file2_info.modified)
def test_copy_value_error(self):
src_fs = open_fs("mem://")
@@ -36,18 +92,46 @@ def test_copy_value_error(self):
with self.assertRaises(ValueError):
fs.copy.copy_fs(src_fs, dst_fs, workers=-1)
- def test_copy_dir(self):
+ def test_copy_dir0(self):
+ namespaces = ("details", "modified")
+
+ src_fs = open_fs("mem://")
+ src_fs.makedirs("foo/bar")
+ src_fs.makedirs("foo/empty")
+ src_fs.touch("test.txt")
+ src_fs.touch("foo/bar/baz.txt")
+ src_file2_info = src_fs.getinfo("foo/bar/baz.txt", namespaces)
+
+ with open_fs("mem://") as dst_fs:
+ fs.copy.copy_dir(src_fs, "/foo", dst_fs, "/", workers=0, preserve_time=True)
+ self.assertTrue(dst_fs.isdir("bar"))
+ self.assertTrue(dst_fs.isdir("empty"))
+ self.assertTrue(dst_fs.isfile("bar/baz.txt"))
+
+ dst_file2_info = dst_fs.getinfo("bar/baz.txt", namespaces)
+ self.assertEqual(dst_file2_info.modified, src_file2_info.modified)
+
+ @parameterized.expand([(0,), (1,), (2,), (4,)])
+ def test_copy_dir(self, workers):
+ namespaces = ("details", "modified")
+
src_fs = open_fs("mem://")
src_fs.makedirs("foo/bar")
src_fs.makedirs("foo/empty")
src_fs.touch("test.txt")
src_fs.touch("foo/bar/baz.txt")
- for workers in (0, 1, 2, 4):
- with open_fs("mem://") as dst_fs:
- fs.copy.copy_dir(src_fs, "/foo", dst_fs, "/", workers=workers)
- self.assertTrue(dst_fs.isdir("bar"))
- self.assertTrue(dst_fs.isdir("empty"))
- self.assertTrue(dst_fs.isfile("bar/baz.txt"))
+ src_file2_info = src_fs.getinfo("foo/bar/baz.txt", namespaces)
+
+ with open_fs("mem://") as dst_fs:
+ fs.copy.copy_dir(
+ src_fs, "/foo", dst_fs, "/", workers=workers, preserve_time=True
+ )
+ self.assertTrue(dst_fs.isdir("bar"))
+ self.assertTrue(dst_fs.isdir("empty"))
+ self.assertTrue(dst_fs.isfile("bar/baz.txt"))
+
+ dst_file2_info = dst_fs.getinfo("bar/baz.txt", namespaces)
+ self.assertEqual(dst_file2_info.modified, src_file2_info.modified)
def test_copy_large(self):
data1 = b"foo" * 512 * 1024
@@ -80,50 +164,11 @@ def on_copy(*args):
fs.copy.copy_dir(src_fs, "/", dst_fs, "/", on_copy=on_copy)
self.assertEqual(on_copy_calls, [(src_fs, "/baz.txt", dst_fs, "/baz.txt")])
- def mkdirp(self, path):
- # os.makedirs(path, exist_ok=True) only for python3.?
- try:
- os.makedirs(path)
- except OSError as exc:
- if exc.errno == errno.EEXIST and os.path.isdir(path):
- pass
- else:
- raise
-
- def _create_sandbox_dir(self, prefix="pyfilesystem2_sandbox_", home=None):
- if home is None:
- return tempfile.mkdtemp(prefix=prefix)
- else:
- sandbox_path = os.path.join(home, prefix)
- self.mkdirp(sandbox_path)
- return sandbox_path
-
- def _touch(self, root, filepath):
- # create abs filename
- abs_filepath = os.path.join(root, filepath)
- # create dir
- dirname = os.path.dirname(abs_filepath)
- self.mkdirp(dirname)
- # touch file
- with open(abs_filepath, "a"):
- os.utime(
- abs_filepath, None
- ) # update the mtime in case the file exists, same as touch
-
- return abs_filepath
-
- def _write_file(self, filepath, write_chars=1024):
- with open(filepath, "w") as f:
- f.write("1" * write_chars)
- return filepath
-
- def _delay_file_utime(self, filepath, delta_sec):
- utcnow = datetime.datetime.utcnow()
- unix_timestamp = calendar.timegm(utcnow.timetuple())
- times = unix_timestamp + delta_sec, unix_timestamp + delta_sec
- os.utime(filepath, times)
-
- def test_copy_file_if_newer_same_fs(self):
+
+class TestCopyIfNewer(unittest.TestCase):
+ copy_if_condition = "newer"
+
+ def test_copy_file_if_same_fs(self):
src_fs = open_fs("mem://")
src_fs.makedir("foo2").touch("exists")
src_fs.makedir("foo1").touch("test1.txt")
@@ -131,35 +176,42 @@ def test_copy_file_if_newer_same_fs(self):
"foo2/exists", datetime.datetime.utcnow() + datetime.timedelta(hours=1)
)
self.assertTrue(
- fs.copy.copy_file_if_newer(
- src_fs, "foo1/test1.txt", src_fs, "foo2/test1.txt.copy"
+ fs.copy.copy_file_if(
+ src_fs,
+ "foo1/test1.txt",
+ src_fs,
+ "foo2/test1.txt.copy",
+ self.copy_if_condition,
)
)
self.assertFalse(
- fs.copy.copy_file_if_newer(src_fs, "foo1/test1.txt", src_fs, "foo2/exists")
+ fs.copy.copy_file_if(
+ src_fs, "foo1/test1.txt", src_fs, "foo2/exists", self.copy_if_condition
+ )
)
self.assertTrue(src_fs.exists("foo2/test1.txt.copy"))
- def test_copy_file_if_newer_dst_older(self):
+ def test_copy_file_if_dst_is_older(self):
try:
# create first dst ==> dst is older the src ==> file should be copied
- dst_dir = self._create_sandbox_dir()
- dst_file1 = self._touch(dst_dir, "file1.txt")
- self._write_file(dst_file1)
+ dst_dir = _create_sandbox_dir()
+ dst_file1 = _touch(dst_dir, "file1.txt")
+ _write_file(dst_file1)
+
+ src_dir = _create_sandbox_dir()
+ src_file1 = _touch(src_dir, "file1.txt")
+ _write_file(src_file1)
- src_dir = self._create_sandbox_dir()
- src_file1 = self._touch(src_dir, "file1.txt")
- self._write_file(src_file1)
# ensure src file is newer than dst, changing its modification time
- self._delay_file_utime(src_file1, delta_sec=60)
+ _delay_file_utime(src_file1, delta_sec=60)
src_fs = open_fs("osfs://" + src_dir)
dst_fs = open_fs("osfs://" + dst_dir)
self.assertTrue(dst_fs.exists("/file1.txt"))
- copied = fs.copy.copy_file_if_newer(
- src_fs, "/file1.txt", dst_fs, "/file1.txt"
+ copied = fs.copy.copy_file_if(
+ src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition
)
self.assertTrue(copied)
@@ -168,19 +220,19 @@ def test_copy_file_if_newer_dst_older(self):
shutil.rmtree(src_dir)
shutil.rmtree(dst_dir)
- def test_copy_file_if_newer_dst_doesnt_exists(self):
+ def test_copy_file_if_dst_doesnt_exists(self):
try:
- src_dir = self._create_sandbox_dir()
- src_file1 = self._touch(src_dir, "file1.txt")
- self._write_file(src_file1)
+ src_dir = _create_sandbox_dir()
+ src_file1 = _touch(src_dir, "file1.txt")
+ _write_file(src_file1)
- dst_dir = self._create_sandbox_dir()
+ dst_dir = _create_sandbox_dir()
src_fs = open_fs("osfs://" + src_dir)
dst_fs = open_fs("osfs://" + dst_dir)
- copied = fs.copy.copy_file_if_newer(
- src_fs, "/file1.txt", dst_fs, "/file1.txt"
+ copied = fs.copy.copy_file_if(
+ src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition
)
self.assertTrue(copied)
@@ -189,57 +241,320 @@ def test_copy_file_if_newer_dst_doesnt_exists(self):
shutil.rmtree(src_dir)
shutil.rmtree(dst_dir)
- def test_copy_file_if_newer_dst_is_newer(self):
+ def test_copy_file_if_dst_is_newer(self):
try:
- src_dir = self._create_sandbox_dir()
- src_file1 = self._touch(src_dir, "file1.txt")
- self._write_file(src_file1)
+ src_dir = _create_sandbox_dir()
+ src_file1 = _touch(src_dir, "file1.txt")
+ _write_file(src_file1)
+
+ dst_dir = _create_sandbox_dir()
+ dst_file1 = _touch(dst_dir, "file1.txt")
+ _write_file(dst_file1)
+
+ # ensure dst file is newer than src, changing its modification time
+ _delay_file_utime(dst_file1, delta_sec=60)
+
+ src_fs = open_fs("osfs://" + src_dir)
+ dst_fs = open_fs("osfs://" + dst_dir)
+
+ self.assertTrue(dst_fs.exists("/file1.txt"))
- dst_dir = self._create_sandbox_dir()
- dst_file1 = self._touch(dst_dir, "file1.txt")
- self._write_file(dst_file1)
+ copied = fs.copy.copy_file_if(
+ src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition
+ )
+
+ self.assertFalse(copied)
+ finally:
+ shutil.rmtree(src_dir)
+ shutil.rmtree(dst_dir)
+
+ def test_copy_fs_if(self):
+ try:
+ dst_dir = _create_sandbox_dir()
+ dst_file1 = _touch(dst_dir, "file1.txt")
+ dst_file2 = _touch(dst_dir, "file2.txt")
+ _write_file(dst_file1)
+ _write_file(dst_file2)
+
+ src_dir = _create_sandbox_dir()
+ src_file1 = _touch(src_dir, "file1.txt")
+ src_file2 = _touch(src_dir, "file2.txt")
+ src_file3 = _touch(src_dir, "file3.txt")
+ _write_file(src_file1)
+ _write_file(src_file2)
+ _write_file(src_file3)
+
+ # ensure src_file1 is newer than dst_file1, changing its modification time
+ # ensure dst_file2 is newer than src_file2, changing its modification time
+ _delay_file_utime(src_file1, delta_sec=60)
+ _delay_file_utime(dst_file2, delta_sec=60)
src_fs = open_fs("osfs://" + src_dir)
dst_fs = open_fs("osfs://" + dst_dir)
self.assertTrue(dst_fs.exists("/file1.txt"))
+ self.assertTrue(dst_fs.exists("/file2.txt"))
+
+ copied = []
+
+ def on_copy(src_fs, src_path, dst_fs, dst_path):
+ copied.append(dst_path)
+
+ fs.copy.copy_fs_if(
+ src_fs, dst_fs, on_copy=on_copy, condition=self.copy_if_condition
+ )
+
+ self.assertTrue("/file1.txt" in copied)
+ self.assertTrue("/file2.txt" not in copied)
+ self.assertTrue("/file3.txt" in copied)
+ self.assertTrue(dst_fs.exists("/file1.txt"))
+ self.assertTrue(dst_fs.exists("/file2.txt"))
+ self.assertTrue(dst_fs.exists("/file3.txt"))
+
+ src_fs.close()
+ dst_fs.close()
+
+ finally:
+ shutil.rmtree(src_dir)
+ shutil.rmtree(dst_dir)
+
+ def test_copy_dir_if(self):
+ try:
+ src_dir = _create_sandbox_dir()
+ src_file1 = _touch(src_dir, "file1.txt")
+ _write_file(src_file1)
+
+ src_file2 = _touch(src_dir, os.path.join("one_level_down", "file2.txt"))
+ _write_file(src_file2)
+
+ dst_dir = _create_sandbox_dir()
+ mkdirp(os.path.join(dst_dir, "target_dir"))
+ dst_file1 = _touch(dst_dir, os.path.join("target_dir", "file1.txt"))
+ _write_file(dst_file1)
+
+ # ensure dst file is newer than src, changing its modification time
+ _delay_file_utime(dst_file1, delta_sec=60)
+
+ src_fs = open_fs("osfs://" + src_dir)
+ dst_fs = open_fs("osfs://" + dst_dir)
+
+ copied = []
+
+ def on_copy(src_fs, src_path, dst_fs, dst_path):
+ copied.append(dst_path)
+
+ fs.copy.copy_dir_if(
+ src_fs,
+ "/",
+ dst_fs,
+ "/target_dir/",
+ on_copy=on_copy,
+ condition=self.copy_if_condition,
+ )
+
+ self.assertEqual(copied, ["/target_dir/one_level_down/file2.txt"])
+ self.assertTrue(dst_fs.exists("/target_dir/one_level_down/file2.txt"))
+
+ src_fs.close()
+ dst_fs.close()
+ finally:
+ shutil.rmtree(src_dir)
+ shutil.rmtree(dst_dir)
+
+ def test_copy_dir_if_same_fs(self):
+ try:
+ src_dir = _create_sandbox_dir()
+ src_file1 = _touch(src_dir, "src" + os.sep + "file1.txt")
+ _write_file(src_file1)
+
+ _create_sandbox_dir(home=src_dir)
+
+ src_fs = open_fs("osfs://" + src_dir)
- copied = fs.copy.copy_file_if_newer(
- src_fs, "/file1.txt", dst_fs, "/file1.txt"
+ copied = []
+
+ def on_copy(src_fs, src_path, dst_fs, dst_path):
+ copied.append(dst_path)
+
+ fs.copy.copy_dir_if(
+ src_fs, "/src", src_fs, "/dst", on_copy=on_copy, condition="newer"
)
- self.assertEqual(copied, False)
+ self.assertEqual(copied, ["/dst/file1.txt"])
+ self.assertTrue(src_fs.exists("/dst/file1.txt"))
+
+ src_fs.close()
+
+ finally:
+ shutil.rmtree(src_dir)
+
+ def test_copy_dir_if_multiple_files(self):
+ try:
+ src_dir = _create_sandbox_dir()
+ src_fs = open_fs("osfs://" + src_dir)
+ src_fs.makedirs("foo/bar")
+ src_fs.makedirs("foo/empty")
+ src_fs.touch("test.txt")
+ src_fs.touch("foo/bar/baz.txt")
+
+ dst_dir = _create_sandbox_dir()
+ dst_fs = open_fs("osfs://" + dst_dir)
+
+ fs.copy.copy_dir_if(src_fs, "/foo", dst_fs, "/", condition="newer")
+
+ self.assertTrue(dst_fs.isdir("bar"))
+ self.assertTrue(dst_fs.isdir("empty"))
+ self.assertTrue(dst_fs.isfile("bar/baz.txt"))
finally:
shutil.rmtree(src_dir)
shutil.rmtree(dst_dir)
- def test_copy_fs_if_newer_dst_older(self):
+
+class TestCopyIfOlder(unittest.TestCase):
+ copy_if_condition = "older"
+
+ def test_copy_file_if_same_fs(self):
+ src_fs = open_fs("mem://")
+ src_fs.makedir("foo2").touch("exists")
+ src_fs.makedir("foo1").touch("test1.txt")
+ src_fs.settimes(
+ "foo2/exists", datetime.datetime.utcnow() - datetime.timedelta(hours=1)
+ )
+ self.assertTrue(
+ fs.copy.copy_file_if(
+ src_fs,
+ "foo1/test1.txt",
+ src_fs,
+ "foo2/test1.txt.copy",
+ self.copy_if_condition,
+ )
+ )
+ self.assertFalse(
+ fs.copy.copy_file_if(
+ src_fs, "foo1/test1.txt", src_fs, "foo2/exists", self.copy_if_condition
+ )
+ )
+ self.assertTrue(src_fs.exists("foo2/test1.txt.copy"))
+
+ def test_copy_file_if_dst_is_older(self):
try:
# create first dst ==> dst is older the src ==> file should be copied
- dst_dir = self._create_sandbox_dir()
- dst_file1 = self._touch(dst_dir, "file1.txt")
- self._write_file(dst_file1)
+ dst_dir = _create_sandbox_dir()
+ dst_file1 = _touch(dst_dir, "file1.txt")
+ _write_file(dst_file1)
+
+ src_dir = _create_sandbox_dir()
+ src_file1 = _touch(src_dir, "file1.txt")
+ _write_file(src_file1)
- src_dir = self._create_sandbox_dir()
- src_file1 = self._touch(src_dir, "file1.txt")
- self._write_file(src_file1)
# ensure src file is newer than dst, changing its modification time
- self._delay_file_utime(src_file1, delta_sec=60)
+ _delay_file_utime(src_file1, delta_sec=60)
+
+ src_fs = open_fs("osfs://" + src_dir)
+ dst_fs = open_fs("osfs://" + dst_dir)
+
+ self.assertTrue(dst_fs.exists("/file1.txt"))
+
+ copied = fs.copy.copy_file_if(
+ src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition
+ )
+
+ self.assertFalse(copied)
+ self.assertTrue(dst_fs.exists("/file1.txt"))
+ finally:
+ shutil.rmtree(src_dir)
+ shutil.rmtree(dst_dir)
+
+ def test_copy_file_if_dst_doesnt_exists(self):
+ try:
+ src_dir = _create_sandbox_dir()
+ src_file1 = _touch(src_dir, "file1.txt")
+ _write_file(src_file1)
+
+ dst_dir = _create_sandbox_dir()
+
+ src_fs = open_fs("osfs://" + src_dir)
+ dst_fs = open_fs("osfs://" + dst_dir)
+
+ copied = fs.copy.copy_file_if(
+ src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition
+ )
+
+ self.assertTrue(copied)
+ self.assertTrue(dst_fs.exists("/file1.txt"))
+ finally:
+ shutil.rmtree(src_dir)
+ shutil.rmtree(dst_dir)
+
+ def test_copy_file_if_dst_is_newer(self):
+ try:
+ src_dir = _create_sandbox_dir()
+ src_file1 = _touch(src_dir, "file1.txt")
+ _write_file(src_file1)
+
+ dst_dir = _create_sandbox_dir()
+ dst_file1 = _touch(dst_dir, "file1.txt")
+ _write_file(dst_file1)
+
+ # ensure dst file is newer than src, changing its modification time
+ _delay_file_utime(dst_file1, delta_sec=60)
+
+ src_fs = open_fs("osfs://" + src_dir)
+ dst_fs = open_fs("osfs://" + dst_dir)
+
+ self.assertTrue(dst_fs.exists("/file1.txt"))
+
+ copied = fs.copy.copy_file_if(
+ src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition
+ )
+
+ self.assertTrue(copied)
+ finally:
+ shutil.rmtree(src_dir)
+ shutil.rmtree(dst_dir)
+
+ def test_copy_fs_if(self):
+ try:
+ dst_dir = _create_sandbox_dir()
+ dst_file1 = _touch(dst_dir, "file1.txt")
+ dst_file2 = _touch(dst_dir, "file2.txt")
+ _write_file(dst_file1)
+ _write_file(dst_file2)
+
+ src_dir = _create_sandbox_dir()
+ src_file1 = _touch(src_dir, "file1.txt")
+ src_file2 = _touch(src_dir, "file2.txt")
+ src_file3 = _touch(src_dir, "file3.txt")
+ _write_file(src_file1)
+ _write_file(src_file2)
+ _write_file(src_file3)
+
+ # ensure src_file1 is newer than dst_file1, changing its modification time
+ # ensure dst_file2 is newer than src_file2, changing its modification time
+ _delay_file_utime(src_file1, delta_sec=60)
+ _delay_file_utime(dst_file2, delta_sec=60)
src_fs = open_fs("osfs://" + src_dir)
dst_fs = open_fs("osfs://" + dst_dir)
self.assertTrue(dst_fs.exists("/file1.txt"))
+ self.assertTrue(dst_fs.exists("/file2.txt"))
copied = []
def on_copy(src_fs, src_path, dst_fs, dst_path):
copied.append(dst_path)
- fs.copy.copy_fs_if_newer(src_fs, dst_fs, on_copy=on_copy)
+ fs.copy.copy_fs_if(
+ src_fs, dst_fs, on_copy=on_copy, condition=self.copy_if_condition
+ )
- self.assertEqual(copied, ["/file1.txt"])
+ self.assertTrue("/file1.txt" not in copied)
+ self.assertTrue("/file2.txt" in copied)
+ self.assertTrue("/file3.txt" in copied)
self.assertTrue(dst_fs.exists("/file1.txt"))
+ self.assertTrue(dst_fs.exists("/file2.txt"))
+ self.assertTrue(dst_fs.exists("/file3.txt"))
src_fs.close()
dst_fs.close()
@@ -248,16 +563,22 @@ def on_copy(src_fs, src_path, dst_fs, dst_path):
shutil.rmtree(src_dir)
shutil.rmtree(dst_dir)
- def test_copy_fs_if_newer_when_dst_doesnt_exists(self):
+ def test_copy_dir_if(self):
try:
- src_dir = self._create_sandbox_dir()
- src_file1 = self._touch(src_dir, "file1.txt")
- self._write_file(src_file1)
+ src_dir = _create_sandbox_dir()
+ src_file1 = _touch(src_dir, "file1.txt")
+ _write_file(src_file1)
+
+ src_file2 = _touch(src_dir, os.path.join("one_level_down", "file2.txt"))
+ _write_file(src_file2)
- src_file2 = self._touch(src_dir, "one_level_down" + os.sep + "file2.txt")
- self._write_file(src_file2)
+ dst_dir = _create_sandbox_dir()
+ mkdirp(os.path.join(dst_dir, "target_dir"))
+ dst_file1 = _touch(dst_dir, os.path.join("target_dir", "file1.txt"))
+ _write_file(dst_file1)
- dst_dir = self._create_sandbox_dir()
+ # ensure src file is newer than dst, changing its modification time
+ _delay_file_utime(src_file1, delta_sec=60)
src_fs = open_fs("osfs://" + src_dir)
dst_fs = open_fs("osfs://" + dst_dir)
@@ -267,31 +588,152 @@ def test_copy_fs_if_newer_when_dst_doesnt_exists(self):
def on_copy(src_fs, src_path, dst_fs, dst_path):
copied.append(dst_path)
- fs.copy.copy_fs_if_newer(src_fs, dst_fs, on_copy=on_copy)
+ fs.copy.copy_dir_if(
+ src_fs,
+ "/",
+ dst_fs,
+ "/target_dir/",
+ on_copy=on_copy,
+ condition=self.copy_if_condition,
+ )
- self.assertEqual(copied, ["/file1.txt", "/one_level_down/file2.txt"])
- self.assertTrue(dst_fs.exists("/file1.txt"))
- self.assertTrue(dst_fs.exists("/one_level_down/file2.txt"))
+ self.assertEqual(copied, ["/target_dir/one_level_down/file2.txt"])
+ self.assertTrue(dst_fs.exists("/target_dir/one_level_down/file2.txt"))
src_fs.close()
dst_fs.close()
+ finally:
+ shutil.rmtree(src_dir)
+ shutil.rmtree(dst_dir)
+
+ def test_copy_dir_if_same_fs(self):
+ try:
+ src_dir = _create_sandbox_dir()
+ src_file1 = _touch(src_dir, "src" + os.sep + "file1.txt")
+ _write_file(src_file1)
+
+ _create_sandbox_dir(home=src_dir)
+
+ src_fs = open_fs("osfs://" + src_dir)
+
+ copied = []
+
+ def on_copy(src_fs, src_path, dst_fs, dst_path):
+ copied.append(dst_path)
+
+ fs.copy.copy_dir_if(
+ src_fs, "/src", src_fs, "/dst", on_copy=on_copy, condition="newer"
+ )
+
+ self.assertEqual(copied, ["/dst/file1.txt"])
+ self.assertTrue(src_fs.exists("/dst/file1.txt"))
+
+ src_fs.close()
+
+ finally:
+ shutil.rmtree(src_dir)
+
+ def test_copy_dir_if_multiple_files(self):
+ try:
+ src_dir = _create_sandbox_dir()
+ src_fs = open_fs("osfs://" + src_dir)
+ src_fs.makedirs("foo/bar")
+ src_fs.makedirs("foo/empty")
+ src_fs.touch("test.txt")
+ src_fs.touch("foo/bar/baz.txt")
+ dst_dir = _create_sandbox_dir()
+ dst_fs = open_fs("osfs://" + dst_dir)
+
+ fs.copy.copy_dir_if(src_fs, "/foo", dst_fs, "/", condition="newer")
+
+ self.assertTrue(dst_fs.isdir("bar"))
+ self.assertTrue(dst_fs.isdir("empty"))
+ self.assertTrue(dst_fs.isfile("bar/baz.txt"))
finally:
shutil.rmtree(src_dir)
shutil.rmtree(dst_dir)
- def test_copy_fs_if_newer_dont_copy_when_dst_exists(self):
+
+class TestCopyIfExists(unittest.TestCase):
+ copy_if_condition = "exists"
+
+ def test_copy_file_if_same_fs(self):
+ src_fs = open_fs("mem://")
+ src_fs.makedir("foo2").touch("exists")
+ src_fs.makedir("foo1").touch("test1.txt")
+ self.assertFalse(
+ fs.copy.copy_file_if(
+ src_fs,
+ "foo1/test1.txt",
+ src_fs,
+ "foo2/test1.txt.copy",
+ self.copy_if_condition,
+ )
+ )
+ self.assertTrue(
+ fs.copy.copy_file_if(
+ src_fs, "foo1/test1.txt", src_fs, "foo2/exists", self.copy_if_condition
+ )
+ )
+ self.assertFalse(src_fs.exists("foo2/test1.txt.copy"))
+
+ def test_copy_file_if_dst_doesnt_exists(self):
try:
- # src is older than dst => no copy should be necessary
- src_dir = self._create_sandbox_dir()
- src_file1 = self._touch(src_dir, "file1.txt")
- self._write_file(src_file1)
-
- dst_dir = self._create_sandbox_dir()
- dst_file1 = self._touch(dst_dir, "file1.txt")
- self._write_file(dst_file1)
- # ensure dst file is newer than src, changing its modification time
- self._delay_file_utime(dst_file1, delta_sec=60)
+ src_dir = _create_sandbox_dir()
+ src_file1 = _touch(src_dir, "file1.txt")
+ _write_file(src_file1)
+
+ dst_dir = _create_sandbox_dir()
+
+ src_fs = open_fs("osfs://" + src_dir)
+ dst_fs = open_fs("osfs://" + dst_dir)
+
+ copied = fs.copy.copy_file_if(
+ src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition
+ )
+
+ self.assertFalse(copied)
+ self.assertFalse(dst_fs.exists("/file1.txt"))
+ finally:
+ shutil.rmtree(src_dir)
+ shutil.rmtree(dst_dir)
+
+ def test_copy_file_if_dst_exists(self):
+ try:
+ src_dir = _create_sandbox_dir()
+ src_file1 = _touch(src_dir, "file1.txt")
+ _write_file(src_file1)
+
+ dst_dir = _create_sandbox_dir()
+ dst_file1 = _touch(dst_dir, "file1.txt")
+ _write_file(dst_file1)
+
+ src_fs = open_fs("osfs://" + src_dir)
+ dst_fs = open_fs("osfs://" + dst_dir)
+
+ self.assertTrue(dst_fs.exists("/file1.txt"))
+
+ copied = fs.copy.copy_file_if(
+ src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition
+ )
+
+ self.assertTrue(copied)
+ finally:
+ shutil.rmtree(src_dir)
+ shutil.rmtree(dst_dir)
+
+ def test_copy_fs_if(self):
+ try:
+ dst_dir = _create_sandbox_dir()
+ dst_file1 = _touch(dst_dir, "file1.txt")
+ _write_file(dst_file1)
+
+ src_dir = _create_sandbox_dir()
+ src_file1 = _touch(src_dir, "file1.txt")
+ src_file2 = _touch(src_dir, "file2.txt")
+ _write_file(src_file1)
+ _write_file(src_file2)
src_fs = open_fs("osfs://" + src_dir)
dst_fs = open_fs("osfs://" + dst_dir)
@@ -303,10 +745,13 @@ def test_copy_fs_if_newer_dont_copy_when_dst_exists(self):
def on_copy(src_fs, src_path, dst_fs, dst_path):
copied.append(dst_path)
- fs.copy.copy_fs_if_newer(src_fs, dst_fs, on_copy=on_copy)
+ fs.copy.copy_fs_if(
+ src_fs, dst_fs, on_copy=on_copy, condition=self.copy_if_condition
+ )
- self.assertEqual(copied, [])
+ self.assertEqual(copied, ["/file1.txt"])
self.assertTrue(dst_fs.exists("/file1.txt"))
+ self.assertFalse(dst_fs.exists("/file2.txt"))
src_fs.close()
dst_fs.close()
@@ -315,57 +760,260 @@ def on_copy(src_fs, src_path, dst_fs, dst_path):
shutil.rmtree(src_dir)
shutil.rmtree(dst_dir)
- def test_copy_dir_if_newer_one_dst_doesnt_exist(self):
+ def test_copy_dir_if(self):
try:
+ src_dir = _create_sandbox_dir()
+ src_file1 = _touch(src_dir, "file1.txt")
+ _write_file(src_file1)
- src_dir = self._create_sandbox_dir()
- src_file1 = self._touch(src_dir, "file1.txt")
- self._write_file(src_file1)
+ src_file2 = _touch(src_dir, os.path.join("one_level_down", "file2.txt"))
+ _write_file(src_file2)
- src_file2 = self._touch(src_dir, "one_level_down" + os.sep + "file2.txt")
- self._write_file(src_file2)
+ dst_dir = _create_sandbox_dir()
+ mkdirp(os.path.join(dst_dir, "target_dir"))
+ dst_file1 = _touch(dst_dir, os.path.join("target_dir", "file1.txt"))
+ _write_file(dst_file1)
- dst_dir = self._create_sandbox_dir()
- dst_file1 = self._touch(dst_dir, "file1.txt")
- self._write_file(dst_file1)
- # ensure dst file is newer than src, changing its modification time
- self._delay_file_utime(dst_file1, delta_sec=60)
+ src_fs = open_fs("osfs://" + src_dir)
+ dst_fs = open_fs("osfs://" + dst_dir)
+
+ copied = []
+
+ def on_copy(src_fs, src_path, dst_fs, dst_path):
+ copied.append(dst_path)
+
+ fs.copy.copy_dir_if(
+ src_fs,
+ "/",
+ dst_fs,
+ "/target_dir/",
+ on_copy=on_copy,
+ condition=self.copy_if_condition,
+ )
+
+ self.assertEqual(copied, ["/target_dir/file1.txt"])
+ self.assertFalse(dst_fs.exists("/target_dir/one_level_down/file2.txt"))
+
+ src_fs.close()
+ dst_fs.close()
+ finally:
+ shutil.rmtree(src_dir)
+ shutil.rmtree(dst_dir)
+
+ def test_copy_dir_if_same_fs(self):
+ try:
+ src_dir = _create_sandbox_dir()
+ src_file1 = _touch(src_dir, "src" + os.sep + "file1.txt")
+ _write_file(src_file1)
+
+ _create_sandbox_dir(home=src_dir)
+
+ src_fs = open_fs("osfs://" + src_dir)
+
+ copied = []
+
+ def on_copy(src_fs, src_path, dst_fs, dst_path):
+ copied.append(dst_path)
+
+ fs.copy.copy_dir_if(
+ src_fs, "/src", src_fs, "/dst", on_copy=on_copy, condition="newer"
+ )
+
+ self.assertEqual(copied, ["/dst/file1.txt"])
+ self.assertTrue(src_fs.exists("/dst/file1.txt"))
+
+ src_fs.close()
+
+ finally:
+ shutil.rmtree(src_dir)
+
+ def test_copy_dir_if_multiple_files(self):
+ try:
+ src_dir = _create_sandbox_dir()
+ src_fs = open_fs("osfs://" + src_dir)
+ src_fs.makedirs("foo/bar")
+ src_fs.makedirs("foo/empty")
+ src_fs.touch("test.txt")
+ src_fs.touch("foo/bar/baz.txt")
+
+ dst_dir = _create_sandbox_dir()
+ dst_fs = open_fs("osfs://" + dst_dir)
+
+ fs.copy.copy_dir_if(src_fs, "/foo", dst_fs, "/", condition="newer")
+
+ self.assertTrue(dst_fs.isdir("bar"))
+ self.assertTrue(dst_fs.isdir("empty"))
+ self.assertTrue(dst_fs.isfile("bar/baz.txt"))
+ finally:
+ shutil.rmtree(src_dir)
+ shutil.rmtree(dst_dir)
+
+
+class TestCopyIfNotExists(unittest.TestCase):
+ copy_if_condition = "not_exists"
+
+ def test_copy_file_if_same_fs(self):
+ src_fs = open_fs("mem://")
+ src_fs.makedir("foo2").touch("exists")
+ src_fs.makedir("foo1").touch("test1.txt")
+ self.assertTrue(
+ fs.copy.copy_file_if(
+ src_fs,
+ "foo1/test1.txt",
+ src_fs,
+ "foo2/test1.txt.copy",
+ self.copy_if_condition,
+ )
+ )
+ self.assertFalse(
+ fs.copy.copy_file_if(
+ src_fs, "foo1/test1.txt", src_fs, "foo2/exists", self.copy_if_condition
+ )
+ )
+ self.assertTrue(src_fs.exists("foo2/test1.txt.copy"))
+
+ def test_copy_file_if_dst_doesnt_exists(self):
+ try:
+ src_dir = _create_sandbox_dir()
+ src_file1 = _touch(src_dir, "file1.txt")
+ _write_file(src_file1)
+
+ dst_dir = _create_sandbox_dir()
src_fs = open_fs("osfs://" + src_dir)
dst_fs = open_fs("osfs://" + dst_dir)
+ copied = fs.copy.copy_file_if(
+ src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition
+ )
+
+ self.assertTrue(copied)
+ self.assertTrue(dst_fs.exists("/file1.txt"))
+ finally:
+ shutil.rmtree(src_dir)
+ shutil.rmtree(dst_dir)
+
+ def test_copy_file_if_dst_exists(self):
+ try:
+ src_dir = _create_sandbox_dir()
+ src_file1 = _touch(src_dir, "file1.txt")
+ _write_file(src_file1)
+
+ dst_dir = _create_sandbox_dir()
+ dst_file1 = _touch(dst_dir, "file1.txt")
+ _write_file(dst_file1)
+
+ src_fs = open_fs("osfs://" + src_dir)
+ dst_fs = open_fs("osfs://" + dst_dir)
+
+ self.assertTrue(dst_fs.exists("/file1.txt"))
+
+ copied = fs.copy.copy_file_if(
+ src_fs, "/file1.txt", dst_fs, "/file1.txt", self.copy_if_condition
+ )
+
+ self.assertFalse(copied)
+ self.assertTrue(dst_fs.exists("/file1.txt"))
+ finally:
+ shutil.rmtree(src_dir)
+ shutil.rmtree(dst_dir)
+
+ def test_copy_fs_if(self):
+ try:
+ dst_dir = _create_sandbox_dir()
+ dst_file1 = _touch(dst_dir, "file1.txt")
+ _write_file(dst_file1)
+
+ src_dir = _create_sandbox_dir()
+ src_file1 = _touch(src_dir, "file1.txt")
+ src_file2 = _touch(src_dir, "file2.txt")
+ _write_file(src_file1)
+ _write_file(src_file2)
+
+ src_fs = open_fs("osfs://" + src_dir)
+ dst_fs = open_fs("osfs://" + dst_dir)
+
+ self.assertTrue(dst_fs.exists("/file1.txt"))
+
copied = []
def on_copy(src_fs, src_path, dst_fs, dst_path):
copied.append(dst_path)
- fs.copy.copy_dir_if_newer(src_fs, "/", dst_fs, "/", on_copy=on_copy)
+ fs.copy.copy_fs_if(
+ src_fs, dst_fs, on_copy=on_copy, condition=self.copy_if_condition
+ )
- self.assertEqual(copied, ["/one_level_down/file2.txt"])
- self.assertTrue(dst_fs.exists("/one_level_down/file2.txt"))
+ self.assertEqual(copied, ["/file2.txt"])
+ self.assertTrue(dst_fs.exists("/file1.txt"))
+ self.assertTrue(dst_fs.exists("/file2.txt"))
src_fs.close()
dst_fs.close()
+
finally:
shutil.rmtree(src_dir)
shutil.rmtree(dst_dir)
- def test_copy_dir_if_newer_same_fs(self):
+ def test_copy_dir_if(self):
try:
- src_dir = self._create_sandbox_dir()
- src_file1 = self._touch(src_dir, "src" + os.sep + "file1.txt")
- self._write_file(src_file1)
+ src_dir = _create_sandbox_dir()
+ src_file1 = _touch(src_dir, "file1.txt")
+ _write_file(src_file1)
+
+ src_file2 = _touch(src_dir, os.path.join("one_level_down", "file2.txt"))
+ _write_file(src_file2)
- dst_dir = self._create_sandbox_dir(home=src_dir)
+ dst_dir = _create_sandbox_dir()
+ mkdirp(os.path.join(dst_dir, "target_dir"))
+ dst_file1 = _touch(dst_dir, os.path.join("target_dir", "file1.txt"))
+ _write_file(dst_file1)
src_fs = open_fs("osfs://" + src_dir)
+ dst_fs = open_fs("osfs://" + dst_dir)
copied = []
def on_copy(src_fs, src_path, dst_fs, dst_path):
copied.append(dst_path)
- fs.copy.copy_dir_if_newer(src_fs, "/src", src_fs, "/dst", on_copy=on_copy)
+ fs.copy.copy_dir_if(
+ src_fs,
+ "/",
+ dst_fs,
+ "/target_dir/",
+ on_copy=on_copy,
+ condition=self.copy_if_condition,
+ )
+
+ self.assertEqual(copied, ["/target_dir/one_level_down/file2.txt"])
+ self.assertTrue(dst_fs.exists("/target_dir/file1.txt"))
+ self.assertTrue(dst_fs.exists("/target_dir/one_level_down/file2.txt"))
+
+ src_fs.close()
+ dst_fs.close()
+ finally:
+ shutil.rmtree(src_dir)
+ shutil.rmtree(dst_dir)
+
+ def test_copy_dir_if_same_fs(self):
+ try:
+ src_dir = _create_sandbox_dir()
+ src_file1 = _touch(src_dir, "src" + os.sep + "file1.txt")
+ _write_file(src_file1)
+
+ _create_sandbox_dir(home=src_dir)
+
+ src_fs = open_fs("osfs://" + src_dir)
+
+ copied = []
+
+ def on_copy(src_fs, src_path, dst_fs, dst_path):
+ copied.append(dst_path)
+
+ fs.copy.copy_dir_if(
+ src_fs, "/src", src_fs, "/dst", on_copy=on_copy, condition="newer"
+ )
self.assertEqual(copied, ["/dst/file1.txt"])
self.assertTrue(src_fs.exists("/dst/file1.txt"))
@@ -375,19 +1023,19 @@ def on_copy(src_fs, src_path, dst_fs, dst_path):
finally:
shutil.rmtree(src_dir)
- def test_copy_dir_if_newer_multiple_files(self):
+ def test_copy_dir_if_multiple_files(self):
try:
- src_dir = self._create_sandbox_dir()
+ src_dir = _create_sandbox_dir()
src_fs = open_fs("osfs://" + src_dir)
src_fs.makedirs("foo/bar")
src_fs.makedirs("foo/empty")
src_fs.touch("test.txt")
src_fs.touch("foo/bar/baz.txt")
- dst_dir = self._create_sandbox_dir()
+ dst_dir = _create_sandbox_dir()
dst_fs = open_fs("osfs://" + dst_dir)
- fs.copy.copy_dir_if_newer(src_fs, "/foo", dst_fs, "/")
+ fs.copy.copy_dir_if(src_fs, "/foo", dst_fs, "/", condition="newer")
self.assertTrue(dst_fs.isdir("bar"))
self.assertTrue(dst_fs.isdir("empty"))
diff --git a/tests/test_doctest.py b/tests/test_doctest.py
new file mode 100644
index 00000000..ba27d82d
--- /dev/null
+++ b/tests/test_doctest.py
@@ -0,0 +1,194 @@
+# coding: utf-8
+"""Test doctest contained tests in every file of the module.
+"""
+import doctest
+import importlib
+import os
+import pkgutil
+import tempfile
+import time
+import types
+import unittest
+import warnings
+from pprint import pprint
+
+try:
+ from unittest import mock
+except ImportError:
+ import mock
+
+import six
+
+import fs
+import fs.opener.parse
+from fs.memoryfs import MemoryFS
+from fs.subfs import ClosingSubFS
+
+# --- Mocks ------------------------------------------------------------------
+
+
+def _home_fs():
+ """Create a mock filesystem that matches the XDG user-dirs spec."""
+ home_fs = MemoryFS()
+ home_fs.makedir("Desktop")
+ home_fs.makedir("Documents")
+ home_fs.makedir("Downloads")
+ home_fs.makedir("Music")
+ home_fs.makedir("Pictures")
+ home_fs.makedir("Public")
+ home_fs.makedir("Templates")
+ home_fs.makedir("Videos")
+ return home_fs
+
+
+def _open_fs(path):
+ """A mock `open_fs` that avoids side effects when running doctests."""
+ if "://" not in path:
+ path = "osfs://{}".format(path)
+ parse_result = fs.opener.parse(path)
+ if parse_result.protocol == "osfs" and parse_result.resource == "~":
+ home_fs = _home_fs()
+ if parse_result.path is not None:
+ home_fs = home_fs.opendir(parse_result.path, factory=ClosingSubFS)
+ return home_fs
+ elif parse_result.protocol in {"ftp", "ftps", "mem", "temp"}:
+ return MemoryFS()
+ else:
+ raise RuntimeError("not allowed in doctests: {}".format(path))
+
+
+def _my_fs(module):
+ """Create a mock filesystem to be used in examples."""
+ my_fs = MemoryFS()
+ if module == "fs.base":
+ my_fs.makedir("Desktop")
+ my_fs.makedir("Videos")
+ my_fs.touch("Videos/starwars.mov")
+ my_fs.touch("file.txt")
+ elif module == "fs.info":
+ my_fs.touch("foo.tar.gz")
+ my_fs.settext("foo.py", "print('Hello, world!')")
+ my_fs.makedir("bar")
+ elif module in {"fs.walk", "fs.glob"}:
+ my_fs.makedir("dir1")
+ my_fs.makedir("dir2")
+ my_fs.settext("foo.py", "print('Hello, world!')")
+ my_fs.touch("foo.pyc")
+ my_fs.settext("bar.py", "print('ok')\n\n# this is a comment\n")
+ my_fs.touch("bar.pyc")
+ return my_fs
+
+
+def _open(filename, mode="r"):
+ """A mock `open` that actually opens a temporary file."""
+ return tempfile.NamedTemporaryFile(mode="r+" if mode == "r" else mode)
+
+
+# --- Loader protocol --------------------------------------------------------
+
+
+def _load_tests_from_module(tests, module, globs, setUp=None, tearDown=None):
+ """Load tests from module, iterating through submodules."""
+ for attr in (getattr(module, x) for x in dir(module) if not x.startswith("_")):
+ if isinstance(attr, types.ModuleType):
+ suite = doctest.DocTestSuite(
+ attr,
+ globs,
+ setUp=setUp,
+ tearDown=tearDown,
+ optionflags=+doctest.ELLIPSIS,
+ )
+ tests.addTests(suite)
+ return tests
+
+
+def _load_tests(loader, tests, ignore):
+ """`load_test` function used by unittest to find the doctests."""
+
+ # NB (@althonos): we only test docstrings on Python 3 because it's
+ # extremely hard to maintain compatibility for both versions without
+ # extensively hacking `doctest` and `unittest`.
+ if six.PY2:
+ return tests
+
+ def setUp(self):
+ warnings.simplefilter("ignore")
+ self._open_fs_mock = mock.patch.object(fs, "open_fs", new=_open_fs)
+ self._open_fs_mock.__enter__()
+ self._ftpfs_mock = mock.patch.object(fs.ftpfs, "FTPFS")
+ self._ftpfs_mock.__enter__()
+
+ def tearDown(self):
+ self._open_fs_mock.__exit__(None, None, None)
+ self._ftpfs_mock.__exit__(None, None, None)
+ warnings.simplefilter(warnings.defaultaction)
+
+ # recursively traverse all library submodules and load tests from them
+ packages = [None, fs]
+ for pkg in iter(packages.pop, None):
+ for (_, subpkgname, subispkg) in pkgutil.walk_packages(pkg.__path__):
+ # import the submodule and add it to the tests
+ module = importlib.import_module(".".join([pkg.__name__, subpkgname]))
+
+ # load some useful modules / classes / mocks to the
+ # globals so that we don't need to explicitly import
+ # them in the doctests
+ globs = dict(**module.__dict__)
+ globs.update(
+ os=os,
+ fs=fs,
+ my_fs=_my_fs(module.__name__),
+ open=_open,
+ # NB (@althonos): This allows using OSFS in some examples,
+ # while not actually opening the real filesystem
+ OSFS=lambda path: MemoryFS(),
+ # NB (@althonos): This is for compatibility in `fs.registry`
+ print_list=lambda path: None,
+ pprint=pprint,
+ time=time,
+ )
+
+ # load the doctests into the unittest test suite
+ tests.addTests(
+ doctest.DocTestSuite(
+ module,
+ globs=globs,
+ setUp=setUp,
+ tearDown=tearDown,
+ optionflags=+doctest.ELLIPSIS,
+ )
+ )
+
+ # if the submodule is a package, we need to process its submodules
+ # as well, so we add it to the package queue
+ if subispkg:
+ packages.append(module)
+
+ return tests
+
+
+# --- Unit test wrapper ------------------------------------------------------
+#
+# NB (@althonos): Since pytest doesn't support the `load_tests` protocol
+# above, we manually build a `unittest.TestCase` using a dedicated test
+# method for each doctest. This should be safe to remove when pytest
+# supports it, or if we move away from pytest to run tests.
+
+
+class TestDoctest(unittest.TestCase):
+ pass
+
+
+def make_wrapper(x):
+ def _test_wrapper(self):
+ x.setUp()
+ try:
+ x.runTest()
+ finally:
+ x.tearDown()
+
+ return _test_wrapper
+
+
+for x in _load_tests(None, unittest.TestSuite(), False):
+ setattr(TestDoctest, "test_{}".format(x.id().replace(".", "_")), make_wrapper(x))
diff --git a/tests/test_encoding.py b/tests/test_encoding.py
index 0cd91d4c..6791e396 100644
--- a/tests/test_encoding.py
+++ b/tests/test_encoding.py
@@ -3,15 +3,13 @@
import os
import platform
import shutil
+import six
import tempfile
import unittest
-import six
-
import fs
from fs.osfs import OSFS
-
if platform.system() != "Windows":
@unittest.skipIf(platform.system() == "Darwin", "Bad unicode not possible on OSX")
diff --git a/tests/test_enums.py b/tests/test_enums.py
index fe496336..aa847c33 100644
--- a/tests/test_enums.py
+++ b/tests/test_enums.py
@@ -1,9 +1,8 @@
import os
+import unittest
from fs import enums
-import unittest
-
class TestEnums(unittest.TestCase):
def test_enums(self):
diff --git a/tests/test_error_tools.py b/tests/test_error_tools.py
index b9ac25c9..4f6aa324 100644
--- a/tests/test_error_tools.py
+++ b/tests/test_error_tools.py
@@ -3,13 +3,23 @@
import errno
import unittest
+import fs.errors
from fs.error_tools import convert_os_errors
-from fs import errors as fserrors
class TestErrorTools(unittest.TestCase):
- def assert_convert_os_errors(self):
+ def test_convert_enoent(self):
+ exception = OSError(errno.ENOENT, "resource not found")
+ with self.assertRaises(fs.errors.ResourceNotFound) as ctx:
+ with convert_os_errors("stat", "/tmp/test"):
+ raise exception
+ self.assertEqual(ctx.exception.exc, exception)
+ self.assertEqual(ctx.exception.path, "/tmp/test")
- with self.assertRaises(fserrors.ResourceNotFound):
- with convert_os_errors("foo", "test"):
- raise OSError(errno.ENOENT)
+ def test_convert_enametoolong(self):
+ exception = OSError(errno.ENAMETOOLONG, "File name too long: test")
+ with self.assertRaises(fs.errors.PathError) as ctx:
+ with convert_os_errors("stat", "/tmp/test"):
+ raise exception
+ self.assertEqual(ctx.exception.exc, exception)
+ self.assertEqual(ctx.exception.path, "/tmp/test")
diff --git a/tests/test_errors.py b/tests/test_errors.py
index 0b78fd15..9688e345 100644
--- a/tests/test_errors.py
+++ b/tests/test_errors.py
@@ -2,7 +2,6 @@
import multiprocessing
import unittest
-
from six import text_type
from fs import errors
@@ -30,7 +29,7 @@ def test_raise_in_multiprocessing(self):
[errors.NoURL, "some_path", "some_purpose"],
[errors.Unsupported],
[errors.IllegalBackReference, "path"],
- [errors.MissingInfoNamespace, "path"]
+ [errors.MissingInfoNamespace, "path"],
]
try:
pool = multiprocessing.Pool(1)
@@ -56,7 +55,7 @@ def test_catch_all(self):
def test(x):
raise errors[x]
- for index, exc in enumerate(errors):
+ for index, _exc in enumerate(errors):
try:
test(index)
except Exception as e:
diff --git a/tests/test_filesize.py b/tests/test_filesize.py
index dc7b5af4..8900f671 100644
--- a/tests/test_filesize.py
+++ b/tests/test_filesize.py
@@ -1,9 +1,9 @@
from __future__ import unicode_literals
-from fs import filesize
-
import unittest
+from fs import filesize
+
class TestFilesize(unittest.TestCase):
def test_traditional(self):
diff --git a/tests/test_fscompat.py b/tests/test_fscompat.py
index 6418922b..d4544eab 100644
--- a/tests/test_fscompat.py
+++ b/tests/test_fscompat.py
@@ -1,10 +1,9 @@
from __future__ import unicode_literals
-import unittest
-
import six
+import unittest
-from fs._fscompat import fsencode, fsdecode, fspath
+from fs._fscompat import fsdecode, fsencode, fspath
class PathMock(object):
diff --git a/tests/test_ftp_parse.py b/tests/test_ftp_parse.py
index 8e00a034..bd967aed 100644
--- a/tests/test_ftp_parse.py
+++ b/tests/test_ftp_parse.py
@@ -1,11 +1,16 @@
from __future__ import unicode_literals
-import mock
+import textwrap
import time
import unittest
from fs import _ftp_parse as ftp_parse
+try:
+ from unittest import mock
+except ImportError:
+ import mock
+
time2017 = time.struct_time([2017, 11, 28, 1, 1, 1, 1, 332, 0])
@@ -13,22 +18,23 @@ class TestFTPParse(unittest.TestCase):
@mock.patch("time.localtime")
def test_parse_time(self, mock_localtime):
self.assertEqual(
- ftp_parse._parse_time("JUL 05 1974", formats=["%b %d %Y"]),
- 142214400.0)
+ ftp_parse._parse_time("JUL 05 1974", formats=["%b %d %Y"]), 142214400.0
+ )
mock_localtime.return_value = time2017
self.assertEqual(
- ftp_parse._parse_time("JUL 05 02:00", formats=["%b %d %H:%M"]),
- 1499220000.0)
+ ftp_parse._parse_time("JUL 05 02:00", formats=["%b %d %H:%M"]), 1499220000.0
+ )
self.assertEqual(
ftp_parse._parse_time("05-07-17 02:00AM", formats=["%d-%m-%y %I:%M%p"]),
- 1499220000.0)
+ 1499220000.0,
+ )
self.assertEqual(ftp_parse._parse_time("notadate", formats=["%b %d %Y"]), None)
def test_parse(self):
- self.assertEqual(ftp_parse.parse([""]), [])
+ self.assertListEqual(ftp_parse.parse([""]), [])
def test_parse_line(self):
self.assertIs(ftp_parse.parse_line("not a dir"), None)
@@ -36,15 +42,19 @@ def test_parse_line(self):
@mock.patch("time.localtime")
def test_decode_linux(self, mock_localtime):
mock_localtime.return_value = time2017
- directory = """\
-lrwxrwxrwx 1 0 0 19 Jan 18 2006 debian -> ./pub/mirror/debian
-drwxr-xr-x 10 0 0 4096 Aug 03 09:21 debian-archive
-lrwxrwxrwx 1 0 0 27 Nov 30 2015 debian-backports -> pub/mirror/debian-backports
-drwxr-xr-x 12 0 0 4096 Sep 29 13:13 pub
--rw-r--r-- 1 0 0 26 Mar 04 2010 robots.txt
-drwxr-xr-x 8 foo bar 4096 Oct 4 09:05 test
-drwxr-xr-x 2 foo-user foo-group 0 Jan 5 11:59 240485
-"""
+ directory = textwrap.dedent(
+ """
+ lrwxrwxrwx 1 0 0 19 Jan 18 2006 debian -> ./pub/mirror/debian
+ drwxr-xr-x 10 0 0 4096 Aug 03 09:21 debian-archive
+ lrwxrwxrwx 1 0 0 27 Nov 30 2015 debian-backports -> pub/mirror/debian-backports
+ drwxr-xr-x 12 0 0 4096 Sep 29 13:13 pub
+ -rw-r--r-- 1 0 0 26 Mar 04 2010 robots.txt
+ drwxr-xr-x 8 foo bar 4096 Oct 4 09:05 test
+ drwxr-xr-x 8 f b 4096 Oct 4 09:05 test
+ drwxr-xr-x 2 foo-user foo-group 0 Jan 5 11:59 240485
+ drwxr-xr-x 2 foo.user$ foo@group_ 0 Jan 5 11:59 240485
+ """
+ )
expected = [
{
@@ -139,6 +149,18 @@ def test_decode_linux(self, mock_localtime):
"ls": "drwxr-xr-x 8 foo bar 4096 Oct 4 09:05 test"
},
},
+ {
+ "access": {
+ "group": "b",
+ "permissions": ["g_r", "g_x", "o_r", "o_x", "u_r", "u_w", "u_x"],
+ "user": "f",
+ },
+ "basic": {"is_dir": True, "name": "test"},
+ "details": {"modified": 1507107900.0, "size": 4096, "type": 1},
+ "ftp": {
+ "ls": "drwxr-xr-x 8 f b 4096 Oct 4 09:05 test"
+ },
+ },
{
"access": {
"group": "foo-group",
@@ -151,42 +173,182 @@ def test_decode_linux(self, mock_localtime):
"ls": "drwxr-xr-x 2 foo-user foo-group 0 Jan 5 11:59 240485"
},
},
+ {
+ "access": {
+ "group": "foo@group_",
+ "permissions": ["g_r", "g_x", "o_r", "o_x", "u_r", "u_w", "u_x"],
+ "user": "foo.user$",
+ },
+ "basic": {"is_dir": True, "name": "240485"},
+ "details": {"modified": 1483617540.0, "size": 0, "type": 1},
+ "ftp": {
+ "ls": "drwxr-xr-x 2 foo.user$ foo@group_ 0 Jan 5 11:59 240485"
+ },
+ },
]
- parsed = ftp_parse.parse(directory.splitlines())
- self.assertEqual(parsed, expected)
+ parsed = ftp_parse.parse(directory.strip().splitlines())
+ self.assertListEqual(parsed, expected)
@mock.patch("time.localtime")
def test_decode_windowsnt(self, mock_localtime):
mock_localtime.return_value = time2017
- directory = """\
-11-02-17 02:00AM docs
-11-02-17 02:12PM images
-11-02-17 03:33PM 9276 logo.gif
-"""
+ directory = textwrap.dedent(
+ """
+ unparsable line
+ 11-02-17 02:00AM docs
+ 11-02-17 02:12PM images
+ 11-02-17 02:12PM AM to PM
+ 11-02-17 03:33PM 9276 logo.gif
+ 05-11-20 22:11 src
+ 11-02-17 01:23 1 12
+ 11-02-17 4:54 0 icon.bmp
+ 11-02-17 4:54AM 0 icon.gif
+ 11-02-17 4:54PM 0 icon.png
+ 11-02-17 16:54 0 icon.jpg
+ """
+ )
expected = [
{
"basic": {"is_dir": True, "name": "docs"},
"details": {"modified": 1486778400.0, "type": 1},
- "ftp": {
- "ls": "11-02-17 02:00AM docs"
- },
+ "ftp": {"ls": "11-02-17 02:00AM docs"},
},
{
"basic": {"is_dir": True, "name": "images"},
"details": {"modified": 1486822320.0, "type": 1},
- "ftp": {
- "ls": "11-02-17 02:12PM images"
- },
+ "ftp": {"ls": "11-02-17 02:12PM images"},
+ },
+ {
+ "basic": {"is_dir": True, "name": "AM to PM"},
+ "details": {"modified": 1486822320.0, "type": 1},
+ "ftp": {"ls": "11-02-17 02:12PM AM to PM"},
},
{
"basic": {"is_dir": False, "name": "logo.gif"},
"details": {"modified": 1486827180.0, "size": 9276, "type": 2},
+ "ftp": {"ls": "11-02-17 03:33PM 9276 logo.gif"},
+ },
+ {
+ "basic": {"is_dir": True, "name": "src"},
+ "details": {"modified": 1604614260.0, "type": 1},
+ "ftp": {"ls": "05-11-20 22:11 src"},
+ },
+ {
+ "basic": {"is_dir": False, "name": "12"},
+ "details": {"modified": 1486776180.0, "size": 1, "type": 2},
+ "ftp": {"ls": "11-02-17 01:23 1 12"},
+ },
+ {
+ "basic": {"is_dir": False, "name": "icon.bmp"},
+ "details": {"modified": 1486788840.0, "size": 0, "type": 2},
+ "ftp": {"ls": "11-02-17 4:54 0 icon.bmp"},
+ },
+ {
+ "basic": {"is_dir": False, "name": "icon.gif"},
+ "details": {"modified": 1486788840.0, "size": 0, "type": 2},
+ "ftp": {"ls": "11-02-17 4:54AM 0 icon.gif"},
+ },
+ {
+ "basic": {"is_dir": False, "name": "icon.png"},
+ "details": {"modified": 1486832040.0, "size": 0, "type": 2},
+ "ftp": {"ls": "11-02-17 4:54PM 0 icon.png"},
+ },
+ {
+ "basic": {"is_dir": False, "name": "icon.jpg"},
+ "details": {"modified": 1486832040.0, "size": 0, "type": 2},
+ "ftp": {"ls": "11-02-17 16:54 0 icon.jpg"},
+ },
+ ]
+
+ parsed = ftp_parse.parse(directory.strip().splitlines())
+ self.assertEqual(parsed, expected)
+
+ @mock.patch("time.localtime")
+ def test_decode_linux_suid(self, mock_localtime):
+ # reported in #451
+ mock_localtime.return_value = time2017
+ directory = textwrap.dedent(
+ """
+ drwxr-sr-x 66 ftp ftp 8192 Mar 16 17:54 pub
+ -rw-r--r-- 1 ftp ftp 25 Mar 18 19:34 robots.txt
+ """
+ )
+ expected = [
+ {
+ "access": {
+ "group": "ftp",
+ "permissions": [
+ "g_r",
+ "g_s",
+ "o_r",
+ "o_x",
+ "u_r",
+ "u_w",
+ "u_x",
+ ],
+ "user": "ftp",
+ },
+ "basic": {"is_dir": True, "name": "pub"},
+ "details": {"modified": 1489686840.0, "size": 8192, "type": 1},
"ftp": {
- "ls": "11-02-17 03:33PM 9276 logo.gif"
+ "ls": "drwxr-sr-x 66 ftp ftp 8192 Mar 16 17:54 pub"
+ },
+ },
+ {
+ "access": {
+ "group": "ftp",
+ "permissions": [
+ "g_r",
+ "o_r",
+ "u_r",
+ "u_w",
+ ],
+ "user": "ftp",
+ },
+ "basic": {"is_dir": False, "name": "robots.txt"},
+ "details": {"modified": 1489865640.0, "size": 25, "type": 2},
+ "ftp": {
+ "ls": "-rw-r--r-- 1 ftp ftp 25 Mar 18 19:34 robots.txt"
},
},
]
- parsed = ftp_parse.parse(directory.splitlines())
- self.assertEqual(parsed, expected)
+ parsed = ftp_parse.parse(directory.strip().splitlines())
+ self.assertListEqual(parsed, expected)
+
+ @mock.patch("time.localtime")
+ def test_decode_linux_sticky(self, mock_localtime):
+ # reported in #451
+ mock_localtime.return_value = time2017
+ directory = textwrap.dedent(
+ """
+ drwxr-xr-t 66 ftp ftp 8192 Mar 16 17:54 pub
+ """
+ )
+ expected = [
+ {
+ "access": {
+ "group": "ftp",
+ "permissions": [
+ "g_r",
+ "g_x",
+ "o_r",
+ "o_t",
+ "u_r",
+ "u_w",
+ "u_x",
+ ],
+ "user": "ftp",
+ },
+ "basic": {"is_dir": True, "name": "pub"},
+ "details": {"modified": 1489686840.0, "size": 8192, "type": 1},
+ "ftp": {
+ "ls": "drwxr-xr-t 66 ftp ftp 8192 Mar 16 17:54 pub"
+ },
+ },
+ ]
+
+ self.maxDiff = None
+ parsed = ftp_parse.parse(directory.strip().splitlines())
+ self.assertListEqual(parsed, expected)
diff --git a/tests/test_ftpfs.py b/tests/test_ftpfs.py
index 19ce22ed..2bb2c73c 100644
--- a/tests/test_ftpfs.py
+++ b/tests/test_ftpfs.py
@@ -1,33 +1,37 @@
# coding: utf-8
-from __future__ import absolute_import
-from __future__ import print_function
-from __future__ import unicode_literals
+from __future__ import absolute_import, print_function, unicode_literals
-import socket
+import calendar
+import datetime
import os
import platform
import shutil
+import socket
import tempfile
import time
import unittest
import uuid
-from nose.plugins.attrib import attr
-
-from six import text_type
-
-from ftplib import error_perm
-from ftplib import error_temp
+try:
+ from unittest import mock
+except ImportError:
+ import mock
+from ftplib import error_perm, error_temp
from pyftpdlib.authorizers import DummyAuthorizer
+from six import BytesIO, text_type
from fs import errors
-from fs.opener import open_fs
from fs.ftpfs import FTPFS, ftp_errors
+from fs.opener import open_fs
from fs.path import join
from fs.subfs import SubFS
from fs.test import FSTestCases
+try:
+ from pytest import mark
+except ImportError:
+ from . import mark
# Prevent socket timeouts from slowing tests too much
socket.setdefaulttimeout(1)
@@ -86,6 +90,10 @@ def test_opener(self):
self.assertIsInstance(ftp_fs, FTPFS)
self.assertEqual(ftp_fs.host, "ftp.example.org")
+ ftps_fs = open_fs("ftps://will:wfc@ftp.example.org")
+ self.assertIsInstance(ftps_fs, FTPFS)
+ self.assertTrue(ftps_fs.tls)
+
class TestFTPErrors(unittest.TestCase):
"""Test the ftp_errors context manager."""
@@ -125,12 +133,14 @@ def test_manager_with_host(self):
with self.assertRaises(errors.RemoteConnectionError) as err_info:
with ftp_errors(mem_fs):
raise socket.error
- self.assertEqual(str(err_info.exception), "unable to connect to ftp.example.com")
+ self.assertEqual(
+ str(err_info.exception), "unable to connect to ftp.example.com"
+ )
-@attr("slow")
+@mark.slow
+@unittest.skipIf(platform.python_implementation() == "PyPy", "ftp unreliable with PyPy")
class TestFTPFS(FSTestCases, unittest.TestCase):
-
user = "user"
pasw = "1234"
@@ -148,7 +158,7 @@ def setUpClass(cls):
cls.server.shutdown_after = -1
cls.server.handler.authorizer = DummyAuthorizer()
cls.server.handler.authorizer.add_user(
- cls.user, cls.pasw, cls._temp_path, perm="elradfmw"
+ cls.user, cls.pasw, cls._temp_path, perm="elradfmwT"
)
cls.server.handler.authorizer.add_anonymous(cls._temp_path)
cls.server.start()
@@ -179,26 +189,56 @@ def tearDown(self):
super(TestFTPFS, self).tearDown()
def test_ftp_url(self):
- self.assertEqual(self.fs.ftp_url, "ftp://{}:{}@{}:{}".format(self.user, self.pasw, self.server.host, self.server.port))
+ self.assertEqual(
+ self.fs.ftp_url,
+ "ftp://{}:{}@{}:{}".format(
+ self.user, self.pasw, self.server.host, self.server.port
+ ),
+ )
def test_geturl(self):
self.fs.makedir("foo")
self.fs.create("bar")
self.fs.create("foo/bar")
self.assertEqual(
- self.fs.geturl('foo'), "ftp://{}:{}@{}:{}/foo".format(self.user, self.pasw, self.server.host, self.server.port)
+ self.fs.geturl("foo"),
+ "ftp://{}:{}@{}:{}/foo".format(
+ self.user, self.pasw, self.server.host, self.server.port
+ ),
)
self.assertEqual(
- self.fs.geturl('bar'), "ftp://{}:{}@{}:{}/bar".format(self.user, self.pasw, self.server.host, self.server.port)
+ self.fs.geturl("bar"),
+ "ftp://{}:{}@{}:{}/bar".format(
+ self.user, self.pasw, self.server.host, self.server.port
+ ),
)
self.assertEqual(
- self.fs.geturl('foo/bar'), "ftp://{}:{}@{}:{}/foo/bar".format(self.user, self.pasw, self.server.host, self.server.port)
+ self.fs.geturl("foo/bar"),
+ "ftp://{}:{}@{}:{}/foo/bar".format(
+ self.user, self.pasw, self.server.host, self.server.port
+ ),
)
+ def test_setinfo(self):
+ # TODO: temporary test, since FSTestCases.test_setinfo is broken.
+ self.fs.create("bar")
+ original_modified = self.fs.getinfo("bar", ("details",)).modified
+ new_modified = original_modified - datetime.timedelta(hours=1)
+ new_modified_stamp = calendar.timegm(new_modified.timetuple())
+ self.fs.setinfo("bar", {"details": {"modified": new_modified_stamp}})
+ new_modified_get = self.fs.getinfo("bar", ("details",)).modified
+ if original_modified.microsecond == 0 or new_modified_get.microsecond == 0:
+ original_modified = original_modified.replace(microsecond=0)
+ new_modified_get = new_modified_get.replace(microsecond=0)
+ if original_modified.second == 0 or new_modified_get.second == 0:
+ original_modified = original_modified.replace(second=0)
+ new_modified_get = new_modified_get.replace(second=0)
+ new_modified_get = new_modified_get + datetime.timedelta(hours=1)
+ self.assertEqual(original_modified, new_modified_get)
+
def test_host(self):
self.assertEqual(self.fs.host, self.server.host)
- # @attr('slow')
def test_connection_error(self):
fs = FTPFS("ftp.not.a.chance", timeout=1)
with self.assertRaises(errors.RemoteConnectionError):
@@ -216,6 +256,23 @@ def test_getmeta_unicode_path(self):
del self.fs.features["UTF8"]
self.assertFalse(self.fs.getmeta().get("unicode_paths"))
+ def test_getinfo_modified(self):
+ self.assertIn("MDTM", self.fs.features)
+ self.fs.create("bar")
+ mtime_detail = self.fs.getinfo("bar", ("basic", "details")).modified
+ mtime_modified = self.fs.getmodified("bar")
+ # Microsecond and seconds might not actually be supported by all
+ # FTP commands, so we strip them before comparing if it looks
+ # like at least one of the two values does not contain them.
+ replacement = {}
+ if mtime_detail.microsecond == 0 or mtime_modified.microsecond == 0:
+ replacement["microsecond"] = 0
+ if mtime_detail.second == 0 or mtime_modified.second == 0:
+ replacement["second"] = 0
+ self.assertEqual(
+ mtime_detail.replace(**replacement), mtime_modified.replace(**replacement)
+ )
+
def test_opener_path(self):
self.fs.makedir("foo")
self.fs.writetext("foo/bar", "baz")
@@ -253,6 +310,12 @@ def test_create(self):
with open_fs(url, create=True) as ftp_fs:
self.assertTrue(ftp_fs.isfile("foo"))
+ def test_upload_connection(self):
+ with mock.patch.object(self.fs, "_manage_ftp") as _manage_ftp:
+ self.fs.upload("foo", BytesIO(b"hello"))
+ self.assertEqual(self.fs.gettext("foo"), "hello")
+ _manage_ftp.assert_not_called()
+
class TestFTPFSNoMLSD(TestFTPFS):
def make_fs(self):
@@ -265,9 +328,9 @@ def test_features(self):
pass
-@attr("slow")
+@mark.slow
+@unittest.skipIf(platform.python_implementation() == "PyPy", "ftp unreliable with PyPy")
class TestAnonFTPFS(FSTestCases, unittest.TestCase):
-
user = "anonymous"
pasw = ""
@@ -301,11 +364,7 @@ def tearDownClass(cls):
super(TestAnonFTPFS, cls).tearDownClass()
def make_fs(self):
- return open_fs(
- "ftp://{}:{}".format(
- self.server.host, self.server.port
- )
- )
+ return open_fs("ftp://{}:{}".format(self.server.host, self.server.port))
def tearDown(self):
shutil.rmtree(self._temp_path)
@@ -313,12 +372,23 @@ def tearDown(self):
super(TestAnonFTPFS, self).tearDown()
def test_ftp_url(self):
- self.assertEqual(self.fs.ftp_url, "ftp://{}:{}".format(self.server.host, self.server.port))
+ self.assertEqual(
+ self.fs.ftp_url, "ftp://{}:{}".format(self.server.host, self.server.port)
+ )
def test_geturl(self):
self.fs.makedir("foo")
self.fs.create("bar")
self.fs.create("foo/bar")
- self.assertEqual(self.fs.geturl('foo'), "ftp://{}:{}/foo".format(self.server.host, self.server.port))
- self.assertEqual(self.fs.geturl('bar'), "ftp://{}:{}/bar".format(self.server.host, self.server.port))
- self.assertEqual(self.fs.geturl('foo/bar'), "ftp://{}:{}/foo/bar".format(self.server.host, self.server.port))
+ self.assertEqual(
+ self.fs.geturl("foo"),
+ "ftp://{}:{}/foo".format(self.server.host, self.server.port),
+ )
+ self.assertEqual(
+ self.fs.geturl("bar"),
+ "ftp://{}:{}/bar".format(self.server.host, self.server.port),
+ )
+ self.assertEqual(
+ self.fs.geturl("foo/bar"),
+ "ftp://{}:{}/foo/bar".format(self.server.host, self.server.port),
+ )
diff --git a/tests/test_glob.py b/tests/test_glob.py
index c2a2d02f..9a5d8827 100644
--- a/tests/test_glob.py
+++ b/tests/test_glob.py
@@ -1,9 +1,11 @@
from __future__ import unicode_literals
+import re
import unittest
-from fs import glob
-from fs import open_fs
+from parameterized import parameterized
+
+from fs import glob, open_fs
class TestGlob(unittest.TestCase):
@@ -18,10 +20,11 @@ def setUp(self):
fs.makedirs("a/b/c/").writetext("foo.py", "import fs")
repr(fs.glob)
- def test_match(self):
- tests = [
+ @parameterized.expand(
+ [
("*.?y", "/test.py", True),
("*.py", "/test.py", True),
+ ("*.py", "__init__.py", True),
("*.py", "/test.pc", False),
("*.py", "/foo/test.py", False),
("foo/*.py", "/foo/test.py", True),
@@ -29,21 +32,23 @@ def test_match(self):
("?oo/*.py", "/foo/test.py", True),
("*/*.py", "/foo/test.py", True),
("foo/*.py", "/bar/foo/test.py", False),
+ ("/foo/**", "/foo/test.py", True),
("**/foo/*.py", "/bar/foo/test.py", True),
("foo/**/bar/*.py", "/foo/bar/test.py", True),
("foo/**/bar/*.py", "/foo/baz/egg/bar/test.py", True),
("foo/**/bar/*.py", "/foo/baz/egg/bar/egg/test.py", False),
("**", "/test.py", True),
+ ("/**", "/test.py", True),
("**", "/test", True),
("**", "/test/", True),
("**/", "/test/", True),
("**/", "/test.py", False),
]
- for pattern, path, expected in tests:
- self.assertEqual(glob.match(pattern, path), expected)
+ )
+ def test_match(self, pattern, path, expected):
+ self.assertEqual(glob.match(pattern, path), expected, msg=(pattern, path))
# Run a second time to test cache
- for pattern, path, expected in tests:
- self.assertEqual(glob.match(pattern, path), expected)
+ self.assertEqual(glob.match(pattern, path), expected, msg=(pattern, path))
def test_count_1dir(self):
globber = glob.BoundGlobber(self.fs)
@@ -97,3 +102,49 @@ def test_remove_all(self):
globber = glob.BoundGlobber(self.fs)
globber("**").remove()
self.assertEqual(sorted(self.fs.listdir("/")), [])
+
+ translate_test_cases = [
+ ("foo.py", ["foo.py"], ["Foo.py", "foo_py", "foo", ".py"]),
+ ("foo?py", ["foo.py", "fooapy"], ["foo/py", "foopy", "fopy"]),
+ ("bar/foo.py", ["bar/foo.py"], []),
+ ("bar?foo.py", ["barafoo.py"], ["bar/foo.py"]),
+ ("???.py", ["foo.py", "bar.py", "FOO.py"], [".py", "foo.PY"]),
+ ("bar/*.py", ["bar/.py", "bar/foo.py"], ["bar/foo"]),
+ ("bar/foo*.py", ["bar/foo.py", "bar/foobaz.py"], ["bar/foo", "bar/.py"]),
+ ("*/[bar]/foo.py", ["/b/foo.py", "x/a/foo.py", "/r/foo.py"], ["b/foo.py", "/bar/foo.py"]),
+ ("[!bar]/foo.py", ["x/foo.py"], ["//foo.py"]),
+ ("[.py", ["[.py"], [".py", "."]),
+ ]
+
+ @parameterized.expand(translate_test_cases)
+ def test_translate(self, glob_pattern, expected_matches, expected_not_matches):
+ translated = glob._translate(glob_pattern)
+ for m in expected_matches:
+ self.assertTrue(re.match(translated, m))
+ for m in expected_not_matches:
+ self.assertFalse(re.match(translated, m))
+
+ @parameterized.expand(translate_test_cases)
+ def test_translate_glob_simple(self, glob_pattern, expected_matches, expected_not_matches):
+ levels, translated = glob._translate_glob(glob_pattern)
+ self.assertEqual(levels, glob_pattern.count("/") + 1)
+ for m in expected_matches:
+ self.assertTrue(re.match(translated, "/" + m))
+ for m in expected_not_matches:
+ self.assertFalse(re.match(translated, m))
+ self.assertFalse(re.match(translated, "/" + m))
+
+ @parameterized.expand(
+ [
+ ("foo/**/bar", ["/foo/bar", "/foo/baz/bar", "/foo/baz/qux/bar"], ["/foo"]),
+ ("**/*/bar", ["/foo/bar", "/foo/bar"], ["/bar", "/bar"]),
+ ("/**/foo/**/bar", ["/baz/foo/qux/bar", "/foo/bar"], ["/bar"]),
+ ]
+ )
+ def test_translate_glob(self, glob_pattern, expected_matches, expected_not_matches):
+ levels, translated = glob._translate_glob(glob_pattern)
+ self.assertIsNone(levels)
+ for m in expected_matches:
+ self.assertTrue(re.match(translated, m))
+ for m in expected_not_matches:
+ self.assertFalse(re.match(translated, m))
diff --git a/tests/test_imports.py b/tests/test_imports.py
index 72fa6fba..e18cffa7 100644
--- a/tests/test_imports.py
+++ b/tests/test_imports.py
@@ -1,4 +1,5 @@
import sys
+
import unittest
@@ -6,6 +7,7 @@ class TestImports(unittest.TestCase):
def test_import_path(self):
"""Test import fs also imports other symbols."""
restore_fs = sys.modules.pop("fs")
+ sys.modules.pop("fs.path")
try:
import fs
diff --git a/tests/test_info.py b/tests/test_info.py
index 8c5a1d30..7c50ec7b 100644
--- a/tests/test_info.py
+++ b/tests/test_info.py
@@ -1,16 +1,18 @@
-
from __future__ import unicode_literals
-import datetime
import unittest
-
-import pytz
+from datetime import datetime
from fs.enums import ResourceType
from fs.info import Info
from fs.permissions import Permissions
from fs.time import datetime_to_epoch
+try:
+ from datetime import timezone
+except ImportError:
+ from fs._tzcompat import timezone # type: ignore
+
class TestInfo(unittest.TestCase):
def test_empty(self):
@@ -72,10 +74,10 @@ def test_basic(self):
def test_details(self):
dates = [
- datetime.datetime(2016, 7, 5, tzinfo=pytz.UTC),
- datetime.datetime(2016, 7, 6, tzinfo=pytz.UTC),
- datetime.datetime(2016, 7, 7, tzinfo=pytz.UTC),
- datetime.datetime(2016, 7, 8, tzinfo=pytz.UTC),
+ datetime(2016, 7, 5, tzinfo=timezone.utc),
+ datetime(2016, 7, 6, tzinfo=timezone.utc),
+ datetime(2016, 7, 7, tzinfo=timezone.utc),
+ datetime(2016, 7, 8, tzinfo=timezone.utc),
]
epochs = [datetime_to_epoch(d) for d in dates]
diff --git a/tests/test_iotools.py b/tests/test_iotools.py
index ffcc2949..56af6e73 100644
--- a/tests/test_iotools.py
+++ b/tests/test_iotools.py
@@ -1,13 +1,10 @@
from __future__ import unicode_literals
import io
-import unittest
-
import six
+import unittest
-from fs import iotools
-from fs import tempfs
-
+from fs import iotools, tempfs
from fs.test import UNICODE_TEXT
@@ -26,7 +23,7 @@ def test_make_stream(self):
with self.fs.openbin("foo.bin") as f:
data = f.read()
- self.assert_(isinstance(data, bytes))
+ self.assertTrue(isinstance(data, bytes))
with self.fs.openbin("text.txt", "wb") as f:
f.write(UNICODE_TEXT.encode("utf-8"))
diff --git a/tests/test_memoryfs.py b/tests/test_memoryfs.py
index aaf6e779..6537fac2 100644
--- a/tests/test_memoryfs.py
+++ b/tests/test_memoryfs.py
@@ -4,8 +4,7 @@
import unittest
from fs import memoryfs
-from fs.test import FSTestCases
-from fs.test import UNICODE_TEXT
+from fs.test import UNICODE_TEXT, FSTestCases
try:
# Only supported on Python 3.4+
@@ -28,8 +27,8 @@ def _create_many_files(self):
posixpath.join(parent_dir, str(file_id)), UNICODE_TEXT
)
- @unittest.skipIf(
- not tracemalloc, "`tracemalloc` isn't supported on this Python version."
+ @unittest.skipUnless(
+ tracemalloc, reason="`tracemalloc` isn't supported on this Python version."
)
def test_close_mem_free(self):
"""Ensure all file memory is freed when calling close().
@@ -66,3 +65,32 @@ def test_close_mem_free(self):
"Memory usage increased after closing the file system; diff is %0.2f KiB."
% (diff_close.size_diff / 1024.0),
)
+
+ def test_copy_preserve_time(self):
+ self.fs.makedir("foo")
+ self.fs.makedir("bar")
+ self.fs.touch("foo/file.txt")
+
+ src_datetime = self.fs.getmodified("foo/file.txt")
+
+ self.fs.copy("foo/file.txt", "bar/file.txt", preserve_time=True)
+ self.assertTrue(self.fs.exists("bar/file.txt"))
+
+ dst_datetime = self.fs.getmodified("bar/file.txt")
+ self.assertEqual(dst_datetime, src_datetime)
+
+
+class TestMemoryFile(unittest.TestCase):
+ def setUp(self):
+ self.fs = memoryfs.MemoryFS()
+
+ def tearDown(self):
+ self.fs.close()
+
+ def test_readline_writing(self):
+ with self.fs.openbin("test.txt", "w") as f:
+ self.assertRaises(IOError, f.readline)
+
+ def test_readinto_writing(self):
+ with self.fs.openbin("test.txt", "w") as f:
+ self.assertRaises(IOError, f.readinto, bytearray(10))
diff --git a/tests/test_mirror.py b/tests/test_mirror.py
index a0e8ac53..8aaa0953 100644
--- a/tests/test_mirror.py
+++ b/tests/test_mirror.py
@@ -1,16 +1,17 @@
from __future__ import unicode_literals
import unittest
+from parameterized import parameterized_class
-from fs.mirror import mirror
from fs import open_fs
+from fs.mirror import mirror
+@parameterized_class(("WORKERS",), [(0,), (1,), (2,), (4,)])
class TestMirror(unittest.TestCase):
- WORKERS = 0 # Single threaded
-
def _contents(self, fs):
"""Extract an FS in to a simple data structure."""
+ namespaces = ("details", "metadata_changed", "modified")
contents = []
for path, dirs, files in fs.walk():
for info in dirs:
@@ -18,7 +19,17 @@ def _contents(self, fs):
contents.append((_path, "dir", b""))
for info in files:
_path = info.make_path(path)
- contents.append((_path, "file", fs.readbytes(_path)))
+ _bytes = fs.readbytes(_path)
+ _info = fs.getinfo(_path, namespaces)
+ contents.append(
+ (
+ _path,
+ "file",
+ _bytes,
+ _info.modified,
+ _info.metadata_changed,
+ )
+ )
return sorted(contents)
def assert_compare_fs(self, fs1, fs2):
@@ -28,14 +39,14 @@ def assert_compare_fs(self, fs1, fs2):
def test_empty_mirror(self):
m1 = open_fs("mem://")
m2 = open_fs("mem://")
- mirror(m1, m2, workers=self.WORKERS)
+ mirror(m1, m2, workers=self.WORKERS, preserve_time=True)
self.assert_compare_fs(m1, m2)
def test_mirror_one_file(self):
m1 = open_fs("mem://")
m1.writetext("foo", "hello")
m2 = open_fs("mem://")
- mirror(m1, m2, workers=self.WORKERS)
+ mirror(m1, m2, workers=self.WORKERS, preserve_time=True)
self.assert_compare_fs(m1, m2)
def test_mirror_one_file_one_dir(self):
@@ -43,7 +54,7 @@ def test_mirror_one_file_one_dir(self):
m1.writetext("foo", "hello")
m1.makedir("bar")
m2 = open_fs("mem://")
- mirror(m1, m2, workers=self.WORKERS)
+ mirror(m1, m2, workers=self.WORKERS, preserve_time=True)
self.assert_compare_fs(m1, m2)
def test_mirror_delete_replace(self):
@@ -51,13 +62,13 @@ def test_mirror_delete_replace(self):
m1.writetext("foo", "hello")
m1.makedir("bar")
m2 = open_fs("mem://")
- mirror(m1, m2, workers=self.WORKERS)
+ mirror(m1, m2, workers=self.WORKERS, preserve_time=True)
self.assert_compare_fs(m1, m2)
m2.remove("foo")
- mirror(m1, m2, workers=self.WORKERS)
+ mirror(m1, m2, workers=self.WORKERS, preserve_time=True)
self.assert_compare_fs(m1, m2)
m2.removedir("bar")
- mirror(m1, m2, workers=self.WORKERS)
+ mirror(m1, m2, workers=self.WORKERS, preserve_time=True)
self.assert_compare_fs(m1, m2)
def test_mirror_extra_dir(self):
@@ -66,7 +77,7 @@ def test_mirror_extra_dir(self):
m1.makedir("bar")
m2 = open_fs("mem://")
m2.makedir("baz")
- mirror(m1, m2, workers=self.WORKERS)
+ mirror(m1, m2, workers=self.WORKERS, preserve_time=True)
self.assert_compare_fs(m1, m2)
def test_mirror_extra_file(self):
@@ -76,7 +87,7 @@ def test_mirror_extra_file(self):
m2 = open_fs("mem://")
m2.makedir("baz")
m2.touch("egg")
- mirror(m1, m2, workers=self.WORKERS)
+ mirror(m1, m2, workers=self.WORKERS, preserve_time=True)
self.assert_compare_fs(m1, m2)
def test_mirror_wrong_type(self):
@@ -86,7 +97,7 @@ def test_mirror_wrong_type(self):
m2 = open_fs("mem://")
m2.makedir("foo")
m2.touch("bar")
- mirror(m1, m2, workers=self.WORKERS)
+ mirror(m1, m2, workers=self.WORKERS, preserve_time=True)
self.assert_compare_fs(m1, m2)
def test_mirror_update(self):
@@ -94,20 +105,8 @@ def test_mirror_update(self):
m1.writetext("foo", "hello")
m1.makedir("bar")
m2 = open_fs("mem://")
- mirror(m1, m2, workers=self.WORKERS)
+ mirror(m1, m2, workers=self.WORKERS, preserve_time=True)
self.assert_compare_fs(m1, m2)
m2.appendtext("foo", " world!")
- mirror(m1, m2, workers=self.WORKERS)
+ mirror(m1, m2, workers=self.WORKERS, preserve_time=True)
self.assert_compare_fs(m1, m2)
-
-
-class TestMirrorWorkers1(TestMirror):
- WORKERS = 1
-
-
-class TestMirrorWorkers2(TestMirror):
- WORKERS = 2
-
-
-class TestMirrorWorkers4(TestMirror):
- WORKERS = 4
diff --git a/tests/test_mode.py b/tests/test_mode.py
index 86634f40..8fce62f2 100644
--- a/tests/test_mode.py
+++ b/tests/test_mode.py
@@ -1,10 +1,9 @@
from __future__ import unicode_literals
import unittest
-
from six import text_type
-from fs.mode import check_readable, check_writable, Mode
+from fs.mode import Mode, check_readable, check_writable
class TestMode(unittest.TestCase):
diff --git a/tests/test_mountfs.py b/tests/test_mountfs.py
index 1ffa82d2..f8403626 100644
--- a/tests/test_mountfs.py
+++ b/tests/test_mountfs.py
@@ -2,8 +2,8 @@
import unittest
-from fs.mountfs import MountError, MountFS
from fs.memoryfs import MemoryFS
+from fs.mountfs import MountError, MountFS
from fs.tempfs import TempFS
from fs.test import FSTestCases
diff --git a/tests/test_move.py b/tests/test_move.py
index bec4d776..8eb1af75 100644
--- a/tests/test_move.py
+++ b/tests/test_move.py
@@ -1,35 +1,208 @@
-
from __future__ import unicode_literals
import unittest
+try:
+ from unittest import mock
+except ImportError:
+ import mock
+
+from parameterized import parameterized, parameterized_class
+
import fs.move
from fs import open_fs
+from fs.errors import FSError, ResourceReadOnly
+from fs.path import join
+from fs.wrap import read_only
-class TestMove(unittest.TestCase):
+@parameterized_class(("preserve_time",), [(True,), (False,)])
+class TestMoveCheckTime(unittest.TestCase):
def test_move_fs(self):
+ namespaces = ("details", "modified")
+
src_fs = open_fs("mem://")
src_fs.makedirs("foo/bar")
src_fs.touch("test.txt")
src_fs.touch("foo/bar/baz.txt")
+ src_file1_info = src_fs.getinfo("test.txt", namespaces)
+ src_file2_info = src_fs.getinfo("foo/bar/baz.txt", namespaces)
dst_fs = open_fs("mem://")
- fs.move.move_fs(src_fs, dst_fs)
+ dst_fs.create("test.txt")
+ dst_fs.setinfo("test.txt", {"details": {"modified": 1000000}})
+
+ fs.move.move_fs(src_fs, dst_fs, preserve_time=self.preserve_time)
+ self.assertTrue(src_fs.isempty("/"))
self.assertTrue(dst_fs.isdir("foo/bar"))
self.assertTrue(dst_fs.isfile("test.txt"))
- self.assertTrue(src_fs.isempty("/"))
+ self.assertTrue(dst_fs.isfile("foo/bar/baz.txt"))
+
+ if self.preserve_time:
+ dst_file1_info = dst_fs.getinfo("test.txt", namespaces)
+ dst_file2_info = dst_fs.getinfo("foo/bar/baz.txt", namespaces)
+ self.assertEqual(dst_file1_info.modified, src_file1_info.modified)
+ self.assertEqual(dst_file2_info.modified, src_file2_info.modified)
+
+ def test_move_file(self):
+ namespaces = ("details", "modified")
+ with open_fs("mem://") as src_fs, open_fs("mem://") as dst_fs:
+ src_fs.writetext("source.txt", "Source")
+ src_fs_file_info = src_fs.getinfo("source.txt", namespaces)
+ fs.move.move_file(
+ src_fs,
+ "source.txt",
+ dst_fs,
+ "dest.txt",
+ preserve_time=self.preserve_time,
+ )
+ self.assertFalse(src_fs.exists("source.txt"))
+ self.assertEqual(dst_fs.readtext("dest.txt"), "Source")
+
+ if self.preserve_time:
+ dst_fs_file_info = dst_fs.getinfo("dest.txt", namespaces)
+ self.assertEqual(src_fs_file_info.modified, dst_fs_file_info.modified)
+
+ def test_move_dir(self):
+ namespaces = ("details", "modified")
- def test_copy_dir(self):
src_fs = open_fs("mem://")
src_fs.makedirs("foo/bar")
src_fs.touch("test.txt")
src_fs.touch("foo/bar/baz.txt")
+ src_file2_info = src_fs.getinfo("foo/bar/baz.txt", namespaces)
dst_fs = open_fs("mem://")
- fs.move.move_dir(src_fs, "/foo", dst_fs, "/")
+ dst_fs.create("test.txt")
+ dst_fs.setinfo("test.txt", {"details": {"modified": 1000000}})
+
+ fs.move.move_dir(src_fs, "/foo", dst_fs, "/", preserve_time=self.preserve_time)
+ self.assertFalse(src_fs.exists("foo"))
+ self.assertTrue(src_fs.isfile("test.txt"))
self.assertTrue(dst_fs.isdir("bar"))
self.assertTrue(dst_fs.isfile("bar/baz.txt"))
- self.assertFalse(src_fs.exists("foo"))
+
+ if self.preserve_time:
+ dst_file2_info = dst_fs.getinfo("bar/baz.txt", namespaces)
+ self.assertEqual(dst_file2_info.modified, src_file2_info.modified)
+
+
+class TestMove(unittest.TestCase):
+ def test_move_file_tempfs(self):
+ with open_fs("temp://") as src, open_fs("temp://") as dst:
+ src_dir = src.makedir("Some subfolder")
+ src_dir.writetext("file.txt", "Content")
+ dst_dir = dst.makedir("dest dir")
+ fs.move.move_file(src_dir, "file.txt", dst_dir, "target.txt")
+ self.assertFalse(src.exists("Some subfolder/file.txt"))
+ self.assertEqual(dst.readtext("dest dir/target.txt"), "Content")
+
+ def test_move_file_fs_urls(self):
+ # create a temp dir to work on
+ with open_fs("temp://") as tmp:
+ path = tmp.getsyspath("/")
+ tmp.makedir("subdir_src")
+ tmp.writetext("subdir_src/file.txt", "Content")
+ tmp.makedir("subdir_dst")
+ fs.move.move_file(
+ "osfs://" + join(path, "subdir_src"),
+ "file.txt",
+ "osfs://" + join(path, "subdir_dst"),
+ "target.txt",
+ )
+ self.assertFalse(tmp.exists("subdir_src/file.txt"))
+ self.assertEqual(tmp.readtext("subdir_dst/target.txt"), "Content")
+
+ def test_move_file_same_fs_read_only_source(self):
+ with open_fs("temp://") as tmp:
+ path = tmp.getsyspath("/")
+ tmp.writetext("file.txt", "Content")
+ src = read_only(open_fs(path))
+ dst = tmp.makedir("sub")
+ with self.assertRaises(ResourceReadOnly):
+ fs.move.move_file(src, "file.txt", dst, "target_file.txt")
+ self.assertTrue(src.exists("file.txt"))
+ self.assertFalse(
+ dst.exists("target_file.txt"), "file should not have been copied over"
+ )
+
+ def test_move_file_read_only_mem_source(self):
+ with open_fs("mem://") as src, open_fs("mem://") as dst:
+ src.writetext("file.txt", "Content")
+ dst_sub = dst.makedir("sub")
+ src_ro = read_only(src)
+ with self.assertRaises(ResourceReadOnly):
+ fs.move.move_file(src_ro, "file.txt", dst_sub, "target.txt")
+ self.assertTrue(src.exists("file.txt"))
+ self.assertFalse(
+ dst_sub.exists("target.txt"), "file should not have been copied over"
+ )
+
+ def test_move_file_read_only_mem_dest(self):
+ with open_fs("mem://") as src, open_fs("mem://") as dst:
+ src.writetext("file.txt", "Content")
+ dst_ro = read_only(dst)
+ with self.assertRaises(ResourceReadOnly):
+ fs.move.move_file(src, "file.txt", dst_ro, "target.txt")
+ self.assertTrue(src.exists("file.txt"))
+ self.assertFalse(
+ dst_ro.exists("target.txt"), "file should not have been copied over"
+ )
+
+ @parameterized.expand([("temp", "temp://"), ("mem", "mem://")])
+ def test_move_file_overwrite(self, _, fs_url):
+ # we use TempFS and MemoryFS in order to make sure the optimized code path
+ # behaves like the regular one (TempFS tests the optmized code path).
+ with open_fs(fs_url) as src, open_fs(fs_url) as dst:
+ src.writetext("file.txt", "source content")
+ dst.writetext("target.txt", "target content")
+ self.assertTrue(src.exists("file.txt"))
+ self.assertFalse(src.exists("target.txt"))
+ self.assertFalse(dst.exists("file.txt"))
+ self.assertTrue(dst.exists("target.txt"))
+ fs.move.move_file(src, "file.txt", dst, "target.txt")
+ self.assertFalse(src.exists("file.txt"))
+ self.assertFalse(src.exists("target.txt"))
+ self.assertFalse(dst.exists("file.txt"))
+ self.assertTrue(dst.exists("target.txt"))
+ self.assertEquals(dst.readtext("target.txt"), "source content")
+
+ @parameterized.expand([("temp", "temp://"), ("mem", "mem://")])
+ def test_move_file_overwrite_itself(self, _, fs_url):
+ # we use TempFS and MemoryFS in order to make sure the optimized code path
+ # behaves like the regular one (TempFS tests the optmized code path).
+ with open_fs(fs_url) as tmp:
+ tmp.writetext("file.txt", "content")
+ fs.move.move_file(tmp, "file.txt", tmp, "file.txt")
+ self.assertTrue(tmp.exists("file.txt"))
+ self.assertEquals(tmp.readtext("file.txt"), "content")
+
+ @parameterized.expand([("temp", "temp://"), ("mem", "mem://")])
+ def test_move_file_overwrite_itself_relpath(self, _, fs_url):
+ # we use TempFS and MemoryFS in order to make sure the optimized code path
+ # behaves like the regular one (TempFS tests the optmized code path).
+ with open_fs(fs_url) as tmp:
+ new_dir = tmp.makedir("dir")
+ new_dir.writetext("file.txt", "content")
+ fs.move.move_file(tmp, "dir/../dir/file.txt", tmp, "dir/file.txt")
+ self.assertTrue(tmp.exists("dir/file.txt"))
+ self.assertEquals(tmp.readtext("dir/file.txt"), "content")
+
+ @parameterized.expand([(True,), (False,)])
+ def test_move_file_cleanup_on_error(self, cleanup):
+ with open_fs("mem://") as src, open_fs("mem://") as dst:
+ src.writetext("file.txt", "Content")
+ with mock.patch.object(src, "remove") as mck:
+ mck.side_effect = FSError
+ with self.assertRaises(FSError):
+ fs.move.move_file(
+ src,
+ "file.txt",
+ dst,
+ "target.txt",
+ cleanup_dst_on_error=cleanup,
+ )
+ self.assertTrue(src.exists("file.txt"))
+ self.assertEqual(not dst.exists("target.txt"), cleanup)
diff --git a/tests/test_multifs.py b/tests/test_multifs.py
index 7f0fe88b..623f5881 100644
--- a/tests/test_multifs.py
+++ b/tests/test_multifs.py
@@ -2,10 +2,9 @@
import unittest
-from fs.multifs import MultiFS
-from fs.memoryfs import MemoryFS
from fs import errors
-
+from fs.memoryfs import MemoryFS
+from fs.multifs import MultiFS
from fs.test import FSTestCases
diff --git a/tests/test_new_name.py b/tests/test_new_name.py
index 647a96e8..5f571df9 100644
--- a/tests/test_new_name.py
+++ b/tests/test_new_name.py
@@ -3,7 +3,6 @@
import unittest
import warnings
-
from fs.base import _new_name
diff --git a/tests/test_opener.py b/tests/test_opener.py
index 398393f4..43d56903 100644
--- a/tests/test_opener.py
+++ b/tests/test_opener.py
@@ -1,21 +1,25 @@
from __future__ import unicode_literals
-import os
-import mock
import sys
+
+import os
+import pkg_resources
+import shutil
import tempfile
import unittest
-import pkg_resources
-
-import six
from fs import open_fs, opener
-from fs.osfs import OSFS
-from fs.opener import registry, errors
-from fs.memoryfs import MemoryFS
from fs.appfs import UserDataFS
+from fs.memoryfs import MemoryFS
+from fs.opener import errors, registry
from fs.opener.parse import ParseResult
from fs.opener.registry import Registry
+from fs.osfs import OSFS
+
+try:
+ from unittest import mock
+except ImportError:
+ import mock
class TestParse(unittest.TestCase):
@@ -25,7 +29,7 @@ def test_registry_repr(self):
def test_parse_not_url(self):
with self.assertRaises(errors.ParseError):
- parsed = opener.parse("foo/bar")
+ opener.parse("foo/bar")
def test_parse_simple(self):
parsed = opener.parse("osfs://foo/bar")
@@ -205,6 +209,12 @@ def test_manage_fs_error(self):
class TestOpeners(unittest.TestCase):
+ def setUp(self):
+ self.tmpdir = tempfile.mkdtemp()
+
+ def tearDown(self):
+ shutil.rmtree(self.tmpdir)
+
def test_repr(self):
# Check __repr__ works
for entry_point in pkg_resources.iter_entry_points("fs.opener"):
@@ -255,7 +265,10 @@ def test_open_fs(self):
mem_fs_2 = opener.open_fs(mem_fs)
self.assertEqual(mem_fs, mem_fs_2)
- def test_open_userdata(self):
+ @mock.patch("appdirs.{}".format(UserDataFS.app_dir), autospec=True, spec_set=True)
+ def test_open_userdata(self, app_dir):
+ app_dir.return_value = self.tmpdir
+
with self.assertRaises(errors.OpenerError):
opener.open_fs("userdata://foo:bar:baz:egg")
@@ -264,14 +277,20 @@ def test_open_userdata(self):
self.assertEqual(app_fs.app_dirs.appauthor, "willmcgugan")
self.assertEqual(app_fs.app_dirs.version, "1.0")
- def test_open_userdata_no_version(self):
+ @mock.patch("appdirs.{}".format(UserDataFS.app_dir), autospec=True, spec_set=True)
+ def test_open_userdata_no_version(self, app_dir):
+ app_dir.return_value = self.tmpdir
+
app_fs = opener.open_fs("userdata://fstest:willmcgugan", create=True)
self.assertEqual(app_fs.app_dirs.appname, "fstest")
self.assertEqual(app_fs.app_dirs.appauthor, "willmcgugan")
self.assertEqual(app_fs.app_dirs.version, None)
- def test_user_data_opener(self):
- user_data_fs = open_fs("userdata://fstest:willmcgugan:1.0")
+ @mock.patch("appdirs.{}".format(UserDataFS.app_dir), autospec=True, spec_set=True)
+ def test_user_data_opener(self, app_dir):
+ app_dir.return_value = self.tmpdir
+
+ user_data_fs = open_fs("userdata://fstest:willmcgugan:1.0", create=True)
self.assertIsInstance(user_data_fs, UserDataFS)
user_data_fs.makedir("foo", recreate=True)
user_data_fs.writetext("foo/bar.txt", "baz")
@@ -282,7 +301,26 @@ def test_user_data_opener(self):
def test_open_ftp(self, mock_FTPFS):
open_fs("ftp://foo:bar@ftp.example.org")
mock_FTPFS.assert_called_once_with(
- "ftp.example.org", passwd="bar", port=21, user="foo", proxy=None, timeout=10
+ "ftp.example.org",
+ passwd="bar",
+ port=21,
+ user="foo",
+ proxy=None,
+ timeout=10,
+ tls=False,
+ )
+
+ @mock.patch("fs.ftpfs.FTPFS")
+ def test_open_ftps(self, mock_FTPFS):
+ open_fs("ftps://foo:bar@ftp.example.org")
+ mock_FTPFS.assert_called_once_with(
+ "ftp.example.org",
+ passwd="bar",
+ port=21,
+ user="foo",
+ proxy=None,
+ timeout=10,
+ tls=True,
)
@mock.patch("fs.ftpfs.FTPFS")
@@ -295,4 +333,5 @@ def test_open_ftp_proxy(self, mock_FTPFS):
user="foo",
proxy="ftp.proxy.org",
timeout=10,
+ tls=False,
)
diff --git a/tests/test_osfs.py b/tests/test_osfs.py
index 601e0b45..c77016a9 100644
--- a/tests/test_osfs.py
+++ b/tests/test_osfs.py
@@ -1,33 +1,50 @@
# coding: utf-8
from __future__ import unicode_literals
+import sys
+
import errno
import io
import os
-import mock
import shutil
import tempfile
+import time
import unittest
+import warnings
+from six import text_type
-from fs import osfs
-from fs import fsencode, fsdecode
-from fs.path import relpath
-from fs import errors
-
+from fs import errors, open_fs, osfs
+from fs.path import dirname, relpath
from fs.test import FSTestCases
-from six import text_type
+try:
+ from unittest import mock
+except ImportError:
+ import mock
class TestOSFS(FSTestCases, unittest.TestCase):
"""Test OSFS implementation."""
+ @classmethod
+ def setUpClass(cls):
+ warnings.simplefilter("error")
+
+ @classmethod
+ def tearDownClass(cls):
+ warnings.simplefilter(warnings.defaultaction)
+
def make_fs(self):
temp_dir = tempfile.mkdtemp("fstestosfs")
return osfs.OSFS(temp_dir)
def destroy_fs(self, fs):
self.fs.close()
+ try:
+ shutil.rmtree(fs.getsyspath("/"))
+ except OSError:
+ # Already deleted
+ pass
def _get_real_path(self, path):
_path = os.path.join(self.fs.root_path, relpath(path))
@@ -67,7 +84,7 @@ def assert_text(self, path, contents):
def test_not_exists(self):
with self.assertRaises(errors.CreateFailed):
- fs = osfs.OSFS("/does/not/exists/")
+ osfs.OSFS("/does/not/exists/")
def test_expand_vars(self):
self.fs.makedir("TYRIONLANISTER")
@@ -79,11 +96,33 @@ def test_expand_vars(self):
self.assertIn("TYRIONLANISTER", fs1.getsyspath("/"))
self.assertNotIn("TYRIONLANISTER", fs2.getsyspath("/"))
- @unittest.skipIf(osfs.sendfile is None, "sendfile not supported")
+ def test_copy_preserve_time(self):
+ self.fs.makedir("foo")
+ self.fs.makedir("bar")
+ self.fs.create("foo/file.txt")
+ raw_info = {"details": {"modified": time.time() - 10000}}
+ self.fs.setinfo("foo/file.txt", raw_info)
+
+ namespaces = ("details", "modified")
+ src_info = self.fs.getinfo("foo/file.txt", namespaces)
+
+ self.fs.copy("foo/file.txt", "bar/file.txt", preserve_time=True)
+ self.assertTrue(self.fs.exists("bar/file.txt"))
+
+ dst_info = self.fs.getinfo("bar/file.txt", namespaces)
+ delta = dst_info.modified - src_info.modified
+ self.assertAlmostEqual(delta.total_seconds(), 0, places=2)
+
+ @unittest.skipUnless(osfs.sendfile, "sendfile not supported")
+ @unittest.skipIf(
+ sys.version_info >= (3, 8),
+ "the copy function uses sendfile in Python 3.8+, "
+ "making the patched implementation irrelevant",
+ )
def test_copy_sendfile(self):
# try copying using sendfile
with mock.patch.object(osfs, "sendfile") as sendfile:
- sendfile.side_effect = OSError(errno.ENOTSUP, "sendfile not supported")
+ sendfile.side_effect = OSError(errno.ENOSYS, "sendfile not supported")
self.test_copy()
# check other errors are transmitted
self.fs.touch("foo")
@@ -125,7 +164,7 @@ def test_unicode_paths(self):
finally:
shutil.rmtree(dir_path)
- @unittest.skipIf(not hasattr(os, "symlink"), "No symlink support")
+ @unittest.skipUnless(hasattr(os, "symlink"), "No symlink support")
def test_symlinks(self):
with open(self._get_real_path("foo"), "wb") as f:
f.write(b"foobar")
@@ -152,3 +191,40 @@ def test_validatepath(self):
with self.assertRaises(errors.InvalidCharsInPath):
with self.fs.open("13 – Marked Register.pdf", "wb") as fh:
fh.write(b"foo")
+
+ def test_consume_geturl(self):
+ self.fs.create("foo")
+ try:
+ url = self.fs.geturl("foo", purpose="fs")
+ except errors.NoURL:
+ self.assertFalse(self.fs.hasurl("foo"))
+ else:
+ self.assertTrue(self.fs.hasurl("foo"))
+
+ # Should not throw an error
+ base_dir = dirname(url)
+ open_fs(base_dir)
+
+ def test_complex_geturl(self):
+ self.fs.makedirs("foo/bar ha")
+ test_fixtures = [
+ # test file, expected url path
+ ["foo", "foo"],
+ ["foo-bar", "foo-bar"],
+ ["foo_bar", "foo_bar"],
+ ["foo/bar ha/barz", "foo/bar%20ha/barz"],
+ ["example b.txt", "example%20b.txt"],
+ ["exampleㄓ.txt", "example%E3%84%93.txt"],
+ ]
+ file_uri_prefix = "osfs://"
+ for test_file, relative_url_path in test_fixtures:
+ self.fs.create(test_file)
+ expected = file_uri_prefix + self.fs.getsyspath(relative_url_path).replace(
+ "\\", "/"
+ )
+ actual = self.fs.geturl(test_file, purpose="fs")
+
+ self.assertEqual(actual, expected)
+
+ def test_geturl_return_no_url(self):
+ self.assertRaises(errors.NoURL, self.fs.geturl, "test/path", "upload")
diff --git a/tests/test_path.py b/tests/test_path.py
index d57c278a..3ab778ed 100644
--- a/tests/test_path.py
+++ b/tests/test_path.py
@@ -1,4 +1,4 @@
-from __future__ import unicode_literals, print_function
+from __future__ import absolute_import, print_function, unicode_literals
"""
fstests.test_path: testcases for the fs path functions
@@ -8,7 +8,29 @@
import unittest
-from fs.path import *
+from fs.path import (
+ abspath,
+ basename,
+ combine,
+ dirname,
+ forcedir,
+ frombase,
+ isabs,
+ isbase,
+ isdotfile,
+ isparent,
+ issamedir,
+ iswildcard,
+ iteratepath,
+ join,
+ normpath,
+ parts,
+ recursepath,
+ relativefrom,
+ relpath,
+ split,
+ splitext,
+)
class TestPathFunctions(unittest.TestCase):
@@ -130,14 +152,14 @@ def test_splitext(self):
self.assertEqual(splitext(".foo"), (".foo", ""))
def test_recursepath(self):
- self.assertEquals(recursepath("/"), ["/"])
- self.assertEquals(recursepath("hello"), ["/", "/hello"])
- self.assertEquals(recursepath("/hello/world/"), ["/", "/hello", "/hello/world"])
- self.assertEquals(
+ self.assertEqual(recursepath("/"), ["/"])
+ self.assertEqual(recursepath("hello"), ["/", "/hello"])
+ self.assertEqual(recursepath("/hello/world/"), ["/", "/hello", "/hello/world"])
+ self.assertEqual(
recursepath("/hello/world/", reverse=True), ["/hello/world", "/hello", "/"]
)
- self.assertEquals(recursepath("hello", reverse=True), ["/hello", "/"])
- self.assertEquals(recursepath("", reverse=True), ["/"])
+ self.assertEqual(recursepath("hello", reverse=True), ["/hello", "/"])
+ self.assertEqual(recursepath("", reverse=True), ["/"])
def test_isbase(self):
self.assertTrue(isbase("foo", "foo/bar"))
@@ -156,7 +178,7 @@ def test_issamedir(self):
def test_isdotfile(self):
for path in [".foo", ".svn", "foo/.svn", "foo/bar/.svn", "/foo/.bar"]:
- self.assert_(isdotfile(path))
+ self.assertTrue(isdotfile(path))
for path in ["asfoo", "df.svn", "foo/er.svn", "foo/bar/test.txt", "/foo/bar"]:
self.assertFalse(isdotfile(path))
@@ -179,10 +201,10 @@ def test_basename(self):
self.assertEqual(basename(path), test_basename)
def test_iswildcard(self):
- self.assert_(iswildcard("*"))
- self.assert_(iswildcard("*.jpg"))
- self.assert_(iswildcard("foo/*"))
- self.assert_(iswildcard("foo/{}"))
+ self.assertTrue(iswildcard("*"))
+ self.assertTrue(iswildcard("*.jpg"))
+ self.assertTrue(iswildcard("foo/*"))
+ self.assertTrue(iswildcard("foo/{}"))
self.assertFalse(iswildcard("foo"))
self.assertFalse(iswildcard("img.jpg"))
self.assertFalse(iswildcard("foo/bar"))
diff --git a/tests/test_permissions.py b/tests/test_permissions.py
index 572ef8f3..72e5f197 100644
--- a/tests/test_permissions.py
+++ b/tests/test_permissions.py
@@ -1,11 +1,9 @@
-from __future__ import unicode_literals
-from __future__ import print_function
+from __future__ import print_function, unicode_literals
import unittest
-
from six import text_type
-from fs.permissions import make_mode, Permissions
+from fs.permissions import Permissions, make_mode
class TestPermissions(unittest.TestCase):
diff --git a/tests/test_subfs.py b/tests/test_subfs.py
index c3604312..494108cf 100644
--- a/tests/test_subfs.py
+++ b/tests/test_subfs.py
@@ -6,9 +6,10 @@
import unittest
from fs import osfs
-from fs.subfs import SubFS
from fs.memoryfs import MemoryFS
from fs.path import relpath
+from fs.subfs import SubFS
+
from .test_osfs import TestOSFS
diff --git a/tests/test_tarfs.py b/tests/test_tarfs.py
index 610c7a31..29d23877 100644
--- a/tests/test_tarfs.py
+++ b/tests/test_tarfs.py
@@ -4,24 +4,25 @@
import io
import os
import six
-import gzip
-import tarfile
-import getpass
import tarfile
import tempfile
import unittest
-import uuid
from fs import tarfs
-from fs import errors
-from fs.enums import ResourceType
from fs.compress import write_tar
+from fs.enums import ResourceType
+from fs.errors import NoURL
from fs.opener import open_fs
from fs.opener.errors import NotWriteable
from fs.test import FSTestCases
from .test_archives import ArchiveTestCases
+try:
+ from pytest import mark
+except ImportError:
+ from . import mark
+
class TestWriteReadTarFS(unittest.TestCase):
def setUp(self):
@@ -60,6 +61,7 @@ def make_fs(self):
def destroy_fs(self, fs):
fs.close()
+ os.remove(fs._tar_file)
del fs._tar_file
@@ -92,18 +94,11 @@ def make_fs(self):
def destroy_fs(self, fs):
fs.close()
+ os.remove(fs._tar_file)
del fs._tar_file
- def assert_is_bzip(self):
- try:
- tarfile.open(fs._tar_file, "r:gz")
- except tarfile.ReadError:
- self.fail("{} is not a valid gz archive".format(fs._tar_file))
- for other_comps in ["xz", "bz2", ""]:
- with self.assertRaises(tarfile.ReadError):
- tarfile.open(fs._tar_file, "r:{}".format(other_comps))
-
+@mark.slow
@unittest.skipIf(six.PY2, "Python2 does not support LZMA")
class TestWriteXZippedTarFS(FSTestCases, unittest.TestCase):
def make_fs(self):
@@ -129,6 +124,7 @@ def assert_is_xz(self, fs):
tarfile.open(fs._tar_file, "r:{}".format(other_comps))
+@mark.slow
class TestWriteBZippedTarFS(FSTestCases, unittest.TestCase):
def make_fs(self):
fh, _tar_file = tempfile.mkstemp()
@@ -173,20 +169,53 @@ def remove_archive(self):
def test_read_from_fileobject(self):
try:
tarfs.TarFS(open(self._temp_path, "rb"))
- except:
+ except Exception:
self.fail("Couldn't open tarfs from fileobject")
def test_read_from_filename(self):
try:
tarfs.TarFS(self._temp_path)
- except:
+ except Exception:
self.fail("Couldn't open tarfs from filename")
+ def test_read_non_existent_file(self):
+ fs = tarfs.TarFS(open(self._temp_path, "rb"))
+ # it has been very difficult to catch exception in __del__()
+ del fs._tar
+ try:
+ fs.close()
+ except AttributeError:
+ self.fail("Could not close tar fs properly")
+ except Exception:
+ self.fail("Strange exception in closing fs")
+
def test_getinfo(self):
super(TestReadTarFS, self).test_getinfo()
top = self.fs.getinfo("top.txt", ["tar"])
self.assertTrue(top.get("tar", "is_file"))
+ def test_geturl_for_fs(self):
+ test_fixtures = [
+ # test_file, expected
+ ["foo/bar/egg/foofoo", "foo/bar/egg/foofoo"],
+ ["foo/bar egg/foo foo", "foo/bar%20egg/foo%20foo"],
+ ]
+ tar_file_path = self._temp_path.replace("\\", "/")
+ for test_file, expected_file in test_fixtures:
+ expected = "tar://{tar_file_path}!/{file_inside_tar}".format(
+ tar_file_path=tar_file_path, file_inside_tar=expected_file
+ )
+ self.assertEqual(self.fs.geturl(test_file, purpose="fs"), expected)
+
+ def test_geturl_for_fs_but_file_is_binaryio(self):
+ self.fs._file = six.BytesIO()
+ self.assertRaises(NoURL, self.fs.geturl, "test", "fs")
+
+ def test_geturl_for_download(self):
+ test_file = "foo/bar/egg/foofoo"
+ with self.assertRaises(NoURL):
+ self.fs.geturl(test_file)
+
class TestBrokenPaths(unittest.TestCase):
@classmethod
@@ -214,8 +243,7 @@ def test_listdir(self):
class TestImplicitDirectories(unittest.TestCase):
- """Regression tests for #160.
- """
+ """Regression tests for #160."""
@classmethod
def setUpClass(cls):
diff --git a/tests/test_tempfs.py b/tests/test_tempfs.py
index 63ce4103..eef46f76 100644
--- a/tests/test_tempfs.py
+++ b/tests/test_tempfs.py
@@ -2,12 +2,16 @@
import os
-from fs.tempfs import TempFS
from fs import errors
-import mock
+from fs.tempfs import TempFS
from .test_osfs import TestOSFS
+try:
+ from unittest import mock
+except ImportError:
+ import mock
+
class TestTempFS(TestOSFS):
"""Test OSFS implementation."""
diff --git a/tests/test_time.py b/tests/test_time.py
index a6bf5587..86a5972c 100644
--- a/tests/test_time.py
+++ b/tests/test_time.py
@@ -1,20 +1,23 @@
-from __future__ import unicode_literals, print_function
+from __future__ import print_function, unicode_literals
-from datetime import datetime
import unittest
-
-import pytz
+from datetime import datetime
from fs.time import datetime_to_epoch, epoch_to_datetime
+try:
+ from datetime import timezone
+except ImportError:
+ from fs._tzcompat import timezone # type: ignore
+
class TestEpoch(unittest.TestCase):
def test_epoch_to_datetime(self):
self.assertEqual(
- epoch_to_datetime(142214400), datetime(1974, 7, 5, tzinfo=pytz.UTC)
+ epoch_to_datetime(142214400), datetime(1974, 7, 5, tzinfo=timezone.utc)
)
def test_datetime_to_epoch(self):
self.assertEqual(
- datetime_to_epoch(datetime(1974, 7, 5, tzinfo=pytz.UTC)), 142214400
+ datetime_to_epoch(datetime(1974, 7, 5, tzinfo=timezone.utc)), 142214400
)
diff --git a/tests/test_tools.py b/tests/test_tools.py
index a151aac1..51d32963 100644
--- a/tests/test_tools.py
+++ b/tests/test_tools.py
@@ -2,9 +2,8 @@
import unittest
-from fs.mode import validate_open_mode
-from fs.mode import validate_openbin_mode
from fs import tools
+from fs.mode import validate_open_mode, validate_openbin_mode
from fs.opener import open_fs
diff --git a/tests/test_tree.py b/tests/test_tree.py
index 805ae708..28f20577 100644
--- a/tests/test_tree.py
+++ b/tests/test_tree.py
@@ -1,4 +1,4 @@
-from __future__ import unicode_literals
+from __future__ import print_function, unicode_literals
import io
import unittest
diff --git a/tests/test_url_tools.py b/tests/test_url_tools.py
new file mode 100644
index 00000000..5b5d4a1d
--- /dev/null
+++ b/tests/test_url_tools.py
@@ -0,0 +1,39 @@
+# coding: utf-8
+"""Test url tools. """
+from __future__ import unicode_literals
+
+import platform
+import unittest
+
+from fs._url_tools import url_quote
+
+
+class TestBase(unittest.TestCase):
+ def test_quote(self):
+ test_fixtures = [
+ # test_snippet, expected
+ ["foo/bar/egg/foofoo", "foo/bar/egg/foofoo"],
+ ["foo/bar ha/barz", "foo/bar%20ha/barz"],
+ ["example b.txt", "example%20b.txt"],
+ ["exampleㄓ.txt", "example%E3%84%93.txt"],
+ ]
+ if platform.system() == "Windows":
+ test_fixtures.extend(
+ [
+ ["C:\\My Documents\\test.txt", "C:/My%20Documents/test.txt"],
+ ["C:/My Documents/test.txt", "C:/My%20Documents/test.txt"],
+ # on Windows '\' is regarded as path separator
+ ["test/forward\\slash", "test/forward/slash"],
+ ]
+ )
+ else:
+ test_fixtures.extend(
+ [
+ # colon:tmp is bad path under Windows
+ ["test/colon:tmp", "test/colon%3Atmp"],
+ # Unix treat \ as %5C
+ ["test/forward\\slash", "test/forward%5Cslash"],
+ ]
+ )
+ for test_snippet, expected in test_fixtures:
+ self.assertEqual(url_quote(test_snippet), expected)
diff --git a/tests/test_walk.py b/tests/test_walk.py
index 8bf57dcf..e0146f2d 100644
--- a/tests/test_walk.py
+++ b/tests/test_walk.py
@@ -1,12 +1,12 @@
from __future__ import unicode_literals
+import six
import unittest
+from fs import walk
from fs.errors import FSError
from fs.memoryfs import MemoryFS
-from fs import walk
from fs.wrap import read_only
-import six
class TestWalker(unittest.TestCase):
@@ -21,9 +21,28 @@ def test_create(self):
walk.Walker(ignore_errors=True, on_error=lambda path, error: True)
walk.Walker(ignore_errors=True)
+ def test_on_error_invalid(self):
+ with self.assertRaises(TypeError):
+ walk.Walker(on_error="nope")
+
-class TestWalk(unittest.TestCase):
+class TestBoundWalkerBase(unittest.TestCase):
def setUp(self):
+ """
+ Sets up the following file system with empty files:
+
+ /
+ -foo1/
+ - -top1.txt
+ - -top2.txt
+ -foo2/
+ - -bar1/
+ - -bar2/
+ - - -bar3/
+ - - - -test.txt
+ - -top3.bin
+ -foo3/
+ """
self.fs = MemoryFS()
self.fs.makedir("foo1")
@@ -37,21 +56,50 @@ def setUp(self):
self.fs.create("foo2/bar2/bar3/test.txt")
self.fs.create("foo2/top3.bin")
- def test_invalid(self):
- with self.assertRaises(ValueError):
- self.fs.walk(search="random")
+class TestBoundWalker(TestBoundWalkerBase):
def test_repr(self):
repr(self.fs.walk)
- def test_walk(self):
+ def test_readonly_wrapper_uses_same_walker(self):
+ class CustomWalker(walk.Walker):
+ @classmethod
+ def bind(cls, fs):
+ return walk.BoundWalker(fs, walker_class=CustomWalker)
+
+ class CustomizedMemoryFS(MemoryFS):
+ walker_class = CustomWalker
+
+ base_fs = CustomizedMemoryFS()
+ base_walker = base_fs.walk
+ self.assertEqual(base_walker.walker_class, CustomWalker)
+
+ readonly_fs = read_only(CustomizedMemoryFS())
+ readonly_walker = readonly_fs.walk
+ self.assertEqual(readonly_walker.walker_class, CustomWalker)
+
+
+class TestWalk(TestBoundWalkerBase):
+ def _walk_step_names(self, *args, **kwargs):
+ """Performs a walk with the given arguments and returns a list of steps.
+
+ Each step is a triple of the path, list of directory names, and list of file names.
+ """
_walk = []
- for step in self.fs.walk():
+ for step in self.fs.walk(*args, **kwargs):
self.assertIsInstance(step, walk.Step)
path, dirs, files = step
_walk.append(
(path, [info.name for info in dirs], [info.name for info in files])
)
+ return _walk
+
+ def test_invalid_search(self):
+ with self.assertRaises(ValueError):
+ self.fs.walk(search="random")
+
+ def test_walk_simple(self):
+ _walk = self._walk_step_names()
expected = [
("/", ["foo1", "foo2", "foo3"], []),
("/foo1", ["bar1"], ["top1.txt", "top2.txt"]),
@@ -63,14 +111,34 @@ def test_walk(self):
]
self.assertEqual(_walk, expected)
+ def test_walk_filter(self):
+ _walk = self._walk_step_names(filter=["top*.txt"])
+ expected = [
+ ("/", ["foo1", "foo2", "foo3"], []),
+ ("/foo1", ["bar1"], ["top1.txt", "top2.txt"]),
+ ("/foo2", ["bar2"], []),
+ ("/foo3", [], []),
+ ("/foo1/bar1", [], []),
+ ("/foo2/bar2", ["bar3"], []),
+ ("/foo2/bar2/bar3", [], []),
+ ]
+ self.assertEqual(_walk, expected)
+
+ def test_walk_exclude(self):
+ _walk = self._walk_step_names(exclude=["top*"])
+ expected = [
+ ("/", ["foo1", "foo2", "foo3"], []),
+ ("/foo1", ["bar1"], []),
+ ("/foo2", ["bar2"], []),
+ ("/foo3", [], []),
+ ("/foo1/bar1", [], []),
+ ("/foo2/bar2", ["bar3"], []),
+ ("/foo2/bar2/bar3", [], ["test.txt"]),
+ ]
+ self.assertEqual(_walk, expected)
+
def test_walk_filter_dirs(self):
- _walk = []
- for step in self.fs.walk(filter_dirs=["foo*"]):
- self.assertIsInstance(step, walk.Step)
- path, dirs, files = step
- _walk.append(
- (path, [info.name for info in dirs], [info.name for info in files])
- )
+ _walk = self._walk_step_names(filter_dirs=["foo*"])
expected = [
("/", ["foo1", "foo2", "foo3"], []),
("/foo1", [], ["top1.txt", "top2.txt"]),
@@ -79,14 +147,65 @@ def test_walk_filter_dirs(self):
]
self.assertEqual(_walk, expected)
+ def test_walk_filter_glob_1(self):
+ _walk = self._walk_step_names(filter_glob=["/foo*/bar*/"])
+ expected = [
+ ("/", ["foo1", "foo2", "foo3"], []),
+ ("/foo1", ["bar1"], []),
+ ("/foo2", ["bar2"], []),
+ ("/foo3", [], []),
+ ("/foo1/bar1", [], []),
+ ("/foo2/bar2", [], []),
+ ]
+ self.assertEqual(_walk, expected)
+
+ def test_walk_filter_glob_2(self):
+ _walk = self._walk_step_names(filter_glob=["/foo*/bar**"])
+ expected = [
+ ("/", ["foo1", "foo2", "foo3"], []),
+ ("/foo1", ["bar1"], []),
+ ("/foo2", ["bar2"], []),
+ ("/foo3", [], []),
+ ("/foo1/bar1", [], []),
+ ("/foo2/bar2", ["bar3"], []),
+ ("/foo2/bar2/bar3", [], ["test.txt"]),
+ ]
+ self.assertEqual(_walk, expected)
+
+ def test_walk_filter_mix(self):
+ _walk = self._walk_step_names(filter_glob=["/foo2/bar**"], filter=["top1.txt"])
+ expected = [
+ ("/", ["foo2"], []),
+ ("/foo2", ["bar2"], []),
+ ("/foo2/bar2", ["bar3"], []),
+ ("/foo2/bar2/bar3", [], []),
+ ]
+ self.assertEqual(_walk, expected)
+
+ def test_walk_exclude_dirs(self):
+ _walk = self._walk_step_names(exclude_dirs=["bar*", "foo2"])
+ expected = [
+ ("/", ["foo1", "foo3"], []),
+ ("/foo1", [], ["top1.txt", "top2.txt"]),
+ ("/foo3", [], []),
+ ]
+ self.assertEqual(_walk, expected)
+
+ def test_walk_exclude_glob(self):
+ _walk = self._walk_step_names(exclude_glob=["**/top*", "test.txt"])
+ expected = [
+ ("/", ["foo1", "foo2", "foo3"], []),
+ ("/foo1", ["bar1"], []),
+ ("/foo2", ["bar2"], []),
+ ("/foo3", [], []),
+ ("/foo1/bar1", [], []),
+ ("/foo2/bar2", ["bar3"], []),
+ ("/foo2/bar2/bar3", [], ["test.txt"]),
+ ]
+ self.assertEqual(_walk, expected)
+
def test_walk_depth(self):
- _walk = []
- for step in self.fs.walk(search="depth"):
- self.assertIsInstance(step, walk.Step)
- path, dirs, files = step
- _walk.append(
- (path, [info.name for info in dirs], [info.name for info in files])
- )
+ _walk = self._walk_step_names(search="depth")
expected = [
("/foo1/bar1", [], []),
("/foo1", ["bar1"], ["top1.txt", "top2.txt"]),
@@ -98,14 +217,8 @@ def test_walk_depth(self):
]
self.assertEqual(_walk, expected)
- def test_walk_directory(self):
- _walk = []
- for step in self.fs.walk("foo2"):
- self.assertIsInstance(step, walk.Step)
- path, dirs, files = step
- _walk.append(
- (path, [info.name for info in dirs], [info.name for info in files])
- )
+ def test_walk_path(self):
+ _walk = self._walk_step_names("foo2")
expected = [
("/foo2", ["bar2"], ["top3.bin"]),
("/foo2/bar2", ["bar3"], []),
@@ -113,34 +226,22 @@ def test_walk_directory(self):
]
self.assertEqual(_walk, expected)
- def test_walk_levels_1(self):
- results = list(self.fs.walk(max_depth=1))
- self.assertEqual(len(results), 1)
- dirs = sorted(info.name for info in results[0].dirs)
- self.assertEqual(dirs, ["foo1", "foo2", "foo3"])
- files = sorted(info.name for info in results[0].files)
- self.assertEqual(files, [])
+ def test_walk_max_depth_1_breadth(self):
+ _walk = self._walk_step_names(max_depth=1, search="breadth")
+ expected = [
+ ("/", ["foo1", "foo2", "foo3"], []),
+ ]
+ self.assertEqual(_walk, expected)
- def test_walk_levels_1_depth(self):
- results = list(self.fs.walk(max_depth=1, search="depth"))
- self.assertEqual(len(results), 1)
- dirs = sorted(info.name for info in results[0].dirs)
- self.assertEqual(dirs, ["foo1", "foo2", "foo3"])
- files = sorted(info.name for info in results[0].files)
- self.assertEqual(files, [])
+ def test_walk_max_depth_1_depth(self):
+ _walk = self._walk_step_names(max_depth=1, search="depth")
+ expected = [
+ ("/", ["foo1", "foo2", "foo3"], []),
+ ]
+ self.assertEqual(_walk, expected)
- def test_walk_levels_2(self):
- _walk = []
- for step in self.fs.walk(max_depth=2):
- self.assertIsInstance(step, walk.Step)
- path, dirs, files = step
- _walk.append(
- (
- path,
- sorted(info.name for info in dirs),
- sorted(info.name for info in files),
- )
- )
+ def test_walk_max_depth_2(self):
+ _walk = self._walk_step_names(max_depth=2)
expected = [
("/", ["foo1", "foo2", "foo3"], []),
("/foo1", ["bar1"], ["top1.txt", "top2.txt"]),
@@ -149,6 +250,30 @@ def test_walk_levels_2(self):
]
self.assertEqual(_walk, expected)
+
+class TestDirs(TestBoundWalkerBase):
+ def test_walk_dirs(self):
+ dirs = list(self.fs.walk.dirs())
+ self.assertEqual(
+ dirs,
+ ["/foo1", "/foo2", "/foo3", "/foo1/bar1", "/foo2/bar2", "/foo2/bar2/bar3"],
+ )
+
+ dirs = list(self.fs.walk.dirs(search="depth"))
+ self.assertEqual(
+ dirs,
+ ["/foo1/bar1", "/foo1", "/foo2/bar2/bar3", "/foo2/bar2", "/foo2", "/foo3"],
+ )
+
+ dirs = list(self.fs.walk.dirs(search="depth", exclude_dirs=["foo2"]))
+ self.assertEqual(dirs, ["/foo1/bar1", "/foo1", "/foo3"])
+
+ def test_foo(self):
+ dirs = list(self.fs.walk.dirs(search="depth", exclude_dirs=["foo2"]))
+ self.assertEqual(dirs, ["/foo1/bar1", "/foo1", "/foo3"])
+
+
+class TestFiles(TestBoundWalkerBase):
def test_walk_files(self):
files = list(self.fs.walk.files())
@@ -173,22 +298,6 @@ def test_walk_files(self):
],
)
- def test_walk_dirs(self):
- dirs = list(self.fs.walk.dirs())
- self.assertEqual(
- dirs,
- ["/foo1", "/foo2", "/foo3", "/foo1/bar1", "/foo2/bar2", "/foo2/bar2/bar3"],
- )
-
- dirs = list(self.fs.walk.dirs(search="depth"))
- self.assertEqual(
- dirs,
- ["/foo1/bar1", "/foo1", "/foo2/bar2/bar3", "/foo2/bar2", "/foo2", "/foo3"],
- )
-
- dirs = list(self.fs.walk.dirs(search="depth", exclude_dirs=["foo2"]))
- self.assertEqual(dirs, ["/foo1/bar1", "/foo1", "/foo3"])
-
def test_walk_files_filter(self):
files = list(self.fs.walk.files(filter=["*.txt"]))
@@ -209,6 +318,16 @@ def test_walk_files_filter(self):
self.assertEqual(files, [])
+ def test_walk_files_filter_glob(self):
+ files = list(self.fs.walk.files(filter_glob=["/foo2/**"]))
+ self.assertEqual(
+ files,
+ [
+ "/foo2/top3.bin",
+ "/foo2/bar2/bar3/test.txt",
+ ],
+ )
+
def test_walk_files_exclude(self):
# Test exclude argument works
files = list(self.fs.walk.files(exclude=["*.txt"]))
@@ -222,25 +341,6 @@ def test_walk_files_exclude(self):
files = list(self.fs.walk.files(exclude=["*"]))
self.assertEqual(files, [])
- def test_walk_info(self):
- walk = []
- for path, info in self.fs.walk.info():
- walk.append((path, info.is_dir, info.name))
-
- expected = [
- ("/foo1", True, "foo1"),
- ("/foo2", True, "foo2"),
- ("/foo3", True, "foo3"),
- ("/foo1/top1.txt", False, "top1.txt"),
- ("/foo1/top2.txt", False, "top2.txt"),
- ("/foo1/bar1", True, "bar1"),
- ("/foo2/bar2", True, "bar2"),
- ("/foo2/top3.bin", False, "top3.bin"),
- ("/foo2/bar2/bar3", True, "bar3"),
- ("/foo2/bar2/bar3/test.txt", False, "test.txt"),
- ]
- self.assertEqual(walk, expected)
-
def test_broken(self):
original_scandir = self.fs.scandir
@@ -257,10 +357,6 @@ def broken_scandir(path, namespaces=None):
with self.assertRaises(FSError):
list(self.fs.walk.files(on_error=lambda path, error: False))
- def test_on_error_invalid(self):
- with self.assertRaises(TypeError):
- walk.Walker(on_error="nope")
-
def test_subdir_uses_same_walker(self):
class CustomWalker(walk.Walker):
@classmethod
@@ -284,19 +380,32 @@ class CustomizedMemoryFS(MemoryFS):
self.assertEqual(sub_walker.walker_class, CustomWalker)
six.assertCountEqual(self, ["/c", "/d"], sub_walker.files())
- def test_readonly_wrapper_uses_same_walker(self):
+ def test_check_file_overwrite(self):
class CustomWalker(walk.Walker):
- @classmethod
- def bind(cls, fs):
- return walk.BoundWalker(fs, walker_class=CustomWalker)
+ def check_file(self, fs, info):
+ return False
- class CustomizedMemoryFS(MemoryFS):
- walker_class = CustomWalker
+ walker = CustomWalker()
+ files = list(walker.files(self.fs))
+ self.assertEqual(files, [])
- base_fs = CustomizedMemoryFS()
- base_walker = base_fs.walk
- self.assertEqual(base_walker.walker_class, CustomWalker)
- readonly_fs = read_only(CustomizedMemoryFS())
- readonly_walker = readonly_fs.walk
- self.assertEqual(readonly_walker.walker_class, CustomWalker)
+class TestInfo(TestBoundWalkerBase):
+ def test_walk_info(self):
+ walk = []
+ for path, info in self.fs.walk.info():
+ walk.append((path, info.is_dir, info.name))
+
+ expected = [
+ ("/foo1", True, "foo1"),
+ ("/foo2", True, "foo2"),
+ ("/foo3", True, "foo3"),
+ ("/foo1/top1.txt", False, "top1.txt"),
+ ("/foo1/top2.txt", False, "top2.txt"),
+ ("/foo1/bar1", True, "bar1"),
+ ("/foo2/bar2", True, "bar2"),
+ ("/foo2/top3.bin", False, "top3.bin"),
+ ("/foo2/bar2/bar3", True, "bar3"),
+ ("/foo2/bar2/bar3/test.txt", False, "test.txt"),
+ ]
+ self.assertEqual(walk, expected)
diff --git a/tests/test_wrap.py b/tests/test_wrap.py
index 4d5cc8c4..2438f2e0 100644
--- a/tests/test_wrap.py
+++ b/tests/test_wrap.py
@@ -1,97 +1,219 @@
from __future__ import unicode_literals
+import operator
import unittest
-from fs import errors
+try:
+ from unittest import mock
+except ImportError:
+ import mock
+
+import six
+
+import fs.copy
+import fs.errors
+import fs.mirror
+import fs.move
+import fs.wrap
from fs import open_fs
-from fs import wrap
+from fs.info import Info
-class TestWrap(unittest.TestCase):
- def test_readonly(self):
- mem_fs = open_fs("mem://")
- fs = wrap.read_only(mem_fs)
+class TestWrapReadOnly(unittest.TestCase):
+ def setUp(self):
+ self.fs = open_fs("mem://")
+ self.ro = fs.wrap.read_only(self.fs)
- with self.assertRaises(errors.ResourceReadOnly):
- fs.open("foo", "w")
+ def tearDown(self):
+ self.fs.close()
- with self.assertRaises(errors.ResourceReadOnly):
- fs.appendtext("foo", "bar")
+ def assertReadOnly(self, func, *args, **kwargs):
+ self.assertRaises(fs.errors.ResourceReadOnly, func, *args, **kwargs)
- with self.assertRaises(errors.ResourceReadOnly):
- fs.appendbytes("foo", b"bar")
+ def test_open_w(self):
+ self.assertReadOnly(self.ro.open, "foo", "w")
- with self.assertRaises(errors.ResourceReadOnly):
- fs.makedir("foo")
+ def test_appendtext(self):
+ self.assertReadOnly(self.ro.appendtext, "foo", "bar")
- with self.assertRaises(errors.ResourceReadOnly):
- fs.move("foo", "bar")
+ def test_appendbytes(self):
+ self.assertReadOnly(self.ro.appendbytes, "foo", b"bar")
- with self.assertRaises(errors.ResourceReadOnly):
- fs.openbin("foo", "w")
+ def test_makedir(self):
+ self.assertReadOnly(self.ro.makedir, "foo")
- with self.assertRaises(errors.ResourceReadOnly):
- fs.remove("foo")
+ def test_move(self):
+ self.assertReadOnly(self.ro.move, "foo", "bar")
- with self.assertRaises(errors.ResourceReadOnly):
- fs.removedir("foo")
+ def test_openbin_w(self):
+ self.assertReadOnly(self.ro.openbin, "foo", "w")
- with self.assertRaises(errors.ResourceReadOnly):
- fs.setinfo("foo", {})
+ def test_remove(self):
+ self.assertReadOnly(self.ro.remove, "foo")
- with self.assertRaises(errors.ResourceReadOnly):
- fs.settimes("foo", {})
+ def test_removedir(self):
+ self.assertReadOnly(self.ro.removedir, "foo")
- with self.assertRaises(errors.ResourceReadOnly):
- fs.copy("foo", "bar")
+ def test_removetree(self):
+ self.assertReadOnly(self.ro.removetree, "foo")
- with self.assertRaises(errors.ResourceReadOnly):
- fs.create("foo")
+ def test_setinfo(self):
+ self.assertReadOnly(self.ro.setinfo, "foo", {})
- with self.assertRaises(errors.ResourceReadOnly):
- fs.writetext("foo", "bar")
+ def test_settimes(self):
+ self.assertReadOnly(self.ro.settimes, "foo", {})
- with self.assertRaises(errors.ResourceReadOnly):
- fs.writebytes("foo", b"bar")
+ def test_copy(self):
+ self.assertReadOnly(self.ro.copy, "foo", "bar")
- with self.assertRaises(errors.ResourceReadOnly):
- fs.makedirs("foo/bar")
+ def test_create(self):
+ self.assertReadOnly(self.ro.create, "foo")
- with self.assertRaises(errors.ResourceReadOnly):
- fs.touch("foo")
+ def test_writetext(self):
+ self.assertReadOnly(self.ro.writetext, "foo", "bar")
- with self.assertRaises(errors.ResourceReadOnly):
- fs.upload("foo", None)
+ def test_writebytes(self):
+ self.assertReadOnly(self.ro.writebytes, "foo", b"bar")
- with self.assertRaises(errors.ResourceReadOnly):
- fs.writefile("foo", None)
+ def test_makedirs(self):
+ self.assertReadOnly(self.ro.makedirs, "foo/bar")
- self.assertTrue(mem_fs.isempty("/"))
- mem_fs.writebytes("file", b"read me")
- with fs.openbin("file") as read_file:
- self.assertEqual(read_file.read(), b"read me")
+ def test_touch(self):
+ self.assertReadOnly(self.ro.touch, "foo")
- with fs.open("file", "rb") as read_file:
- self.assertEqual(read_file.read(), b"read me")
+ def test_upload(self):
+ self.assertReadOnly(self.ro.upload, "foo", six.BytesIO())
- def test_cachedir(self):
- mem_fs = open_fs("mem://")
- mem_fs.makedirs("foo/bar/baz")
- mem_fs.touch("egg")
+ def test_writefile(self):
+ self.assertReadOnly(self.ro.writefile, "foo", six.StringIO())
- fs = wrap.cache_directory(mem_fs)
- self.assertEqual(sorted(fs.listdir("/")), ["egg", "foo"])
- self.assertEqual(sorted(fs.listdir("/")), ["egg", "foo"])
- self.assertTrue(fs.isdir("foo"))
- self.assertTrue(fs.isdir("foo"))
- self.assertTrue(fs.isfile("egg"))
- self.assertTrue(fs.isfile("egg"))
+ def test_openbin_r(self):
+ self.fs.writebytes("file", b"read me")
+ with self.ro.openbin("file") as read_file:
+ self.assertEqual(read_file.read(), b"read me")
- self.assertEqual(fs.getinfo("foo"), mem_fs.getinfo("foo"))
- self.assertEqual(fs.getinfo("foo"), mem_fs.getinfo("foo"))
+ def test_open_r(self):
+ self.fs.writebytes("file", b"read me")
+ with self.ro.open("file", "rb") as read_file:
+ self.assertEqual(read_file.read(), b"read me")
- self.assertEqual(fs.getinfo("/"), mem_fs.getinfo("/"))
- self.assertEqual(fs.getinfo("/"), mem_fs.getinfo("/"))
- with self.assertRaises(errors.ResourceNotFound):
- fs.getinfo("/foofoo")
+class TestWrapReadOnlySyspath(unittest.TestCase):
+ # If the wrapped fs has a syspath, there is a chance that somewhere
+ # in fs.copy or fs.mirror we try to use it to our advantage, but
+ # we want to make sure these implementations don't circumvent the
+ # wrapper.
+
+ def setUp(self):
+ self.fs = open_fs("temp://")
+ self.ro = fs.wrap.read_only(self.fs)
+ self.src = open_fs("temp://")
+ self.src.touch("foo")
+ self.src.makedir("bar")
+
+ def tearDown(self):
+ self.fs.close()
+ self.src.close()
+
+ def assertReadOnly(self, func, *args, **kwargs):
+ self.assertRaises(fs.errors.ResourceReadOnly, func, *args, **kwargs)
+
+ def test_copy_fs(self):
+ self.assertReadOnly(fs.copy.copy_fs, self.src, self.ro)
+
+ def test_copy_fs_if_newer(self):
+ self.assertReadOnly(fs.copy.copy_fs_if_newer, self.src, self.ro)
+
+ def test_copy_file(self):
+ self.assertReadOnly(fs.copy.copy_file, self.src, "foo", self.ro, "foo")
+
+ def test_copy_file_if_newer(self):
+ self.assertReadOnly(fs.copy.copy_file_if_newer, self.src, "foo", self.ro, "foo")
+
+ def test_copy_structure(self):
+ self.assertReadOnly(fs.copy.copy_structure, self.src, self.ro)
+
+ def test_mirror(self):
+ self.assertReadOnly(fs.mirror.mirror, self.src, self.ro)
+ fs.mirror.mirror(self.src, self.fs)
+ self.fs.touch("baz")
+ self.assertReadOnly(fs.mirror.mirror, self.src, self.ro)
+
+ def test_move_fs(self):
+ self.assertReadOnly(fs.move.move_fs, self.src, self.ro)
+ self.src.removetree("/")
+ self.fs.touch("foo")
+ self.assertReadOnly(fs.move.move_fs, self.ro, self.src)
+
+ def test_move_file(self):
+ self.assertReadOnly(fs.move.move_file, self.src, "foo", self.ro, "foo")
+ self.fs.touch("baz")
+ self.assertReadOnly(fs.move.move_file, self.ro, "baz", self.src, "foo")
+
+ def test_move_dir(self):
+ self.assertReadOnly(fs.move.move_file, self.src, "bar", self.ro, "bar")
+ self.fs.makedir("baz")
+ self.assertReadOnly(fs.move.move_dir, self.ro, "baz", self.src, "baz")
+
+
+class TestWrapCachedDir(unittest.TestCase):
+ def setUp(self):
+ self.fs = open_fs("mem://")
+ self.fs.makedirs("foo/bar/baz")
+ self.fs.touch("egg")
+ self.cached = fs.wrap.cache_directory(self.fs)
+
+ def tearDown(self):
+ self.fs.close()
+
+ def assertNotFound(self, func, *args, **kwargs):
+ self.assertRaises(fs.errors.ResourceNotFound, func, *args, **kwargs)
+
+ def test_scandir(self):
+ key = operator.attrgetter("name")
+ expected = [
+ Info({"basic": {"name": "egg", "is_dir": False}}),
+ Info({"basic": {"name": "foo", "is_dir": True}}),
+ ]
+ with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir:
+ self.assertEqual(sorted(self.cached.scandir("/"), key=key), expected)
+ scandir.assert_has_calls([mock.call("/", namespaces=None, page=None)])
+ with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir:
+ self.assertEqual(sorted(self.cached.scandir("/"), key=key), expected)
+ scandir.assert_not_called()
+
+ def test_isdir(self):
+ with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir:
+ self.assertTrue(self.cached.isdir("foo"))
+ self.assertFalse(self.cached.isdir("egg")) # is file
+ self.assertFalse(self.cached.isdir("spam")) # doesn't exist
+ scandir.assert_has_calls([mock.call("/", namespaces=None, page=None)])
+ with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir:
+ self.assertTrue(self.cached.isdir("foo"))
+ self.assertFalse(self.cached.isdir("egg"))
+ self.assertFalse(self.cached.isdir("spam"))
+ scandir.assert_not_called()
+
+ def test_isfile(self):
+ with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir:
+ self.assertTrue(self.cached.isfile("egg"))
+ self.assertFalse(self.cached.isfile("foo")) # is dir
+ self.assertFalse(self.cached.isfile("spam")) # doesn't exist
+ scandir.assert_has_calls([mock.call("/", namespaces=None, page=None)])
+ with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir:
+ self.assertTrue(self.cached.isfile("egg"))
+ self.assertFalse(self.cached.isfile("foo"))
+ self.assertFalse(self.cached.isfile("spam"))
+ scandir.assert_not_called()
+
+ def test_getinfo(self):
+ with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir:
+ self.assertEqual(self.cached.getinfo("foo"), self.fs.getinfo("foo"))
+ self.assertEqual(self.cached.getinfo("/"), self.fs.getinfo("/"))
+ self.assertNotFound(self.cached.getinfo, "spam")
+ scandir.assert_has_calls([mock.call("/", namespaces=None, page=None)])
+ with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir:
+ self.assertEqual(self.cached.getinfo("foo"), self.fs.getinfo("foo"))
+ self.assertEqual(self.cached.getinfo("/"), self.fs.getinfo("/"))
+ self.assertNotFound(self.cached.getinfo, "spam")
+ scandir.assert_not_called()
diff --git a/tests/test_wrapfs.py b/tests/test_wrapfs.py
index d8d6a6b4..b4842de7 100644
--- a/tests/test_wrapfs.py
+++ b/tests/test_wrapfs.py
@@ -1,7 +1,6 @@
from __future__ import unicode_literals
import unittest
-
from six import text_type
from fs import wrapfs
diff --git a/tests/test_zipfs.py b/tests/test_zipfs.py
index 421d80d8..7390649c 100644
--- a/tests/test_zipfs.py
+++ b/tests/test_zipfs.py
@@ -1,20 +1,21 @@
# -*- encoding: UTF-8
from __future__ import unicode_literals
-import os
import sys
+
+import os
+import six
import tempfile
import unittest
import zipfile
-import six
-
from fs import zipfs
from fs.compress import write_zip
+from fs.enums import Seek
+from fs.errors import NoURL
from fs.opener import open_fs
from fs.opener.errors import NotWriteable
from fs.test import FSTestCases
-from fs.enums import Seek, ResourceType
from .test_archives import ArchiveTestCases
@@ -168,6 +169,33 @@ def test_seek_end(self):
self.assertEqual(f.seek(-5, Seek.end), 7)
self.assertEqual(f.read(), b"World")
+ def test_geturl_for_fs(self):
+ test_file = "foo/bar/egg/foofoo"
+ expected = "zip://{zip_file_path}!/{file_inside_zip}".format(
+ zip_file_path=self._temp_path.replace("\\", "/"), file_inside_zip=test_file
+ )
+ self.assertEqual(self.fs.geturl(test_file, purpose="fs"), expected)
+
+ def test_geturl_for_fs_but_file_is_binaryio(self):
+ self.fs._file = six.BytesIO()
+ self.assertRaises(NoURL, self.fs.geturl, "test", "fs")
+
+ def test_geturl_for_download(self):
+ test_file = "foo/bar/egg/foofoo"
+ with self.assertRaises(NoURL):
+ self.fs.geturl(test_file)
+
+ def test_read_non_existent_file(self):
+ fs = zipfs.ZipFS(open(self._temp_path, "rb"))
+ # it has been very difficult to catch exception in __del__()
+ del fs._zip
+ try:
+ fs.close()
+ except AttributeError:
+ self.fail("Could not close tar fs properly")
+ except Exception:
+ self.fail("Strange exception in closing fs")
+
class TestReadZipFSMem(TestReadZipFS):
def make_source_fs(self):
@@ -184,8 +212,8 @@ def test_implied(self):
z.writestr("foo/bar/baz/egg", b"hello")
with zipfs.ReadZipFS(path) as zip_fs:
foo = zip_fs.getinfo("foo", ["details"])
- bar = zip_fs.getinfo("foo/bar")
- baz = zip_fs.getinfo("foo/bar/baz")
+ self.assertEqual(zip_fs.getinfo("foo/bar").name, "bar")
+ self.assertEqual(zip_fs.getinfo("foo/bar/baz").name, "baz")
self.assertTrue(foo.is_dir)
self.assertTrue(zip_fs.isfile("foo/bar/baz/egg"))
finally:
diff --git a/tox.ini b/tox.ini
deleted file mode 100644
index 2383abbb..00000000
--- a/tox.ini
+++ /dev/null
@@ -1,19 +0,0 @@
-[tox]
-envlist = {py27,py34,py35,py36,py37}{,-scandir},pypy
-sitepackages = False
-skip_missing_interpreters=True
-
-[testenv]
-deps = appdirs
- backports.os
- coverage
- enum34
- nose
- mock
- pyftpdlib
- pytz
- psutil
- scandir: scandir
- pysendfile
-commands = nosetests --with-coverage --cover-package=fs --cover-package=fs.opener tests \
- []