1
0
Fork 0
mirror of synced 2024-06-25 18:31:09 -04:00

Compare commits

..

No commits in common. "master" and "v1.3.2" have entirely different histories.

55 changed files with 480 additions and 3867 deletions

View file

@ -1,17 +0,0 @@
root = true
[*]
charset = utf-8
end_of_line = lf
insert_final_newline = true
indent_style = space
trim_trailing_whitespace = true
[*.py]
indent_size = 4
[*.bash]
indent_size = 4
[*.yml]
indent_size = 2

View file

@ -1,57 +0,0 @@
name: CI
on:
push:
pull_request:
schedule:
- cron: '0 8 * * 6'
jobs:
test:
env:
PIP_DISABLE_PIP_VERSION_CHECK: 1
strategy:
fail-fast: false
matrix:
os: ["ubuntu-20.04", "macos-latest"]
python: ["3.6", "3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "pypy-3.9"]
include:
- os: "windows-latest"
python: "3.8"
- os: "windows-latest"
python: "3.9"
- os: "windows-latest"
python: "3.10"
- os: "windows-latest"
python: "3.11"
- os: "windows-latest"
python: "3.12"
exclude:
- os: "macos-latest"
python: "3.6"
- os: "macos-latest"
python: "3.7"
runs-on: ${{ matrix.os }}
name: "Test: Python ${{ matrix.python }} on ${{ matrix.os }}"
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python }}
allow-prereleases: true
- name: "Install dependencies"
run: |
python -m pip install --upgrade pip setuptools
python -m pip install tox tox-gh-actions
- name: "Run tests"
run: |
python -m tox
python -m tox -e coverage_report
- uses: codecov/codecov-action@v3
fmt:
name: Format
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v3
- uses: psf/black@stable
- uses: isort/isort-action@v1

10
.gitignore vendored
View file

@ -1,11 +1 @@
*.egg-info
*.pyc
.coverage*
.eggs/
.idea/
.tox/
.venv/
build/
coverage.xml
dist/
htmlcov/

2
.gitmodules vendored
View file

@ -1,4 +1,4 @@
[submodule "lib/pyyaml"]
path = lib/pyyaml
url = https://github.com/yaml/pyyaml
url = https://github.com/anishathalye/pyyaml
ignore = dirty

View file

@ -1,52 +0,0 @@
Note: this changelog only lists feature additions, not bugfixes. For details on
those, see the Git history.
- v1.20
- Drop support for Python 2 and old versions of Python 3: the minimum
version supported is now Python 3.6
- v1.19
- Add `mode:` option for `create`
- Add `exclude:` option for `link`
- v1.18
- Add `--only` and `--except` flags
- Add support to run with `python -m dotbot`
- Add `--force-color` option
- v1.17
- Add `canonicalize-path:` option for `link`
- v1.16
- Add `create` plugin
- v1.15
- Add `quiet:` option for `shell`
- v1.14
- Add `if:` option for `link`
- v1.13
- Add `--no-color` flag
- v1.12
- Add globbing support to `link`
- v1.11
- Add force option to `clean` to remove all broken symlinks
- v1.10
- Update `link` to support shorthand syntax for links
- v1.9
- Add support for default options for commands
- v1.8
- Update `link` to be able to create relative links
- v1.7
- Add support for plugins
- v1.6
- Update `link` to expand environment variables in paths
- v1.5
- Update `link` to be able to automatically overwrite broken symlinks
- v1.4
- Update `shell` to allow for selectively enabling/disabling stdin, stdout,
and stderr
- v1.3
- Add support for YAML format configs
- v1.2
- Update `link` to be able to force create links (deleting things that were
previously there)
- Update `link` to be able to create parent directories
- v1.1
- Update `clean` to remove old broken symlinks
- v1.0
- Initial commit

View file

@ -1,96 +0,0 @@
Contributing
============
All kinds of contributions to Dotbot are greatly appreciated. For someone
unfamiliar with the code base, the most efficient way to contribute is usually
to submit a [feature request](#feature-requests) or [bug report](#bug-reports).
If you want to dive into the source code, you can submit a [patch](#patches) as
well, either working on your own ideas or [existing issues][issues].
Feature Requests
----------------
Do you have an idea for an awesome new feature for Dotbot? Please [submit a
feature request][issue]. It's great to hear about new ideas.
If you are inclined to do so, you're welcome to [fork][fork] Dotbot, work on
implementing the feature yourself, and submit a patch. In this case, it's
*highly recommended* that you first [open an issue][issue] describing your
enhancement to get early feedback on the new feature that you are implementing.
This will help avoid wasted efforts and ensure that your work is incorporated
into the code base.
Bug Reports
-----------
Did something go wrong with Dotbot? Sorry about that! Bug reports are greatly
appreciated!
When you [submit a bug report][issue], please include relevant information such
as Dotbot version, operating system, configuration file, error messages, and
steps to reproduce the bug. The more details you can include, the easier it is
to find and fix the bug.
Patches
-------
Want to hack on Dotbot? Awesome!
If there are [open issues][issues], you're more than welcome to work on those -
this is probably the best way to contribute to Dotbot. If you have your own
ideas, that's great too! In that case, before working on substantial changes to
the code base, it is *highly recommended* that you first [open an issue][issue]
describing what you intend to work on.
**Patches are generally submitted as pull requests.** Patches are also
[accepted over email][email].
Any changes to the code base should follow the style and coding conventions
used in the rest of the project. The version history should be clean, and
commit messages should be descriptive and [properly
formatted][commit-messages].
When preparing a patch, it's recommended that you add unit tests
that demonstrate the bug is fixed (or that the feature works).
You can run the tests on your local machine by installing the `dev` extras.
The steps below do this using a virtual environment:
```shell
# Create a local virtual environment
$ python -m venv .venv
# Activate the virtual environment
# Cygwin, Linux, and MacOS:
$ . .venv/bin/activate
# Windows Powershell:
$ & .venv\Scripts\Activate.ps1
# Update pip and setuptools
(.venv) $ python -m pip install -U pip setuptools
# Install dotbot and its development dependencies
(.venv) $ python -m pip install -e .[dev]
# Run the unit tests
(.venv) $ tox
```
If you prefer to run the tests in an isolated container using Docker, you can
do so with the following:
```
docker run -it --rm -v "${PWD}:/dotbot" -w /dotbot python:3.10-alpine /bin/sh
```
After spawning the container, follow the same instructions as above (create a
virtualenv, ..., run the tests).
---
If you have any questions about anything, feel free to [ask][email]!
[issue]: https://github.com/anishathalye/dotbot/issues/new
[issues]: https://github.com/anishathalye/dotbot/issues
[fork]: https://github.com/anishathalye/dotbot/fork
[email]: mailto:me@anishathalye.com
[commit-messages]: http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html

View file

@ -1,7 +1,7 @@
The MIT License (MIT)
=====================
**Copyright (c) Anish Athalye (me@anishathalye.com)**
**Copyright (c) 2014 Anish Athalye (me@anishathalye.com)**
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in

538
README.md
View file

@ -1,315 +1,145 @@
# Dotbot [![Build Status](https://github.com/anishathalye/dotbot/workflows/CI/badge.svg)](https://github.com/anishathalye/dotbot/actions?query=workflow%3ACI) [![Coverage](https://codecov.io/gh/anishathalye/dotbot/branch/master/graph/badge.svg)](https://app.codecov.io/gh/anishathalye/dotbot) [![PyPI](https://img.shields.io/pypi/v/dotbot.svg)](https://pypi.org/pypi/dotbot/) [![Python 3.6+](https://img.shields.io/badge/python-3.6%2B-blue)](https://pypi.org/pypi/dotbot/)
Dotbot makes installing your dotfiles as easy as `git clone $url && cd dotfiles
&& ./install`, even on a freshly installed system!
- [Rationale](#rationale)
- [Getting Started](#getting-started)
- [Configuration](#configuration)
- [Directives](#directives) ([Link](#link), [Create](#create), [Shell](#shell), [Clean](#clean), [Defaults](#defaults))
- [Plugins](#plugins)
- [Command-line Arguments](#command-line-arguments)
- [Wiki][wiki]
---
## Rationale
Dotbot
======
Dotbot is a tool that bootstraps your dotfiles (it's a [Dot]files
[bo]o[t]strapper, get it?). It does *less* than you think, because version
control systems do more than you think.
Dotbot is designed to be lightweight and self-contained, with no external
dependencies and no installation required. Dotbot can also be a drop-in
replacement for any other tool you were using to manage your dotfiles, and
Dotbot is VCS-agnostic -- it doesn't make any attempt to manage your dotfiles.
dependencies and no installation required. Dotbot is easy to set up, and it's
easy to configure.
See [this blog
post](https://www.anishathalye.com/2014/08/03/managing-your-dotfiles/) or more
resources on the [tutorials
page](https://github.com/anishathalye/dotbot/wiki/Tutorials) for more detailed
explanations of how to organize your dotfiles.
Dotbot is VCS-agnostic, and it doesn't make any attempt to manage your
dotfiles. Existing version control systems like git are pretty awesome at doing
this.
## Getting Started
Dotbot can be a drop-in replacement for any other tool you were using to manage
your dotfiles.
### Starting Fresh?
Dotfiles Organization
---------------------
Great! You can automate the creation of your dotfiles by using the
user-contributed [init-dotfiles][init-dotfiles] script. If you'd rather use a
template repository, check out [dotfiles_template][dotfiles-template]. Or, if
you're just looking for [some inspiration][inspiration], we've got you covered.
If you want an in-depth tutorial about organizing your dotfiles, see this [blog
post][managing-dotfiles-post].
### Integrate with Existing Dotfiles
A great way to organize your dotfiles is having all of them in a single
(isolated) git repository and symlinking files into place. You can add plugins
and stuff using git submodules. This whole symlinking business can be a bit of
work, but it's much better than just having your entire home directory under
source control, and Dotbot can automate all of this for you and let you have a
one-click install process, so you can have all the benefits of isolation
without the annoyance of having to manually copy or link files.
The following will help you get set up using Dotbot in just a few steps.
Dotbot itself is entirely self contained and requires no installation (it's
self-bootstrapping), so it's not necessary to install any software before you
provision a new machine! All you have to do is download your dotfiles and then
run `./install`.
If you're using **Git**, you can add Dotbot as a submodule:
Template
--------
If you are starting fresh with your dotfiles, you can fork the [template
repository][template]. If you want, you can rename it afterwards (to something
like just "dotfiles"). If you're looking for inspiration, the template
repository contains links to dotfiles repositories that use Dotbot.
Setup
-----
Dotbot is super easy to set up. This description is given in terms of git and
git submodules, but the procedure is similar for other VCSs.
You can add Dotbot to your dotfiles by running the following command from
within your git repository:
```bash
cd ~/.dotfiles # replace with the path to your dotfiles
git init # initialize repository if needed
git submodule add https://github.com/anishathalye/dotbot
git config -f .gitmodules submodule.dotbot.ignore dirty # ignore dirty commits in the submodule
cp dotbot/tools/git-submodule/install .
touch install.conf.yaml
```
If you're using **Mercurial**, you can add Dotbot as a subrepo:
To have a one-click (one-command) install, you can place a bootstrap install
shell script that calls Dotbot with the appropriate parameters. This script
simply passes its arguments to Dotbot, so the script itself will not have to be
updated once it's placed in the proper location (the Dotbot repository can be
updated independently).
```bash
cd ~/.dotfiles # replace with the path to your dotfiles
hg init # initialize repository if needed
echo "dotbot = [git]https://github.com/anishathalye/dotbot" > .hgsub
hg add .hgsub
git clone https://github.com/anishathalye/dotbot
cp dotbot/tools/hg-subrepo/install .
touch install.conf.yaml
```
An bootstrap install shell script for git is given in
[tools/git-submodule/install][git-install]. By default, the script assumes that
the configuration is located in `install.conf.yaml` and Dotbot is located in
`dotbot`. The script automatically makes sure that the correct version of
Dotbot is checked out in the submodule.
If you are using PowerShell instead of a POSIX shell, you can use the provided
`install.ps1` script instead of `install`. On Windows, Dotbot only supports
Python 3.8+, and it requires that your account is [allowed to create symbolic
links][windows-symlinks].
Adapting the bootstrap script for different situations (such as using a
different VCS) is fairly straightforward.
To get started, you just need to fill in the `install.conf.yaml` and Dotbot
will take care of the rest. To help you get started we have [an
example](#full-example) config file as well as [configuration
documentation](#configuration) for the accepted parameters.
Configuration
-------------
Note: The `install` script is merely a shim that checks out the appropriate
version of Dotbot and calls the full Dotbot installer. By default, the script
assumes that the configuration is located in `install.conf.yaml` the Dotbot
submodule is located in `dotbot`. You can change either of these parameters by
editing the variables in the `install` script appropriately.
Setting up Dotbot as a submodule or subrepo locks it on the current version.
You can upgrade Dotbot at any point. If using a submodule, run `git submodule
update --remote dotbot`, substituting `dotbot` with the path to the Dotbot
submodule; be sure to commit your changes before running `./install`, otherwise
the old version of Dotbot will be checked out by the install script. If using a
subrepo, run `git fetch && git checkout origin/master` in the Dotbot directory.
If you prefer, you can install Dotbot from [PyPI] and call it as a command-line
program:
```bash
pip install dotbot
touch install.conf.yaml
```
In this case, rather than running `./install`, you can invoke Dotbot with
`dotbot -c <path to configuration file>`.
### Full Example
Here's an example of a complete configuration.
The conventional name for the configuration file is `install.conf.yaml`.
```yaml
- defaults:
link:
relink: true
- clean: ['~']
- link:
~/.tmux.conf: tmux.conf
~/.vim: vim
~/.vimrc: vimrc
- create:
- ~/downloads
- ~/.vim/undo-history
- shell:
- [git submodule update --init --recursive, Installing submodules]
```
The configuration file is typically written in YAML, but it can also be written
in JSON (which is a [subset of YAML][json2yaml]). JSON configuration files are
conventionally named `install.conf.json`.
## Configuration
Dotbot uses YAML or JSON-formatted configuration files to let you specify how
to set up your dotfiles. Currently, Dotbot knows how to [link](#link) files and
folders, [create](#create) folders, execute [shell](#shell) commands, and
[clean](#clean) directories of broken symbolic links. Dotbot also supports user
[plugins](#plugins) for custom commands.
Dotbot uses YAML-formatted (or JSON-formatted) configuration files to let you
specify how to set up your dotfiles. Currently, Dotbot knows how to `link`
files and folders, execute `shell` commands, and `clean` directories of broken
symbolic links.
**Ideally, bootstrap configurations should be idempotent. That is, the
installer should be able to be run multiple times without causing any
problems.** This makes a lot of things easier to do (in particular, syncing
updates between machines becomes really easy).
Dotbot configuration files are arrays of tasks, where each task
Dotbot configuration files are YAML (or JSON) arrays of tasks, where each task
is a dictionary that contains a command name mapping to data for that command.
Tasks are run in the order in which they are specified. Commands within a task
do not have a defined ordering.
When writing nested constructs, keep in mind that YAML is whitespace-sensitive.
Following the formatting used in the examples is a good idea. If a YAML
configuration file is not behaving as you expect, try inspecting the
[equivalent JSON][json2yaml] and check that it is correct.
## Directives
Most Dotbot commands support both a simplified and extended syntax, and they
can also be configured via setting [defaults](#defaults).
### Link
Link commands specify how files and directories should be symbolically linked.
If desired, items can be specified to be forcibly linked, overwriting existing
files if necessary. Environment variables in paths are automatically expanded.
files if necessary.
#### Format
Link commands are specified as a dictionary mapping targets to source
locations. Source locations are specified relative to the base directory (that
is specified when running the installer). If linking directories, *do not*
include a trailing slash.
is specified when running the installer). Source directory names should contain
a trailing "/" character.
Link commands support an optional extended configuration. In this type of
Link commands support an (optional) extended configuration. In this type of
configuration, instead of specifying source locations directly, targets are
mapped to extended configuration dictionaries.
mapped to extended configuration dictionaries. These dictionaries map "path" to
the source path, specify "create" as true if the parent directory should be
created if necessary, and specify "force" as true if the file or directory
should be forcibly linked.
| Parameter | Explanation |
| --- | --- |
| `path` | The source for the symlink, the same as in the shortcut syntax (default: null, automatic (see below)) |
| `create` | When true, create parent directories to the link as needed. (default: false) |
| `relink` | Removes the old target if it's a symlink (default: false) |
| `force` | Force removes the old target, file or folder, and forces a new link (default: false) |
| `relative` | Use a relative path to the source when creating the symlink (default: false, absolute links) |
| `canonicalize` | Resolve any symbolic links encountered in the source to symlink to the canonical path (default: true, real paths) |
| `if` | Execute this in your `$SHELL` and only link if it is successful. |
| `ignore-missing` | Do not fail if the source is missing and create the link anyway (default: false) |
| `glob` | Treat `path` as a glob pattern, expanding patterns referenced below, linking all *files* matched. (default: false) |
| `exclude` | Array of glob patterns to remove from glob matches. Uses same syntax as `path`. Ignored if `glob` is `false`. (default: empty, keep all matches) |
| `prefix` | Prepend prefix prefix to basename of each file when linked, when `glob` is `true`. (default: '') |
When `glob: True`, Dotbot uses [glob.glob](https://docs.python.org/3/library/glob.html#glob.glob) to resolve glob paths, expanding Unix shell-style wildcards, which are **not** the same as regular expressions; Only the following are expanded:
| Pattern | Meaning |
|:---------|:-----------------------------------|
| `*` | matches anything |
| `**` | matches any **file**, recursively |
| `?` | matches any single character |
| `[seq]` | matches any character in `seq` |
| `[!seq]` | matches any character not in `seq` |
However, due to the design of `glob.glob`, using a glob pattern such as `config/*`, will **not** match items that begin with `.`. To specifically capture items that being with `.`, you will need to include the `.` in the pattern, like this: `config/.*`.
When using glob with the `exclude:` option, the paths in the exclude paths should be relative to the base directory, same as the glob pattern itself. For example, if a glob pattern `vim/*` matches directories `vim/autoload`, `vim/ftdetect`, `vim/ftplugin`, and `vim/spell`, and you want to ignore the spell directory, then you should use `exclude: ["vim/spell"]` (not just `"spell"`).
#### Example
##### Example (YAML)
```yaml
- link:
~/.config/terminator:
create: true
path: config/terminator
~/.vim: vim
~/.vimrc:
relink: true
path: vimrc
path: config/terminator/
~/.vim: vim/
~/.vimrc: vimrc
~/.zshrc:
force: true
path: zshrc
~/.hammerspoon:
if: '[ `uname` = Darwin ]'
path: hammerspoon
~/.config/:
path: dotconf/config/**
~/:
glob: true
path: dotconf/*
prefix: '.'
```
If the source location is omitted or set to `null`, Dotbot will use the
basename of the destination, with a leading `.` stripped if present. This makes
the following two config files equivalent.
##### Example (JSON)
Explicit sources:
```yaml
- link:
~/bin/ack: ack
~/.vim: vim
~/.vimrc:
relink: true
path: vimrc
~/.zshrc:
force: true
path: zshrc
~/.config/:
glob: true
path: config/*
relink: true
exclude: [ config/Code ]
~/.config/Code/User/:
create: true
glob: true
path: config/Code/User/*
relink: true
```
Implicit sources:
```yaml
- link:
~/bin/ack:
~/.vim:
~/.vimrc:
relink: true
~/.zshrc:
force: true
~/.config/:
glob: true
path: config/*
relink: true
exclude: [ config/Code ]
~/.config/Code/User/:
create: true
glob: true
path: config/Code/User/*
relink: true
```
### Create
Create commands specify empty directories to be created. This can be useful
for scaffolding out folders or parent folder structure required for various
apps, plugins, shell commands, etc.
#### Format
Create commands are specified as an array of directories to be created. If you
want to use the optional extended configuration, create commands are specified
as dictionaries. For convenience, it's permissible to leave the options blank
(null) in the dictionary syntax.
| Parameter | Explanation |
| --- | --- |
| `mode` | The file mode to use for creating the leaf directory (default: 0777) |
The `mode` parameter is treated in the same way as in Python's
[os.mkdir](https://docs.python.org/3/library/os.html#mkdir-modebits). Its
behavior is platform-dependent. On Unix systems, the current umask value is
first masked out.
#### Example
```yaml
- create:
- ~/downloads
- ~/.vim/undo-history
- create:
~/.ssh:
mode: 0700
~/projects:
```json
[{
"link": {
"~/.config/terminator": {
"create": true,
"path": "config/terminator/"
},
"~/.vim": "vim/",
"~/.vimrc": "vimrc",
"~/.zshrc": {
"force": true,
"path": "zshrc"
}
}
}]
```
### Shell
@ -319,160 +149,102 @@ base directory (that is specified when running the installer).
#### Format
Shell commands can be specified in several different ways. The simplest way is
just to specify a command as a string containing the command to be run.
Shell commands are specified as an array of commands, where each command is a
two element array containing the actual shell command as the first element and
a human-readable description as the second element.
Another way is to specify a two element array where the first element is the
shell command and the second is an optional human-readable description.
Shell commands support an extended syntax as well, which provides more
fine-grained control.
| Parameter | Explanation |
| --- | --- |
| `command` | The command to be run |
| `description` | A human-readable message describing the command (default: null) |
| `quiet` | Show only the description but not the command in log output (default: false) |
| `stdin` | Allow a command to read from standard input (default: false) |
| `stdout` | Show a command's output from stdout (default: false) |
| `stderr` | Show a command's error output from stderr (default: false) |
Note that `quiet` controls whether the command (a string) is printed in log
output, it does not control whether the output from running the command is
printed (that is controlled by `stdout` / `stderr`). When a command's `stdin` /
`stdout` / `stderr` is not enabled (which is the default), it's connected to
`/dev/null`, disabling input and hiding output.
#### Example
##### Example (YAML)
```yaml
- shell:
- chsh -s $(which zsh)
- [chsh -s $(which zsh), Making zsh the default shell]
-
command: read var && echo Your variable is $var
stdin: true
stdout: true
description: Reading and printing variable
quiet: true
-
command: read fail
stderr: true
- [mkdir -p ~/downloads, Creating downloads directory]
```
##### Example (JSON)
```json
[{
"shell": [
["mkdir -p ~/downloads", "Creating downloads directory"]
]
}]
```
### Clean
Clean commands specify directories that should be checked for dead symbolic
links. These dead links are removed automatically. Only dead links that point
to somewhere within the dotfiles directory are removed unless the `force`
option is set to `true`.
to the dotfiles directory are removed.
#### Format
Clean commands are specified as an array of directories to be cleaned.
Clean commands also support an extended configuration syntax.
##### Example (YAML)
| Parameter | Explanation |
| --- | --- |
| `force` | Remove dead links even if they don't point to a file inside the dotfiles directory (default: false) |
| `recursive` | Traverse the directory recursively looking for dead links (default: false) |
```yaml
- clean: ['~']
```
Note: using the `recursive` option for `~` is not recommended because it will
be slow.
##### Example (JSON)
#### Example
```json
[{
"clean": ["~"]
}]
```
### Full Example
The configuration file format is pretty simple. Here's an example of a complete
configuration. The conventional name for the configuration file is
`install.conf.yaml`.
```yaml
- clean: ['~']
- clean:
~/:
force: true
~/.config:
recursive: true
- link:
~/.dotfiles: ''
~/.tmux.conf: tmux.conf
~/.vim: vim/
~/.vimrc: vimrc
- shell:
- [git update-submodules, Installing/updating submodules]
```
### Defaults
The configuration file can also be written in JSON. Here is the JSON equivalent
of the YAML configuration given above. The conventional name for this file is
`install.conf.json`.
Default options for plugins can be specified so that options don't have to be
repeated many times. This can be very useful to use with the link command, for
example.
Defaults apply to all commands that come after setting the defaults. Defaults
can be set multiple times; each change replaces the defaults with a new set of
options.
#### Format
Defaults are specified as a dictionary mapping action names to settings, which
are dictionaries from option names to values.
#### Example
```yaml
- defaults:
link:
create: true
relink: true
```json
[
{
"clean": ["~"]
},
{
"link": {
"~/.dotfiles": "",
"~/.tmux.conf": "tmux.conf",
"~/.vim": "vim/",
"~/.vimrc": "vimrc"
}
},
{
"shell": [
["git submodule update --init --recursive", "Installing submodules"]
]
}
]
```
### Plugins
License
-------
Dotbot also supports custom directives implemented by plugins. Plugins are
implemented as subclasses of `dotbot.Plugin`, so they must implement
`can_handle()` and `handle()`. The `can_handle()` method should return `True`
if the plugin can handle an action with the given name. The `handle()` method
should do something and return whether or not it completed successfully.
All built-in Dotbot directives are written as plugins that are loaded by
default, so those can be used as a reference when writing custom plugins.
Plugins are loaded using the `--plugin` and `--plugin-dir` options, using
either absolute paths or paths relative to the base directory. It is
recommended that these options are added directly to the `install` script.
See [here][plugins] for a current list of plugins.
## Command-line Arguments
Dotbot takes a number of command-line arguments; you can run Dotbot with
`--help`, e.g. by running `./install --help`, to see the full list of options.
Here, we highlight a couple that are particularly interesting.
### `--only`
You can call `./install --only [list of directives]`, such as `./install --only
link`, and Dotbot will only run those sections of the config file.
### `--except`
You can call `./install --except [list of directives]`, such as `./install
--except shell`, and Dotbot will run all the sections of the config file except
the ones listed.
## Wiki
Check out the [Dotbot wiki][wiki] for more information, tips and tricks,
user-contributed plugins, and more.
## Contributing
Do you have a feature request, bug report, or patch? Great! See
[CONTRIBUTING.md][contributing] for information on what you can do about that.
## License
Copyright (c) Anish Athalye. Released under the MIT License. See
Copyright (c) 2014 Anish Athalye. Released under the MIT License. See
[LICENSE.md][license] for details.
[PyPI]: https://pypi.org/project/dotbot/
[init-dotfiles]: https://github.com/Vaelatern/init-dotfiles
[dotfiles-template]: https://github.com/anishathalye/dotfiles_template
[inspiration]: https://github.com/anishathalye/dotbot/wiki/Users
[windows-symlinks]: https://learn.microsoft.com/en-us/windows/security/threat-protection/security-policy-settings/create-symbolic-links
[json2yaml]: https://www.json2yaml.com/
[plugins]: https://github.com/anishathalye/dotbot/wiki/Plugins
[wiki]: https://github.com/anishathalye/dotbot/wiki
[contributing]: CONTRIBUTING.md
[template]: https://github.com/anishathalye/dotfiles_template
[git-install]: tools/git-submodule/install
[license]: LICENSE.md
[managing-dotfiles-post]: http://www.anishathalye.com/2014/08/03/managing-your-dotfiles/

View file

@ -1,30 +1,7 @@
#!/usr/bin/env sh
# This is a valid shell script and also a valid Python script. When this file
# is executed as a shell script, it finds a python binary and executes this
# file as a Python script, passing along all of the command line arguments.
# When this file is executed as a Python script, it loads and runs Dotbot. This
# is useful because we don't know the name of the python binary.
''':' # begin python string; this line is interpreted by the shell as `:`
command -v python3 >/dev/null 2>&1 && exec python3 "$0" "$@"
command -v python >/dev/null 2>&1 && exec python "$0" "$@"
>&2 echo "error: cannot find python"
exit 1
'''
# python code
#!/usr/bin/env python
import sys, os
# this file is syntactically valid Python 2; bail out if the interpreter is Python 2
if sys.version_info[0] < 3:
print('error: this version of Dotbot is not compatible with Python 2:\nhttps://github.com/anishathalye/dotbot/wiki/Troubleshooting#python-2')
exit(1)
if sys.version_info < (3, 6):
print('error: this version of Dotbot requires Python 3.6+')
exit(1)
PROJECT_ROOT_DIRECTORY = os.path.dirname(
os.path.dirname(os.path.realpath(__file__)))
@ -32,7 +9,11 @@ def inject(lib_path):
path = os.path.join(PROJECT_ROOT_DIRECTORY, 'lib', lib_path)
sys.path.insert(0, path)
inject('pyyaml/lib')
# version dependent libraries
if sys.version_info.major >= 3:
inject('pyyaml/lib3')
else:
inject('pyyaml/lib')
if os.path.exists(os.path.join(PROJECT_ROOT_DIRECTORY, 'dotbot')):
if PROJECT_ROOT_DIRECTORY not in sys.path:

View file

@ -1,4 +1 @@
from .cli import main
from .plugin import Plugin
__version__ = "1.20.1"

View file

@ -1,4 +0,0 @@
from .cli import main
if __name__ == "__main__":
main()

View file

@ -1,167 +1,49 @@
import glob
import os
import subprocess
import sys
from argparse import ArgumentParser, RawTextHelpFormatter
import dotbot
from argparse import ArgumentParser
from .config import ConfigReader, ReadingError
from .dispatcher import Dispatcher, DispatchError
from .messenger import Level, Messenger
from .plugins import Clean, Create, Link, Shell
from .util import module
from .messenger import Messenger
from .messenger import Level
def add_options(parser):
parser.add_argument(
"-Q", "--super-quiet", action="store_true", help="suppress almost all output"
)
parser.add_argument("-q", "--quiet", action="store_true", help="suppress most output")
parser.add_argument(
"-v",
"--verbose",
action="count",
default=0,
help="enable verbose output\n"
"-v: typical verbose\n"
"-vv: also, set shell commands stderr/stdout to true",
)
parser.add_argument(
"-d", "--base-directory", help="execute commands from within BASEDIR", metavar="BASEDIR"
)
parser.add_argument(
"-c", "--config-file", help="run commands given in CONFIGFILE", metavar="CONFIGFILE"
)
parser.add_argument(
"-p",
"--plugin",
action="append",
dest="plugins",
default=[],
help="load PLUGIN as a plugin",
metavar="PLUGIN",
)
parser.add_argument(
"--disable-built-in-plugins", action="store_true", help="disable built-in plugins"
)
parser.add_argument(
"--plugin-dir",
action="append",
dest="plugin_dirs",
default=[],
metavar="PLUGIN_DIR",
help="load all plugins in PLUGIN_DIR",
)
parser.add_argument(
"--only", nargs="+", help="only run specified directives", metavar="DIRECTIVE"
)
parser.add_argument(
"--except", nargs="+", dest="skip", help="skip specified directives", metavar="DIRECTIVE"
)
parser.add_argument(
"--force-color", dest="force_color", action="store_true", help="force color output"
)
parser.add_argument(
"--no-color", dest="no_color", action="store_true", help="disable color output"
)
parser.add_argument(
"--version", action="store_true", help="show program's version number and exit"
)
parser.add_argument(
"-x",
"--exit-on-failure",
dest="exit_on_failure",
action="store_true",
help="exit after first failed directive",
)
parser.add_argument('-Q', '--super-quiet', dest = 'super_quiet', action = 'store_true',
help = 'suppress almost all output')
parser.add_argument('-q', '--quiet', dest = 'quiet', action = 'store_true',
help = 'suppress most output')
parser.add_argument('-v', '--verbose', dest = 'verbose', action = 'store_true',
help = 'enable verbose output')
parser.add_argument('-d', '--base-directory', nargs = 1,
dest = 'base_directory', help = 'execute commands from within BASEDIR',
metavar = 'BASEDIR', required = True)
parser.add_argument('-c', '--config-file', nargs = 1, dest = 'config_file',
help = 'run commands given in CONFIGFILE', metavar = 'CONFIGFILE',
required = True)
def read_config(config_file):
reader = ConfigReader(config_file)
return reader.get_config()
def main():
log = Messenger()
try:
parser = ArgumentParser(formatter_class=RawTextHelpFormatter)
parser = ArgumentParser()
add_options(parser)
options = parser.parse_args()
if options.version:
try:
with open(os.devnull) as devnull:
git_hash = subprocess.check_output(
["git", "rev-parse", "HEAD"],
cwd=os.path.dirname(os.path.abspath(__file__)),
stderr=devnull,
).decode("ascii")
hash_msg = " (git %s)" % git_hash[:10]
except (OSError, subprocess.CalledProcessError):
hash_msg = ""
print("Dotbot version %s%s" % (dotbot.__version__, hash_msg))
exit(0)
if options.super_quiet:
if (options.super_quiet):
log.set_level(Level.WARNING)
if options.quiet:
if (options.quiet):
log.set_level(Level.INFO)
if options.verbose > 0:
if (options.verbose):
log.set_level(Level.DEBUG)
if options.force_color and options.no_color:
log.error("`--force-color` and `--no-color` cannot both be provided")
exit(1)
elif options.force_color:
log.use_color(True)
elif options.no_color:
log.use_color(False)
else:
log.use_color(sys.stdout.isatty())
plugins = []
plugin_directories = list(options.plugin_dirs)
if not options.disable_built_in_plugins:
plugins.extend([Clean, Create, Link, Shell])
plugin_paths = []
for directory in plugin_directories:
for plugin_path in glob.glob(os.path.join(directory, "*.py")):
plugin_paths.append(plugin_path)
for plugin_path in options.plugins:
plugin_paths.append(plugin_path)
for plugin_path in plugin_paths:
abspath = os.path.abspath(plugin_path)
plugins.extend(module.load(abspath))
if not options.config_file:
log.error("No configuration file specified")
exit(1)
tasks = read_config(options.config_file)
if tasks is None:
log.warning("Configuration file is empty, no work to do")
tasks = []
if not isinstance(tasks, list):
raise ReadingError("Configuration file must be a list of tasks")
if options.base_directory:
base_directory = os.path.abspath(options.base_directory)
else:
# default to directory of config file
base_directory = os.path.dirname(os.path.abspath(options.config_file))
os.chdir(base_directory)
dispatcher = Dispatcher(
base_directory,
only=options.only,
skip=options.skip,
exit_on_failure=options.exit_on_failure,
options=options,
plugins=plugins,
)
tasks = read_config(options.config_file[0])
dispatcher = Dispatcher(options.base_directory[0])
success = dispatcher.dispatch(tasks)
if success:
log.info("\n==> All tasks executed successfully")
log.info('\n==> All tasks executed successfully')
else:
raise DispatchError("\n==> Some tasks were not executed successfully")
raise DispatchError('\n==> Some tasks were not executed successfully')
except (ReadingError, DispatchError) as e:
log.error("%s" % e)
log.error('%s' % e)
exit(1)
except KeyboardInterrupt:
log.error("\n==> Operation aborted")
log.error('\n==> Operation aborted')
exit(1)

View file

@ -1,31 +1,19 @@
import json
import os.path
import yaml
from .util import string
class ConfigReader:
class ConfigReader(object):
def __init__(self, config_file_path):
self._config = self._read(config_file_path)
def _read(self, config_file_path):
try:
_, ext = os.path.splitext(config_file_path)
with open(config_file_path) as fin:
if ext == ".json":
data = json.load(fin)
else:
data = yaml.safe_load(fin)
data = yaml.load(fin)
return data
except Exception as e:
msg = string.indent_lines(str(e))
raise ReadingError("Could not read config file:\n%s" % msg)
except Exception:
raise ReadingError('Could not read config file')
def get_config(self):
return self._config
class ReadingError(Exception):
pass

View file

@ -1,33 +0,0 @@
import copy
import os
from argparse import Namespace
class Context:
"""
Contextual data and information for plugins.
"""
def __init__(self, base_directory, options=Namespace()):
self._base_directory = base_directory
self._defaults = {}
self._options = options
pass
def set_base_directory(self, base_directory):
self._base_directory = base_directory
def base_directory(self, canonical_path=True):
base_directory = self._base_directory
if canonical_path:
base_directory = os.path.realpath(base_directory)
return base_directory
def set_defaults(self, defaults):
self._defaults = defaults
def defaults(self):
return copy.deepcopy(self._defaults)
def options(self):
return copy.deepcopy(self._options)

View file

@ -1,78 +1,46 @@
import os
from argparse import Namespace
from .context import Context
from .executor import Executor
from .messenger import Messenger
from .plugin import Plugin
class Dispatcher(object):
PLUGIN_CLASS = Executor
PLUGIN_DIR = 'dotbot/executor'
class Dispatcher:
def __init__(
self,
base_directory,
only=None,
skip=None,
exit_on_failure=False,
options=Namespace(),
plugins=None,
):
def __init__(self, base_directory):
self._log = Messenger()
self._setup_context(base_directory, options)
plugins = plugins or []
self._plugins = [plugin(self._context) for plugin in plugins]
self._only = only
self._skip = skip
self._exit = exit_on_failure
self._set_base_directory(base_directory)
self._load_plugins()
def _setup_context(self, base_directory, options):
path = os.path.abspath(os.path.expanduser(base_directory))
if not os.path.exists(path):
raise DispatchError("Nonexistent base directory")
self._context = Context(path, options)
def _set_base_directory(self, base_directory):
path = os.path.abspath(os.path.realpath(
os.path.expanduser(base_directory)))
if os.path.exists(path):
self._base_directory = path
else:
raise DispatchError('Nonexistant base directory')
def dispatch(self, tasks):
success = True
for task in tasks:
for action in task:
if (
self._only is not None
and action not in self._only
or self._skip is not None
and action in self._skip
) and action != "defaults":
self._log.info("Skipping action %s" % action)
continue
handled = False
if action == "defaults":
self._context.set_defaults(task[action]) # replace, not update
handled = True
# keep going, let other plugins handle this if they want
for plugin in self._plugins:
if plugin.can_handle(action):
try:
local_success = plugin.handle(action, task[action])
if not local_success and self._exit:
# The action has failed exit
self._log.error("Action %s failed" % action)
return False
success &= local_success
success &= plugin.handle(action, task[action])
handled = True
except Exception as err:
except Exception:
self._log.error(
"An error was encountered while executing action %s" % action
)
self._log.debug(err)
if self._exit:
# There was an execption exit
return False
'An error was encountered while executing action %s' %
action)
if not handled:
success = False
self._log.error("Action %s not handled" % action)
if self._exit:
# Invalid action exit
return False
self._log.error('Action %s not handled' % action)
return success
def _load_plugins(self):
self._plugins = [plugin(self._base_directory)
for plugin in Executor.__subclasses__()]
class DispatchError(Exception):
pass

View file

@ -0,0 +1,4 @@
from .executor import Executor
from .linker import Linker
from .cleaner import Cleaner
from .commandrunner import CommandRunner

View file

@ -0,0 +1,49 @@
import os
from . import Executor
class Cleaner(Executor):
'''
Cleans broken symbolic links.
'''
_directive = 'clean'
def can_handle(self, directive):
return directive == self._directive
def handle(self, directive, data):
if directive != self._directive:
raise ValueError('Cleaner cannot handle directive %s' % directive)
return self._process_clean(data)
def _process_clean(self, targets):
success = True
for target in targets:
success &= self._clean(target)
if success:
self._log.info('All targets have been cleaned')
else:
self._log.error('Some targets were not succesfully cleaned')
return success
def _clean(self, target):
'''
Cleans all the broken symbolic links in target that point to
a subdirectory of the base directory.
'''
for item in os.listdir(os.path.expanduser(target)):
path = os.path.join(os.path.expanduser(target), item)
if not os.path.exists(path) and os.path.islink(path):
if self._in_directory(path, self._base_directory):
self._log.lowinfo('Removing invalid link %s -> %s' %
(path, os.path.join(os.path.dirname(path), os.readlink(path))))
os.remove(path)
return True
def _in_directory(self, path, directory):
'''
Returns true if the path is in the directory.
'''
directory = os.path.join(os.path.realpath(directory), '')
path = os.path.realpath(path)
return os.path.commonprefix([path, directory]) == directory

View file

@ -0,0 +1,34 @@
import os, subprocess
from . import Executor
class CommandRunner(Executor):
'''
Run arbitrary shell commands.
'''
_directive = 'shell'
def can_handle(self, directive):
return directive == self._directive
def handle(self, directive, data):
if directive != self._directive:
raise ValueError('CommandRunner cannot handle directive %s' %
directive)
return self._process_commands(data)
def _process_commands(self, data):
success = True
with open(os.devnull, 'w') as devnull:
for cmd, msg in data:
self._log.lowinfo('%s [%s]' % (msg, cmd))
ret = subprocess.call(cmd, shell = True, stdout = devnull,
stderr = devnull, cwd = self._base_directory)
if ret != 0:
success = False
self._log.warning('Command [%s] failed' % cmd)
if success:
self._log.info('All commands have been executed')
else:
self._log.error('Some commands were not sucessfully executed')
return success

View file

@ -0,0 +1,24 @@
from ..messenger import Messenger
class Executor(object):
'''
Abstract base class for commands that process directives.
'''
def __init__(self, base_directory):
self._base_directory = base_directory
self._log = Messenger()
def can_handle(self, directive):
'''
Returns true if the Executor can handle the directive.
'''
raise NotImplementedError
def handle(self, directive, data):
'''
Executes the directive.
Returns true if the Executor successfully handled the directive.
'''
raise NotImplementedError

129
dotbot/executor/linker.py Normal file
View file

@ -0,0 +1,129 @@
import os, shutil
from . import Executor
class Linker(Executor):
'''
Symbolically links dotfiles.
'''
_directive = 'link'
def can_handle(self, directive):
return directive == self._directive
def handle(self, directive, data):
if directive != self._directive:
raise ValueError('Linker cannot handle directive %s' % directive)
return self._process_links(data)
def _process_links(self, links):
success = True
for destination, source in links.items():
if isinstance(source, dict):
# extended config
path = source['path']
force = source.get('force', False)
create = source.get('create', False)
if create:
success &= self._create(destination)
if force:
success &= self._delete(path, destination)
else:
path = source
success &= self._link(path, destination)
if success:
self._log.info('All links have been set up')
else:
self._log.error('Some links were not successfully set up')
return success
def _is_link(self, path):
'''
Returns true if the path is a symbolic link.
'''
return os.path.islink(os.path.expanduser(path))
def _link_destination(self, path):
'''
Returns the absolute path to the destination of the symbolic link.
'''
path = os.path.expanduser(path)
rel_dest = os.readlink(path)
return os.path.join(os.path.dirname(path), rel_dest)
def _exists(self, path):
'''
Returns true if the path exists.
'''
path = os.path.expanduser(path)
return os.path.exists(path)
def _create(self, path):
success = True
parent = os.path.abspath(os.path.join(os.path.expanduser(path), os.pardir))
if not self._exists(parent):
try:
os.makedirs(parent)
except OSError:
self._log.warning('Failed to create directory %s' % parent)
success = False
else:
self._log.lowinfo('Creating directory %s' % parent)
return success
def _delete(self, source, path):
success = True
source = os.path.join(self._base_directory, source)
if ((self._is_link(path) and self._link_destination(path) != source) or
(self._exists(path) and not self._is_link(path))):
fullpath = os.path.expanduser(path)
try:
if os.path.isdir(fullpath):
shutil.rmtree(fullpath)
else:
os.remove(fullpath)
except OSError:
self._log.warning('Failed to remove %s' % path)
success = False
else:
self._log.lowinfo('Removing %s' % path)
return success
def _link(self, source, link_name):
'''
Links link_name to source.
Returns true if successfully linked files.
'''
success = False
source = os.path.join(self._base_directory, source)
if (not self._exists(link_name) and self._is_link(link_name) and
self._link_destination(link_name) != source):
self._log.warning('Invalid link %s -> %s' %
(link_name, self._link_destination(link_name)))
elif not self._exists(link_name) and self._exists(source):
try:
os.symlink(source, os.path.expanduser(link_name))
except OSError:
self._log.warning('Linking failed %s -> %s' % (link_name, source))
else:
self._log.lowinfo('Creating link %s -> %s' % (link_name, source))
success = True
elif self._exists(link_name) and not self._is_link(link_name):
self._log.warning(
'%s already exists but is a regular file or directory' %
link_name)
elif self._is_link(link_name) and self._link_destination(link_name) != source:
self._log.warning('Incorrect link %s -> %s' %
(link_name, self._link_destination(link_name)))
elif not self._exists(source):
if self._is_link(link_name):
self._log.warning('Nonexistant target %s -> %s' %
(link_name, source))
else:
self._log.warning('Nonexistant target for %s : %s' %
(link_name, source))
else:
self._log.lowinfo('Link exists %s -> %s' % (link_name, source))
success = True
return success

View file

@ -1,2 +1,2 @@
from .level import Level
from .messenger import Messenger
from .level import Level

View file

@ -1,8 +1,8 @@
class Color:
NONE = ""
RESET = "\033[0m"
RED = "\033[91m"
GREEN = "\033[92m"
YELLOW = "\033[93m"
BLUE = "\033[94m"
MAGENTA = "\033[95m"
class Color(object):
NONE = ''
RESET = '\033[0m'
RED = '\033[91m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
BLUE = '\033[94m'
MAGENTA = '\033[95m'

View file

@ -1,4 +1,4 @@
class Level:
class Level(object):
NOTSET = 0
DEBUG = 10
LOWINFO = 15

View file

@ -1,22 +1,20 @@
import sys
from ..util.singleton import Singleton
from .color import Color
from .level import Level
class Messenger(object):
__metaclass__ = Singleton
class Messenger(metaclass=Singleton):
def __init__(self, level=Level.LOWINFO):
def __init__(self, level = Level.LOWINFO):
self.set_level(level)
self.use_color(True)
def set_level(self, level):
self._level = level
def use_color(self, yesno):
self._use_color = yesno
def log(self, level, message):
if level >= self._level:
print("%s%s%s" % (self._color(level), message, self._reset()))
if (level >= self._level):
print('%s%s%s' % (self._color(level), message, self._reset()))
def debug(self, message):
self.log(Level.DEBUG, message)
@ -34,13 +32,13 @@ class Messenger(metaclass=Singleton):
self.log(Level.ERROR, message)
def _color(self, level):
"""
'''
Get a color (terminal escape sequence) according to a level.
"""
if not self._use_color:
return ""
'''
if not sys.stdout.isatty():
return ''
elif level < Level.DEBUG:
return ""
return ''
elif Level.DEBUG <= level < Level.LOWINFO:
return Color.YELLOW
elif Level.LOWINFO <= level < Level.INFO:
@ -53,10 +51,10 @@ class Messenger(metaclass=Singleton):
return Color.RED
def _reset(self):
"""
'''
Get a reset color (terminal escape sequence).
"""
if not self._use_color:
return ""
'''
if not sys.stdout.isatty():
return ''
else:
return Color.RESET

View file

@ -1,26 +0,0 @@
from .context import Context
from .messenger import Messenger
class Plugin:
"""
Abstract base class for commands that process directives.
"""
def __init__(self, context):
self._context = context
self._log = Messenger()
def can_handle(self, directive):
"""
Returns true if the Plugin can handle the directive.
"""
raise NotImplementedError
def handle(self, directive, data):
"""
Executes the directive.
Returns true if the Plugin successfully handled the directive.
"""
raise NotImplementedError

View file

@ -1,4 +0,0 @@
from .clean import Clean
from .create import Create
from .link import Link
from .shell import Shell

View file

@ -1,72 +0,0 @@
import os
import sys
from ..plugin import Plugin
class Clean(Plugin):
"""
Cleans broken symbolic links.
"""
_directive = "clean"
def can_handle(self, directive):
return directive == self._directive
def handle(self, directive, data):
if directive != self._directive:
raise ValueError("Clean cannot handle directive %s" % directive)
return self._process_clean(data)
def _process_clean(self, targets):
success = True
defaults = self._context.defaults().get(self._directive, {})
for target in targets:
force = defaults.get("force", False)
recursive = defaults.get("recursive", False)
if isinstance(targets, dict) and isinstance(targets[target], dict):
force = targets[target].get("force", force)
recursive = targets[target].get("recursive", recursive)
success &= self._clean(target, force, recursive)
if success:
self._log.info("All targets have been cleaned")
else:
self._log.error("Some targets were not successfully cleaned")
return success
def _clean(self, target, force, recursive):
"""
Cleans all the broken symbolic links in target if they point to
a subdirectory of the base directory or if forced to clean.
"""
if not os.path.isdir(os.path.expandvars(os.path.expanduser(target))):
self._log.debug("Ignoring nonexistent directory %s" % target)
return True
for item in os.listdir(os.path.expandvars(os.path.expanduser(target))):
path = os.path.abspath(
os.path.join(os.path.expandvars(os.path.expanduser(target)), item)
)
if recursive and os.path.isdir(path):
# isdir implies not islink -- we don't want to descend into
# symlinked directories. okay to do a recursive call here
# because depth should be fairly limited
self._clean(path, force, recursive)
if not os.path.exists(path) and os.path.islink(path):
points_at = os.path.join(os.path.dirname(path), os.readlink(path))
if sys.platform[:5] == "win32" and points_at.startswith("\\\\?\\"):
points_at = points_at[4:]
if self._in_directory(path, self._context.base_directory()) or force:
self._log.lowinfo("Removing invalid link %s -> %s" % (path, points_at))
os.remove(path)
else:
self._log.lowinfo("Link %s -> %s not removed." % (path, points_at))
return True
def _in_directory(self, path, directory):
"""
Returns true if the path is in the directory.
"""
directory = os.path.join(os.path.realpath(directory), "")
path = os.path.realpath(path)
return os.path.commonprefix([path, directory]) == directory

View file

@ -1,60 +0,0 @@
import os
from ..plugin import Plugin
class Create(Plugin):
"""
Create empty paths.
"""
_directive = "create"
def can_handle(self, directive):
return directive == self._directive
def handle(self, directive, data):
if directive != self._directive:
raise ValueError("Create cannot handle directive %s" % directive)
return self._process_paths(data)
def _process_paths(self, paths):
success = True
defaults = self._context.defaults().get("create", {})
for key in paths:
path = os.path.abspath(os.path.expandvars(os.path.expanduser(key)))
mode = defaults.get("mode", 0o777) # same as the default for os.makedirs
if isinstance(paths, dict):
options = paths[key]
if options:
mode = options.get("mode", mode)
success &= self._create(path, mode)
if success:
self._log.info("All paths have been set up")
else:
self._log.error("Some paths were not successfully set up")
return success
def _exists(self, path):
"""
Returns true if the path exists.
"""
path = os.path.expanduser(path)
return os.path.exists(path)
def _create(self, path, mode):
success = True
if not self._exists(path):
self._log.debug("Trying to create path %s with mode %o" % (path, mode))
try:
self._log.lowinfo("Creating path %s" % path)
os.makedirs(path, mode)
# On Windows, the *mode* argument to `os.makedirs()` is ignored.
# The mode must be set explicitly in a follow-up call.
os.chmod(path, mode)
except OSError:
self._log.warning("Failed to create path %s" % path)
success = False
else:
self._log.lowinfo("Path exists %s" % path)
return success

View file

@ -1,286 +0,0 @@
import glob
import os
import shutil
import sys
from ..plugin import Plugin
from ..util import shell_command
class Link(Plugin):
"""
Symbolically links dotfiles.
"""
_directive = "link"
def can_handle(self, directive):
return directive == self._directive
def handle(self, directive, data):
if directive != self._directive:
raise ValueError("Link cannot handle directive %s" % directive)
return self._process_links(data)
def _process_links(self, links):
success = True
defaults = self._context.defaults().get("link", {})
for destination, source in links.items():
destination = os.path.expandvars(destination)
relative = defaults.get("relative", False)
# support old "canonicalize-path" key for compatibility
canonical_path = defaults.get("canonicalize", defaults.get("canonicalize-path", True))
force = defaults.get("force", False)
relink = defaults.get("relink", False)
create = defaults.get("create", False)
use_glob = defaults.get("glob", False)
base_prefix = defaults.get("prefix", "")
test = defaults.get("if", None)
ignore_missing = defaults.get("ignore-missing", False)
exclude_paths = defaults.get("exclude", [])
if isinstance(source, dict):
# extended config
test = source.get("if", test)
relative = source.get("relative", relative)
canonical_path = source.get(
"canonicalize", source.get("canonicalize-path", canonical_path)
)
force = source.get("force", force)
relink = source.get("relink", relink)
create = source.get("create", create)
use_glob = source.get("glob", use_glob)
base_prefix = source.get("prefix", base_prefix)
ignore_missing = source.get("ignore-missing", ignore_missing)
exclude_paths = source.get("exclude", exclude_paths)
path = self._default_source(destination, source.get("path"))
else:
path = self._default_source(destination, source)
if test is not None and not self._test_success(test):
self._log.lowinfo("Skipping %s" % destination)
continue
path = os.path.normpath(os.path.expandvars(os.path.expanduser(path)))
if use_glob and self._has_glob_chars(path):
glob_results = self._create_glob_results(path, exclude_paths)
self._log.lowinfo("Globs from '" + path + "': " + str(glob_results))
for glob_full_item in glob_results:
# Find common dirname between pattern and the item:
glob_dirname = os.path.dirname(os.path.commonprefix([path, glob_full_item]))
glob_item = (
glob_full_item
if len(glob_dirname) == 0
else glob_full_item[len(glob_dirname) + 1 :]
)
# Add prefix to basepath, if provided
if base_prefix:
glob_item = base_prefix + glob_item
# where is it going
glob_link_destination = os.path.join(destination, glob_item)
if create:
success &= self._create(glob_link_destination)
if force or relink:
success &= self._delete(
glob_full_item,
glob_link_destination,
relative,
canonical_path,
force,
)
success &= self._link(
glob_full_item,
glob_link_destination,
relative,
canonical_path,
ignore_missing,
)
else:
if create:
success &= self._create(destination)
if not ignore_missing and not self._exists(
os.path.join(self._context.base_directory(), path)
):
# we seemingly check this twice (here and in _link) because
# if the file doesn't exist and force is True, we don't
# want to remove the original (this is tested by
# link-force-leaves-when-nonexistent.bash)
success = False
self._log.warning("Nonexistent source %s -> %s" % (destination, path))
continue
if force or relink:
success &= self._delete(path, destination, relative, canonical_path, force)
success &= self._link(path, destination, relative, canonical_path, ignore_missing)
if success:
self._log.info("All links have been set up")
else:
self._log.error("Some links were not successfully set up")
return success
def _test_success(self, command):
ret = shell_command(command, cwd=self._context.base_directory())
if ret != 0:
self._log.debug("Test '%s' returned false" % command)
return ret == 0
def _default_source(self, destination, source):
if source is None:
basename = os.path.basename(destination)
if basename.startswith("."):
return basename[1:]
else:
return basename
else:
return source
def _has_glob_chars(self, path):
return any(i in path for i in "?*[")
def _glob(self, path):
"""
Wrap `glob.glob` in a python agnostic way, catching errors in usage.
"""
found = glob.glob(path, recursive=True)
# normalize paths to ensure cross-platform compatibility
found = [os.path.normpath(p) for p in found]
# if using recursive glob (`**`), filter results to return only files:
if "**" in path and not path.endswith(str(os.sep)):
self._log.debug("Excluding directories from recursive glob: " + str(path))
found = [f for f in found if os.path.isfile(f)]
# return matched results
return found
def _create_glob_results(self, path, exclude_paths):
self._log.debug("Globbing with pattern: " + str(path))
include = self._glob(path)
self._log.debug("Glob found : " + str(include))
# filter out any paths matching the exclude globs:
exclude = []
for expat in exclude_paths:
self._log.debug("Excluding globs with pattern: " + str(expat))
exclude.extend(self._glob(expat))
self._log.debug("Excluded globs from '" + path + "': " + str(exclude))
ret = set(include) - set(exclude)
return list(ret)
def _is_link(self, path):
"""
Returns true if the path is a symbolic link.
"""
return os.path.islink(os.path.expanduser(path))
def _link_destination(self, path):
"""
Returns the destination of the symbolic link.
"""
path = os.path.expanduser(path)
path = os.readlink(path)
if sys.platform[:5] == "win32" and path.startswith("\\\\?\\"):
path = path[4:]
return path
def _exists(self, path):
"""
Returns true if the path exists.
"""
path = os.path.expanduser(path)
return os.path.exists(path)
def _create(self, path):
success = True
parent = os.path.abspath(os.path.join(os.path.expanduser(path), os.pardir))
if not self._exists(parent):
self._log.debug("Try to create parent: " + str(parent))
try:
os.makedirs(parent)
except OSError:
self._log.warning("Failed to create directory %s" % parent)
success = False
else:
self._log.lowinfo("Creating directory %s" % parent)
return success
def _delete(self, source, path, relative, canonical_path, force):
success = True
source = os.path.join(self._context.base_directory(canonical_path=canonical_path), source)
fullpath = os.path.abspath(os.path.expanduser(path))
if relative:
source = self._relative_path(source, fullpath)
if (self._is_link(path) and self._link_destination(path) != source) or (
self._exists(path) and not self._is_link(path)
):
removed = False
try:
if os.path.islink(fullpath):
os.unlink(fullpath)
removed = True
elif force:
if os.path.isdir(fullpath):
shutil.rmtree(fullpath)
removed = True
else:
os.remove(fullpath)
removed = True
except OSError:
self._log.warning("Failed to remove %s" % path)
success = False
else:
if removed:
self._log.lowinfo("Removing %s" % path)
return success
def _relative_path(self, source, destination):
"""
Returns the relative path to get to the source file from the
destination file.
"""
destination_dir = os.path.dirname(destination)
return os.path.relpath(source, destination_dir)
def _link(self, source, link_name, relative, canonical_path, ignore_missing):
"""
Links link_name to source.
Returns true if successfully linked files.
"""
success = False
destination = os.path.abspath(os.path.expanduser(link_name))
base_directory = self._context.base_directory(canonical_path=canonical_path)
absolute_source = os.path.join(base_directory, source)
link_name = os.path.normpath(link_name)
if relative:
source = self._relative_path(absolute_source, destination)
else:
source = absolute_source
if (
not self._exists(link_name)
and self._is_link(link_name)
and self._link_destination(link_name) != source
):
self._log.warning(
"Invalid link %s -> %s" % (link_name, self._link_destination(link_name))
)
# we need to use absolute_source below because our cwd is the dotfiles
# directory, and if source is relative, it will be relative to the
# destination directory
elif not self._exists(link_name) and (ignore_missing or self._exists(absolute_source)):
try:
os.symlink(source, destination)
except OSError:
self._log.warning("Linking failed %s -> %s" % (link_name, source))
else:
self._log.lowinfo("Creating link %s -> %s" % (link_name, source))
success = True
elif self._exists(link_name) and not self._is_link(link_name):
self._log.warning("%s already exists but is a regular file or directory" % link_name)
elif self._is_link(link_name) and self._link_destination(link_name) != source:
self._log.warning(
"Incorrect link %s -> %s" % (link_name, self._link_destination(link_name))
)
# again, we use absolute_source to check for existence
elif not self._exists(absolute_source):
if self._is_link(link_name):
self._log.warning("Nonexistent source %s -> %s" % (link_name, source))
else:
self._log.warning("Nonexistent source for %s : %s" % (link_name, source))
else:
self._log.lowinfo("Link exists %s -> %s" % (link_name, source))
success = True
return success

View file

@ -1,77 +0,0 @@
from ..plugin import Plugin
from ..util import shell_command
class Shell(Plugin):
"""
Run arbitrary shell commands.
"""
_directive = "shell"
_has_shown_override_message = False
def can_handle(self, directive):
return directive == self._directive
def handle(self, directive, data):
if directive != self._directive:
raise ValueError("Shell cannot handle directive %s" % directive)
return self._process_commands(data)
def _process_commands(self, data):
success = True
defaults = self._context.defaults().get("shell", {})
options = self._get_option_overrides()
for item in data:
stdin = defaults.get("stdin", False)
stdout = defaults.get("stdout", False)
stderr = defaults.get("stderr", False)
quiet = defaults.get("quiet", False)
if isinstance(item, dict):
cmd = item["command"]
msg = item.get("description", None)
stdin = item.get("stdin", stdin)
stdout = item.get("stdout", stdout)
stderr = item.get("stderr", stderr)
quiet = item.get("quiet", quiet)
elif isinstance(item, list):
cmd = item[0]
msg = item[1] if len(item) > 1 else None
else:
cmd = item
msg = None
if quiet:
if msg is not None:
self._log.lowinfo("%s" % msg)
elif msg is None:
self._log.lowinfo(cmd)
else:
self._log.lowinfo("%s [%s]" % (msg, cmd))
stdout = options.get("stdout", stdout)
stderr = options.get("stderr", stderr)
ret = shell_command(
cmd,
cwd=self._context.base_directory(),
enable_stdin=stdin,
enable_stdout=stdout,
enable_stderr=stderr,
)
if ret != 0:
success = False
self._log.warning("Command [%s] failed" % cmd)
if success:
self._log.info("All commands have been executed")
else:
self._log.error("Some commands were not successfully executed")
return success
def _get_option_overrides(self):
ret = {}
options = self._context.options()
if options.verbose > 1:
ret["stderr"] = True
ret["stdout"] = True
if not self._has_shown_override_message:
self._log.debug("Shell: Found cli option to force show stderr and stdout.")
self._has_shown_override_message = True
return ret

View file

@ -1 +0,0 @@
from .common import shell_command

View file

@ -1,34 +0,0 @@
import os
import platform
import subprocess
def shell_command(command, cwd=None, enable_stdin=False, enable_stdout=False, enable_stderr=False):
with open(os.devnull, "w") as devnull_w, open(os.devnull, "r") as devnull_r:
stdin = None if enable_stdin else devnull_r
stdout = None if enable_stdout else devnull_w
stderr = None if enable_stderr else devnull_w
executable = os.environ.get("SHELL")
if platform.system() == "Windows":
# We avoid setting the executable kwarg on Windows because it does
# not have the desired effect when combined with shell=True. It
# will result in the correct program being run (e.g. bash), but it
# will be invoked with a '/c' argument instead of a '-c' argument,
# which it won't understand.
#
# See https://github.com/anishathalye/dotbot/issues/219 and
# https://bugs.python.org/issue40467.
#
# This means that complex commands that require Bash's parsing
# won't work; a workaround for this is to write the command as
# `bash -c "..."`.
executable = None
return subprocess.call(
command,
shell=True,
executable=executable,
stdin=stdin,
stdout=stdout,
stderr=stderr,
cwd=cwd,
)

View file

@ -1,33 +0,0 @@
import os
import sys
from dotbot.plugin import Plugin
# We keep references to loaded modules so they don't get garbage collected.
loaded_modules = []
def load(path):
basename = os.path.basename(path)
module_name, extension = os.path.splitext(basename)
loaded_module = load_module(module_name, path)
plugins = []
for name in dir(loaded_module):
possible_plugin = getattr(loaded_module, name)
try:
if issubclass(possible_plugin, Plugin) and possible_plugin is not Plugin:
plugins.append(possible_plugin)
except TypeError:
pass
loaded_modules.append(loaded_module)
return plugins
import importlib.util
def load_module(module_name, path):
spec = importlib.util.spec_from_file_location(module_name, path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module

View file

@ -1,6 +1,5 @@
class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)

View file

@ -1,4 +0,0 @@
def indent_lines(string, amount=2, delimiter="\n"):
whitespace = " " * amount
sep = "%s%s" % (delimiter, whitespace)
return "%s%s" % (whitespace, sep.join(string.split(delimiter)))

@ -1 +1 @@
Subproject commit c42fa3bff1eabdb64763bb1526d9ea1ccb708479
Subproject commit f30c956c11aa6b5e7827fe5840cc9ed40b938d17

View file

@ -1,17 +0,0 @@
[tool.black]
line-length = 100
exclude = '''
/(
\.git
| \.github
| .*\.egg-info
| build
| dist
| lib
)/
'''
[tool.pytest.ini_options]
filterwarnings = [
"error",
]

View file

@ -1,73 +0,0 @@
import re
from os import path
from setuptools import find_packages, setup
here = path.dirname(__file__)
with open(path.join(here, "README.md"), encoding="utf-8") as f:
long_description = f.read()
def read(*names, **kwargs):
with open(path.join(here, *names), encoding=kwargs.get("encoding", "utf8")) as fp:
return fp.read()
def find_version(*file_paths):
version_file = read(*file_paths)
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
setup(
name="dotbot",
version=find_version("dotbot", "__init__.py"),
description="A tool that bootstraps your dotfiles",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/anishathalye/dotbot",
author="Anish Athalye",
author_email="me@anishathalye.com",
license="MIT",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Topic :: Utilities",
],
keywords="dotfiles",
packages=find_packages(),
setup_requires=[
"setuptools>=38.6.0",
"wheel>=0.31.0",
],
install_requires=[
"PyYAML>=6.0.1,<7",
],
extras_require={
"dev": {
"pytest",
"tox",
}
},
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
entry_points={
"console_scripts": [
"dotbot=dotbot:main",
],
},
)

View file

@ -1,317 +0,0 @@
import builtins
import ctypes
import json
import os
import shutil
import sys
import tempfile
import unittest.mock as mock
from shutil import rmtree
import pytest
import yaml
import dotbot.cli
def get_long_path(path):
"""Get the long path for a given path."""
# Do nothing for non-Windows platforms.
if sys.platform[:5] != "win32":
return path
buffer_size = 1000
buffer = ctypes.create_unicode_buffer(buffer_size)
get_long_path_name = ctypes.windll.kernel32.GetLongPathNameW
get_long_path_name(path, buffer, buffer_size)
return buffer.value
# On Linux, tempfile.TemporaryFile() requires unlink access.
# This list is updated by a tempfile._mkstemp_inner() wrapper,
# and its contents are checked by wrapped functions.
allowed_tempfile_internal_unlink_calls = []
def wrap_function(function, function_path, arg_index, kwarg_key, root):
def wrapper(*args, **kwargs):
if kwarg_key in kwargs:
value = kwargs[kwarg_key]
else:
value = args[arg_index]
# Allow tempfile.TemporaryFile's internal unlink calls to work.
if value in allowed_tempfile_internal_unlink_calls:
return function(*args, **kwargs)
msg = "The '{0}' argument to {1}() must be an absolute path"
msg = msg.format(kwarg_key, function_path)
assert value == os.path.abspath(value), msg
msg = "The '{0}' argument to {1}() must be rooted in {2}"
msg = msg.format(kwarg_key, function_path, root)
assert value[: len(str(root))] == str(root), msg
return function(*args, **kwargs)
return wrapper
def wrap_open(root):
wrapped = getattr(builtins, "open")
def wrapper(*args, **kwargs):
if "file" in kwargs:
value = kwargs["file"]
else:
value = args[0]
mode = "r"
if "mode" in kwargs:
mode = kwargs["mode"]
elif len(args) >= 2:
mode = args[1]
msg = "The 'file' argument to open() must be an absolute path"
if value != os.devnull and "w" in mode:
assert value == os.path.abspath(value), msg
msg = "The 'file' argument to open() must be rooted in {0}"
msg = msg.format(root)
if value != os.devnull and "w" in mode:
assert value[: len(str(root))] == str(root), msg
return wrapped(*args, **kwargs)
return wrapper
def rmtree_error_handler(_, path, __):
# Handle read-only files and directories.
os.chmod(path, 0o777)
if os.path.isdir(path):
rmtree(path)
else:
os.unlink(path)
@pytest.fixture(autouse=True, scope="session")
def standardize_tmp():
r"""Standardize the temporary directory path.
On MacOS, `/var` is a symlink to `/private/var`.
This creates issues with link canonicalization and relative link tests,
so this fixture rewrites environment variables and Python variables
to ensure the tests work the same as on Linux and Windows.
On Windows in GitHub CI, the temporary directory may be a short path.
For example, `C:\Users\RUNNER~1\...` instead of `C:\Users\runneradmin\...`.
This causes string-based path comparisons to fail.
"""
tmp = tempfile.gettempdir()
# MacOS: `/var` is a symlink.
tmp = os.path.abspath(os.path.realpath(tmp))
# Windows: The temporary directory may be a short path.
if sys.platform[:5] == "win32":
tmp = get_long_path(tmp)
os.environ["TMP"] = tmp
os.environ["TEMP"] = tmp
os.environ["TMPDIR"] = tmp
tempfile.tempdir = tmp
yield
@pytest.fixture(autouse=True)
def root(standardize_tmp):
"""Create a temporary directory for the duration of each test."""
# Reset allowed_tempfile_internal_unlink_calls.
global allowed_tempfile_internal_unlink_calls
allowed_tempfile_internal_unlink_calls = []
# Dotbot changes the current working directory,
# so this must be reset at the end of each test.
current_working_directory = os.getcwd()
# Create an isolated temporary directory from which to operate.
current_root = tempfile.mkdtemp()
functions_to_wrap = [
(os, "chflags", 0, "path"),
(os, "chmod", 0, "path"),
(os, "chown", 0, "path"),
(os, "copy_file_range", 1, "dst"),
(os, "lchflags", 0, "path"),
(os, "lchmod", 0, "path"),
(os, "link", 1, "dst"),
(os, "makedirs", 0, "name"),
(os, "mkdir", 0, "path"),
(os, "mkfifo", 0, "path"),
(os, "mknod", 0, "path"),
(os, "remove", 0, "path"),
(os, "removedirs", 0, "name"),
(os, "removexattr", 0, "path"),
(os, "rename", 0, "src"), # Check both
(os, "rename", 1, "dst"),
(os, "renames", 0, "old"), # Check both
(os, "renames", 1, "new"),
(os, "replace", 0, "src"), # Check both
(os, "replace", 1, "dst"),
(os, "rmdir", 0, "path"),
(os, "setxattr", 0, "path"),
(os, "splice", 1, "dst"),
(os, "symlink", 1, "dst"),
(os, "truncate", 0, "path"),
(os, "unlink", 0, "path"),
(os, "utime", 0, "path"),
(shutil, "chown", 0, "path"),
(shutil, "copy", 1, "dst"),
(shutil, "copy2", 1, "dst"),
(shutil, "copyfile", 1, "dst"),
(shutil, "copymode", 1, "dst"),
(shutil, "copystat", 1, "dst"),
(shutil, "copytree", 1, "dst"),
(shutil, "make_archive", 0, "base_name"),
(shutil, "move", 0, "src"), # Check both
(shutil, "move", 1, "dst"),
(shutil, "rmtree", 0, "path"),
(shutil, "unpack_archive", 1, "extract_dir"),
]
patches = []
for module, function_name, arg_index, kwarg_key in functions_to_wrap:
# Skip anything that doesn't exist in this version of Python.
if not hasattr(module, function_name):
continue
# These values must be passed to a separate function
# to ensure the variable closures work correctly.
function_path = "{0}.{1}".format(module.__name__, function_name)
function = getattr(module, function_name)
wrapped = wrap_function(function, function_path, arg_index, kwarg_key, current_root)
patches.append(mock.patch(function_path, wrapped))
# open() must be separately wrapped.
function_path = "builtins.open"
wrapped = wrap_open(current_root)
patches.append(mock.patch(function_path, wrapped))
# Block all access to bad functions.
if hasattr(os, "chroot"):
patches.append(mock.patch("os.chroot", lambda *_, **__: None))
# Patch tempfile._mkstemp_inner() so tempfile.TemporaryFile()
# can unlink files immediately.
mkstemp_inner = tempfile._mkstemp_inner
def wrap_mkstemp_inner(*args, **kwargs):
(fd, name) = mkstemp_inner(*args, **kwargs)
allowed_tempfile_internal_unlink_calls.append(name)
return fd, name
patches.append(mock.patch("tempfile._mkstemp_inner", wrap_mkstemp_inner))
[patch.start() for patch in patches]
try:
yield current_root
finally:
[patch.stop() for patch in patches]
os.chdir(current_working_directory)
if sys.version_info >= (3, 12):
rmtree(current_root, onexc=rmtree_error_handler)
else:
rmtree(current_root, onerror=rmtree_error_handler)
@pytest.fixture
def home(monkeypatch, root):
"""Create a home directory for the duration of the test.
On *nix, the environment variable "HOME" will be mocked.
On Windows, the environment variable "USERPROFILE" will be mocked.
"""
home = os.path.abspath(os.path.join(root, "home/user"))
os.makedirs(home)
if sys.platform[:5] == "win32":
monkeypatch.setenv("USERPROFILE", home)
else:
monkeypatch.setenv("HOME", home)
yield home
class Dotfiles:
"""Create and manage a dotfiles directory for a test."""
def __init__(self, root):
self.root = root
self.config = None
self.config_filename = None
self.directory = os.path.join(root, "dotfiles")
os.mkdir(self.directory)
def makedirs(self, path):
os.makedirs(os.path.abspath(os.path.join(self.directory, path)))
def write(self, path, content=""):
path = os.path.abspath(os.path.join(self.directory, path))
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
with open(path, "w") as file:
file.write(content)
def write_config(self, config, serializer="yaml", path=None):
"""Write a dotbot config and return the filename."""
assert serializer in {"json", "yaml"}, "Only json and yaml are supported"
if serializer == "yaml":
serialize = yaml.dump
else: # serializer == "json"
serialize = json.dumps
if path:
msg = "The config file path must be an absolute path"
assert path == os.path.abspath(path), msg
msg = "The config file path must be rooted in {0}"
msg = msg.format(root)
assert path[: len(str(root))] == str(root), msg
self.config_filename = path
else:
self.config_filename = os.path.join(self.directory, "install.conf.yaml")
self.config = config
with open(self.config_filename, "w") as file:
file.write(serialize(config))
return self.config_filename
@pytest.fixture
def dotfiles(root):
"""Create a dotfiles directory."""
yield Dotfiles(root)
@pytest.fixture
def run_dotbot(dotfiles):
"""Run dotbot.
When calling `runner()`, only CLI arguments need to be specified.
If the keyword-only argument *custom* is True
then the CLI arguments will not be modified,
and the caller will be responsible for all CLI arguments.
"""
def runner(*argv, **kwargs):
argv = ["dotbot"] + list(argv)
if kwargs.get("custom", False) is not True:
argv.extend(["-c", dotfiles.config_filename])
with mock.patch("sys.argv", argv):
dotbot.cli.main()
yield runner

View file

@ -1,27 +0,0 @@
"""Test that a plugin can be loaded by directory.
This file is copied to a location with the name "directory.py",
and is then loaded from within the `test_cli.py` code.
"""
import os.path
import dotbot
class Directory(dotbot.Plugin):
def can_handle(self, directive):
return directive == "plugin_directory"
def handle(self, directive, data):
self._log.debug("Attempting to get options from Context")
options = self._context.options()
if len(options.plugin_dirs) != 1:
self._log.debug(
"Context.options.plugins length is %i, expected 1" % len(options.plugins)
)
return False
with open(os.path.abspath(os.path.expanduser("~/flag")), "w") as file:
file.write("directory plugin loading works")
return True

View file

@ -1,32 +0,0 @@
"""Test that a plugin can be loaded by filename.
This file is copied to a location with the name "file.py",
and is then loaded from within the `test_cli.py` code.
"""
import os.path
import dotbot
class File(dotbot.Plugin):
def can_handle(self, directive):
return directive == "plugin_file"
def handle(self, directive, data):
self._log.debug("Attempting to get options from Context")
options = self._context.options()
if len(options.plugins) != 1:
self._log.debug(
"Context.options.plugins length is %i, expected 1" % len(options.plugins)
)
return False
if not options.plugins[0].endswith("file.py"):
self._log.debug(
"Context.options.plugins[0] is %s, expected end with file.py" % options.plugins[0]
)
return False
with open(os.path.abspath(os.path.expanduser("~/flag")), "w") as file:
file.write("file plugin loading works")
return True

View file

@ -1,45 +0,0 @@
import os
import shutil
import subprocess
import pytest
@pytest.mark.skipif(
"sys.platform[:5] == 'win32'",
reason="The hybrid sh/Python dotbot script doesn't run on Windows platforms",
)
@pytest.mark.parametrize("python_name", (None, "python", "python3"))
def test_find_python_executable(python_name, home, dotfiles):
"""Verify that the sh/Python hybrid dotbot executable can find Python."""
dotfiles.write_config([])
dotbot_executable = os.path.join(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "bin", "dotbot"
)
# Create a link to sh.
tmp_bin = os.path.join(home, "tmp_bin")
os.makedirs(tmp_bin)
sh_path = shutil.which("sh")
os.symlink(sh_path, os.path.join(tmp_bin, "sh"))
if python_name:
with open(os.path.join(tmp_bin, python_name), "w") as file:
file.write("#!" + tmp_bin + "/sh\n")
file.write("exit 0\n")
os.chmod(os.path.join(tmp_bin, python_name), 0o777)
env = dict(os.environ)
env["PATH"] = tmp_bin
if python_name:
subprocess.check_call(
[dotbot_executable, "-c", dotfiles.config_filename],
env=env,
)
else:
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call(
[dotbot_executable, "-c", dotfiles.config_filename],
env=env,
)

View file

@ -1,136 +0,0 @@
import os
import sys
import pytest
def test_clean_default(root, home, dotfiles, run_dotbot):
"""Verify clean uses default unless overridden."""
os.symlink(os.path.join(root, "nowhere"), os.path.join(home, ".g"))
dotfiles.write_config(
[
{
"clean": {
"~/nonexistent": {"force": True},
"~/": None,
},
}
]
)
run_dotbot()
assert not os.path.isdir(os.path.join(home, "nonexistent"))
assert os.path.islink(os.path.join(home, ".g"))
def test_clean_environment_variable_expansion(home, dotfiles, run_dotbot):
"""Verify clean expands environment variables."""
os.symlink(os.path.join(dotfiles.directory, "f"), os.path.join(home, ".f"))
variable = "$HOME"
if sys.platform[:5] == "win32":
variable = "$USERPROFILE"
dotfiles.write_config([{"clean": [variable]}])
run_dotbot()
assert not os.path.islink(os.path.join(home, ".f"))
def test_clean_missing(home, dotfiles, run_dotbot):
"""Verify clean deletes links to missing files."""
dotfiles.write("f")
os.symlink(os.path.join(dotfiles.directory, "f"), os.path.join(home, ".f"))
os.symlink(os.path.join(dotfiles.directory, "g"), os.path.join(home, ".g"))
dotfiles.write_config([{"clean": ["~"]}])
run_dotbot()
assert os.path.islink(os.path.join(home, ".f"))
assert not os.path.islink(os.path.join(home, ".g"))
def test_clean_nonexistent(home, dotfiles, run_dotbot):
"""Verify clean ignores nonexistent directories."""
dotfiles.write_config([{"clean": ["~", "~/fake"]}])
run_dotbot() # Nonexistent directories should not raise exceptions.
assert not os.path.isdir(os.path.join(home, "fake"))
def test_clean_outside_force(root, home, dotfiles, run_dotbot):
"""Verify clean forced to remove files linking outside dotfiles directory."""
os.symlink(os.path.join(root, "nowhere"), os.path.join(home, ".g"))
dotfiles.write_config([{"clean": {"~/": {"force": True}}}])
run_dotbot()
assert not os.path.islink(os.path.join(home, ".g"))
def test_clean_outside(root, home, dotfiles, run_dotbot):
"""Verify clean ignores files linking outside dotfiles directory."""
os.symlink(os.path.join(dotfiles.directory, "f"), os.path.join(home, ".f"))
os.symlink(os.path.join(home, "g"), os.path.join(home, ".g"))
dotfiles.write_config([{"clean": ["~"]}])
run_dotbot()
assert not os.path.islink(os.path.join(home, ".f"))
assert os.path.islink(os.path.join(home, ".g"))
def test_clean_recursive_1(root, home, dotfiles, run_dotbot):
"""Verify clean respects when the recursive directive is off (default)."""
os.makedirs(os.path.join(home, "a", "b"))
os.symlink(os.path.join(root, "nowhere"), os.path.join(home, "c"))
os.symlink(os.path.join(root, "nowhere"), os.path.join(home, "a", "d"))
os.symlink(os.path.join(root, "nowhere"), os.path.join(home, "a", "b", "e"))
dotfiles.write_config([{"clean": {"~": {"force": True}}}])
run_dotbot()
assert not os.path.islink(os.path.join(home, "c"))
assert os.path.islink(os.path.join(home, "a", "d"))
assert os.path.islink(os.path.join(home, "a", "b", "e"))
def test_clean_recursive_2(root, home, dotfiles, run_dotbot):
"""Verify clean respects when the recursive directive is on."""
os.makedirs(os.path.join(home, "a", "b"))
os.symlink(os.path.join(root, "nowhere"), os.path.join(home, "c"))
os.symlink(os.path.join(root, "nowhere"), os.path.join(home, "a", "d"))
os.symlink(os.path.join(root, "nowhere"), os.path.join(home, "a", "b", "e"))
dotfiles.write_config([{"clean": {"~": {"force": True, "recursive": True}}}])
run_dotbot()
assert not os.path.islink(os.path.join(home, "c"))
assert not os.path.islink(os.path.join(home, "a", "d"))
assert not os.path.islink(os.path.join(home, "a", "b", "e"))
def test_clean_defaults_1(root, home, dotfiles, run_dotbot):
"""Verify that clean doesn't erase non-dotfiles links by default."""
os.symlink(os.path.join(root, "nowhere"), os.path.join(home, ".g"))
dotfiles.write_config([{"clean": ["~"]}])
run_dotbot()
assert os.path.islink(os.path.join(home, ".g"))
def test_clean_defaults_2(root, home, dotfiles, run_dotbot):
"""Verify that explicit clean defaults override the implicit default."""
os.symlink(os.path.join(root, "nowhere"), os.path.join(home, ".g"))
dotfiles.write_config(
[
{"defaults": {"clean": {"force": True}}},
{"clean": ["~"]},
]
)
run_dotbot()
assert not os.path.islink(os.path.join(home, ".g"))

View file

@ -1,172 +0,0 @@
import os
import shutil
import pytest
def test_except_create(capfd, home, dotfiles, run_dotbot):
"""Verify that `--except` works as intended."""
dotfiles.write_config(
[
{"create": ["~/a"]},
{
"shell": [
{"command": "echo success", "stdout": True},
]
},
]
)
run_dotbot("--except", "create")
assert not os.path.exists(os.path.join(home, "a"))
stdout = capfd.readouterr().out.splitlines()
assert any(line.startswith("success") for line in stdout)
def test_except_shell(capfd, home, dotfiles, run_dotbot):
"""Verify that `--except` works as intended."""
dotfiles.write_config(
[
{"create": ["~/a"]},
{
"shell": [
{"command": "echo failure", "stdout": True},
]
},
]
)
run_dotbot("--except", "shell")
assert os.path.exists(os.path.join(home, "a"))
stdout = capfd.readouterr().out.splitlines()
assert not any(line.startswith("failure") for line in stdout)
def test_except_multiples(capfd, home, dotfiles, run_dotbot):
"""Verify that `--except` works with multiple exceptions."""
dotfiles.write_config(
[
{"create": ["~/a"]},
{
"shell": [
{"command": "echo failure", "stdout": True},
]
},
]
)
run_dotbot("--except", "create", "shell")
assert not os.path.exists(os.path.join(home, "a"))
stdout = capfd.readouterr().out.splitlines()
assert not any(line.startswith("failure") for line in stdout)
def test_exit_on_failure(capfd, home, dotfiles, run_dotbot):
"""Verify that processing can halt immediately on failures."""
dotfiles.write_config(
[
{"create": ["~/a"]},
{"shell": ["this_is_not_a_command"]},
{"create": ["~/b"]},
]
)
with pytest.raises(SystemExit):
run_dotbot("-x")
assert os.path.isdir(os.path.join(home, "a"))
assert not os.path.isdir(os.path.join(home, "b"))
def test_only(capfd, home, dotfiles, run_dotbot):
"""Verify that `--only` works as intended."""
dotfiles.write_config(
[
{"create": ["~/a"]},
{"shell": [{"command": "echo success", "stdout": True}]},
]
)
run_dotbot("--only", "shell")
assert not os.path.exists(os.path.join(home, "a"))
stdout = capfd.readouterr().out.splitlines()
assert any(line.startswith("success") for line in stdout)
def test_only_with_defaults(capfd, home, dotfiles, run_dotbot):
"""Verify that `--only` does not suppress defaults."""
dotfiles.write_config(
[
{"defaults": {"shell": {"stdout": True}}},
{"create": ["~/a"]},
{"shell": [{"command": "echo success"}]},
]
)
run_dotbot("--only", "shell")
assert not os.path.exists(os.path.join(home, "a"))
stdout = capfd.readouterr().out.splitlines()
assert any(line.startswith("success") for line in stdout)
def test_only_with_multiples(capfd, home, dotfiles, run_dotbot):
"""Verify that `--only` works as intended."""
dotfiles.write_config(
[
{"create": ["~/a"]},
{"shell": [{"command": "echo success", "stdout": True}]},
{"link": ["~/.f"]},
]
)
run_dotbot("--only", "create", "shell")
assert os.path.isdir(os.path.join(home, "a"))
stdout = capfd.readouterr().out.splitlines()
assert any(line.startswith("success") for line in stdout)
assert not os.path.exists(os.path.join(home, ".f"))
def test_plugin_loading_file(home, dotfiles, run_dotbot):
"""Verify that plugins can be loaded by file."""
plugin_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "dotbot_plugin_file.py")
shutil.copy(plugin_file, os.path.join(dotfiles.directory, "file.py"))
dotfiles.write_config([{"plugin_file": "~"}])
run_dotbot("--plugin", os.path.join(dotfiles.directory, "file.py"))
with open(os.path.join(home, "flag"), "r") as file:
assert file.read() == "file plugin loading works"
def test_plugin_loading_directory(home, dotfiles, run_dotbot):
"""Verify that plugins can be loaded from a directory."""
dotfiles.makedirs("plugins")
plugin_file = os.path.join(
os.path.dirname(os.path.abspath(__file__)), "dotbot_plugin_directory.py"
)
shutil.copy(plugin_file, os.path.join(dotfiles.directory, "plugins", "directory.py"))
dotfiles.write_config([{"plugin_directory": "~"}])
run_dotbot("--plugin-dir", os.path.join(dotfiles.directory, "plugins"))
with open(os.path.join(home, "flag"), "r") as file:
assert file.read() == "directory plugin loading works"
def test_disable_builtin_plugins(home, dotfiles, run_dotbot):
"""Verify that builtin plugins can be disabled."""
dotfiles.write("f", "apple")
dotfiles.write_config([{"link": {"~/.f": "f"}}])
# The link directive will be unhandled so dotbot will raise SystemExit.
with pytest.raises(SystemExit):
run_dotbot("--disable-built-in-plugins")
assert not os.path.exists(os.path.join(home, ".f"))

View file

@ -1,36 +0,0 @@
import json
import os
def test_config_blank(dotfiles, run_dotbot):
"""Verify blank configs work."""
dotfiles.write_config([])
run_dotbot()
def test_config_empty(dotfiles, run_dotbot):
"""Verify empty configs work."""
dotfiles.write("config.yaml", "")
run_dotbot("-c", os.path.join(dotfiles.directory, "config.yaml"), custom=True)
def test_json(home, dotfiles, run_dotbot):
"""Verify JSON configs work."""
document = json.dumps([{"create": ["~/d"]}])
dotfiles.write("config.json", document)
run_dotbot("-c", os.path.join(dotfiles.directory, "config.json"), custom=True)
assert os.path.isdir(os.path.join(home, "d"))
def test_json_tabs(home, dotfiles, run_dotbot):
"""Verify JSON configs with tabs work."""
document = """[\n\t{\n\t\t"create": ["~/d"]\n\t}\n]"""
dotfiles.write("config.json", document)
run_dotbot("-c", os.path.join(dotfiles.directory, "config.json"), custom=True)
assert os.path.isdir(os.path.join(home, "d"))

View file

@ -1,55 +0,0 @@
import os
import stat
import pytest
@pytest.mark.parametrize("directory", ("~/a", "~/b/c"))
def test_directory_creation(home, directory, dotfiles, run_dotbot):
"""Test creating directories, including nested directories."""
dotfiles.write_config([{"create": [directory]}])
run_dotbot()
expanded_directory = os.path.abspath(os.path.expanduser(directory))
assert os.path.isdir(expanded_directory)
assert os.stat(expanded_directory).st_mode & 0o777 == 0o777
def test_default_mode(home, dotfiles, run_dotbot):
"""Test creating a directory with an explicit default mode.
Note: `os.chmod()` on Windows only supports changing write permissions.
Therefore, this test is restricted to testing read-only access.
"""
read_only = 0o777 - stat.S_IWUSR - stat.S_IWGRP - stat.S_IWOTH
config = [{"defaults": {"create": {"mode": read_only}}}, {"create": ["~/a"]}]
dotfiles.write_config(config)
run_dotbot()
directory = os.path.abspath(os.path.expanduser("~/a"))
assert os.stat(directory).st_mode & stat.S_IWUSR == 0
assert os.stat(directory).st_mode & stat.S_IWGRP == 0
assert os.stat(directory).st_mode & stat.S_IWOTH == 0
def test_default_mode_override(home, dotfiles, run_dotbot):
"""Test creating a directory that overrides an explicit default mode.
Note: `os.chmod()` on Windows only supports changing write permissions.
Therefore, this test is restricted to testing read-only access.
"""
read_only = 0o777 - stat.S_IWUSR - stat.S_IWGRP - stat.S_IWOTH
config = [
{"defaults": {"create": {"mode": read_only}}},
{"create": {"~/a": {"mode": 0o777}}},
]
dotfiles.write_config(config)
run_dotbot()
directory = os.path.abspath(os.path.expanduser("~/a"))
assert os.stat(directory).st_mode & stat.S_IWUSR == stat.S_IWUSR
assert os.stat(directory).st_mode & stat.S_IWGRP == stat.S_IWGRP
assert os.stat(directory).st_mode & stat.S_IWOTH == stat.S_IWOTH

View file

@ -1,967 +0,0 @@
import os
import sys
import pytest
def test_link_canonicalization(home, dotfiles, run_dotbot):
"""Verify links to symlinked destinations are canonical.
"Canonical", here, means that dotbot does not create symlinks
that point to intermediary symlinks.
"""
dotfiles.write("f", "apple")
dotfiles.write_config([{"link": {"~/.f": {"path": "f"}}}])
# Point to the config file in a symlinked dotfiles directory.
dotfiles_symlink = os.path.join(home, "dotfiles-symlink")
os.symlink(dotfiles.directory, dotfiles_symlink)
config_file = os.path.join(dotfiles_symlink, os.path.basename(dotfiles.config_filename))
run_dotbot("-c", config_file, custom=True)
expected = os.path.join(dotfiles.directory, "f")
actual = os.readlink(os.path.abspath(os.path.expanduser("~/.f")))
if sys.platform[:5] == "win32" and actual.startswith("\\\\?\\"):
actual = actual[4:]
assert expected == actual
@pytest.mark.parametrize("dst", ("~/.f", "~/f"))
@pytest.mark.parametrize("include_force", (True, False))
def test_link_default_source(root, home, dst, include_force, dotfiles, run_dotbot):
"""Verify that default sources are calculated correctly.
This test includes verifying files with and without leading periods,
as well as verifying handling of None dict values.
"""
dotfiles.write("f", "apple")
config = [
{
"link": {
dst: {"force": False} if include_force else None,
}
}
]
dotfiles.write_config(config)
run_dotbot()
with open(os.path.abspath(os.path.expanduser(dst)), "r") as file:
assert file.read() == "apple"
def test_link_environment_user_expansion_target(home, dotfiles, run_dotbot):
"""Verify link expands user in target."""
src = "~/f"
target = "~/g"
with open(os.path.abspath(os.path.expanduser(src)), "w") as file:
file.write("apple")
dotfiles.write_config([{"link": {target: src}}])
run_dotbot()
with open(os.path.abspath(os.path.expanduser(target)), "r") as file:
assert file.read() == "apple"
def test_link_environment_variable_expansion_source(monkeypatch, root, home, dotfiles, run_dotbot):
"""Verify link expands environment variables in source."""
monkeypatch.setenv("APPLE", "h")
target = "~/.i"
src = "$APPLE"
dotfiles.write("h", "grape")
dotfiles.write_config([{"link": {target: src}}])
run_dotbot()
with open(os.path.abspath(os.path.expanduser(target)), "r") as file:
assert file.read() == "grape"
def test_link_environment_variable_expansion_source_extended(
monkeypatch, root, home, dotfiles, run_dotbot
):
"""Verify link expands environment variables in extended config syntax."""
monkeypatch.setenv("APPLE", "h")
target = "~/.i"
src = "$APPLE"
dotfiles.write("h", "grape")
dotfiles.write_config([{"link": {target: {"path": src, "relink": True}}}])
run_dotbot()
with open(os.path.abspath(os.path.expanduser(target)), "r") as file:
assert file.read() == "grape"
def test_link_environment_variable_expansion_target(monkeypatch, root, home, dotfiles, run_dotbot):
"""Verify link expands environment variables in target.
If the variable doesn't exist, the "variable" must not be replaced.
"""
monkeypatch.setenv("ORANGE", ".config")
monkeypatch.setenv("BANANA", "g")
monkeypatch.delenv("PEAR", raising=False)
dotfiles.write("f", "apple")
dotfiles.write("h", "grape")
config = [
{
"link": {
"~/${ORANGE}/$BANANA": {
"path": "f",
"create": True,
},
"~/$PEAR": "h",
}
}
]
dotfiles.write_config(config)
run_dotbot()
with open(os.path.join(home, ".config", "g"), "r") as file:
assert file.read() == "apple"
with open(os.path.join(home, "$PEAR"), "r") as file:
assert file.read() == "grape"
def test_link_environment_variable_unset(monkeypatch, root, home, dotfiles, run_dotbot):
"""Verify link leaves unset environment variables."""
monkeypatch.delenv("ORANGE", raising=False)
dotfiles.write("$ORANGE", "apple")
dotfiles.write_config([{"link": {"~/f": "$ORANGE"}}])
run_dotbot()
with open(os.path.join(home, "f"), "r") as file:
assert file.read() == "apple"
def test_link_force_leaves_when_nonexistent(root, home, dotfiles, run_dotbot):
"""Verify force doesn't erase sources when targets are nonexistent."""
os.mkdir(os.path.join(home, "dir"))
open(os.path.join(home, "file"), "a").close()
config = [
{
"link": {
"~/dir": {"path": "dir", "force": True},
"~/file": {"path": "file", "force": True},
}
}
]
dotfiles.write_config(config)
with pytest.raises(SystemExit):
run_dotbot()
assert os.path.isdir(os.path.join(home, "dir"))
assert os.path.isfile(os.path.join(home, "file"))
def test_link_force_overwrite_symlink(home, dotfiles, run_dotbot):
"""Verify force overwrites a symlinked directory."""
os.mkdir(os.path.join(home, "dir"))
dotfiles.write("dir/f")
os.symlink(home, os.path.join(home, ".dir"))
config = [{"link": {"~/.dir": {"path": "dir", "force": True}}}]
dotfiles.write_config(config)
run_dotbot()
assert os.path.isfile(os.path.join(home, ".dir", "f"))
def test_link_glob_1(home, dotfiles, run_dotbot):
"""Verify globbing works."""
dotfiles.write("bin/a", "apple")
dotfiles.write("bin/b", "banana")
dotfiles.write("bin/c", "cherry")
dotfiles.write_config(
[
{"defaults": {"link": {"glob": True, "create": True}}},
{"link": {"~/bin": "bin/*"}},
]
)
run_dotbot()
with open(os.path.join(home, "bin", "a")) as file:
assert file.read() == "apple"
with open(os.path.join(home, "bin", "b")) as file:
assert file.read() == "banana"
with open(os.path.join(home, "bin", "c")) as file:
assert file.read() == "cherry"
def test_link_glob_2(home, dotfiles, run_dotbot):
"""Verify globbing works with a trailing slash in the source."""
dotfiles.write("bin/a", "apple")
dotfiles.write("bin/b", "banana")
dotfiles.write("bin/c", "cherry")
dotfiles.write_config(
[
{"defaults": {"link": {"glob": True, "create": True}}},
{"link": {"~/bin/": "bin/*"}},
]
)
run_dotbot()
with open(os.path.join(home, "bin", "a")) as file:
assert file.read() == "apple"
with open(os.path.join(home, "bin", "b")) as file:
assert file.read() == "banana"
with open(os.path.join(home, "bin", "c")) as file:
assert file.read() == "cherry"
def test_link_glob_3(home, dotfiles, run_dotbot):
"""Verify globbing works with hidden ("period-prefixed") files."""
dotfiles.write("bin/.a", "dot-apple")
dotfiles.write("bin/.b", "dot-banana")
dotfiles.write("bin/.c", "dot-cherry")
dotfiles.write_config(
[
{"defaults": {"link": {"glob": True, "create": True}}},
{"link": {"~/bin/": "bin/.*"}},
]
)
run_dotbot()
with open(os.path.join(home, "bin", ".a")) as file:
assert file.read() == "dot-apple"
with open(os.path.join(home, "bin", ".b")) as file:
assert file.read() == "dot-banana"
with open(os.path.join(home, "bin", ".c")) as file:
assert file.read() == "dot-cherry"
def test_link_glob_4(home, dotfiles, run_dotbot):
"""Verify globbing works at the root of the home and dotfiles directories."""
dotfiles.write(".a", "dot-apple")
dotfiles.write(".b", "dot-banana")
dotfiles.write(".c", "dot-cherry")
dotfiles.write_config(
[
{
"link": {
"~": {
"path": ".*",
"glob": True,
},
},
}
]
)
run_dotbot()
with open(os.path.join(home, ".a")) as file:
assert file.read() == "dot-apple"
with open(os.path.join(home, ".b")) as file:
assert file.read() == "dot-banana"
with open(os.path.join(home, ".c")) as file:
assert file.read() == "dot-cherry"
@pytest.mark.parametrize("path", ("foo", "foo/"))
def test_link_glob_ignore_no_glob_chars(path, home, dotfiles, run_dotbot):
"""Verify ambiguous link globbing fails."""
dotfiles.makedirs("foo")
dotfiles.write_config(
[
{
"link": {
"~/foo/": {
"path": path,
"glob": True,
}
}
}
]
)
run_dotbot()
assert os.path.islink(os.path.join(home, "foo"))
assert os.path.exists(os.path.join(home, "foo"))
def test_link_glob_exclude_1(home, dotfiles, run_dotbot):
"""Verify link globbing with an explicit exclusion."""
dotfiles.write("config/foo/a", "apple")
dotfiles.write("config/bar/b", "banana")
dotfiles.write("config/bar/c", "cherry")
dotfiles.write("config/baz/d", "donut")
dotfiles.write_config(
[
{
"defaults": {
"link": {
"glob": True,
"create": True,
},
},
},
{
"link": {
"~/.config/": {
"path": "config/*",
"exclude": ["config/baz"],
},
},
},
]
)
run_dotbot()
assert not os.path.exists(os.path.join(home, ".config", "baz"))
assert not os.path.islink(os.path.join(home, ".config"))
assert os.path.islink(os.path.join(home, ".config", "foo"))
assert os.path.islink(os.path.join(home, ".config", "bar"))
with open(os.path.join(home, ".config", "foo", "a")) as file:
assert file.read() == "apple"
with open(os.path.join(home, ".config", "bar", "b")) as file:
assert file.read() == "banana"
with open(os.path.join(home, ".config", "bar", "c")) as file:
assert file.read() == "cherry"
def test_link_glob_exclude_2(home, dotfiles, run_dotbot):
"""Verify deep link globbing with a globbed exclusion."""
dotfiles.write("config/foo/a", "apple")
dotfiles.write("config/bar/b", "banana")
dotfiles.write("config/bar/c", "cherry")
dotfiles.write("config/baz/d", "donut")
dotfiles.write("config/baz/buzz/e", "egg")
dotfiles.write_config(
[
{
"defaults": {
"link": {
"glob": True,
"create": True,
},
},
},
{
"link": {
"~/.config/": {
"path": "config/*/*",
"exclude": ["config/baz/*"],
},
},
},
]
)
run_dotbot()
assert not os.path.exists(os.path.join(home, ".config", "baz"))
assert not os.path.islink(os.path.join(home, ".config"))
assert not os.path.islink(os.path.join(home, ".config", "foo"))
assert not os.path.islink(os.path.join(home, ".config", "bar"))
assert os.path.islink(os.path.join(home, ".config", "foo", "a"))
with open(os.path.join(home, ".config", "foo", "a")) as file:
assert file.read() == "apple"
with open(os.path.join(home, ".config", "bar", "b")) as file:
assert file.read() == "banana"
with open(os.path.join(home, ".config", "bar", "c")) as file:
assert file.read() == "cherry"
def test_link_glob_exclude_3(home, dotfiles, run_dotbot):
"""Verify deep link globbing with an explicit exclusion."""
dotfiles.write("config/foo/a", "apple")
dotfiles.write("config/bar/b", "banana")
dotfiles.write("config/bar/c", "cherry")
dotfiles.write("config/baz/d", "donut")
dotfiles.write("config/baz/buzz/e", "egg")
dotfiles.write("config/baz/bizz/g", "grape")
dotfiles.write_config(
[
{
"defaults": {
"link": {
"glob": True,
"create": True,
},
},
},
{
"link": {
"~/.config/": {
"path": "config/*/*",
"exclude": ["config/baz/buzz"],
},
},
},
]
)
run_dotbot()
assert not os.path.exists(os.path.join(home, ".config", "baz", "buzz"))
assert not os.path.islink(os.path.join(home, ".config"))
assert not os.path.islink(os.path.join(home, ".config", "foo"))
assert not os.path.islink(os.path.join(home, ".config", "bar"))
assert not os.path.islink(os.path.join(home, ".config", "baz"))
assert os.path.islink(os.path.join(home, ".config", "baz", "bizz"))
assert os.path.islink(os.path.join(home, ".config", "foo", "a"))
with open(os.path.join(home, ".config", "foo", "a")) as file:
assert file.read() == "apple"
with open(os.path.join(home, ".config", "bar", "b")) as file:
assert file.read() == "banana"
with open(os.path.join(home, ".config", "bar", "c")) as file:
assert file.read() == "cherry"
with open(os.path.join(home, ".config", "baz", "d")) as file:
assert file.read() == "donut"
with open(os.path.join(home, ".config", "baz", "bizz", "g")) as file:
assert file.read() == "grape"
def test_link_glob_exclude_4(home, dotfiles, run_dotbot):
"""Verify deep link globbing with multiple globbed exclusions."""
dotfiles.write("config/foo/a", "apple")
dotfiles.write("config/bar/b", "banana")
dotfiles.write("config/bar/c", "cherry")
dotfiles.write("config/baz/d", "donut")
dotfiles.write("config/baz/buzz/e", "egg")
dotfiles.write("config/baz/bizz/g", "grape")
dotfiles.write("config/fiz/f", "fig")
dotfiles.write_config(
[
{
"defaults": {
"link": {
"glob": True,
"create": True,
},
},
},
{
"link": {
"~/.config/": {
"path": "config/*/*",
"exclude": ["config/baz/*", "config/fiz/*"],
},
},
},
]
)
run_dotbot()
assert not os.path.exists(os.path.join(home, ".config", "baz"))
assert not os.path.exists(os.path.join(home, ".config", "fiz"))
assert not os.path.islink(os.path.join(home, ".config"))
assert not os.path.islink(os.path.join(home, ".config", "foo"))
assert not os.path.islink(os.path.join(home, ".config", "bar"))
assert os.path.islink(os.path.join(home, ".config", "foo", "a"))
with open(os.path.join(home, ".config", "foo", "a")) as file:
assert file.read() == "apple"
with open(os.path.join(home, ".config", "bar", "b")) as file:
assert file.read() == "banana"
with open(os.path.join(home, ".config", "bar", "c")) as file:
assert file.read() == "cherry"
def test_link_glob_multi_star(home, dotfiles, run_dotbot):
"""Verify link globbing with deep-nested stars."""
dotfiles.write("config/foo/a", "apple")
dotfiles.write("config/bar/b", "banana")
dotfiles.write("config/bar/c", "cherry")
dotfiles.write_config(
[
{"defaults": {"link": {"glob": True, "create": True}}},
{"link": {"~/.config/": "config/*/*"}},
]
)
run_dotbot()
assert not os.path.islink(os.path.join(home, ".config"))
assert not os.path.islink(os.path.join(home, ".config", "foo"))
assert not os.path.islink(os.path.join(home, ".config", "bar"))
assert os.path.islink(os.path.join(home, ".config", "foo", "a"))
with open(os.path.join(home, ".config", "foo", "a")) as file:
assert file.read() == "apple"
with open(os.path.join(home, ".config", "bar", "b")) as file:
assert file.read() == "banana"
with open(os.path.join(home, ".config", "bar", "c")) as file:
assert file.read() == "cherry"
@pytest.mark.parametrize(
"pattern, expect_file",
(
("conf/*", lambda fruit: fruit),
("conf/.*", lambda fruit: "." + fruit),
("conf/[bc]*", lambda fruit: fruit if fruit[0] in "bc" else None),
("conf/*e", lambda fruit: fruit if fruit[-1] == "e" else None),
("conf/??r*", lambda fruit: fruit if fruit[2] == "r" else None),
),
)
def test_link_glob_patterns(pattern, expect_file, home, dotfiles, run_dotbot):
"""Verify link glob pattern matching."""
fruits = ["apple", "apricot", "banana", "cherry", "currant", "cantalope"]
[dotfiles.write("conf/" + fruit, fruit) for fruit in fruits]
[dotfiles.write("conf/." + fruit, "dot-" + fruit) for fruit in fruits]
dotfiles.write_config(
[
{"defaults": {"link": {"glob": True, "create": True}}},
{"link": {"~/globtest": pattern}},
]
)
run_dotbot()
for fruit in fruits:
if expect_file(fruit) is None:
assert not os.path.exists(os.path.join(home, "globtest", fruit))
assert not os.path.exists(os.path.join(home, "globtest", "." + fruit))
elif "." in expect_file(fruit):
assert not os.path.islink(os.path.join(home, "globtest", fruit))
assert os.path.islink(os.path.join(home, "globtest", "." + fruit))
else: # "." not in expect_file(fruit)
assert os.path.islink(os.path.join(home, "globtest", fruit))
assert not os.path.islink(os.path.join(home, "globtest", "." + fruit))
def test_link_glob_recursive(home, dotfiles, run_dotbot):
"""Verify recursive link globbing and exclusions."""
dotfiles.write("config/foo/bar/a", "apple")
dotfiles.write("config/foo/bar/b", "banana")
dotfiles.write("config/foo/bar/c", "cherry")
dotfiles.write_config(
[
{"defaults": {"link": {"glob": True, "create": True}}},
{"link": {"~/.config/": {"path": "config/**", "exclude": ["config/**/b"]}}},
]
)
run_dotbot()
assert not os.path.islink(os.path.join(home, ".config"))
assert not os.path.islink(os.path.join(home, ".config", "foo"))
assert not os.path.islink(os.path.join(home, ".config", "foo", "bar"))
assert os.path.islink(os.path.join(home, ".config", "foo", "bar", "a"))
assert not os.path.exists(os.path.join(home, ".config", "foo", "bar", "b"))
assert os.path.islink(os.path.join(home, ".config", "foo", "bar", "c"))
with open(os.path.join(home, ".config", "foo", "bar", "a")) as file:
assert file.read() == "apple"
with open(os.path.join(home, ".config", "foo", "bar", "c")) as file:
assert file.read() == "cherry"
def test_link_glob_no_match(home, dotfiles, run_dotbot):
"""Verify that a glob with no match doesn't raise an error."""
dotfiles.makedirs("foo")
dotfiles.write_config(
[
{"defaults": {"link": {"glob": True, "create": True}}},
{"link": {"~/.config/foo": "foo/*"}},
]
)
run_dotbot()
def test_link_glob_single_match(home, dotfiles, run_dotbot):
"""Verify linking works even when glob matches exactly one file."""
# regression test for https://github.com/anishathalye/dotbot/issues/282
dotfiles.write("foo/a", "apple")
dotfiles.write_config(
[
{"defaults": {"link": {"glob": True, "create": True}}},
{"link": {"~/.config/foo": "foo/*"}},
]
)
run_dotbot()
assert not os.path.islink(os.path.join(home, ".config"))
assert not os.path.islink(os.path.join(home, ".config", "foo"))
assert os.path.islink(os.path.join(home, ".config", "foo", "a"))
with open(os.path.join(home, ".config", "foo", "a")) as file:
assert file.read() == "apple"
@pytest.mark.skipif(
"sys.platform[:5] == 'win32'",
reason="These if commands won't run on Windows",
)
def test_link_if(home, dotfiles, run_dotbot):
"""Verify 'if' directives are checked when linking."""
os.mkdir(os.path.join(home, "d"))
dotfiles.write("f", "apple")
dotfiles.write_config(
[
{
"link": {
"~/.f": {"path": "f", "if": "true"},
"~/.g": {"path": "f", "if": "false"},
"~/.h": {"path": "f", "if": "[ -d ~/d ]"},
"~/.i": {"path": "f", "if": "badcommand"},
},
}
]
)
run_dotbot()
assert not os.path.exists(os.path.join(home, ".g"))
assert not os.path.exists(os.path.join(home, ".i"))
with open(os.path.join(home, ".f")) as file:
assert file.read() == "apple"
with open(os.path.join(home, ".h")) as file:
assert file.read() == "apple"
@pytest.mark.skipif(
"sys.platform[:5] == 'win32'",
reason="These if commands won't run on Windows.",
)
def test_link_if_defaults(home, dotfiles, run_dotbot):
"""Verify 'if' directive defaults are checked when linking."""
os.mkdir(os.path.join(home, "d"))
dotfiles.write("f", "apple")
dotfiles.write_config(
[
{
"defaults": {
"link": {
"if": "false",
},
},
},
{
"link": {
"~/.j": {"path": "f", "if": "true"},
"~/.k": {"path": "f"}, # default is false
},
},
]
)
run_dotbot()
assert not os.path.exists(os.path.join(home, ".k"))
with open(os.path.join(home, ".j")) as file:
assert file.read() == "apple"
@pytest.mark.skipif(
"sys.platform[:5] != 'win32'",
reason="These if commands only run on Windows.",
)
def test_link_if_windows(home, dotfiles, run_dotbot):
"""Verify 'if' directives are checked when linking (Windows only)."""
os.mkdir(os.path.join(home, "d"))
dotfiles.write("f", "apple")
dotfiles.write_config(
[
{
"link": {
"~/.f": {"path": "f", "if": 'cmd /c "exit 0"'},
"~/.g": {"path": "f", "if": 'cmd /c "exit 1"'},
"~/.h": {"path": "f", "if": 'cmd /c "dir %USERPROFILE%\\d'},
"~/.i": {"path": "f", "if": 'cmd /c "badcommand"'},
},
}
]
)
run_dotbot()
assert not os.path.exists(os.path.join(home, ".g"))
assert not os.path.exists(os.path.join(home, ".i"))
with open(os.path.join(home, ".f")) as file:
assert file.read() == "apple"
with open(os.path.join(home, ".h")) as file:
assert file.read() == "apple"
@pytest.mark.skipif(
"sys.platform[:5] != 'win32'",
reason="These if commands only run on Windows",
)
def test_link_if_defaults_windows(home, dotfiles, run_dotbot):
"""Verify 'if' directive defaults are checked when linking (Windows only)."""
os.mkdir(os.path.join(home, "d"))
dotfiles.write("f", "apple")
dotfiles.write_config(
[
{
"defaults": {
"link": {
"if": 'cmd /c "exit 1"',
},
},
},
{
"link": {
"~/.j": {"path": "f", "if": 'cmd /c "exit 0"'},
"~/.k": {"path": "f"}, # default is false
},
},
]
)
run_dotbot()
assert not os.path.exists(os.path.join(home, ".k"))
with open(os.path.join(home, ".j")) as file:
assert file.read() == "apple"
@pytest.mark.parametrize("ignore_missing", (True, False))
def test_link_ignore_missing(ignore_missing, home, dotfiles, run_dotbot):
"""Verify link 'ignore_missing' is respected when the target is missing."""
dotfiles.write_config(
[
{
"link": {
"~/missing_link": {
"path": "missing",
"ignore-missing": ignore_missing,
},
},
}
]
)
if ignore_missing:
run_dotbot()
assert os.path.islink(os.path.join(home, "missing_link"))
else:
with pytest.raises(SystemExit):
run_dotbot()
def test_link_leaves_file(home, dotfiles, run_dotbot):
"""Verify relink does not overwrite file."""
dotfiles.write("f", "apple")
with open(os.path.join(home, ".f"), "w") as file:
file.write("grape")
dotfiles.write_config([{"link": {"~/.f": "f"}}])
with pytest.raises(SystemExit):
run_dotbot()
with open(os.path.join(home, ".f"), "r") as file:
assert file.read() == "grape"
@pytest.mark.parametrize("key", ("canonicalize-path", "canonicalize"))
def test_link_no_canonicalize(key, home, dotfiles, run_dotbot):
"""Verify link canonicalization can be disabled."""
dotfiles.write("f", "apple")
dotfiles.write_config([{"defaults": {"link": {key: False}}}, {"link": {"~/.f": {"path": "f"}}}])
os.symlink(
dotfiles.directory,
os.path.join(home, "dotfiles-symlink"),
target_is_directory=True,
)
run_dotbot(
"-c",
os.path.join(home, "dotfiles-symlink", os.path.basename(dotfiles.config_filename)),
custom=True,
)
assert "dotfiles-symlink" in os.readlink(os.path.join(home, ".f"))
def test_link_prefix(home, dotfiles, run_dotbot):
"""Verify link prefixes are prepended."""
dotfiles.write("conf/a", "apple")
dotfiles.write("conf/b", "banana")
dotfiles.write("conf/c", "cherry")
dotfiles.write_config(
[
{
"link": {
"~/": {
"glob": True,
"path": "conf/*",
"prefix": ".",
},
},
}
]
)
run_dotbot()
with open(os.path.join(home, ".a")) as file:
assert file.read() == "apple"
with open(os.path.join(home, ".b")) as file:
assert file.read() == "banana"
with open(os.path.join(home, ".c")) as file:
assert file.read() == "cherry"
def test_link_relative(home, dotfiles, run_dotbot):
"""Test relative linking works."""
dotfiles.write("f", "apple")
dotfiles.write("d/e", "grape")
dotfiles.write_config(
[
{
"link": {
"~/.f": {
"path": "f",
},
"~/.frel": {
"path": "f",
"relative": True,
},
"~/nested/.frel": {
"path": "f",
"relative": True,
"create": True,
},
"~/.d": {
"path": "d",
"relative": True,
},
},
}
]
)
run_dotbot()
f = os.readlink(os.path.join(home, ".f"))
if sys.platform[:5] == "win32" and f.startswith("\\\\?\\"):
f = f[4:]
assert f == os.path.join(dotfiles.directory, "f")
frel = os.readlink(os.path.join(home, ".frel"))
if sys.platform[:5] == "win32" and frel.startswith("\\\\?\\"):
frel = frel[4:]
assert frel == os.path.normpath("../../dotfiles/f")
nested_frel = os.readlink(os.path.join(home, "nested", ".frel"))
if sys.platform[:5] == "win32" and nested_frel.startswith("\\\\?\\"):
nested_frel = nested_frel[4:]
assert nested_frel == os.path.normpath("../../../dotfiles/f")
d = os.readlink(os.path.join(home, ".d"))
if sys.platform[:5] == "win32" and d.startswith("\\\\?\\"):
d = d[4:]
assert d == os.path.normpath("../../dotfiles/d")
with open(os.path.join(home, ".f")) as file:
assert file.read() == "apple"
with open(os.path.join(home, ".frel")) as file:
assert file.read() == "apple"
with open(os.path.join(home, "nested", ".frel")) as file:
assert file.read() == "apple"
with open(os.path.join(home, ".d", "e")) as file:
assert file.read() == "grape"
def test_link_relink_leaves_file(home, dotfiles, run_dotbot):
"""Verify relink does not overwrite file."""
dotfiles.write("f", "apple")
with open(os.path.join(home, ".f"), "w") as file:
file.write("grape")
dotfiles.write_config([{"link": {"~/.f": {"path": "f", "relink": True}}}])
with pytest.raises(SystemExit):
run_dotbot()
with open(os.path.join(home, ".f"), "r") as file:
assert file.read() == "grape"
def test_link_relink_overwrite_symlink(home, dotfiles, run_dotbot):
"""Verify relink overwrites symlinks."""
dotfiles.write("f", "apple")
with open(os.path.join(home, "f"), "w") as file:
file.write("grape")
os.symlink(os.path.join(home, "f"), os.path.join(home, ".f"))
dotfiles.write_config([{"link": {"~/.f": {"path": "f", "relink": True}}}])
run_dotbot()
with open(os.path.join(home, ".f"), "r") as file:
assert file.read() == "apple"
def test_link_relink_relative_leaves_file(home, dotfiles, run_dotbot):
"""Verify relink relative does not incorrectly relink file."""
dotfiles.write("f", "apple")
with open(os.path.join(home, ".f"), "w") as file:
file.write("grape")
config = [
{
"link": {
"~/.folder/f": {
"path": "f",
"create": True,
"relative": True,
},
},
}
]
dotfiles.write_config(config)
run_dotbot()
mtime = os.stat(os.path.join(home, ".folder", "f")).st_mtime
config[0]["link"]["~/.folder/f"]["relink"] = True
dotfiles.write_config(config)
run_dotbot()
new_mtime = os.stat(os.path.join(home, ".folder", "f")).st_mtime
assert mtime == new_mtime
def test_link_defaults_1(home, dotfiles, run_dotbot):
"""Verify that link doesn't overwrite non-dotfiles links by default."""
with open(os.path.join(home, "f"), "w") as file:
file.write("grape")
os.symlink(os.path.join(home, "f"), os.path.join(home, ".f"))
dotfiles.write("f", "apple")
dotfiles.write_config(
[
{
"link": {"~/.f": "f"},
}
]
)
with pytest.raises(SystemExit):
run_dotbot()
with open(os.path.join(home, ".f"), "r") as file:
assert file.read() == "grape"
def test_link_defaults_2(home, dotfiles, run_dotbot):
"""Verify that explicit link defaults override the implicit default."""
with open(os.path.join(home, "f"), "w") as file:
file.write("grape")
os.symlink(os.path.join(home, "f"), os.path.join(home, ".f"))
dotfiles.write("f", "apple")
dotfiles.write_config(
[
{"defaults": {"link": {"relink": True}}},
{"link": {"~/.f": "f"}},
]
)
run_dotbot()
with open(os.path.join(home, ".f"), "r") as file:
assert file.read() == "apple"

View file

@ -1,25 +0,0 @@
import os
import pytest
def test_success(root):
path = os.path.join(root, "abc.txt")
with open(path, "wt") as f:
f.write("hello")
with open(path, "rt") as f:
assert f.read() == "hello"
def test_failure():
with pytest.raises(AssertionError):
open("abc.txt", "w")
with pytest.raises(AssertionError):
open(file="abc.txt", mode="w")
with pytest.raises(AssertionError):
os.mkdir("a")
with pytest.raises(AssertionError):
os.mkdir(path="a")

View file

@ -1,261 +0,0 @@
def test_shell_allow_stdout(capfd, dotfiles, run_dotbot):
"""Verify shell command STDOUT works."""
dotfiles.write_config(
[
{
"shell": [
{
"command": "echo apple",
"stdout": True,
}
],
}
]
)
run_dotbot()
output = capfd.readouterr()
assert any([line.startswith("apple") for line in output.out.splitlines()]), output
def test_shell_cli_verbosity_overrides_1(capfd, dotfiles, run_dotbot):
"""Verify that '-vv' overrides the implicit default stdout=False."""
dotfiles.write_config([{"shell": [{"command": "echo apple"}]}])
run_dotbot("-vv")
lines = capfd.readouterr().out.splitlines()
assert any(line.startswith("apple") for line in lines)
def test_shell_cli_verbosity_overrides_2(capfd, dotfiles, run_dotbot):
"""Verify that '-vv' overrides an explicit stdout=False."""
dotfiles.write_config([{"shell": [{"command": "echo apple", "stdout": False}]}])
run_dotbot("-vv")
lines = capfd.readouterr().out.splitlines()
assert any(line.startswith("apple") for line in lines)
def test_shell_cli_verbosity_overrides_3(capfd, dotfiles, run_dotbot):
"""Verify that '-vv' overrides an explicit defaults:shell:stdout=False."""
dotfiles.write_config(
[
{"defaults": {"shell": {"stdout": False}}},
{"shell": [{"command": "echo apple"}]},
]
)
run_dotbot("-vv")
stdout = capfd.readouterr().out.splitlines()
assert any(line.startswith("apple") for line in stdout)
def test_shell_cli_verbosity_stderr(capfd, dotfiles, run_dotbot):
"""Verify that commands can output to STDERR."""
dotfiles.write_config([{"shell": [{"command": "echo apple >&2"}]}])
run_dotbot("-vv")
stderr = capfd.readouterr().err.splitlines()
assert any(line.startswith("apple") for line in stderr)
def test_shell_cli_verbosity_stderr_with_explicit_stdout_off(capfd, dotfiles, run_dotbot):
"""Verify that commands can output to STDERR with STDOUT explicitly off."""
dotfiles.write_config(
[
{
"shell": [
{
"command": "echo apple >&2",
"stdout": False,
}
],
}
]
)
run_dotbot("-vv")
stderr = capfd.readouterr().err.splitlines()
assert any(line.startswith("apple") for line in stderr)
def test_shell_cli_verbosity_stderr_with_defaults_stdout_off(capfd, dotfiles, run_dotbot):
"""Verify that commands can output to STDERR with defaults:shell:stdout=False."""
dotfiles.write_config(
[
{
"defaults": {
"shell": {
"stdout": False,
},
},
},
{
"shell": [
{"command": "echo apple >&2"},
],
},
]
)
run_dotbot("-vv")
stderr = capfd.readouterr().err.splitlines()
assert any(line.startswith("apple") for line in stderr)
def test_shell_single_v_verbosity_stdout(capfd, dotfiles, run_dotbot):
"""Verify that a single '-v' verbosity doesn't override stdout=False."""
dotfiles.write_config([{"shell": [{"command": "echo apple"}]}])
run_dotbot("-v")
stdout = capfd.readouterr().out.splitlines()
assert not any(line.startswith("apple") for line in stdout)
def test_shell_single_v_verbosity_stderr(capfd, dotfiles, run_dotbot):
"""Verify that a single '-v' verbosity doesn't override stderr=False."""
dotfiles.write_config([{"shell": [{"command": "echo apple >&2"}]}])
run_dotbot("-v")
stderr = capfd.readouterr().err.splitlines()
assert not any(line.startswith("apple") for line in stderr)
def test_shell_compact_stdout_1(capfd, dotfiles, run_dotbot):
"""Verify that shell command stdout works in compact form."""
dotfiles.write_config(
[
{"defaults": {"shell": {"stdout": True}}},
{"shell": ["echo apple"]},
]
)
run_dotbot()
stdout = capfd.readouterr().out.splitlines()
assert any(line.startswith("apple") for line in stdout)
def test_shell_compact_stdout_2(capfd, dotfiles, run_dotbot):
"""Verify that shell command stdout works in compact form."""
dotfiles.write_config(
[
{"defaults": {"shell": {"stdout": True}}},
{"shell": [["echo apple", "echoing message"]]},
]
)
run_dotbot()
stdout = capfd.readouterr().out.splitlines()
assert any(line.startswith("apple") for line in stdout)
assert any(line.startswith("echoing message") for line in stdout)
def test_shell_stdout_disabled_by_default(capfd, dotfiles, run_dotbot):
"""Verify that the shell command disables stdout by default."""
dotfiles.write_config(
[
{
"shell": ["echo banana"],
}
]
)
run_dotbot()
stdout = capfd.readouterr().out.splitlines()
assert not any(line.startswith("banana") for line in stdout)
def test_shell_can_override_defaults(capfd, dotfiles, run_dotbot):
"""Verify that the shell command can override defaults."""
dotfiles.write_config(
[
{"defaults": {"shell": {"stdout": True}}},
{"shell": [{"command": "echo apple", "stdout": False}]},
]
)
run_dotbot()
stdout = capfd.readouterr().out.splitlines()
assert not any(line.startswith("apple") for line in stdout)
def test_shell_quiet_default(capfd, dotfiles, run_dotbot):
"""Verify that quiet is off by default."""
dotfiles.write_config(
[
{
"shell": [
{
"command": "echo banana",
"description": "echoing a thing...",
}
],
}
]
)
run_dotbot()
stdout = capfd.readouterr().out.splitlines()
assert not any(line.startswith("banana") for line in stdout)
assert any("echo banana" in line for line in stdout)
assert any(line.startswith("echoing a thing...") for line in stdout)
def test_shell_quiet_enabled_with_description(capfd, dotfiles, run_dotbot):
"""Verify that only the description is shown when quiet is enabled."""
dotfiles.write_config(
[
{
"shell": [
{
"command": "echo banana",
"description": "echoing a thing...",
"quiet": True,
}
],
}
]
)
run_dotbot()
stdout = capfd.readouterr().out.splitlines()
assert not any(line.startswith("banana") for line in stdout)
assert not any("echo banana" in line for line in stdout)
assert any(line.startswith("echoing a thing...") for line in stdout)
def test_shell_quiet_enabled_without_description(capfd, dotfiles, run_dotbot):
"""Verify nothing is shown when quiet is enabled with no description."""
dotfiles.write_config(
[
{
"shell": [
{
"command": "echo banana",
"quiet": True,
}
],
}
]
)
run_dotbot()
stdout = capfd.readouterr().out.splitlines()
assert not any(line.startswith("banana") for line in stdout)
assert not any(line.startswith("echo banana") for line in stdout)

View file

@ -1,49 +0,0 @@
import os
import shutil
import subprocess
import sys
import pytest
def test_shim(root, home, dotfiles, run_dotbot):
"""Verify install shim works."""
# Skip the test if git is unavailable.
git = shutil.which("git")
if git is None:
pytest.skip("git is unavailable")
if sys.platform[:5] == "win32":
install = os.path.join(
dotfiles.directory, "dotbot", "tools", "git-submodule", "install.ps1"
)
shim = os.path.join(dotfiles.directory, "install.ps1")
else:
install = os.path.join(dotfiles.directory, "dotbot", "tools", "git-submodule", "install")
shim = os.path.join(dotfiles.directory, "install")
# Set up the test environment.
git_directory = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
os.chdir(dotfiles.directory)
subprocess.check_call([git, "init"])
subprocess.check_call(
[git, "-c", "protocol.file.allow=always", "submodule", "add", git_directory, "dotbot"]
)
shutil.copy(install, shim)
dotfiles.write("foo", "pear")
dotfiles.write_config([{"link": {"~/.foo": "foo"}}])
# Run the shim script.
env = dict(os.environ)
if sys.platform[:5] == "win32":
args = [shutil.which("powershell"), "-ExecutionPolicy", "RemoteSigned", shim]
env["USERPROFILE"] = home
else:
args = [shim]
env["HOME"] = home
subprocess.check_call(args, env=env, cwd=dotfiles.directory)
assert os.path.islink(os.path.join(home, ".foo"))
with open(os.path.join(home, ".foo"), "r") as file:
assert file.read() == "pear"

View file

@ -9,7 +9,6 @@ DOTBOT_BIN="bin/dotbot"
BASEDIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "${BASEDIR}"
git -C "${DOTBOT_DIR}" submodule sync --quiet --recursive
git submodule update --init --recursive "${DOTBOT_DIR}"
"${BASEDIR}/${DOTBOT_DIR}/${DOTBOT_BIN}" -d "${BASEDIR}" -c "${CONFIG}" "${@}"

View file

@ -1,22 +0,0 @@
$ErrorActionPreference = "Stop"
$CONFIG = "install.conf.yaml"
$DOTBOT_DIR = "dotbot"
$DOTBOT_BIN = "bin/dotbot"
$BASEDIR = $PSScriptRoot
Set-Location $BASEDIR
git -C $DOTBOT_DIR submodule sync --quiet --recursive
git submodule update --init --recursive $DOTBOT_DIR
foreach ($PYTHON in ('python', 'python3')) {
# Python redirects to Microsoft Store in Windows 10 when not installed
if (& { $ErrorActionPreference = "SilentlyContinue"
![string]::IsNullOrEmpty((&$PYTHON -V))
$ErrorActionPreference = "Stop" }) {
&$PYTHON $(Join-Path $BASEDIR -ChildPath $DOTBOT_DIR | Join-Path -ChildPath $DOTBOT_BIN) -d $BASEDIR -c $CONFIG $Args
return
}
}
Write-Error "Error: Cannot find Python."

View file

@ -1,14 +0,0 @@
#!/usr/bin/env bash
set -e
CONFIG="install.conf.yaml"
DOTBOT_DIR="dotbot"
DOTBOT_BIN="bin/dotbot"
BASEDIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "${BASEDIR}"
(cd "${DOTBOT_DIR}" && git submodule update --init --recursive)
"${BASEDIR}/${DOTBOT_DIR}/${DOTBOT_BIN}" -d "${BASEDIR}" -c "${CONFIG}" "${@}"

View file

@ -1,21 +0,0 @@
$ErrorActionPreference = "Stop"
$CONFIG = "install.conf.yaml"
$DOTBOT_DIR = "dotbot"
$DOTBOT_BIN = "bin/dotbot"
$BASEDIR = $PSScriptRoot
Set-Location $BASEDIR
Set-Location $DOTBOT_DIR && git submodule update --init --recursive
foreach ($PYTHON in ('python', 'python3')) {
# Python redirects to Microsoft Store in Windows 10 when not installed
if (& { $ErrorActionPreference = "SilentlyContinue"
![string]::IsNullOrEmpty((&$PYTHON -V))
$ErrorActionPreference = "Stop" }) {
&$PYTHON $(Join-Path $BASEDIR -ChildPath $DOTBOT_DIR | Join-Path -ChildPath $DOTBOT_BIN) -d $BASEDIR -c $CONFIG $Args
return
}
}
Write-Error "Error: Cannot find Python."

76
tox.ini
View file

@ -1,76 +0,0 @@
[tox]
; On Windows, only CPython >= 3.8 is supported.
; All older versions, and PyPy, lack full symlink support.
envlist =
coverage_erase
py{38, 39, 310, 311, 312}-all_platforms
py{36, 37}-most_platforms
pypy3-most_platforms
coverage_report
skip_missing_interpreters = true
[testenv]
platform =
all_platforms: cygwin|darwin|linux|win32
most_platforms: cygwin|darwin|linux
deps =
coverage
pytest
pytest-randomly
pyyaml
commands =
coverage run -m pytest tests/
[testenv:coverage_erase]
skipsdist = true
skip_install = true
deps = coverage
commands = coverage erase
[testenv:coverage_report]
skipsdist = true
skip_install = true
deps = coverage
commands_pre =
coverage combine
commands =
coverage report
coverage html
coverage xml
[coverage:run]
branch = true
parallel = true
source =
dotbot/
tests/
[coverage:html]
directory = htmlcov
[gh-actions]
python =
; Run on all platforms (Linux, Mac, and Windows)
3.8: py38-all_platforms
3.9: py39-all_platforms
3.10: py310-all_platforms
3.11: py311-all_platforms
3.12: py312-all_platforms
; Run on most platforms (Linux and Mac)
pypy-3.9: pypy3-most_platforms
3.6: py36-most_platforms
3.7: py37-most_platforms
; Disable problem matcher because it causes issues when running in a container;
; see https://github.com/ymyzk/tox-gh-actions/issues/126
problem_matcher = False